hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b9a879340258856243eed462835e1aaa3fe143c2
| 24,992
|
py
|
Python
|
test/lib/testRegex.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 18
|
2019-11-29T08:53:58.000Z
|
2021-11-19T05:33:33.000Z
|
test/lib/testRegex.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 2
|
2020-04-29T12:58:32.000Z
|
2021-03-23T05:55:43.000Z
|
test/lib/testRegex.py
|
animator/titus2
|
1d35fab2950bd9f0438b931a02996475271a695e
|
[
"Apache-2.0"
] | 1
|
2020-05-05T15:10:27.000Z
|
2020-05-05T15:10:27.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import math
import struct
from titus.genpy import PFAEngine
from titus.errors import *
# libc regexp library has no support for multibyte characters. This causes a difference between
# hadrian and titus regex libs. Unittests for multibye characters (non-ascii) are commented out.
class TestLib1Regex(unittest.TestCase):
def testMemory(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.rindex: [input, [ab(c|d)*]]}
""")
import resource, time
memusage_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for i in range(0, 10000):
engine.action("abcccdc")
memusage_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
print("\nMemory usage before: {0}, after: {1}".format(memusage_1, memusage_2))
def testPosix(self):
engine, = PFAEngine.fromYaml('''
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "[hc]+at"}]}
''')
self.assertEqual(engine.action("hat"), [0,3])
self.assertEqual(engine.action("cat"), [0,3])
self.assertEqual(engine.action("hhat"), [0,4])
self.assertEqual(engine.action("chat"), [0,4])
self.assertEqual(engine.action("hcat"), [0,4])
self.assertEqual(engine.action("cchchat"), [0,7])
self.assertEqual(engine.action("at"), [])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "[hc]?at"}]}
""")
self.assertEqual(engine.action("hat"), [0,3])
self.assertEqual(engine.action("cat"), [0,3])
self.assertEqual(engine.action("at"), [0,2])
self.assertEqual(engine.action("dog"), [])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "[hc]*at"}]}
""")
self.assertEqual(engine.action("hat"), [0,3])
self.assertEqual(engine.action("cat"), [0,3])
self.assertEqual(engine.action("hhat"), [0,4])
self.assertEqual(engine.action("chat"), [0,4])
self.assertEqual(engine.action("hcat"), [0,4])
self.assertEqual(engine.action("cchchat"), [0,7])
self.assertEqual(engine.action("at"), [0,2])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "cat|dog"}]}
""")
self.assertEqual(engine.action("dog"), [0,3])
self.assertEqual(engine.action("cat"), [0,3])
self.assertEqual(engine.action("mouse"),[])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "(abc){2}|(def){2}"}]}
""")
self.assertEqual(engine.action("abcabc"), [0,6])
self.assertEqual(engine.action("defdef"), [0,6])
self.assertEqual(engine.action("XKASGJ8"), [])
# backreferences
engine, = PFAEngine.fromYaml(r"""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, [(the )\1]]}
""")
self.assertEqual(engine.action("Paris in the the spring"), [9,17])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "[[:upper:]ab]"}]}
""")
self.assertEqual(engine.action("GHab"), [0,1])
self.assertEqual(engine.action("ab"), [0,1])
self.assertEqual(engine.action("p"), [])
def testIndex(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, {string: "ab(c|d)*"}]}
""")
self.assertEqual(engine.action("abcccdc"), [0,7])
self.assertEqual(engine.action("abddddd"), [0,7])
self.assertEqual(engine.action("XKASGJ8"), [])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.index: [input, [dog]]}
""")
self.assertEqual(engine.action("999dogggggg"), [3,6])
self.assertEqual(engine.action("cat"), [])
# test non ascii strings
# engine, = PFAEngine.fromYaml('''
# input: string
# output: {type: array, items: int}
# action:
# - {re.index: [input, {string: "对讲(机|p)*"}]}
# ''')
# self.assertEqual(engine.action("对讲机机机机机机"), [0,8])
# self.assertEqual(engine.action("对讲pppppppppp"), [0,12])
# check byte input
engine, = PFAEngine.fromYaml('''
input: bytes
output: {type: array, items: int}
action:
- re.index: [input, {bytes.encodeUtf8: {string: "ab(c|d)*"}}]
''')
self.assertEqual(engine.action("abcccdc"), [0,7])
self.assertEqual(engine.action("\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xbaabcccdc"), [9,16])
self.assertEqual(engine.action("\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xbaabcccdc\xe8\xae\xb2\xe6\x9c\xba"), [9,16])
def testContains(self):
engine, = PFAEngine.fromYaml('''
input: string
output: boolean
action:
- {re.contains: [input, [ab(c|d)*]]}
''')
self.assertEqual(engine.action("wio239fj6abcccdc"), True)
self.assertEqual(engine.action("938736362abddddd"), True)
self.assertEqual(engine.action("938272XKASGJ8"), False)
engine, = PFAEngine.fromYaml('''
input: string
output: boolean
action:
- {re.contains: [input, [dog]]}
''')
self.assertEqual(engine.action("9999doggggggg"), True)
self.assertEqual(engine.action("928373cat"), False)
# check non ascii strings
engine, = PFAEngine.fromYaml("""
input: string
output: boolean
action:
- {re.contains: [input, [对讲机(讲|机)*]]}
""")
self.assertEqual(engine.action("abcccdc"), False)
self.assertEqual(engine.action("xyzzzz对讲机机abcc"), True)
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: boolean
action:
- re.contains: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcccdc"), False)
self.assertEqual(engine.action('xyzzzz\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaabcc'), True)
def testCount(self):
engine, = PFAEngine.fromYaml('''
input: string
output: int
action:
- {re.count: [input, [ab(c|d)*]]}
''')
self.assertEqual(engine.action("938272XKASGJ8"), 0)
self.assertEqual(engine.action("iabc1abc2abc2abc"), 4)
self.assertEqual(engine.action("938736362abddddd"), 1)
engine, = PFAEngine.fromYaml('''
input: string
output: int
action:
- {re.count: [input, [dog]]}
''')
self.assertEqual(engine.action("999doggggggg"), 1)
self.assertEqual(engine.action("9233857cat"), 0)
self.assertEqual(engine.action("dogdogdogdogdog"), 5)
self.assertEqual(engine.action("dogDogdogdogdog"), 4)
self.assertEqual(engine.action("dogdog \n dogdogdog"), 5)
engine, = PFAEngine.fromYaml('''
input: string
output: int
action:
- {re.count: [input, [a*]]}
''')
self.assertEqual(engine.action("aaaaaaaaaaaaaaa"), 1)
engine, = PFAEngine.fromYaml('''
input: string
output: int
action:
- {re.count: [input, [ba]]}
''')
self.assertEqual(engine.action("ababababababababababa"), 10)
# check non ascii strings
engine, = PFAEngine.fromYaml("""
input: string
output: int
action:
- {re.count: [input, [对+]]}
""")
self.assertEqual(engine.action("abcccdc"), 0)
self.assertEqual(engine.action("xyzzzz对对对对讲机机abcc对讲机机mmmmm对对对讲机机aa"), 3)
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: int
action:
- re.count: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcccdc"), 0)
self.assertEqual(engine.action("xyzzzz\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xba\xe6\x9c\xbaabcc\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xba\xe6\x9c\xbammmmm\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaaa"), 3)
def testrIndex(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.rindex: [input, [ab(c|d)*]]}
""")
self.assertEqual(engine.action("abcccdc"), [0,7])
self.assertEqual(engine.action("abddddd"), [0,7])
self.assertEqual(engine.action("XKASGJ8"), [])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: int}
action:
- {re.rindex: [input, [dog]]}
""")
self.assertEqual(engine.action("999dogggggg"), [3,6])
self.assertEqual(engine.action("cat"), [])
self.assertEqual(engine.action("catdogpppdog"), [9,12])
# check non-ascii string input
# engine, = PFAEngine.fromYaml("""
# input: string
# output: {type: array, items: int}
# action:
# - {re.rindex: [input, [对讲机(讲|机)*]]}
# """)
# self.assertEqual(engine.action("abcccdc"), [])
# self.assertEqual(engine.action("xyzzzz对讲机机abcc对讲机机mmmmm对讲机机aa"), [23,27])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: int}
action:
- re.rindex: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcccdc"), [])
self.assertEqual(engine.action("xyzzzz\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaabcc\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbammmmm\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaaa"), [39,51])
def testGroups(self):
engine, = PFAEngine.fromYaml('''
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.groups: [input, {string: "(a(b)c)d"}]}
''')
self.assertEqual(engine.action("abcd"), [[0,4], [0,3], [1,2]])
engine, = PFAEngine.fromYaml('''
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.groups: [input, {string: "(the )+"}]}
''')
self.assertEqual(engine.action("Paris in the the spring"), [[9,17], [13,17]])
engine, = PFAEngine.fromYaml(r'''
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.groups: [input, {string: (the )\1}]}
''')
self.assertEqual(engine.action("Paris in the the spring"), [[9,17], [9,13]])
engine, = PFAEngine.fromYaml('''
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.groups: [input, {string: "()(a)bc(def)ghijk"}]}
''')
self.assertEqual(engine.action("abcdefghijk"), [[0,11], [0,0], [0,1], [3,6]])
# check non-ascii string input
# engine, = PFAEngine.fromYaml("""
# input: string
# output: {type: array, items: {type: array, items: int}}
# action:
# - {re.groups: [input, [对讲机(讲|机)*]]}
# """)
# self.assertEqual(engine.action("abcccdc"), [])
# self.assertEqual(engine.action("xyzzzz对讲机机abcc对讲机机mmmmm对讲机机aa"), [[6,10], [9,10]])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: {type: array, items: int}}
action:
- re.groups: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcccdc"), [])
self.assertEqual(engine.action("xyzzzz\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaabcc\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbammmmm\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaaa"), [[6,18], [15,18]])
def testindexAll(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.indexall: [input, [ab]]}
""")
self.assertEqual(engine.action("abcabcabc"), [[0,2], [3,5], [6,8]])
self.assertEqual(engine.action("88cabcc"), [[3,5]])
# backref (include r in string)
engine, = PFAEngine.fromYaml(r"""
input: string
output: {type: array, items: {type: array, items: int}}
action:
- {re.indexall: [input, [(the )\1]]}
""")
self.assertEqual(engine.action("Paris in the the spring, LA in the the summer"), [[9,17], [31,39]])
# check non-ascii string input
# engine, = PFAEngine.fromYaml("""
# input: string
# output: {type: array, items: {type: array, items: int}}
# action:
# - {re.indexall: [input, [对讲机(讲|机)*]]}
# """)
# self.assertEqual(engine.action("abcccdc"), [])
# self.assertEqual(engine.action("xyzzzz对讲机机abcc对讲机机mmmmm对讲机机aa"), [[6,10], [14,18], [23,27]])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: {type: array, items: int}}
action:
- re.indexall: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcccdc"), [])
self.assertEqual(engine.action("xyzzzz\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaabcc\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbammmmm\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe6\x9c\xbaaa"), [[6,18], [22,34], [39,51]])
def testfindAll(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.findall: [input, [ab]]}
""")
self.assertEqual(engine.action("abcabcabc"), ["ab","ab", "ab"])
self.assertEqual(engine.action("88cabcc"), ["ab"])
# check non-ascii string input
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.findall: [input, [猫机+猫]]}
""")
self.assertEqual(engine.action("猫机猫oooo猫机机猫ppp猫机机机猫bbbb猫机aaaa猫机机"), ["猫机猫" ,"猫机机猫","猫机机机猫"])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: bytes}
action:
- re.findall: [input, {bytes.encodeUtf8: {string: "ab+"}}]
""")
self.assertEqual(engine.action("xyz"), [])
self.assertEqual(engine.action("abc\xe6\x9c\xba\xe6\x9c\xbaabcabc"), ["ab", "ab", "ab"] )
def testfindFirst(self):
engine, = PFAEngine.fromYaml("""
input: string
output: [string, "null"]
action:
- {re.findfirst: [input, [ab]]}
""")
self.assertEqual(engine.action("88ccc555"), None)
self.assertEqual(engine.action("abcabcabc"), {"string": "ab"})
# check non-ascii input
# engine, = PFAEngine.fromYaml("""
# input: string
# output: [string, "null"]
# action:
# - {re.findfirst: [input, [机机+]]}
# """)
# self.assertEqual(engine.action("abc机机机abca机机bc asdkj 机机机sd"), "机机机")
# self.assertEqual(engine.action("abdefg"), None)
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: [bytes, "null"]
action:
- re.findfirst: [input, {bytes.encodeUtf8: {string: "对讲机(讲|机)*"}}]
""")
self.assertEqual(engine.action("abcde\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe8\xae\xb2fgg\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe8\xae\xb2h"), {"bytes": "\xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba\xe8\xae\xb2"})
self.assertEqual(engine.action("abcdefghijk"), None)
def testfindGroupsFirst(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.findgroupsfirst: [input, [ab]]}
""")
self.assertEqual(engine.action("abcabcabc"), ["ab"])
self.assertEqual(engine.action("88ccc"), [])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.findgroupsfirst: [input, [()(a)bc(def)ghijk]]}
""")
self.assertEqual(engine.action("abcdefghijk"), ["abcdefghijk", "", "a", "def"])
engine, = PFAEngine.fromYaml(r"""
input: string
output: {type: array, items: string}
action:
- {re.findgroupsfirst: [input, [(the.)\1]]}
""")
self.assertEqual(engine.action("Paris in the the spring"), ["the the ", "the "])
# check non-ascii input
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.findgroupsfirst: [input, [机+(机)]]}
""")
self.assertEqual(engine.action("abc机机机机abca机机bc"), ["机机机机","机"] )
self.assertEqual(engine.action("abcd"), [])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: bytes}
action:
- re.findgroupsfirst: [input, {bytes.encodeUtf8: {string: "机(机)"}}]
""")
self.assertEqual(engine.action("abc\xe6\x9c\xba\xe6\x9c\xbaabca\xe6\x9c\xba\xe6\x9c\xbabc"), ["\xe6\x9c\xba\xe6\x9c\xba","\xe6\x9c\xba"] )
self.assertEqual(engine.action("abcd"), [])
def testfindGroupsAll(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: {type: array, items: string}}
action:
- {re.findgroupsall: [input, [ab]]}
""")
self.assertEqual(engine.action("aabb"), [["ab"]])
self.assertEqual(engine.action("kkabkkabkkab"), [["ab"], ["ab"], ["ab"]])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: {type: array, items: string}}
action:
- {re.findgroupsall: [input, [()(a)bc(def)ghijk]]}
""")
self.assertEqual(engine.action("abcdefghijkMMMMMabcdefghijkMMMM"), [["abcdefghijk", "", "a", "def"], ["abcdefghijk","", "a", "def"]])
# check non-ascii input
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: {type: array, items: string}}
action:
- {re.findgroupsall: [input, [机+(机)]]}
""")
self.assertEqual(engine.action("abc机机机机abca机机bc"), [["机机机机", "机"], ["机机", "机"]])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: {type: array, items: bytes}}
action:
- re.findgroupsall: [input, {bytes.encodeUtf8: {string: "机(机)"}}]
""")
self.assertEqual(engine.action('abc\xe6\x9c\xba\xe6\x9c\xbaabca\xe6\x9c\xba\xe6\x9c\xbabc'), [['\xe6\x9c\xba\xe6\x9c\xba', '\xe6\x9c\xba'], ['\xe6\x9c\xba\xe6\x9c\xba', '\xe6\x9c\xba']])
self.assertEqual(engine.action("abcd"), [])
def testgroupsAll(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: {type: array, items: {type: array, items: int}}}
action:
- {re.groupsall: [input, [()(a)bc(def)ghijk]]}
""")
self.assertEqual(engine.action("abcdefghijkMMMMMabcdefghijkMMMM"), [[[0,11], [0,0], [0,1], [3,6]], [[16, 27],[16,16],[16,17], [19,22]]])
## check non-ascii input
# engine, = PFAEngine.fromYaml("""
# input: string
# output: {type: array, items: {type: array, items: {type: array, items: int}}}
# action:
# - {re.groupsall: [input, [(机)机]]}
# """)
# self.assertEqual(engine.action("abc机机abca机机bc"), [[[3,5], [3,4]], [[9,11], [9,10]]])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: {type: array, items: {type: array, items: int}}}
action:
- re.groupsall: [input, {bytes.encodeUtf8: {string: "(机)机"}}]
""")
self.assertEqual(engine.action('abc\xe6\x9c\xba\xe6\x9c\xbaabca\xe6\x9c\xba\xe6\x9c\xbabc'), [[[3,9], [3,6]], [[13,19], [13,16]]])
def testreplaceFirst(self):
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replacefirst: [input, ["ab(c|d)*"], ["person"]]}
""")
self.assertEqual(engine.action("abcccdcPPPP"), "personPPPP")
self.assertEqual(engine.action("PPPPabcccdcPPPP"), "PPPPpersonPPPP")
self.assertEqual(engine.action("PPPPPPPP"), "PPPPPPPP")
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replacefirst: [input, ["ab(c|d)*"], ["walkie talkie"]]}
""")
self.assertEqual(engine.action("This abcccdc works better than that abcccdc."), "This walkie talkie works better than that abcccdc.")
# check non-ascii input
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replacefirst: [input, [对讲机+], ["walkie talkie"]]}
""")
self.assertEqual(engine.action("This 对讲机 works better than that 对讲机."), "This walkie talkie works better than that 对讲机.")
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: bytes
action:
- {re.replacefirst: [input, {bytes.encodeUtf8: {string: "对讲机+"}}, {bytes.encodeUtf8: {string: "walkie talkie"}}]}
""")
self.assertEqual(engine.action('This \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba works better than that \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba.'), 'This walkie talkie works better than that \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba.')
def testreplaceLast(self):
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replacelast: [input, ["ab(c|d)*"], ["person"]]}
""")
self.assertEqual(engine.action("abcccdcPPPPabcccdc"), "abcccdcPPPPperson")
self.assertEqual(engine.action("abcccdcPPPPabcccdcPPPP"), "abcccdcPPPPpersonPPPP")
# check non-ascii input
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replacelast: [input, [对讲机+], ["walkie talkie"]]}
""")
self.assertEqual(engine.action("This 对讲机 works better than that 对讲机."), "This 对讲机 works better than that walkie talkie.")
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: bytes
action:
- {re.replacelast: [input, {bytes.encodeUtf8: {string: "对讲机+"}}, {bytes.encodeUtf8: {string: "walkie talkie"}}]}
""")
self.assertEqual(engine.action('This \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba works better than that \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba.'), 'This \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba works better than that walkie talkie.')
def testreplaceAll(self):
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replaceall: [input, [cow], [doggy]]}
""")
self.assertEqual(engine.action("pcowppcowpppcow"), "pdoggyppdoggypppdoggy")
self.assertEqual(engine.action("cowpcowppcowppp"), "doggypdoggyppdoggyppp")
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replaceall: [input, [cow], [Y]]}
""")
self.assertEqual(engine.action("cowpcowppcowppp"), "YpYppYppp")
self.assertEqual(engine.action("pcowppcowpppcow"), "pYppYpppY")
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replaceall: [input, [ab(c|d)*], [cow]]}
""")
self.assertEqual(engine.action("abcccdcPPPP"), "cowPPPP")
self.assertEqual(engine.action("PPPPabcccdc"), "PPPPcow")
self.assertEqual(engine.action("PPabcdddcPPabcccdcPPabcccdcPP"), "PPcowPPcowPPcowPP")
# check non-ascii input
engine, = PFAEngine.fromYaml("""
input: string
output: string
action:
- {re.replaceall: [input, [对讲机+], ["walkie talkie"]]}
""")
self.assertEqual(engine.action("This 对讲机机 works better than that 对讲机机."), "This walkie talkie works better than that walkie talkie.")
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: bytes
action:
- {re.replaceall: [input, {bytes.encodeUtf8: {string: "对讲机+"}}, {bytes.encodeUtf8: {string: "walkie talkie"}}]}
""")
self.assertEqual(engine.action('This \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba works better than that \xe5\xaf\xb9\xe8\xae\xb2\xe6\x9c\xba.'), "This walkie talkie works better than that walkie talkie.")
def testsplit(self):
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.split: [input, [cow]]}
""")
self.assertEqual(engine.action("cowpcowppcowppp"), ["p","pp","ppp"])
self.assertEqual(engine.action("pcowppcowpppcow"), ["p","pp","ppp"])
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.split: [input, [ab(c|d)*]]}
""")
self.assertEqual(engine.action("abcccdcPPPP"), ["PPPP"])
self.assertEqual(engine.action("PPPPabcccdc"), ["PPPP"])
self.assertEqual(engine.action("PPabcccdcPPabcccdcPPabcccdcPP"), ["PP","PP","PP","PP"])
# check non-ascii string input
engine, = PFAEngine.fromYaml("""
input: string
output: {type: array, items: string}
action:
- {re.split: [input, [机+]]}
""")
self.assertEqual(engine.action("abc机机机abca机机机bc asdkj 机机sd"), ["abc","abca","bc asdkj ", "sd" ])
# check byte input
engine, = PFAEngine.fromYaml("""
input: bytes
output: {type: array, items: bytes}
action:
- re.split: [input, {bytes.encodeUtf8: {string: "机机+"}}]
""")
self.assertEqual(engine.action("xyz"), ["xyz"])
self.assertEqual(engine.action("ab\xe6\x9c\xba\xe6\x9c\xbaab\xe6\x9c\xba\xe6\x9c\xbaabc\xe6\x9c\xba\xe6\x9c\xbaabc"), ["ab", "ab", "abc", "abc"])
| 33.322667
| 232
| 0.632802
| 3,053
| 24,992
| 5.17753
| 0.116279
| 0.1078
| 0.187322
| 0.240843
| 0.830012
| 0.739166
| 0.715253
| 0.696717
| 0.68438
| 0.660467
| 0
| 0.028986
| 0.168974
| 24,992
| 749
| 233
| 33.367156
| 0.732101
| 0.133323
| 0
| 0.657459
| 0
| 0.064457
| 0.492969
| 0.094584
| 0
| 0
| 0
| 0
| 0.239411
| 1
| 0.031308
| false
| 0
| 0.01105
| 0
| 0.044199
| 0.001842
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9c01a392349a6ba4206138c741d80d3a8d7a77f
| 3,038
|
py
|
Python
|
pypresence/presence.py
|
SoulSen/BlendRPC
|
6ef02053c5721b3c92f90a9b78205a985abbf9ea
|
[
"MIT"
] | null | null | null |
pypresence/presence.py
|
SoulSen/BlendRPC
|
6ef02053c5721b3c92f90a9b78205a985abbf9ea
|
[
"MIT"
] | null | null | null |
pypresence/presence.py
|
SoulSen/BlendRPC
|
6ef02053c5721b3c92f90a9b78205a985abbf9ea
|
[
"MIT"
] | null | null | null |
import json
import os
import time
from .baseclient import BaseClient
from .payloads import Payload
from .utils import remove_none
class Presence(BaseClient):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def update(self, pid: int = os.getpid(),
state: str = None, details: str = None,
start: int = None, end: int = None,
large_image: str = None, large_text: str = None,
small_image: str = None, small_text: str = None,
party_id: str = None, party_size: list = None,
join: str = None, spectate: str = None,
match: str = None, instance: bool = True,
_donotuse=True):
if _donotuse is True:
payload = Payload.set_activity(pid, state, details, start, end, large_image, large_text,
small_image, small_text, party_id, party_size, join, spectate,
match, instance, activity=True)
else:
payload = _donotuse
self.send_data(1, payload)
return self.loop.run_until_complete(self.read_output())
def clear(self, pid: int = os.getpid()):
payload = Payload.set_activity(pid, activity=None)
self.send_data(1, payload)
return self.loop.run_until_complete(self.read_output())
def connect(self):
self.loop.run_until_complete(self.handshake())
def close(self):
self.send_data(2, {'v': 1, 'client_id': self.client_id})
self.sock_writer.close()
self.loop.close()
class AioPresence(BaseClient):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs, isasync=True)
async def update(self, pid: int = os.getpid(),
state: str = None, details: str = None,
start: int = None, end: int = None,
large_image: str = None, large_text: str = None,
small_image: str = None, small_text: str = None,
party_id: str = None, party_size: list = None,
join: str = None, spectate: str = None,
match: str = None, instance: bool = True):
payload = Payload.set_activity(pid, state, details, start, end, large_image, large_text,
small_image, small_text, party_id, party_size, join, spectate,
match, instance, activity=True)
self.send_data(1, payload)
return await self.read_output()
async def clear(self, pid: int = os.getpid()):
payload = Payload.set_activity(pid, activity=None)
self.send_data(1, payload)
return await self.read_output()
async def connect(self):
await self.handshake()
def close(self):
self.send_data(2, {'v': 1, 'client_id': self.client_id})
self.sock_writer.close()
self.loop.close()
| 38.455696
| 105
| 0.559908
| 355
| 3,038
| 4.594366
| 0.2
| 0.085837
| 0.044145
| 0.02943
| 0.863274
| 0.863274
| 0.848559
| 0.848559
| 0.848559
| 0.848559
| 0
| 0.003951
| 0.333443
| 3,038
| 78
| 106
| 38.948718
| 0.801481
| 0
| 0
| 0.612903
| 0
| 0
| 0.006583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112903
| false
| 0
| 0.096774
| 0
| 0.306452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9d8ea04f73670e704304a0d4208852331a1fdcb
| 8,318
|
py
|
Python
|
alibi/confidence/tests/test_model_linearity.py
|
markus583/alibi
|
ee709d6296b0d803707bce2ed8a47488cd9e9cee
|
[
"Apache-2.0"
] | 1,570
|
2019-05-03T06:43:03.000Z
|
2022-03-31T02:49:34.000Z
|
alibi/confidence/tests/test_model_linearity.py
|
markus583/alibi
|
ee709d6296b0d803707bce2ed8a47488cd9e9cee
|
[
"Apache-2.0"
] | 511
|
2019-05-02T16:36:15.000Z
|
2022-03-31T08:09:43.000Z
|
alibi/confidence/tests/test_model_linearity.py
|
markus583/alibi
|
ee709d6296b0d803707bce2ed8a47488cd9e9cee
|
[
"Apache-2.0"
] | 190
|
2019-05-02T13:41:38.000Z
|
2022-03-14T21:18:56.000Z
|
import pytest
import numpy as np
from sklearn.datasets import load_iris, load_boston
from sklearn.linear_model import LogisticRegression, LinearRegression
from sklearn.svm import SVR
from alibi.confidence.model_linearity import linearity_measure, LinearityMeasure
from alibi.confidence.model_linearity import _linear_superposition, _sample_grid, _sample_knn
from functools import reduce
@pytest.mark.parametrize('input_shape', ((3,), (4, 4, 1)))
@pytest.mark.parametrize('nb_instances', (1, 10))
def test_linear_superposition(input_shape, nb_instances):
alphas = np.array([0.5, 0.5])
vecs_list = []
for i in range(nb_instances):
v0 = np.zeros((1,) + input_shape)
v1 = np.ones((1,) + input_shape)
vec = np.stack((v0, v1), axis=1)
vecs_list.append(vec)
vecs = reduce(lambda x, y: np.vstack((x, y)), vecs_list)
summ = _linear_superposition(alphas, vecs, input_shape)
assert summ.shape[0] == nb_instances
assert summ.shape[1:] == input_shape
assert (summ == 0.5).all()
@pytest.mark.parametrize('nb_instances', (1, 5))
@pytest.mark.parametrize('nb_samples', (2, 10))
def test_sample_knn(nb_instances, nb_samples):
iris = load_iris()
X_train = iris.data
input_shape = X_train.shape[1:]
x = np.ones((nb_instances, ) + input_shape)
X_samples = _sample_knn(x=x, X_train=X_train, nb_samples=nb_samples)
assert X_samples.shape[0] == nb_instances
assert X_samples.shape[1] == nb_samples
@pytest.mark.parametrize('nb_instances', (5, ))
@pytest.mark.parametrize('nb_samples', (3, ))
@pytest.mark.parametrize('input_shape', ((3,), (4, 4, 1)))
def test_sample_grid(nb_instances, nb_samples, input_shape):
x = np.ones((nb_instances, ) + input_shape)
nb_features = x.reshape(x.shape[0], -1).shape[1]
feature_range = np.array([[0, 1] for _ in range(nb_features)])
X_samples = _sample_grid(x, feature_range, nb_samples=nb_samples)
assert X_samples.shape[0] == nb_instances
assert X_samples.shape[1] == nb_samples
@pytest.mark.parametrize('method', ('knn', 'grid'))
@pytest.mark.parametrize('epsilon', (0.04,))
@pytest.mark.parametrize('res', (100,))
@pytest.mark.parametrize('nb_instances', (1, 10))
@pytest.mark.parametrize('agg', ('global', 'pairwise'))
def test_linearity_measure_class(method, epsilon, res, nb_instances, agg):
iris = load_iris()
X_train = iris.data
y_train = iris.target
x = X_train[0: nb_instances].reshape(nb_instances, -1)
lg = LogisticRegression()
lg.fit(X_train, y_train)
def predict_fn(x):
return lg.predict_proba(x)
lin = linearity_measure(predict_fn, x, method=method, epsilon=epsilon, X_train=X_train, res=res,
model_type='classifier', agg=agg)
assert lin.shape[0] == nb_instances, 'Checking shapes'
assert (lin >= 0).all(), 'Linearity measure must be >= 0'
feature_range = [[0, 1] for _ in range(X_train.shape[1])]
lin_2 = linearity_measure(predict_fn, x, method='grid', epsilon=epsilon, feature_range=feature_range,
res=res, model_type='classifier', agg=agg)
assert lin_2.shape[0] == nb_instances, 'Nb of linearity values returned different from number of instances'
assert (lin_2 >= 0).all(), 'Linearity measure must be >= 0'
@pytest.mark.parametrize('method', ('knn', 'grid'))
@pytest.mark.parametrize('epsilon', (0.04,))
@pytest.mark.parametrize('res', (100,))
@pytest.mark.parametrize('nb_instances', (1, 10))
@pytest.mark.parametrize('agg', ('global', 'pairwise'))
def test_linearity_measure_reg(method, epsilon, res, nb_instances, agg):
boston = load_boston()
X_train, y_train = boston.data, boston.target
x = X_train[0: nb_instances].reshape(nb_instances, -1)
lg = LinearRegression()
lg.fit(X_train, y_train)
svr = SVR(kernel='linear')
svr.fit(X_train, y_train)
def predict_fn_svr(x):
return svr.predict(x)
def predict_fn(x):
return lg.predict(x)
lin = linearity_measure(predict_fn, x, method=method, epsilon=epsilon, X_train=X_train, res=res,
model_type='regressor', agg=agg)
assert lin.shape[0] == nb_instances, 'Checking shapes'
assert (lin >= 0).all(), 'Linearity measure must be >= 0'
assert np.allclose(lin, np.zeros(lin.shape))
lin_svr = linearity_measure(predict_fn_svr, x, method=method, epsilon=epsilon, X_train=X_train,
res=res, model_type='regressor', agg=agg)
assert lin_svr.shape[0] == nb_instances, 'Checking shapes'
assert (lin_svr >= 0).all(), 'Linearity measure must be >= 0'
feature_range = [[0, 1] for _ in range(X_train.shape[1])]
lin_2 = linearity_measure(predict_fn, x, method='grid', epsilon=epsilon, feature_range=feature_range,
res=res, model_type='regressor', agg=agg)
assert lin_2.shape[0] == nb_instances, 'Checking shapes'
assert (lin_2 >= 0).all(), 'Linearity measure must be >= 0'
assert np.allclose(lin_2, np.zeros(lin_2.shape))
feature_range = [[0, 1] for _ in range(X_train.shape[1])]
lin_2_svr = linearity_measure(predict_fn_svr, x, method='grid', epsilon=epsilon,
feature_range=feature_range, res=res, model_type='regressor', agg=agg)
assert lin_2_svr.shape[0] == nb_instances, 'Checking shapes'
assert (lin_2_svr >= 0).all(), 'Linearity measure must be >= 0'
y_train_multi = np.stack((y_train, y_train), axis=1)
lg_multi = LinearRegression()
lg_multi.fit(X_train, y_train_multi)
def predict_fn_multi(x):
return lg_multi.predict(x)
lm_multi = LinearityMeasure(method=method, epsilon=epsilon, res=res, model_type='regressor', agg=agg)
lm_multi.fit(X_train)
lin_multi = lm_multi.score(predict_fn_multi, x)
assert lin_multi.shape[0] == nb_instances, 'Checking shapes'
assert (lin_multi >= 0).all(), 'Linearity measure must be >= 0'
assert np.allclose(lin_multi, np.zeros(lin_multi.shape))
@pytest.mark.parametrize('method', ('knn', 'grid'))
@pytest.mark.parametrize('epsilon', (0.04,))
@pytest.mark.parametrize('res', (100,))
@pytest.mark.parametrize('nb_instances', (1, 10))
@pytest.mark.parametrize('agg', ('global', 'pairwise'))
def test_LinearityMeasure_class(method, epsilon, res, nb_instances, agg):
iris = load_iris()
X_train = iris.data
y_train = iris.target
x = X_train[0: nb_instances].reshape(nb_instances, -1)
lg = LogisticRegression()
lg.fit(X_train, y_train)
def predict_fn(x):
return lg.predict_proba(x)
lm = LinearityMeasure(method=method, epsilon=epsilon, res=res, model_type='classifier', agg=agg)
lm.fit(X_train)
lin = lm.score(predict_fn, x)
assert lin.shape[0] == nb_instances, 'Checking shapes'
assert (lin >= 0).all(), 'Linearity measure must be >= 0'
@pytest.mark.parametrize('method', ('knn', 'grid'))
@pytest.mark.parametrize('epsilon', (0.04,))
@pytest.mark.parametrize('res', (100,))
@pytest.mark.parametrize('nb_instances', (1, 10))
@pytest.mark.parametrize('agg', ('global', 'pairwise'))
def test_LinearityMeasure_reg(method, epsilon, res, nb_instances, agg):
boston = load_boston()
X_train, y_train = boston.data, boston.target
x = X_train[0: nb_instances].reshape(nb_instances, -1)
lg = LinearRegression()
lg.fit(X_train, y_train)
def predict_fn(x):
return lg.predict(x)
y_train_multi = np.stack((y_train, y_train), axis=1)
lg_multi = LinearRegression()
lg_multi.fit(X_train, y_train_multi)
def predict_fn_multi(x):
return lg_multi.predict(x)
lm = LinearityMeasure(method=method, epsilon=epsilon, res=res, model_type='regressor', agg=agg)
lm.fit(X_train)
lin = lm.score(predict_fn, x)
assert lin.shape[0] == nb_instances, 'Checking shapes'
assert (lin >= 0).all(), 'Linearity measure must be >= 0'
assert np.allclose(lin, np.zeros(lin.shape))
lm_multi = LinearityMeasure(method=method, epsilon=epsilon, res=res, model_type='regressor', agg=agg)
lm_multi.fit(X_train)
lin_multi = lm_multi.score(predict_fn_multi, x)
assert lin_multi.shape[0] == nb_instances, 'Checking shapes'
assert (lin_multi >= 0).all(), 'Linearity measure must be >= 0'
assert np.allclose(lin_multi, np.zeros(lin_multi.shape))
| 38.869159
| 111
| 0.681414
| 1,203
| 8,318
| 4.497091
| 0.088944
| 0.077264
| 0.104806
| 0.04085
| 0.83475
| 0.826248
| 0.79427
| 0.770795
| 0.742884
| 0.698336
| 0
| 0.020195
| 0.172517
| 8,318
| 213
| 112
| 39.051643
| 0.7658
| 0
| 0
| 0.662577
| 0
| 0
| 0.107959
| 0
| 0
| 0
| 0
| 0
| 0.196319
| 1
| 0.08589
| false
| 0
| 0.04908
| 0.042945
| 0.177914
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9d96af838dc52d405b86344ac3d244435be5d1b
| 75
|
py
|
Python
|
tests/test_fibonacci.py
|
jonathanmusila/simple-logic-tests
|
508b0af93e99e3645887fc229718e162ff0c91a0
|
[
"MIT"
] | null | null | null |
tests/test_fibonacci.py
|
jonathanmusila/simple-logic-tests
|
508b0af93e99e3645887fc229718e162ff0c91a0
|
[
"MIT"
] | null | null | null |
tests/test_fibonacci.py
|
jonathanmusila/simple-logic-tests
|
508b0af93e99e3645887fc229718e162ff0c91a0
|
[
"MIT"
] | null | null | null |
import pytest
from fibonacci import fib
def test_fibonacci(self):
pass
| 15
| 25
| 0.786667
| 11
| 75
| 5.272727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 75
| 5
| 26
| 15
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
b9e450f486758fcb0e3e8b0847401db963e1b071
| 2,183
|
py
|
Python
|
tests/feeds/uniswapv3/test_integration.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 3
|
2022-02-17T16:11:39.000Z
|
2022-03-10T23:46:19.000Z
|
tests/feeds/uniswapv3/test_integration.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 10
|
2022-01-25T21:49:20.000Z
|
2022-03-31T00:32:29.000Z
|
tests/feeds/uniswapv3/test_integration.py
|
overlay-market/v1-core
|
e18fabd242f21c243a555712d3f08ca059941f41
|
[
"MIT"
] | 2
|
2022-01-21T01:04:54.000Z
|
2022-02-23T08:38:20.000Z
|
from pytest import approx
def test_consult_for_daiweth(pool_daiweth_30bps, quanto_feed):
seconds_agos = [7200, 3600, 600, 0]
windows = [3600, 3600, 600]
now_idxs = [1, len(seconds_agos)-1, len(seconds_agos)-1]
tick_cums, secs_per_liq_cums = pool_daiweth_30bps.observe(seconds_agos)
actual_avg_ticks, actual_avg_liqs = quanto_feed.consult(pool_daiweth_30bps,
seconds_agos,
windows,
now_idxs)
# calculate expect arithmetic means for ticks and harmonic
# mean for liquidity to compare w actuals
for i in range(len(windows)):
expect_avg_tick = int((tick_cums[now_idxs[i]]-tick_cums[i])/windows[i])
expect_avg_liq = int(windows[i] * ((1 << 160) - 1) /
((secs_per_liq_cums[now_idxs[i]]
- secs_per_liq_cums[i]) << 32))
# rel=1e-4 is needed for rounding with ticks
assert approx(expect_avg_tick, rel=1e-4) == actual_avg_ticks[i]
assert approx(expect_avg_liq) == actual_avg_liqs[i]
def test_consult_for_uniweth(pool_uniweth_30bps, inverse_feed):
seconds_agos = [7200, 3600, 600, 0]
windows = [3600, 3600, 600]
now_idxs = [1, len(seconds_agos)-1, len(seconds_agos)-1]
tick_cums, secs_per_liq_cums = pool_uniweth_30bps.observe(seconds_agos)
actual_avg_ticks, actual_avg_liqs = inverse_feed.consult(
pool_uniweth_30bps, seconds_agos, windows, now_idxs)
# calculate expect arithmetic means for ticks and harmonic
# mean for liquidity to compare w actuals
for i in range(len(windows)):
expect_avg_tick = int((tick_cums[now_idxs[i]]-tick_cums[i])/windows[i])
expect_avg_liq = int(windows[i] * ((1 << 160) - 1) /
((secs_per_liq_cums[now_idxs[i]]
- secs_per_liq_cums[i]) << 32))
# rel=1e-4 is needed for rounding with ticks
assert approx(expect_avg_tick, rel=1e-4) == actual_avg_ticks[i]
assert approx(expect_avg_liq) == actual_avg_liqs[i]
| 45.479167
| 79
| 0.611086
| 298
| 2,183
| 4.154362
| 0.208054
| 0.088853
| 0.048465
| 0.067851
| 0.849758
| 0.849758
| 0.849758
| 0.849758
| 0.849758
| 0.849758
| 0
| 0.055627
| 0.2918
| 2,183
| 47
| 80
| 46.446809
| 0.745149
| 0.127806
| 0
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a0d71dd2aa4b85a89ef36a4333bb489b3f9c1e0
| 199
|
py
|
Python
|
utils.py
|
sci-c0/python-misc-problems
|
a0827cc9cd290ca142bba3b7dda307234da63c3c
|
[
"BSD-3-Clause"
] | null | null | null |
utils.py
|
sci-c0/python-misc-problems
|
a0827cc9cd290ca142bba3b7dda307234da63c3c
|
[
"BSD-3-Clause"
] | null | null | null |
utils.py
|
sci-c0/python-misc-problems
|
a0827cc9cd290ca142bba3b7dda307234da63c3c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
class Test:
@classmethod
def assert_equals(cls, func_out, expected_out):
assert func_out == expected_out, f"The function out '{func_out}' != '{expected_out}'"
| 28.428571
| 93
| 0.683417
| 28
| 199
| 4.607143
| 0.607143
| 0.162791
| 0.348837
| 0.418605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006135
| 0.180905
| 199
| 6
| 94
| 33.166667
| 0.785276
| 0.105528
| 0
| 0
| 0
| 0
| 0.276836
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a1ccefccf152044add6553e4a7f717485d169f2
| 116
|
py
|
Python
|
challenges/3.9.True_Operator/main.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 141
|
2017-05-07T00:38:22.000Z
|
2022-03-25T10:14:25.000Z
|
challenges/3.9.True_Operator/main.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 23
|
2017-05-06T23:57:37.000Z
|
2018-03-23T19:07:32.000Z
|
challenges/3.9.True_Operator/main.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 143
|
2017-05-07T09:33:35.000Z
|
2022-03-12T21:04:13.000Z
|
def boolean_true():
return value # Change the varable named value to the correct answer
print(boolean_true())
| 23.2
| 72
| 0.75
| 17
| 116
| 5
| 0.764706
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181034
| 116
| 4
| 73
| 29
| 0.894737
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
6a2244b259bb8132854c877d29487e47d0ec4b30
| 10,571
|
py
|
Python
|
test/regression/daily/ledger_lte.py
|
WDeepali/blockchaindemo
|
b8e189ebd50de01026a43d3ff4219d278c404870
|
[
"Apache-2.0"
] | 172
|
2017-10-12T07:56:32.000Z
|
2022-03-10T11:08:02.000Z
|
test/regression/daily/ledger_lte.py
|
Cielo2017/hyperledger-fabric-gm
|
40f2d1d03a96872b52cf6c5ba8a5c634e36089a6
|
[
"Apache-2.0"
] | 6
|
2017-11-28T14:50:34.000Z
|
2021-12-30T13:40:00.000Z
|
test/regression/daily/ledger_lte.py
|
Cielo2017/hyperledger-fabric-gm
|
40f2d1d03a96872b52cf6c5ba8a5c634e36089a6
|
[
"Apache-2.0"
] | 89
|
2017-09-14T04:38:56.000Z
|
2021-05-21T17:24:48.000Z
|
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
import unittest
import subprocess
tool_directory = '../../tools/LTE/scripts'
class perf_goleveldb(unittest.TestCase):
def test_FAB_3790_VaryNumParallelTxPerChain(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of parallel
transactions per chain and observe the performance.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumParallelTxPerChain.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumParallelTxPerChain",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="VaryNumParallelTxPerChain "
"performance test failed. \nPlease check the logfile "
+logfile.name+" for more details.")
def test_FAB_3795_VaryNumChains(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of chains
(ledgers).
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumChains.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumChains",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="VaryNumChains performance test"
" failed. \nPlease check the logfile "+logfile.name+" for more "
"details.")
def test_FAB_3798_VaryNumParallelTxWithSingleChain(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of parallel
transactions on a single chain.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumParallelTxWithSingleChain.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumParallelTxWithSingleChain",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="VaryNumParallelTxWithSingleChain "
"performance test failed. \nPlease check the logfile "
+logfile.name+" for more details.")
def test_FAB_3799_VaryNumChainsWithNoParallelism(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of chains
without any parallelism within a single chain.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumChainsWithNoParallelism.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumChainsWithNoParallelism",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="varyNumChainsWithNoParallelism "
"performance test failed. \nPlease check the logfile "
+logfile.name+" for more details.")
def test_FAB_3801_VaryKVSize(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the size of key-value.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryKVSize.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyKVSize",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="varyKVSize performance test"
" failed. \nPlease check the logfile "+logfile.name+" for more "
"details.")
def test_FAB_3802_VaryBatchSize(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the value of the batch
size
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryBatchSize.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyBatchSize",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="varyBatchSize performance test"
" failed. \nPlease check the logfile "+logfile.name+" for more "
"details.")
def test_FAB_3800_VaryNumKeysInEachTx(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of keys in
each transaction.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumKeysInEachTx.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumKeysInEachTx",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="varyNumKeysInEachTx performance "
"test failed. \nPlease check the logfile "+logfile.name
+" for more details.")
def test_FAB_3803_VaryNumTxs(self):
'''
In this Performance test, we observe the performance (time to
complete a set number of Ledger operations) of the Ledger component,
with goleveldb as the state database. We vary the number of
transactions carried out.
Passing criteria: Underlying LTE test completed successfully with
exit code 0
'''
logfile = open("output_VaryNumTxs.log", "w")
returncode = subprocess.call(
"./runbenchmarks.sh -f parameters_daily_CI.sh varyNumTxs",
shell=True, stderr=subprocess.STDOUT, stdout=logfile,
cwd=tool_directory)
logfile.close()
self.assertEqual(returncode, 0, msg="varyNumTxs performance test"
" failed. \nPlease check the logfile "+logfile.name+" for more "
"details.")
class perf_couchdb(unittest.TestCase):
@unittest.skip("WIP, skipping")
def test_FAB_3870_VaryNumParallelTxPerChain(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the number of parallel transactions per chain.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3871_VaryNumChain(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the number of chains (ledgers).
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3872_VaryNumParallelTxWithSingleChain(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, vary the number of parallel transactions on a single chain.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3873_VaryNumChainWithNoParallelism(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the number of chains without any parallelism.
within a single chain.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3874_VaryKVSize(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, varying the size of key-value.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3875_VaryBatchSize(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the value of the batch size.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3876_VaryNumKeysInEachTX(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the number of keys in each transaction.
'''
self.assertTrue(True)
@unittest.skip("WIP, skipping")
def test_FAB_3877_VaryNumTxs(self):
'''
In this Performance test, we observe the performance (operations
per second) of the Ledger component, with CouchDB as the state
database, as we vary the number of transactions carried out.
'''
self.assertTrue(True)
| 42.971545
| 96
| 0.64535
| 1,183
| 10,571
| 5.696534
| 0.117498
| 0.05342
| 0.052975
| 0.049859
| 0.874017
| 0.869417
| 0.869417
| 0.862443
| 0.850275
| 0.839442
| 0
| 0.010798
| 0.28162
| 10,571
| 245
| 97
| 43.146939
| 0.876613
| 0.378772
| 0
| 0.568807
| 0
| 0
| 0.283172
| 0.108018
| 0
| 0
| 0
| 0
| 0.146789
| 1
| 0.146789
| false
| 0
| 0.018349
| 0
| 0.183486
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a3e45954e855173ecfe01b8aa69a64cbf9d6a63
| 8,110
|
py
|
Python
|
exp/run_dist_matrix.py
|
nla-group/classix
|
8584162e4c89ba05a62faf1e20104768cf5bb43c
|
[
"MIT"
] | 56
|
2022-02-04T08:51:12.000Z
|
2022-03-30T10:54:18.000Z
|
exp/run_dist_matrix.py
|
nla-group/classix
|
8584162e4c89ba05a62faf1e20104768cf5bb43c
|
[
"MIT"
] | 5
|
2022-03-02T14:28:30.000Z
|
2022-03-19T21:03:17.000Z
|
exp/run_dist_matrix.py
|
nla-group/classix
|
8584162e4c89ba05a62faf1e20104768cf5bb43c
|
[
"MIT"
] | 5
|
2022-03-04T15:34:26.000Z
|
2022-03-28T09:44:03.000Z
|
import numpy as np
import pandas as pd
from sklearn.decomposition import PCA
from numpy.linalg import norm
import matplotlib.pyplot as plt
from sklearn import preprocessing
import seaborn as sns; sns.set_theme()
seed = 0
np.random.seed(seed)
"""We test without normalization"""
def normalize(data, shift = 'z-score'):
if shift not in ['mean', 'min', 'z-score', 'pca']:
raise ValueError("please enter a correct shift parameter.")
if shift == 'min':
_mu = data.min(axis=0)
_scl = data.std()
cdata = data / _scl
elif shift == 'mean':
_mu = data.mean(axis=0)
cdata = data - _mu
_scl = cdata.std()
cdata = cdata / _scl
elif shift == 'pca':
_mu = data.mean(axis=0)
cdata = data - _mu # mean center
rds = norm(cdata - _mu, axis=1) # distance of each data point from 0
_scl = np.median(rds) # 50% of data points are within that radius
cdata = cdata / _scl
else: # shift == 'z-score':
_mu = data.mean(axis=0)
_scl = data.std(axis=0)
cdata = (data - _mu) / _scl
return cdata, (_mu, _scl)
def sorting(data, sorting='pca'):
if sorting=='norm-mean':
data, parameters = normalize(data, shift='mean')
size = np.linalg.norm(data, ord=2, axis=1)
ind = np.argsort(size)
if sorting=='norm-orthant':
data, parameters = normalize(data, shift='min')
size = np.linalg.norm(data, ord=2, axis=1)
ind = np.argsort(size)
if sorting=='pca':
# data, parameters = normalize(data, shift='pca')
pca = PCA(n_components=1)
size = pca.fit_transform(data).reshape(-1)
ind = np.argsort(size)
return data[ind], size
def rn_wine_dataset():
plt.style.use('ggplot')
data = pd.read_csv("data/Real_data/Wine.csv")
X = data.drop(['14'],axis=1).values
font_scale = 3
dist_corr = np.zeros((len(X), len(X)))
for i in range(len(X)):
for j in range(i, len(X)):
dist_corr[j,i] = dist_corr[i,j] = np.linalg.norm(X[i]-X[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.yticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.savefig('results/original_wine.pdf', bbox_inches='tight')
# plt.show()
ndata, size_pca = sorting(X, sorting='pca')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.yticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.savefig('results/pca_wine.pdf', bbox_inches='tight')
# plt.show()
ndata, size_no = sorting(X, sorting='norm-orthant')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.yticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.savefig('results/norm-orthant_wine.pdf', bbox_inches='tight')
# plt.show()
ndata, size_nm = sorting(X, sorting='norm-mean')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.yticks([0, 25, 50, 75, 100, 125, 150, 175])
plt.savefig('results/norm-mean_wine.pdf', bbox_inches='tight')
# plt.show()
sorting_df = pd.DataFrame()
sorting_df['PCA'] = size_pca
sorting_df['Norm-mean'] = size_nm
sorting_df['Norm-orthant'] = size_no
sns.set(style='ticks', color_codes=True, font_scale=3)
g = sns.pairplot(sorting_df, corner=True, height=4.2, aspect=1)
plt.savefig('results/sort_pair_plot_wine.pdf', bbox_inches='tight')
# plt.show()
def rn_iris_dataset():
plt.style.use('ggplot')
data = pd.read_csv("data/Real_data/Iris.csv")
le = preprocessing.LabelEncoder()
data['Species'] = le.fit_transform(data['Species'])
X = data.drop(['Species','Id'],axis=1).values
y = data['Species'].values
font_scale = 3
dist_corr = np.zeros((len(X), len(X)))
for i in range(len(X)):
for j in range(i, len(X)):
dist_corr[j,i] = dist_corr[i,j] = np.linalg.norm(X[i]-X[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.yticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.savefig('results/original_iris.pdf', bbox_inches='tight')
# plt.show()
ndata, size_pca = sorting(X, sorting='pca')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.yticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.savefig('results/pca_iris.pdf', bbox_inches='tight')
# plt.show()
ndata, size_no = sorting(X, sorting='norm-orthant')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.yticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.savefig('results/norm-orthant_iris.pdf', bbox_inches='tight')
# plt.show()
ndata, size_nm = sorting(X, sorting='norm-mean')
dist_corr_sort = np.zeros((len(ndata), len(ndata)))
for i in range(len(ndata)):
for j in range(i, len(ndata)):
dist_corr_sort[j,i] = dist_corr_sort[i,j] = np.linalg.norm(ndata[i]-ndata[j], ord=2, axis=0)
sns.set(rc={'figure.figsize':(12,10)}, font_scale=font_scale)
fig, ax = plt.subplots()
im = ax.imshow(dist_corr_sort, cmap='YlGnBu', aspect='auto')
fig.colorbar(im, ax=ax)
plt.xticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.yticks([0, 20, 40, 60, 80, 100, 120, 140])
plt.savefig('results/norm-mean_iris.pdf', bbox_inches='tight')
# plt.show()
sorting_df = pd.DataFrame()
sorting_df['PCA'] = size_pca
sorting_df['Norm-mean'] = size_nm
sorting_df['Norm-orthant'] = size_no
sns.set(style='ticks', color_codes=True, font_scale=3)
g = sns.pairplot(sorting_df, corner=True, height=4.2, aspect=1)
plt.savefig('results/sort_pair_plot_iris.pdf', bbox_inches='tight')
# plt.show()
| 36.696833
| 104
| 0.607768
| 1,296
| 8,110
| 3.684414
| 0.126543
| 0.053613
| 0.060314
| 0.037696
| 0.821152
| 0.784503
| 0.779895
| 0.766073
| 0.755183
| 0.750157
| 0
| 0.058759
| 0.215166
| 8,110
| 221
| 105
| 36.696833
| 0.691438
| 0.032922
| 0
| 0.682635
| 0
| 0
| 0.103554
| 0.03439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023952
| false
| 0
| 0.041916
| 0
| 0.077844
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e043a115f41a62aa87eb062f6e056563521400d4
| 117
|
py
|
Python
|
scripts/mlp/wholebody/croccodyl.py
|
JasonChmn/multicontact-locomotion-planning
|
061f89a58ea6363b2b0d5ee0156950e22040cc0d
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/mlp/wholebody/croccodyl.py
|
JasonChmn/multicontact-locomotion-planning
|
061f89a58ea6363b2b0d5ee0156950e22040cc0d
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/mlp/wholebody/croccodyl.py
|
JasonChmn/multicontact-locomotion-planning
|
061f89a58ea6363b2b0d5ee0156950e22040cc0d
|
[
"BSD-2-Clause"
] | null | null | null |
import mlp.config as cfg
def generateWholeBodyMotion(cs,fullBody=None,viewer=None):
raise NotImplemented("TODO")
| 29.25
| 58
| 0.794872
| 15
| 117
| 6.2
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 4
| 59
| 29.25
| 0.885714
| 0
| 0
| 0
| 1
| 0
| 0.033898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0eb24fdbbdaa54d4ea695ed637802e33f3c10428
| 4,285
|
py
|
Python
|
manage/fuzzytranslation.py
|
nfreear/browser-update
|
69ebaded86fa6941dd890097094e6fe5876ef7b5
|
[
"MIT"
] | null | null | null |
manage/fuzzytranslation.py
|
nfreear/browser-update
|
69ebaded86fa6941dd890097094e6fe5876ef7b5
|
[
"MIT"
] | null | null | null |
manage/fuzzytranslation.py
|
nfreear/browser-update
|
69ebaded86fa6941dd890097094e6fe5876ef7b5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
replacing new strings with old translations
"""
import polib
#old (translated) string
#new renamed string
pairs="""
An initiative by web designers to inform users about browser-updates
An initiative by websites to inform users to update their web browser
If you are on a computer that is maintained by an admin and you cannot install a new browser, ask your admin about it.
Ask your admin to update your browser if you cannot install updates yourself.
blaasdasdfsdaf
faselsdfsadf""";
pairs=pairs.replace("\r","")[1:-1].split("\n\n")
mappings={s.split("\n")[0]:s.split("\n")[1] for s in pairs}
#%%
po = polib.pofile('lang/de_DE/LC_MESSAGES/update.po')
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
#print(entry.msgid)
if entry.msgid in mappings:
print("replacing", entry.msgid[:10], "with",mappings[entry.msgid][:10])
entry.msgid=mappings[entry.msgid]
po.save()
po.save_as_mofile('lang/de_DE/LC_MESSAGES/update.mo')
#%%
pairs="""aaa
bbb
Subtle
Unobtrusive
bla
fasel"""
pairs=pairs.replace("\r","")[1:-1].split("\n\n")
mappings={s.split("\n")[0]:s.split("\n")[1] for s in pairs}
#%%
po = polib.pofile('lang/de_DE/LC_MESSAGES/site.po')
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
#print(entry.msgid)
if entry.msgid in mappings:
print("replacing", entry.msgid[:10], "with",mappings[entry.msgid][:10])
entry.msgid=mappings[entry.msgid]
po.save()
po.save_as_mofile('lang/de_DE/LC_MESSAGES/site.mo')
#%%
pot = polib.pofile('lang/update.pot')
for entry in pot:
print (entry.msgid, entry.msgstr)
#%%
#%% display old translations
po = polib.pofile('lang/de_DE/LC_MESSAGES/update.po')
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
print(entry.msgid)
#%%
#%% getting files
from glob import glob
paths = glob('lang/*/LC_MESSAGES/')
paths=[p[5:10] for p in paths]
paths
#%% updating all site.po
for p in paths:
print("updating %s"%p)
try:
po = polib.pofile('lang/%s/LC_MESSAGES/site.po'%p)
except OSError:
print("no file found")
continue
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
#print(entry.msgid)
if entry.msgid in mappings:
print(" ", entry.msgid[:10], "-->",mappings[entry.msgid][:10])
entry.msgid=mappings[entry.msgid]
po.save()
po.save_as_mofile('lang/%s/LC_MESSAGES/site.mo'%p)
#%% updating all update.po
for p in paths:
print("updating %s"%p)
try:
po = polib.pofile('lang/%s/LC_MESSAGES/update.po'%p)
except OSError:
print("no file found")
continue
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
#print(entry.msgid)
if entry.msgid in mappings:
print(" ", entry.msgid[:10], "-->",mappings[entry.msgid][:10])
entry.msgid=mappings[entry.msgid]
po.save()
po.save_as_mofile('lang/%s/LC_MESSAGES/update.mo'%p)
#%%
pairs="""aaa
bbb
Optionally include up to two placeholders "%s" which will be replaced with the browser version and contents of the link tag. Example: "Your browser (%s) is old. Please <a%s>update</a>"
Optionally include up to two placeholders "%s" which will be replaced with the browser version and contents of the link tag. Example: "Your browser (%s) is old. Please <a%s>update</a>"
bla
fasel"""
pairs=pairs.replace("\r","")[1:-1].split("\n\n")
mappings={s.split("\n")[0]:s.split("\n")[1] for s in pairs}
#%%
from glob import glob
paths = glob('lang/*/LC_MESSAGES/')
paths=[p[5:10] for p in paths]
paths
#%% updating all site.po
for p in paths:
print("customize %s"%p)
try:
po = polib.pofile('lang/%s/LC_MESSAGES/customize.po'%p)
except OSError:
print("no file found")
continue
valid_entries = [e for e in po if not e.obsolete]
for entry in valid_entries:
#print(entry.msgid)
if entry.msgid in mappings:
print(" ", entry.msgid[:10], "-->",mappings[entry.msgid][:10])
entry.msgid=mappings[entry.msgid]
po.save()
po.save_as_mofile('lang/%s/LC_MESSAGES/customize.mo'%p)
| 27.120253
| 197
| 0.653442
| 678
| 4,285
| 4.070796
| 0.182891
| 0.115942
| 0.054348
| 0.036957
| 0.781884
| 0.768116
| 0.763043
| 0.763043
| 0.763043
| 0.763043
| 0
| 0.011278
| 0.192999
| 4,285
| 157
| 198
| 27.292994
| 0.786871
| 0.076313
| 0
| 0.737374
| 0
| 0.030303
| 0.342748
| 0.098473
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.030303
| 0.131313
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0eda9e645a0c3cd1c329757730cc51c323063ac5
| 29
|
py
|
Python
|
Wrapping/Python/Packaging/__init__.py
|
pcarnah/SimpleElastix
|
4e3bf31b7997d26bbd904c18a9882188d4368790
|
[
"Apache-2.0"
] | null | null | null |
Wrapping/Python/Packaging/__init__.py
|
pcarnah/SimpleElastix
|
4e3bf31b7997d26bbd904c18a9882188d4368790
|
[
"Apache-2.0"
] | null | null | null |
Wrapping/Python/Packaging/__init__.py
|
pcarnah/SimpleElastix
|
4e3bf31b7997d26bbd904c18a9882188d4368790
|
[
"Apache-2.0"
] | null | null | null |
from .SimpleElastix import *
| 14.5
| 28
| 0.793103
| 3
| 29
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0efe7004250973a8cca1cb2f3af1dfe3d905c786
| 47,382
|
py
|
Python
|
ckan/tests/legacy/functional/api/model/test_vocabulary.py
|
brew/ckan
|
39039d8c04116b01dbd01334f350cfb3404ccc75
|
[
"Apache-2.0"
] | null | null | null |
ckan/tests/legacy/functional/api/model/test_vocabulary.py
|
brew/ckan
|
39039d8c04116b01dbd01334f350cfb3404ccc75
|
[
"Apache-2.0"
] | null | null | null |
ckan/tests/legacy/functional/api/model/test_vocabulary.py
|
brew/ckan
|
39039d8c04116b01dbd01334f350cfb3404ccc75
|
[
"Apache-2.0"
] | null | null | null |
import ckan
import pylons.test
import paste.fixture
import ckan.lib.helpers as helpers
import ckan.lib.dictization.model_dictize as model_dictize
class TestVocabulary(object):
@classmethod
def setup_class(self):
self.app = paste.fixture.TestApp(pylons.test.pylonsapp)
@classmethod
def teardown_class(self):
ckan.model.repo.rebuild_db()
def setup(self):
self.clean_vocab()
model = ckan.model
context = {'model': model}
genre = model.Vocabulary("Genre")
time_period = ckan.model.Vocabulary("Time Period")
composers = ckan.model.Vocabulary("Composers")
model.Session.add_all([genre, time_period, composers])
self.genre_vocab = model_dictize.vocabulary_dictize(genre, context)
self.timeperiod_vocab = model_dictize.vocabulary_dictize(time_period,
context)
self.composers_vocab = model_dictize.vocabulary_dictize(composers,
context)
ckan.model.Session.commit()
self.sysadmin_user = ckan.model.User.get('admin')
self.normal_user = ckan.model.User.get('normal')
if not self.sysadmin_user:
normal_user = ckan.model.User(name=u'normal', password=u'annafan')
sysadmin_user = ckan.model.User(name=u'admin',
password=u'testsysadmin')
sysadmin_user.sysadmin = True
ckan.model.Session.add(normal_user)
ckan.model.Session.add(sysadmin_user)
ckan.model.Session.commit()
self.sysadmin_user = ckan.model.User.get('admin')
self.normal_user = ckan.model.User.get('normal')
self.sysadmin_apikey = self.sysadmin_user.apikey
def clean_vocab(self):
ckan.model.Session.execute('delete from package_tag_revision')
ckan.model.Session.execute('delete from package_tag')
ckan.model.Session.execute('delete from tag')
ckan.model.Session.execute('delete from vocabulary')
ckan.model.Session.commit()
@classmethod
def _post(self, url, params=None, extra_environ=None):
if params is None:
params = {}
param_string = helpers.json.dumps(params)
response = self.app.post(url, params=param_string,
extra_environ=extra_environ)
assert not response.errors
return response.json
@classmethod
def _create_vocabulary(self, vocab_name=None, user=None):
# Create a new vocabulary.
params = {'name': vocab_name}
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
response = self._post('/api/action/vocabulary_create', params=params,
extra_environ=extra_environ)
# Check the values of the response.
assert response['success'] is True
assert response['result']
created_vocab = response['result']
assert created_vocab['name'] == vocab_name
assert created_vocab['id']
# Get the list of vocabularies.
response = self._post('/api/action/vocabulary_list')
# Check that the vocabulary we created is in the list.
assert response['success'] is True
assert response['result']
assert response['result'].count(created_vocab) == 1
# Get the created vocabulary.
params = {'id': created_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
# Check that retrieving the vocab by name gives the same result.
by_name_params = {'id': created_vocab['name']}
assert response == self._post('/api/action/vocabulary_show',
by_name_params)
# Check that it matches what we created.
assert response['success'] is True
assert response['result'] == created_vocab
return created_vocab
def _update_vocabulary(self, params, user=None):
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
original_vocab = self._post('/api/action/vocabulary_show',
{'id': params.get('id') or params.get('name')})['result']
response = self._post('/api/action/vocabulary_update', params=params,
extra_environ=extra_environ)
# Check the values of the response.
assert response['success'] is True
assert response['result']
updated_vocab = response['result']
# id should never change.
assert updated_vocab['id'] == original_vocab['id']
if 'id' in params:
assert updated_vocab['id'] == params['id']
# name should change only if given in params.
if 'name' in params:
assert updated_vocab['name'] == params['name']
else:
assert updated_vocab['name'] == original_vocab['name']
# tags should change only if given in params.
if 'tags' in params:
assert sorted([tag['name'] for tag in params['tags']]) \
== sorted([tag['name'] for tag in updated_vocab['tags']])
else:
assert updated_vocab['tags'] == original_vocab['tags']
# Get the list of vocabularies.
response = self._post('/api/action/vocabulary_list')
# Check that the vocabulary we created is in the list.
assert response['success'] is True
assert response['result']
assert response['result'].count(updated_vocab) == 1
# Get the created vocabulary.
params = {'id': updated_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
# Check that retrieving the vocab by name gives the same result.
by_name_params = {'id': updated_vocab['name']}
assert response == self._post('/api/action/vocabulary_show',
by_name_params)
# Check that it matches what we created.
assert response['success'] is True
assert response['result'] == updated_vocab
return updated_vocab
def _delete_vocabulary(self, vocab_id, user=None):
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
params = {'id': vocab_id}
response = self._post('/api/action/vocabulary_delete', params=params,
extra_environ=extra_environ)
# Check the values of the response.
assert response['success'] is True
assert response['result'] is None
response['result']
# Get the list of vocabularies.
response = self._post('/api/action/vocabulary_list')
assert response['success'] is True
assert response['result']
# Check that the vocabulary we deleted is not in the list.
assert vocab_id not in [vocab['id'] for vocab in response['result']]
# Check that the deleted vocabulary can no longer be retrieved.
response = self.app.post('/api/action/vocabulary_show',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
def _list_tags(self, vocabulary=None, user=None):
params = {}
if vocabulary:
params['vocabulary_id'] = vocabulary['id']
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
response = self._post('/api/action/tag_list', params=params,
extra_environ=extra_environ)
assert response['success'] is True
return response['result']
def _create_tag(self, user, tag_name, vocabulary=None):
tag_dict = {'name': tag_name}
if vocabulary:
tag_dict['vocabulary_id'] = vocabulary['id']
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
response = self._post('/api/action/tag_create', params=tag_dict,
extra_environ=extra_environ)
assert response['success'] is True
return response['result']
def _delete_tag(self, user, tag_id_or_name, vocab_id_or_name=None):
params = {'id': tag_id_or_name}
if vocab_id_or_name:
params['vocabulary_id'] = vocab_id_or_name
if user:
extra_environ = {'Authorization': str(user.apikey)}
else:
extra_environ = None
response = self._post('/api/action/tag_delete', params=params,
extra_environ=extra_environ)
assert response['success'] is True
return response['result']
def test_vocabulary_create(self):
'''Test adding a new vocabulary to a CKAN instance via the action
API.
'''
self._create_vocabulary(vocab_name="My cool vocab",
user=self.sysadmin_user)
def test_vocabulary_create_with_tags(self):
'''Test adding a new vocabulary with some tags.
'''
params = {'name': 'foobar'}
tag1 = {'name': 'foo'}
tag2 = {'name': 'bar'}
params['tags'] = [tag1, tag2]
response = self._post('/api/action/vocabulary_create',
params=params,
extra_environ={'Authorization': str(self.sysadmin_apikey)})
assert response['success'] is True
assert response['result']
created_vocab = response['result']
assert created_vocab['name'] == 'foobar'
assert created_vocab['id']
# Get the list of vocabularies.
response = self._post('/api/action/vocabulary_list')
# Check that the vocabulary we created is in the list.
assert response['success'] is True
assert response['result']
assert response['result'].count(created_vocab) == 1
# Get the created vocabulary.
params = {'id': created_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
# Check that retrieving the vocab by name gives the same result.
by_name_params = {'id': created_vocab['name']}
assert response == self._post('/api/action/vocabulary_show',
by_name_params)
# Check that it matches what we created.
assert response['success'] is True
assert response['result'] == created_vocab
# Get the list of tags for the vocabulary.
tags = self._list_tags(created_vocab)
assert len(tags) == 2
assert tags.count('foo') == 1
assert tags.count('bar') == 1
def test_vocabulary_create_bad_tags(self):
'''Test creating new vocabularies with invalid tags.
'''
for tags in (
[{'id': 'xxx'}, {'name': 'foo'}],
[{'name': 'foo'}, {'name': None}],
[{'name': 'foo'}, {'name': ''}],
[{'name': 'foo'}, {'name': 'f'}],
[{'name': 'f' * 200}, {'name': 'foo'}],
[{'name': 'Invalid!'}, {'name': 'foo'}],
):
params = {'name': 'foobar', 'tags': tags}
response = self.app.post('/api/action/vocabulary_create',
params=helpers.json.dumps(params),
extra_environ={'Authorization': str(self.sysadmin_apikey)},
status=409)
assert response.json['success'] is False
assert 'tags' in response.json['error']
assert len(response.json['error']) == 2
def test_vocabulary_create_none_tags(self):
'''Test creating new vocabularies with None for 'tags'.
'''
params = {'name': 'foobar', 'tags': None}
response = self.app.post('/api/action/vocabulary_create',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=400)
assert "Integrity Error" in response.body
def test_vocabulary_create_empty_tags(self):
'''Test creating new vocabularies with [] for 'tags'.
'''
params = {'name': 'foobar', 'tags': []}
response = self.app.post('/api/action/vocabulary_create',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=200)
assert response.json['success'] is True
assert response.json['result']
created_vocab = response.json['result']
assert created_vocab['name'] == 'foobar'
assert created_vocab['id']
assert created_vocab['tags'] == []
params = {'id': created_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
assert response['success'] is True
assert response['result'] == created_vocab
tags = self._list_tags(created_vocab)
assert tags == []
def test_vocabulary_create_id(self):
'''Test error response when user tries to supply their own ID when
creating a vocabulary.
'''
params = {'id': 'xxx', 'name': 'foobar'}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['id'] == [u'The input field id was '
'not expected.']
def test_vocabulary_create_no_name(self):
'''Test error response when user tries to create a vocab without a
name.
'''
params = {}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['name'] == [u'Missing value']
def test_vocabulary_create_invalid_name(self):
'''Test error response when user tries to create a vocab with an
invalid name.
'''
for name in (None, '', 'a', 'foobar' * 100):
params = {'name': name}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['name']
def test_vocabulary_create_exists(self):
'''Test error response when user tries to create a vocab that already
exists.
'''
params = {'name': self.genre_vocab['name']}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['name'] == [u'That vocabulary name is '
'already in use.']
def test_vocabulary_create_not_logged_in(self):
'''Test that users who are not logged in cannot create vocabularies.'''
params = {'name':
"Spam Vocabulary: SpamCo Duck Rental: Rent Your Ducks From Us!"}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_vocabulary_create_not_authorized(self):
'''Test that users who are not authorized cannot create vocabs.'''
params = {'name': 'My Unauthorised Vocabulary'}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_create',
params=param_string,
extra_environ={'Authorization':
str(self.normal_user.apikey)},
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_vocabulary_update_id_only(self):
self._update_vocabulary({'id': self.genre_vocab['id']},
self.sysadmin_user)
def test_vocabulary_update_id_and_same_name(self):
self._update_vocabulary({'id': self.genre_vocab['id'],
'name': self.genre_vocab['name']}, self.sysadmin_user)
def test_vocabulary_update_id_and_new_name(self):
self._update_vocabulary({'id': self.genre_vocab['id'],
'name': 'new name'}, self.sysadmin_user)
def test_vocabulary_update_id_and_same_tags(self):
self._update_vocabulary({'id': self.genre_vocab['id'],
'tags': self.genre_vocab['tags']}, self.sysadmin_user)
def test_vocabulary_update_id_and_new_tags(self):
tags = [
{'name': 'new test tag one'},
{'name': 'new test tag two'},
{'name': 'new test tag three'},
]
self._update_vocabulary({'id': self.genre_vocab['id'], 'tags': tags},
self.sysadmin_user)
def test_vocabulary_update_id_same_name_and_same_tags(self):
self._update_vocabulary({'id': self.genre_vocab['id'],
'name': self.genre_vocab['name'],
'tags': self.genre_vocab['tags']}, self.sysadmin_user)
def test_vocabulary_update_id_same_name_and_new_tags(self):
tags = [
{'name': 'new test tag one'},
{'name': 'new test tag two'},
{'name': 'new test tag three'},
]
self._update_vocabulary({'id': self.genre_vocab['id'],
'name': self.genre_vocab['name'],
'tags': tags}, self.sysadmin_user)
def test_vocabulary_update_id_new_name_and_same_tags(self):
self._update_vocabulary({'id': self.genre_vocab['id'],
'name': 'new name',
'tags': self.genre_vocab['tags']}, self.sysadmin_user)
def test_vocabulary_update_not_exists(self):
'''Test the error response given when a user tries to update a
vocabulary that doesn't exist.
'''
params = {'id': 'xxxxxxx', 'name': 'updated_name'}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_update',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
assert response.json['error']['message'].startswith('Not found: ')
def test_vocabulary_update_no_id(self):
params = {'name': 'bagel radio'}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_update',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert 'id' in response.json['error']
assert response.json['error']['id'] == 'id not in data'
def test_vocabulary_update_not_logged_in(self):
'''Test that users who are not logged in cannot update vocabularies.'''
params = {'id': self.genre_vocab['id']}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_update',
params=param_string,
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_vocabulary_update_with_tags(self):
tags = [
{'name': 'drone'},
{'name': 'noise'},
{'name': 'fuzz'},
{'name': 'field recordings'},
{'name': 'hypnagogia'},
{'name': 'textures without rhythm'},
]
self._update_vocabulary(
{
'id': self.genre_vocab['id'],
'name': self.genre_vocab['name'],
'tags': tags
},
self.sysadmin_user)
params = {'id': self.genre_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
# Check that retrieving the vocab by name gives the same result.
assert len(response['result']['tags']) == len(tags)
def test_vocabulary_update_not_authorized(self):
'''Test that users who are not authorized cannot update vocabs.'''
params = {'id': self.genre_vocab['id']}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_update',
params=param_string,
extra_environ={'Authorization':
str(self.normal_user.apikey)},
status=403)
assert response.json['success'] is False
assert response.json['error']['message'] == 'Access denied'
def test_vocabulary_update_bad_tags(self):
'''Test updating vocabularies with invalid tags.
'''
apikey = str(self.sysadmin_user.apikey)
for tags in (
[{'id': 'xxx'}, {'name': 'foo'}],
[{'name': 'foo'}, {'name': None}],
[{'name': 'foo'}, {'name': ''}],
[{'name': 'foo'}, {'name': 'f'}],
[{'name': 'f' * 200}, {'name': 'foo'}],
[{'name': 'Invalid!'}, {'name': 'foo'}],
):
params = {'id': self.genre_vocab['name'], 'tags': tags}
response = self.app.post('/api/action/vocabulary_update',
params=helpers.json.dumps(params),
extra_environ={'Authorization': apikey},
status=409)
assert response.json['success'] is False
assert response.json['error']['tags']
def test_vocabulary_update_none_tags(self):
'''Test updating vocabularies with None for 'tags'.
'''
params = {'id': self.genre_vocab['id'], 'tags': None}
response = self.app.post('/api/action/vocabulary_update',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=400)
assert "Integrity Error" in response.body, response.body
def test_vocabulary_update_empty_tags(self):
'''Test updating vocabularies with [] for 'tags'.
'''
params = {'id': self.genre_vocab['id'], 'tags': []}
response = self.app.post('/api/action/vocabulary_update',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=200)
assert response.json['success'] is True
assert response.json['result']
updated_vocab = response.json['result']
assert updated_vocab['name'] == self.genre_vocab['name']
assert updated_vocab['id'] == self.genre_vocab['id']
assert updated_vocab['tags'] == []
params = {'id': updated_vocab['id']}
response = self._post('/api/action/vocabulary_show', params)
assert response['success'] is True
assert response['result'] == updated_vocab
tags = self._list_tags(updated_vocab)
assert tags == []
def test_vocabulary_delete(self):
self._delete_vocabulary(self.genre_vocab['id'], self.sysadmin_user)
def test_vocabulary_delete_not_exists(self):
'''Test the error response given when a user tries to delete a
vocabulary that doesn't exist.
'''
params = {'id': 'xxxxxxx'}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_delete',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
assert response.json['error']['message'].startswith('Not found: '
'Could not find vocabulary')
def test_vocabulary_delete_no_id(self):
'''Test the error response given when a user tries to delete a
vocabulary without giving the vocabulary id.
'''
params = {}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_delete',
params=param_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert 'id' in response.json['error']
assert response.json['error']['id'] == 'id not in data'
def test_vocabulary_delete_not_logged_in(self):
'''Test that users who are not logged in cannot delete vocabularies.'''
params = {'id': self.genre_vocab['id']}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_delete',
params=param_string,
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_vocabulary_delete_not_authorized(self):
'''Test that users who are not authorized cannot delete vocabs.'''
params = {'id': self.genre_vocab['id']}
param_string = helpers.json.dumps(params)
response = self.app.post('/api/action/vocabulary_delete',
params=param_string,
extra_environ={'Authorization':
str(self.normal_user.apikey)},
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_add_tag_to_vocab(self):
'''Test that a tag can be added to and then retrieved from a vocab.'''
vocab = self.genre_vocab
tags_before = self._list_tags(vocab)
tag_created = self._create_tag(self.sysadmin_user, 'noise', vocab)
tags_after = self._list_tags(vocab)
new_tag_names = [tag_name for tag_name in tags_after if tag_name not in
tags_before]
assert len(new_tag_names) == 1
assert tag_created['name'] in new_tag_names
def test_add_tag_no_vocab(self):
'''Test the error response when a user tries to create a tag without
specifying a vocab.
'''
tag_dict = {'name': 'noise'}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['vocabulary_id'] == ['Missing value']
def test_add_tag_vocab_not_exists(self):
'''Test the error response when a user tries to add a tag to a vocab
that doesn't exist.
'''
tag_dict = {'name': 'noise', 'vocabulary_id': 'does not exist'}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['vocabulary_id'] == [
'Tag vocabulary was not found.']
def test_add_tag_already_added(self):
'''Test the error response when a user tries to add a tag to a vocab
that already has a tag with the same name.
'''
self.test_add_tag_to_vocab()
vocab = self.genre_vocab
tag_dict = {'name': 'noise', 'vocabulary_id': vocab['id']}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['vocabulary_id'][0].startswith(
'Tag noise already belongs to vocabulary')
def test_add_tag_with_id(self):
'''Test the error response when a user tries to specify the tag ID when
adding a tag to a vocab.
'''
tag_dict = {
'id': 'dsagdsgsgsd',
'name': 'noise',
'vocabulary_id': self.genre_vocab['id']
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['id'] == [u'The input field id was not '
'expected.']
def test_add_tag_without_name(self):
'''Test the error response when a user tries to create a tag without a
name.
'''
tag_dict = {
'vocabulary_id': self.genre_vocab['id']
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['name'] == [u'Missing value']
def test_add_tag_invalid_name(self):
for name in ('Not a valid tag name!', '', None):
tag_dict = {
'name': name,
'vocabulary_id': self.genre_vocab['id']
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['name']
def test_add_tag_invalid_vocab_id(self):
tag_dict = {
'name': 'noise',
'vocabulary_id': 'xxcxzczxczxc',
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert response.json['error']['vocabulary_id'] == [
u'Tag vocabulary was not found.']
def test_add_tag_not_logged_in(self):
tag_dict = {
'name': 'noise',
'vocabulary_id': self.genre_vocab['id']
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_add_tag_not_authorized(self):
tag_dict = {
'name': 'noise',
'vocabulary_id': self.genre_vocab['id']
}
tag_string = helpers.json.dumps(tag_dict)
response = self.app.post('/api/action/tag_create',
params=tag_string,
extra_environ={'Authorization':
str(self.normal_user.apikey)},
status=403)
assert response.json['success'] is False
assert response.json['error']['__type'] == 'Authorization Error'
def test_add_vocab_tag_to_dataset(self):
'''Test that a tag belonging to a vocab can be added to a dataset,
retrieved from the dataset, and then removed from the dataset.'''
ckan.model.repo.rebuild_db()
self.setup()
ckan.tests.legacy.CreateTestData.create()
# First add a tag to the vocab.
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
# Get a package from the API.
package = (self._post('/api/action/package_show',
{'id': self._post('/api/action/package_list')['result'][0]})
['result'])
# Add the new vocab tag to the package.
package['tags'].append(tag)
updated_package = self._post('/api/action/package_update',
params={'id': package['id'], 'tags': package['tags']},
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)})['result']
# Test that the new vocab tag was added to the package.
tags_in_pkg = [tag_in_pkg for tag_in_pkg in updated_package['tags'] if
tag_in_pkg['name'] == tag['name'] and
tag_in_pkg['vocabulary_id'] == tag['vocabulary_id']]
assert len(tags_in_pkg) == 1
# Test that the package appears vocabulary_list.
vocabs = self._post('/api/action/vocabulary_list')['result']
genre_vocab = [vocab for vocab in vocabs if vocab['name'] == 'Genre']
assert len(genre_vocab) == 1
genre_vocab = genre_vocab[0]
noise_tag = [tag_ for tag_ in genre_vocab['tags']
if tag_['name'] == 'noise']
assert len(noise_tag) == 1
noise_tag = noise_tag[0]
assert len([p for p in noise_tag['packages'] if
p['id'] == updated_package['id']]) == 1
# Test that the tagged package appears in vocabulary_show.
genre_vocab = self._post('/api/action/vocabulary_show',
params={'id': genre_vocab['id']})['result']
noise_tag = [tag_ for tag_ in genre_vocab['tags']
if tag_['name'] == 'noise']
assert len(noise_tag) == 1
noise_tag = noise_tag[0]
assert len([p for p in noise_tag['packages'] if
p['id'] == updated_package['id']]) == 1
# Remove the new vocab tag from the package.
package['tags'].remove(tag)
updated_package = self._post('/api/action/package_update',
params={'id': package['id'], 'tags': package['tags']},
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)})['result']
# Test that the tag no longer appears in the list of tags for the
# package.
package = (self._post('/api/action/package_show',
{'id': self._post('/api/action/package_list')['result'][0]})
['result'])
tags_in_pkg = [tag_in_pkg for tag_in_pkg in package['tags'] if
tag_in_pkg['name'] == tag['name'] and
tag_in_pkg['vocabulary_id'] == tag['vocabulary_id']]
assert len(tags_in_pkg) == 0
def test_delete_tag_from_vocab(self):
'''Test that a tag can be deleted from a vocab.'''
ckan.model.repo.rebuild_db()
self.setup()
ckan.tests.legacy.CreateTestData.create()
vocab = self.genre_vocab
# First add some tags to the vocab.
noise_tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
ragga_tag = self._create_tag(self.sysadmin_user, 'ragga', vocab)
grunge_tag = self._create_tag(self.sysadmin_user, 'grunge', vocab)
funk_tag = self._create_tag(self.sysadmin_user, 'funk', vocab)
tags = (noise_tag, ragga_tag, grunge_tag, funk_tag)
# Get a package from the API.
package = (self._post('/api/action/package_show',
{'id': self._post('/api/action/package_list')['result'][0]})
['result'])
# Add the new vocab tags to the package.
for tag in tags:
package['tags'].append(tag)
updated_package = self._post('/api/action/package_update',
params={'id': package['id'], 'tags': package['tags']},
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)})['result']
# Test that the new vocab tags were added to the package.
for tag in tags:
tags_in_pkg = [tag_in_pkg for tag_in_pkg in
updated_package['tags'] if tag_in_pkg['name'] ==
tag['name'] and tag_in_pkg['vocabulary_id'] ==
tag['vocabulary_id']]
assert len(tags_in_pkg) == 1
# Now delete the tags from the vocab.
tags_before = self._list_tags(vocab)
self._delete_tag(self.sysadmin_user, noise_tag['name'], vocab['name'])
self._delete_tag(self.sysadmin_user, ragga_tag['id'], vocab['name'])
self._delete_tag(self.sysadmin_user, grunge_tag['id'], vocab['id'])
self._delete_tag(self.sysadmin_user, funk_tag['name'], vocab['id'])
# Test that the tags no longer appear in the list of tags for the
# vocab.
tags_after = self._list_tags(vocab)
assert len(tags_after) == len(tags_before) - 4
assert tag['name'] not in tags_after
difference = [tag_name for tag_name in tags_before if tag_name not in
tags_after]
assert sorted(difference) == sorted([tag['name'] for tag in tags])
# Test that the tags no longer appear in the list of tags for the
# package.
package = (self._post('/api/action/package_show',
{'id': self._post('/api/action/package_list')['result'][0]})
['result'])
for tag in tags:
tags_in_pkg = [tag_in_pkg for tag_in_pkg in package['tags'] if
tag_in_pkg['name'] == tag['name'] and
tag_in_pkg['vocabulary_id'] == tag['vocabulary_id']]
assert len(tags_in_pkg) == 0
def test_delete_free_tag(self):
'''Test that a free tag can be deleted via the API, and is
automatically removed from datasets.
'''
ckan.model.repo.rebuild_db()
self.setup()
ckan.tests.legacy.CreateTestData.create()
# Get a package from the API.
package = (self._post('/api/action/package_show',
{'id': self._post('/api/action/package_list')['result'][0]})
['result'])
package_id = package['id']
# Add some new free tags to the package.
tags = package['tags']
tags.append({'name': 'ducks'})
tags.append({'name': 'birds'})
self._post('/api/action/package_update',
params={'id': package['id'], 'tags': tags},
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)})
# Test that the new tags appear in the list of tags.
tags = self._list_tags()
assert [tag for tag in tags].count('ducks') == 1
assert [tag for tag in tags].count('birds') == 1
# Test that the new tags appear in the package's list of tags.
package = (self._post('/api/action/package_show',
{'id': package_id})['result'])
packages_tags = [tag['name'] for tag in package['tags']]
assert [tag for tag in packages_tags].count('ducks') == 1
assert [tag for tag in packages_tags].count('birds') == 1
# Now delete the tags.
self._delete_tag(self.sysadmin_user, 'ducks')
birds_tag_id = self._post('/api/action/tag_show',
{'id': 'birds'})['result']['id']
self._delete_tag(self.sysadmin_user, birds_tag_id)
# Test that the tags no longer appear in the list of tags.
tags = self._list_tags()
assert [tag for tag in tags].count('ducks') == 0
assert [tag for tag in tags].count('birds') == 0
# Test that the tags no longer appear in the package's list of tags.
package = (self._post('/api/action/package_show',
{'id': package_id})['result'])
packages_tags = [tag['name'] for tag in package['tags']]
assert [tag for tag in packages_tags].count('ducks') == 0
assert [tag for tag in packages_tags].count('birds') == 0
def test_delete_tag_no_id(self):
'''Test the error response when a user tries to delete a tag without
giving the tag id.
'''
vocab = self.genre_vocab
self._create_tag(self.sysadmin_user, 'noise', vocab)
for tag_id in ('missing', '', None):
# Now try to delete the tag from the vocab.
params = {'vocabulary_id': vocab['name']}
if tag_id != 'missing':
params['id'] = tag_id
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=409)
assert response.json['success'] is False
assert 'id' in response.json['error']
assert response.json['error']['id'] == 'id not in data'
def test_delete_tag_no_vocab(self):
'''Test the error response when a user tries to delete a vocab tag
without giving the vocab name.
'''
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
# Now try to delete the tag from the vocab.
for vocab_name in ('', None, 'missing'):
params = {'id': tag['name']}
if vocab_name != 'missing':
params['vocabulary_id'] = vocab_name
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
msg = response.json['error']['message']
assert msg == u'Not found: Could not find tag "{0}"'.format(
tag['name']), msg
def test_delete_tag_not_exists(self):
'''Test the error response when a user tries to delete a from a vocab
but there is no tag with that name in the vocab.
'''
vocab = self.genre_vocab
self._create_tag(self.sysadmin_user, 'noise', vocab)
params = {'id': 'nonexistent',
'vocabulary_id': self.genre_vocab['name']}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
msg = response.json['error']['message']
assert msg == u'Not found: Could not find tag "%s"' % 'nonexistent', \
msg
def test_delete_tag_vocab_not_exists(self):
'''Test the error response when a user tries to delete a from a vocab
but there is no vocab with that name.
'''
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
params = {'id': tag['name'],
'vocabulary_id': 'nonexistent'}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
msg = response.json['error']['message']
assert msg == u"Not found: could not find vocabulary 'nonexistent'", \
msg
def test_delete_tag_invalid_tag(self):
'''Test the error response when a user tries to delete a tag but gives
an invalid tag name.
'''
vocab = self.genre_vocab
self._create_tag(self.sysadmin_user, 'noise', vocab)
for tag_name in ('Invalid!', ' '):
params = {'id': tag_name,
'vocabulary_id': self.genre_vocab['name']}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
msg = response.json['error']['message']
assert msg == u'Not found: Could not find tag "%s"' % tag_name, msg
def test_delete_tag_invalid_vocab(self):
'''Test the error response when a user tries to delete a tag but gives
an invalid vocab name.
'''
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
for vocab_name in ('Invalid!', ' '):
params = {'id': tag['name'], 'vocabulary_id': vocab_name}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.sysadmin_user.apikey)},
status=404)
assert response.json['success'] is False
msg = response.json['error']['message']
assert msg == u"Not found: could not find vocabulary '%s'" \
% vocab_name, msg
def test_delete_tag_not_logged_in(self):
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
params = {'id': tag['name'],
'vocabulary_id': self.genre_vocab['name']}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
status=403)
assert response.json['success'] is False
error = response.json['error']['__type']
assert error == u"Authorization Error", error
def test_delete_tag_not_authorized(self):
vocab = self.genre_vocab
tag = self._create_tag(self.sysadmin_user, 'noise', vocab)
params = {'id': tag['name'],
'vocabulary_id': self.genre_vocab['name']}
response = self.app.post('/api/action/tag_delete',
params=helpers.json.dumps(params),
extra_environ={'Authorization':
str(self.normal_user.apikey)},
status=403)
assert response.json['success'] is False
msg = response.json['error']['__type']
assert msg == u"Authorization Error"
| 41.709507
| 79
| 0.580727
| 5,534
| 47,382
| 4.788941
| 0.047524
| 0.05177
| 0.038261
| 0.045431
| 0.8279
| 0.79145
| 0.758169
| 0.725191
| 0.703871
| 0.679458
| 0
| 0.004893
| 0.296948
| 47,382
| 1,135
| 80
| 41.746256
| 0.790676
| 0.106813
| 0
| 0.645123
| 0
| 0
| 0.156571
| 0.047101
| 0
| 0
| 0
| 0
| 0.185664
| 1
| 0.075206
| false
| 0.00235
| 0.005875
| 0
| 0.089307
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
160ee36272c93d3bfcf47db70290a3a1249dd22e
| 123
|
py
|
Python
|
h/services/search_index/__init__.py
|
pombredanne/h
|
9c4c2dc0d53ed5bed5183936c24b4c27b23070b4
|
[
"BSD-2-Clause"
] | 2,103
|
2015-01-07T12:47:49.000Z
|
2022-03-29T02:38:25.000Z
|
h/services/search_index/__init__.py
|
pombredanne/h
|
9c4c2dc0d53ed5bed5183936c24b4c27b23070b4
|
[
"BSD-2-Clause"
] | 4,322
|
2015-01-04T17:18:01.000Z
|
2022-03-31T17:06:02.000Z
|
h/services/search_index/__init__.py
|
admariner/h
|
25ef1b8d94889df86ace5a084f1aa0effd9f4e25
|
[
"BSD-2-Clause"
] | 389
|
2015-01-24T04:10:02.000Z
|
2022-03-28T08:00:16.000Z
|
from h.services.search_index.service import SearchIndexService
from h.services.search_index.service_factory import factory
| 41
| 62
| 0.886179
| 17
| 123
| 6.235294
| 0.529412
| 0.09434
| 0.245283
| 0.358491
| 0.584906
| 0.584906
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065041
| 123
| 2
| 63
| 61.5
| 0.921739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
162aa0b436b0e0f099bc01f1a3077f5ea59916dd
| 171,978
|
py
|
Python
|
yacctab.py
|
cynthia-doc/PA-final-project
|
8fe6a710313f2cc7507947ba9b5f1a0c0e5326a5
|
[
"BSD-3-Clause"
] | null | null | null |
yacctab.py
|
cynthia-doc/PA-final-project
|
8fe6a710313f2cc7507947ba9b5f1a0c0e5326a5
|
[
"BSD-3-Clause"
] | null | null | null |
yacctab.py
|
cynthia-doc/PA-final-project
|
8fe6a710313f2cc7507947ba9b5f1a0c0e5326a5
|
[
"BSD-3-Clause"
] | null | null | null |
# yacctab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMOD_BOOL _COMPLEX AUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN INT_CONST_CHAR FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST STRING_LITERAL WSTRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n | INT_CONST_CHAR\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
_lr_action_items = {'$end':([0,1,2,3,4,5,6,7,8,9,13,14,55,77,78,105,144,211,265,],[-310,0,-58,-59,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-70,-309,-71,-202,]),'SEMI':([0,2,4,5,6,7,8,9,11,12,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,62,63,64,65,66,67,69,70,72,73,74,75,76,77,78,81,82,83,84,85,86,87,88,89,90,91,92,98,99,101,102,103,104,105,106,108,110,127,131,139,140,141,142,143,144,145,146,147,148,151,152,153,154,155,156,157,158,159,160,161,162,163,166,169,172,175,176,177,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,228,229,243,244,247,250,251,252,253,254,255,256,257,258,259,260,261,262,264,265,266,267,268,270,271,273,274,283,284,285,286,287,288,289,290,326,327,328,330,331,332,334,335,350,351,352,353,372,373,376,377,378,381,382,383,385,388,392,396,397,398,399,400,401,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,432,439,440,443,444,457,458,459,461,463,464,465,467,468,470,471,474,476,480,481,492,493,495,496,498,500,509,510,512,515,520,521,522,524,527,528,530,],[9,9,-60,-62,-63,-64,-65,-66,-310,77,-67,-68,-52,-310,-310,-310,-116,-93,-310,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,-310,-310,-162,-89,-90,-91,-92,-81,-19,-20,-120,-122,-163,-54,-37,-83,-69,-53,-86,-9,-10,-87,-88,-94,-82,-15,-16,-124,-126,-152,-153,-308,-132,-133,146,-70,-310,-162,-55,-294,-30,146,146,146,-135,-142,-309,-310,-145,-146,-130,-13,-310,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,274,-14,-310,287,288,290,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-71,-121,-38,-123,-177,-35,-36,-125,-127,-154,146,-137,146,-139,-134,-143,378,-128,-129,-25,-26,-147,-149,-131,-202,-201,-13,-310,-235,-257,-310,-218,-78,-80,-310,399,-214,-215,400,-217,-279,-280,-260,-261,-262,-263,-305,-307,-43,-44,-31,-34,-155,-156,-136,-138,-144,-151,-203,-310,-205,-287,-220,-79,467,-310,-213,-216,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-178,-39,-42,-32,-33,-148,-150,-204,-310,-258,-310,-310,-310,499,-272,-273,-264,-179,-40,-41,-206,-80,-208,-209,513,-237,-310,-281,522,-288,-207,-282,-210,-310,-310,-212,-211,]),'PPHASH':([0,2,4,5,6,7,8,9,13,14,55,77,78,105,144,211,265,],[13,13,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-70,-309,-71,-202,]),'PPPRAGMA':([0,2,4,5,6,7,8,9,13,14,55,77,78,101,104,105,106,139,140,141,143,144,146,147,152,153,154,155,156,157,158,159,160,161,162,172,211,250,252,255,265,266,268,273,274,283,284,287,288,290,378,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[14,14,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-308,14,-70,14,14,14,14,-142,-309,-145,-146,14,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,14,-71,14,14,-143,-202,-201,14,14,-218,14,-80,-214,-215,-217,-144,-203,14,-205,-79,-213,-216,-204,14,14,14,-206,-80,-208,-209,14,-207,-210,14,14,-212,-211,]),'ID':([0,2,4,5,6,7,8,9,11,13,14,16,17,18,19,20,21,22,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,60,61,63,64,65,66,68,71,77,78,79,80,82,83,84,85,86,87,94,95,96,97,98,99,100,101,102,103,105,106,111,113,114,115,116,117,118,126,129,130,132,133,134,135,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,164,168,172,174,177,183,184,185,187,188,189,190,191,193,194,211,216,217,218,219,223,226,227,231,235,239,240,247,248,249,251,253,254,257,258,263,264,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,329,333,339,340,341,344,345,347,348,349,361,362,365,368,370,372,373,376,377,379,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,475,477,478,483,484,485,492,493,495,496,499,509,511,513,516,517,520,522,524,527,528,530,],[23,23,-60,-62,-63,-64,-65,-66,23,-67,-68,23,-310,-310,-310,-116,-93,23,23,-97,-310,-113,-114,-115,-221,98,102,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-140,-141,-61,23,23,-89,-90,-91,-92,23,23,-83,-69,-310,127,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,138,-308,-132,-133,-70,163,23,127,-310,127,127,-310,-28,23,23,127,-165,-167,138,138,-135,-309,23,-130,163,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,127,127,163,286,127,127,127,127,127,-266,-267,-268,-265,-269,-270,-71,-310,127,-310,-28,-266,127,127,127,23,23,-310,-154,138,127,-137,-139,-134,-128,-129,127,-131,-202,-201,163,127,163,-218,127,127,127,127,163,-80,127,-214,-215,-217,127,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,425,427,127,127,-11,127,-12,127,127,-266,127,127,-310,127,23,127,127,-155,-156,-136,-138,23,127,-203,163,-205,127,-79,127,-213,-216,-310,-182,127,-310,-28,-266,-204,127,163,-310,163,163,127,127,127,127,127,127,-11,-266,127,127,-206,-80,-208,-209,127,163,-310,127,127,127,-207,-210,163,163,-212,-211,]),'LPAREN':([0,2,4,5,6,7,8,9,11,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,63,64,65,66,68,71,75,76,77,78,79,81,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,105,106,110,111,113,114,116,117,118,126,127,129,130,131,132,133,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,163,164,167,168,170,171,172,173,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,213,216,217,218,219,223,226,227,228,229,235,236,239,240,241,242,247,249,251,253,254,257,258,263,264,265,266,268,272,273,274,275,278,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,329,333,334,335,339,340,341,344,347,348,349,350,351,352,353,359,360,361,365,368,370,372,373,376,377,379,380,382,383,385,387,388,390,391,395,396,398,399,400,423,425,426,427,428,433,435,439,440,443,444,445,446,447,450,451,453,455,459,460,461,462,464,465,466,467,469,470,471,472,477,478,480,481,483,484,485,486,487,488,489,490,491,492,493,495,496,499,505,506,509,510,511,513,515,517,518,519,520,521,522,524,527,528,530,],[24,24,-60,-62,-63,-64,-65,-66,71,-67,-68,80,24,-310,-310,-310,-116,-93,24,-29,24,-97,-310,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,71,24,-89,-90,-91,-92,71,71,115,-37,-83,-69,-310,80,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,-308,-132,-133,-70,168,115,71,168,-310,168,-310,-28,239,-294,71,168,-30,-165,-167,-135,-309,71,-130,168,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,272,275,168,280,281,168,285,168,323,329,329,272,333,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,336,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-71,-38,-310,168,-310,-28,-266,168,168,-35,-36,239,362,239,-310,-45,371,-154,272,-137,-139,-134,-128,-129,272,-131,-202,-201,168,168,168,-218,168,391,168,168,168,168,-80,168,-214,-215,-217,168,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,168,272,272,272,272,272,272,272,272,272,272,272,272,272,272,272,272,272,272,168,168,-279,-280,168,168,-305,-307,-11,168,-12,272,-266,168,168,-43,-44,-31,-34,362,371,-310,239,168,168,-155,-156,-136,-138,71,272,-203,168,-205,272,-287,391,391,466,-79,168,-213,-216,-274,-275,-276,-277,-278,-310,-182,-39,-42,-32,-33,168,-310,-28,-191,-197,-195,-266,-204,272,168,-310,168,168,168,168,272,-272,-273,168,168,-11,-40,-41,-266,168,168,-50,-51,-193,-192,-194,-196,-206,-80,-208,-209,168,-46,-49,168,-281,-310,168,-288,168,-47,-48,-207,-282,-210,168,168,-212,-211,]),'TIMES':([0,2,4,5,6,7,8,9,11,13,14,17,18,19,20,21,22,24,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,63,64,65,66,71,77,78,79,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,105,106,111,113,114,116,117,118,126,127,129,130,133,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,211,216,217,218,219,223,226,227,239,240,247,249,251,253,254,257,258,263,264,265,266,268,271,272,273,274,275,278,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,328,329,330,331,332,333,334,335,339,340,341,344,347,348,349,361,368,370,372,373,376,377,379,380,382,383,385,387,388,391,396,398,399,400,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,463,464,465,466,467,469,470,471,472,474,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[26,26,-60,-62,-63,-64,-65,-66,26,-67,-68,-310,-310,-310,-116,-93,26,26,-97,-310,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,26,26,-89,-90,-91,-92,26,-83,-69,-310,-86,-9,-10,-87,-88,-94,26,-27,-28,-166,-152,-153,-308,-132,-133,-70,188,26,188,-310,223,-310,-28,26,-294,26,188,-167,-135,-309,26,-130,188,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,188,188,188,188,-257,304,-259,188,188,188,-238,188,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-71,-310,347,-310,-28,-266,188,188,26,369,-154,188,-137,-139,-134,-128,-129,188,-131,-202,-201,188,-257,188,188,-218,188,26,188,188,188,188,-80,188,-214,-215,-217,188,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,-279,-280,-260,188,-261,-262,-263,188,-305,-307,-11,188,-12,188,-266,188,188,-310,188,455,-155,-156,-136,-138,26,188,-203,188,-205,188,-287,26,-79,188,-213,-216,-239,-240,-241,304,304,304,304,304,304,304,304,304,304,304,304,304,304,304,-274,-275,-276,-277,-278,-310,-182,483,-310,-28,-266,-204,188,188,-310,-258,188,188,188,188,188,-272,-273,188,-264,188,-11,-266,188,188,-206,-80,-208,-209,188,188,-281,-310,188,-288,188,-207,-282,-210,188,188,-212,-211,]),'TYPEID':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,58,59,60,61,62,63,64,65,66,68,71,77,78,80,81,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,104,105,106,107,111,115,126,128,129,131,132,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,235,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,283,284,285,287,288,290,324,325,329,333,336,352,353,362,371,372,373,376,377,378,379,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[30,30,-60,-62,-63,-64,-65,-66,30,76,-67,-68,-52,-310,-310,-310,-116,-93,30,-29,-97,-310,-113,-114,-115,-221,99,103,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-140,-141,-61,30,-84,76,30,30,-89,-90,-91,-92,76,76,-83,-69,30,-53,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,-308,-132,-133,30,-70,30,-85,76,30,241,30,76,-30,-165,-167,30,30,30,-135,-142,-309,76,-145,-146,-130,30,30,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,30,-71,-35,-36,30,241,30,-154,30,-137,30,-139,-134,-143,-128,-129,-131,-202,-201,30,-218,-78,-80,30,-214,-215,-217,426,428,30,30,30,-31,-34,30,30,-155,-156,-136,-138,-144,76,-203,-205,30,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'ENUM':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,250,252,255,265,266,272,274,283,284,285,287,288,290,329,333,336,352,353,362,371,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[31,31,-60,-62,-63,-64,-65,-66,31,-67,-68,-52,-310,-310,-310,31,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,31,-84,31,31,-83,-69,31,-53,-86,-9,-10,-87,-88,-166,-308,31,-70,31,-85,31,31,-30,-167,31,31,31,-142,-309,-145,-146,31,31,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,31,-71,-35,-36,31,31,31,31,-143,-202,-201,31,-218,-78,-80,31,-214,-215,-217,31,31,31,-31,-34,31,31,-144,-203,-205,31,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'VOID':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[33,33,-60,-62,-63,-64,-65,-66,33,33,-67,-68,-52,-310,-310,-310,-116,-93,33,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,33,-84,33,33,33,-89,-90,-91,-92,-83,-69,33,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,33,-70,33,-85,33,33,33,-30,-167,33,33,33,-135,-142,-309,33,-145,-146,-130,33,33,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,33,-71,-35,-36,33,33,-154,33,-137,33,-139,-134,-143,-128,-129,-131,-202,-201,33,-218,33,-78,-80,33,-214,-215,-217,33,33,33,-31,-34,33,33,-155,-156,-136,-138,-144,-203,-205,33,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'_BOOL':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[34,34,-60,-62,-63,-64,-65,-66,34,34,-67,-68,-52,-310,-310,-310,-116,-93,34,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,34,-84,34,34,34,-89,-90,-91,-92,-83,-69,34,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,34,-70,34,-85,34,34,34,-30,-167,34,34,34,-135,-142,-309,34,-145,-146,-130,34,34,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,34,-71,-35,-36,34,34,-154,34,-137,34,-139,-134,-143,-128,-129,-131,-202,-201,34,-218,34,-78,-80,34,-214,-215,-217,34,34,34,-31,-34,34,34,-155,-156,-136,-138,-144,-203,-205,34,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'CHAR':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[35,35,-60,-62,-63,-64,-65,-66,35,35,-67,-68,-52,-310,-310,-310,-116,-93,35,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,35,-84,35,35,35,-89,-90,-91,-92,-83,-69,35,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,35,-70,35,-85,35,35,35,-30,-167,35,35,35,-135,-142,-309,35,-145,-146,-130,35,35,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,35,-71,-35,-36,35,35,-154,35,-137,35,-139,-134,-143,-128,-129,-131,-202,-201,35,-218,35,-78,-80,35,-214,-215,-217,35,35,35,-31,-34,35,35,-155,-156,-136,-138,-144,-203,-205,35,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'SHORT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[36,36,-60,-62,-63,-64,-65,-66,36,36,-67,-68,-52,-310,-310,-310,-116,-93,36,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,36,-84,36,36,36,-89,-90,-91,-92,-83,-69,36,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,36,-70,36,-85,36,36,36,-30,-167,36,36,36,-135,-142,-309,36,-145,-146,-130,36,36,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,36,-71,-35,-36,36,36,-154,36,-137,36,-139,-134,-143,-128,-129,-131,-202,-201,36,-218,36,-78,-80,36,-214,-215,-217,36,36,36,-31,-34,36,36,-155,-156,-136,-138,-144,-203,-205,36,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'INT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[37,37,-60,-62,-63,-64,-65,-66,37,37,-67,-68,-52,-310,-310,-310,-116,-93,37,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,37,-84,37,37,37,-89,-90,-91,-92,-83,-69,37,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,37,-70,37,-85,37,37,37,-30,-167,37,37,37,-135,-142,-309,37,-145,-146,-130,37,37,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,37,-71,-35,-36,37,37,-154,37,-137,37,-139,-134,-143,-128,-129,-131,-202,-201,37,-218,37,-78,-80,37,-214,-215,-217,37,37,37,-31,-34,37,37,-155,-156,-136,-138,-144,-203,-205,37,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'LONG':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[38,38,-60,-62,-63,-64,-65,-66,38,38,-67,-68,-52,-310,-310,-310,-116,-93,38,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,38,-84,38,38,38,-89,-90,-91,-92,-83,-69,38,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,38,-70,38,-85,38,38,38,-30,-167,38,38,38,-135,-142,-309,38,-145,-146,-130,38,38,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,38,-71,-35,-36,38,38,-154,38,-137,38,-139,-134,-143,-128,-129,-131,-202,-201,38,-218,38,-78,-80,38,-214,-215,-217,38,38,38,-31,-34,38,38,-155,-156,-136,-138,-144,-203,-205,38,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'FLOAT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[39,39,-60,-62,-63,-64,-65,-66,39,39,-67,-68,-52,-310,-310,-310,-116,-93,39,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,39,-84,39,39,39,-89,-90,-91,-92,-83,-69,39,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,39,-70,39,-85,39,39,39,-30,-167,39,39,39,-135,-142,-309,39,-145,-146,-130,39,39,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,39,-71,-35,-36,39,39,-154,39,-137,39,-139,-134,-143,-128,-129,-131,-202,-201,39,-218,39,-78,-80,39,-214,-215,-217,39,39,39,-31,-34,39,39,-155,-156,-136,-138,-144,-203,-205,39,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'DOUBLE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[40,40,-60,-62,-63,-64,-65,-66,40,40,-67,-68,-52,-310,-310,-310,-116,-93,40,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,40,-84,40,40,40,-89,-90,-91,-92,-83,-69,40,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,40,-70,40,-85,40,40,40,-30,-167,40,40,40,-135,-142,-309,40,-145,-146,-130,40,40,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,40,-71,-35,-36,40,40,-154,40,-137,40,-139,-134,-143,-128,-129,-131,-202,-201,40,-218,40,-78,-80,40,-214,-215,-217,40,40,40,-31,-34,40,40,-155,-156,-136,-138,-144,-203,-205,40,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'_COMPLEX':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[41,41,-60,-62,-63,-64,-65,-66,41,41,-67,-68,-52,-310,-310,-310,-116,-93,41,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,41,-84,41,41,41,-89,-90,-91,-92,-83,-69,41,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,41,-70,41,-85,41,41,41,-30,-167,41,41,41,-135,-142,-309,41,-145,-146,-130,41,41,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,41,-71,-35,-36,41,41,-154,41,-137,41,-139,-134,-143,-128,-129,-131,-202,-201,41,-218,41,-78,-80,41,-214,-215,-217,41,41,41,-31,-34,41,41,-155,-156,-136,-138,-144,-203,-205,41,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'SIGNED':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[42,42,-60,-62,-63,-64,-65,-66,42,42,-67,-68,-52,-310,-310,-310,-116,-93,42,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,42,-84,42,42,42,-89,-90,-91,-92,-83,-69,42,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,42,-70,42,-85,42,42,42,-30,-167,42,42,42,-135,-142,-309,42,-145,-146,-130,42,42,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,42,-71,-35,-36,42,42,-154,42,-137,42,-139,-134,-143,-128,-129,-131,-202,-201,42,-218,42,-78,-80,42,-214,-215,-217,42,42,42,-31,-34,42,42,-155,-156,-136,-138,-144,-203,-205,42,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'UNSIGNED':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[43,43,-60,-62,-63,-64,-65,-66,43,43,-67,-68,-52,-310,-310,-310,-116,-93,43,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,43,-84,43,43,43,-89,-90,-91,-92,-83,-69,43,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,43,-70,43,-85,43,43,43,-30,-167,43,43,43,-135,-142,-309,43,-145,-146,-130,43,43,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,43,-71,-35,-36,43,43,-154,43,-137,43,-139,-134,-143,-128,-129,-131,-202,-201,43,-218,43,-78,-80,43,-214,-215,-217,43,43,43,-31,-34,43,43,-155,-156,-136,-138,-144,-203,-205,43,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'__INT128':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[44,44,-60,-62,-63,-64,-65,-66,44,44,-67,-68,-52,-310,-310,-310,-116,-93,44,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,44,-84,44,44,44,-89,-90,-91,-92,-83,-69,44,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-308,-132,-133,44,-70,44,-85,44,44,44,-30,-167,44,44,44,-135,-142,-309,44,-145,-146,-130,44,44,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,44,-71,-35,-36,44,44,-154,44,-137,44,-139,-134,-143,-128,-129,-131,-202,-201,44,-218,44,-78,-80,44,-214,-215,-217,44,44,44,-31,-34,44,44,-155,-156,-136,-138,-144,-203,-205,44,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'CONST':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,218,219,228,229,230,239,240,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,361,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,446,447,459,492,493,495,496,520,522,528,530,],[45,45,-60,-62,-63,-64,-65,-66,45,45,-67,-68,-52,45,45,45,-116,-93,-29,-97,45,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,45,-84,45,45,-89,-90,-91,-92,-83,-69,45,45,-53,-94,45,-166,-152,-153,-308,-132,-133,45,-70,45,-85,45,45,45,45,45,-30,-167,45,45,45,-135,-142,-309,45,-145,-146,-130,45,45,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,45,-71,45,45,-35,-36,45,45,45,-154,45,-137,45,-139,-134,-143,-128,-129,-131,-202,-201,45,-218,45,-78,-80,45,-214,-215,-217,45,45,45,-31,-34,45,45,45,-155,-156,-136,-138,-144,-203,-205,45,-79,-213,-216,-32,-33,45,45,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'RESTRICT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,218,219,228,229,230,239,240,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,361,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,446,447,459,492,493,495,496,520,522,528,530,],[46,46,-60,-62,-63,-64,-65,-66,46,46,-67,-68,-52,46,46,46,-116,-93,-29,-97,46,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,46,-84,46,46,-89,-90,-91,-92,-83,-69,46,46,-53,-94,46,-166,-152,-153,-308,-132,-133,46,-70,46,-85,46,46,46,46,46,-30,-167,46,46,46,-135,-142,-309,46,-145,-146,-130,46,46,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,46,-71,46,46,-35,-36,46,46,46,-154,46,-137,46,-139,-134,-143,-128,-129,-131,-202,-201,46,-218,46,-78,-80,46,-214,-215,-217,46,46,46,-31,-34,46,46,46,-155,-156,-136,-138,-144,-203,-205,46,-79,-213,-216,-32,-33,46,46,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'VOLATILE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,211,218,219,228,229,230,239,240,247,250,251,252,253,254,255,257,258,264,265,266,272,274,278,283,284,285,287,288,290,329,333,336,352,353,361,362,371,372,373,376,377,378,382,385,391,396,399,400,443,444,446,447,459,492,493,495,496,520,522,528,530,],[47,47,-60,-62,-63,-64,-65,-66,47,47,-67,-68,-52,47,47,47,-116,-93,-29,-97,47,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,47,-84,47,47,-89,-90,-91,-92,-83,-69,47,47,-53,-94,47,-166,-152,-153,-308,-132,-133,47,-70,47,-85,47,47,47,47,47,-30,-167,47,47,47,-135,-142,-309,47,-145,-146,-130,47,47,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,47,-71,47,47,-35,-36,47,47,47,-154,47,-137,47,-139,-134,-143,-128,-129,-131,-202,-201,47,-218,47,-78,-80,47,-214,-215,-217,47,47,47,-31,-34,47,47,47,-155,-156,-136,-138,-144,-203,-205,47,-79,-213,-216,-32,-33,47,47,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'AUTO':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,211,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,362,371,372,373,376,377,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[48,48,-60,-62,-63,-64,-65,-66,48,48,-67,-68,-52,48,48,48,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,48,-84,48,48,-89,-90,-91,-92,-83,-69,48,-53,-94,-152,-153,-308,-132,-133,-70,48,-85,48,48,-30,-135,-309,48,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,48,48,-154,-137,-139,-134,-202,-201,-218,-78,-80,48,-214,-215,-217,-31,-34,48,48,-155,-156,-136,-138,-203,-205,48,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'REGISTER':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,211,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,362,371,372,373,376,377,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[49,49,-60,-62,-63,-64,-65,-66,49,49,-67,-68,-52,49,49,49,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,49,-84,49,49,-89,-90,-91,-92,-83,-69,49,-53,-94,-152,-153,-308,-132,-133,-70,49,-85,49,49,-30,-135,-309,49,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,49,49,-154,-137,-139,-134,-202,-201,-218,-78,-80,49,-214,-215,-217,-31,-34,49,49,-155,-156,-136,-138,-203,-205,49,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'STATIC':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,97,98,99,101,102,103,105,106,107,114,115,118,126,131,133,142,144,152,153,154,155,156,157,158,159,160,161,162,211,219,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,361,362,371,372,373,376,377,382,385,391,396,399,400,443,444,447,459,492,493,495,496,520,522,528,530,],[25,25,-60,-62,-63,-64,-65,-66,25,25,-67,-68,-52,25,25,25,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,25,-84,25,25,-89,-90,-91,-92,-83,-69,117,25,-53,-94,-166,-152,-153,-308,-132,-133,-70,25,-85,218,25,227,25,-30,-167,-135,-309,25,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,349,-35,-36,25,25,-154,-137,-139,-134,-202,-201,-218,-78,-80,25,-214,-215,-217,-31,-34,446,25,25,-155,-156,-136,-138,-203,-205,25,-79,-213,-216,-32,-33,485,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'EXTERN':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,211,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,362,371,372,373,376,377,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[50,50,-60,-62,-63,-64,-65,-66,50,50,-67,-68,-52,50,50,50,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,50,-84,50,50,-89,-90,-91,-92,-83,-69,50,-53,-94,-152,-153,-308,-132,-133,-70,50,-85,50,50,-30,-135,-309,50,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,50,50,-154,-137,-139,-134,-202,-201,-218,-78,-80,50,-214,-215,-217,-31,-34,50,50,-155,-156,-136,-138,-203,-205,50,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'TYPEDEF':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,211,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,362,371,372,373,376,377,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[51,51,-60,-62,-63,-64,-65,-66,51,51,-67,-68,-52,51,51,51,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,51,-84,51,51,-89,-90,-91,-92,-83,-69,51,-53,-94,-152,-153,-308,-132,-133,-70,51,-85,51,51,-30,-135,-309,51,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,51,51,-154,-137,-139,-134,-202,-201,-218,-78,-80,51,-214,-215,-217,-31,-34,51,51,-155,-156,-136,-138,-203,-205,51,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'INLINE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,211,228,229,230,239,247,251,253,254,265,266,274,283,284,285,287,288,290,352,353,362,371,372,373,376,377,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[52,52,-60,-62,-63,-64,-65,-66,52,52,-67,-68,-52,52,52,52,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,52,-84,52,52,-89,-90,-91,-92,-83,-69,52,-53,-94,-152,-153,-308,-132,-133,-70,52,-85,52,52,-30,-135,-309,52,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,52,52,-154,-137,-139,-134,-202,-201,-218,-78,-80,52,-214,-215,-217,-31,-34,52,52,-155,-156,-136,-138,-203,-205,52,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'STRUCT':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,250,252,255,265,266,272,274,283,284,285,287,288,290,329,333,336,352,353,362,371,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[53,53,-60,-62,-63,-64,-65,-66,53,-67,-68,-52,-310,-310,-310,53,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,53,-84,53,53,-83,-69,53,-53,-86,-9,-10,-87,-88,-166,-308,53,-70,53,-85,53,53,-30,-167,53,53,53,-142,-309,-145,-146,53,53,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,53,-71,-35,-36,53,53,53,53,-143,-202,-201,53,-218,-78,-80,53,-214,-215,-217,53,53,53,-31,-34,53,53,-144,-203,-205,53,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'UNION':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,211,228,229,230,239,250,252,255,265,266,272,274,283,284,285,287,288,290,329,333,336,352,353,362,371,378,382,385,391,396,399,400,443,444,459,492,493,495,496,520,522,528,530,],[54,54,-60,-62,-63,-64,-65,-66,54,-67,-68,-52,-310,-310,-310,54,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,54,-84,54,54,-83,-69,54,-53,-86,-9,-10,-87,-88,-166,-308,54,-70,54,-85,54,54,-30,-167,54,54,54,-142,-309,-145,-146,54,54,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,54,-71,-35,-36,54,54,54,54,-143,-202,-201,54,-218,-78,-80,54,-214,-215,-217,54,54,54,-31,-34,54,54,-144,-203,-205,54,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'LBRACE':([10,14,15,23,31,32,53,54,56,57,58,59,62,77,78,81,98,99,101,102,103,106,107,109,113,130,131,144,152,153,154,155,156,157,158,159,160,161,162,172,216,228,229,265,266,268,273,274,283,284,287,288,290,339,340,341,352,353,382,383,385,387,396,399,400,433,435,443,444,459,460,461,462,464,465,473,474,477,478,492,493,495,496,509,511,520,522,524,527,528,530,],[-310,-68,-52,-29,101,101,-140,-141,101,-7,-8,-84,-310,-83,-69,-53,101,101,-308,101,101,101,-85,101,101,101,-30,-309,101,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,101,-310,-35,-36,-202,-201,101,101,-218,101,-80,-214,-215,-217,-11,101,-12,-31,-34,-203,101,-205,101,-79,-213,-216,-310,-182,-32,-33,-204,101,101,-310,101,101,101,101,101,-11,-206,-80,-208,-209,101,-310,-207,-210,101,101,-212,-211,]),'RBRACE':([14,77,78,101,104,106,127,136,137,138,139,140,141,143,144,146,147,150,151,152,153,154,155,156,157,158,159,160,161,162,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,215,216,245,246,248,250,252,255,265,266,270,271,274,283,284,287,288,290,326,327,328,330,331,332,334,335,337,338,339,374,375,378,382,385,388,396,399,400,401,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,432,433,434,459,463,470,471,474,476,492,493,494,495,496,500,504,510,511,515,520,521,522,528,530,],[-68,-83,-69,-308,144,-310,-294,144,-157,-160,144,144,144,-142,-309,-145,-146,144,-5,-6,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-177,-310,144,144,-158,144,144,-143,-202,-201,-235,-257,-218,-78,-80,-214,-215,-217,-279,-280,-260,-261,-262,-263,-305,-307,144,-22,-21,-159,-161,-144,-203,-205,-287,-79,-213,-216,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-178,144,-180,-204,-258,-272,-273,-264,-179,-206,-80,144,-208,-209,-237,-181,-281,144,-288,-207,-282,-210,-212,-211,]),'CASE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,164,-309,164,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,164,-202,-201,164,164,-218,164,-80,-214,-215,-217,-203,164,-205,-79,-213,-216,-204,164,164,164,-206,-80,-208,-209,164,-207,-210,164,164,-212,-211,]),'DEFAULT':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,165,-309,165,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,165,-202,-201,165,165,-218,165,-80,-214,-215,-217,-203,165,-205,-79,-213,-216,-204,165,165,165,-206,-80,-208,-209,165,-207,-210,165,165,-212,-211,]),'IF':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,167,-309,167,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,167,-202,-201,167,167,-218,167,-80,-214,-215,-217,-203,167,-205,-79,-213,-216,-204,167,167,167,-206,-80,-208,-209,167,-207,-210,167,167,-212,-211,]),'SWITCH':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,170,-309,170,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,170,-202,-201,170,170,-218,170,-80,-214,-215,-217,-203,170,-205,-79,-213,-216,-204,170,170,170,-206,-80,-208,-209,170,-207,-210,170,170,-212,-211,]),'WHILE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,282,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,171,-309,171,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,171,-202,-201,171,171,-218,395,171,-80,-214,-215,-217,-203,171,-205,-79,-213,-216,-204,171,171,171,-206,-80,-208,-209,171,-207,-210,171,171,-212,-211,]),'DO':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,172,-309,172,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,172,-202,-201,172,172,-218,172,-80,-214,-215,-217,-203,172,-205,-79,-213,-216,-204,172,172,172,-206,-80,-208,-209,172,-207,-210,172,172,-212,-211,]),'FOR':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,173,-309,173,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,173,-202,-201,173,173,-218,173,-80,-214,-215,-217,-203,173,-205,-79,-213,-216,-204,173,173,173,-206,-80,-208,-209,173,-207,-210,173,173,-212,-211,]),'GOTO':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,174,-309,174,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,174,-202,-201,174,174,-218,174,-80,-214,-215,-217,-203,174,-205,-79,-213,-216,-204,174,174,174,-206,-80,-208,-209,174,-207,-210,174,174,-212,-211,]),'BREAK':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,175,-309,175,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,175,-202,-201,175,175,-218,175,-80,-214,-215,-217,-203,175,-205,-79,-213,-216,-204,175,175,175,-206,-80,-208,-209,175,-207,-210,175,175,-212,-211,]),'CONTINUE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,176,-309,176,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,176,-202,-201,176,176,-218,176,-80,-214,-215,-217,-203,176,-205,-79,-213,-216,-204,176,176,176,-206,-80,-208,-209,176,-207,-210,176,176,-212,-211,]),'RETURN':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,265,266,268,273,274,283,284,287,288,290,382,383,385,396,399,400,459,461,464,465,492,493,495,496,509,520,522,524,527,528,530,],[-68,-83,-69,-308,177,-309,177,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,177,-202,-201,177,177,-218,177,-80,-214,-215,-217,-203,177,-205,-79,-213,-216,-204,177,177,177,-206,-80,-208,-209,177,-207,-210,177,177,-212,-211,]),'PLUSPLUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,329,333,334,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,388,396,398,399,400,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,470,471,472,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,183,183,-310,183,-310,-28,-294,183,-167,-309,183,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,183,183,183,183,326,183,183,183,183,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,183,-310,-28,-266,183,183,-310,183,183,-202,-201,183,183,183,-218,183,183,183,183,183,-80,183,-214,-215,-217,183,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,-279,-280,183,183,-305,-307,-11,183,-12,183,-266,183,183,-310,183,183,183,-203,183,-205,183,-287,-79,183,-213,-216,-274,-275,-276,-277,-278,-310,-182,183,-310,-28,-266,-204,183,183,-310,183,183,183,183,183,-272,-273,183,183,-11,-266,183,183,-206,-80,-208,-209,183,183,-281,-310,183,-288,183,-207,-282,-210,183,183,-212,-211,]),'MINUSMINUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,329,333,334,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,388,396,398,399,400,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,470,471,472,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,184,184,-310,184,-310,-28,-294,184,-167,-309,184,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,184,184,184,184,327,184,184,184,184,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,184,-310,-28,-266,184,184,-310,184,184,-202,-201,184,184,184,-218,184,184,184,184,184,-80,184,-214,-215,-217,184,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,-279,-280,184,184,-305,-307,-11,184,-12,184,-266,184,184,-310,184,184,184,-203,184,-205,184,-287,-79,184,-213,-216,-274,-275,-276,-277,-278,-310,-182,184,-310,-28,-266,-204,184,184,-310,184,184,184,184,184,-272,-273,184,184,-11,-266,184,184,-206,-80,-208,-209,184,184,-281,-310,184,-288,184,-207,-282,-210,184,184,-212,-211,]),'SIZEOF':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,187,187,-310,187,-310,-28,187,-167,-309,187,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,187,187,187,187,187,187,187,187,-266,-267,-268,-265,-269,-270,-310,187,-310,-28,-266,187,187,-310,187,187,-202,-201,187,187,187,-218,187,187,187,187,187,-80,187,-214,-215,-217,187,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,-11,187,-12,187,-266,187,187,-310,187,187,187,-203,187,-205,187,-79,187,-213,-216,-310,-182,187,-310,-28,-266,-204,187,187,-310,187,187,187,187,187,187,187,-11,-266,187,187,-206,-80,-208,-209,187,187,-310,187,187,-207,-210,187,187,-212,-211,]),'AND':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,217,218,219,223,226,227,240,249,263,265,266,268,271,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,328,329,330,331,332,333,334,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,388,396,398,399,400,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,463,464,465,466,467,469,470,471,472,474,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,191,191,-310,191,-310,-28,-294,191,-167,-309,191,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,191,191,191,191,-257,317,-259,191,191,191,-238,191,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,191,-310,-28,-266,191,191,-310,191,191,-202,-201,191,-257,191,191,-218,191,191,191,191,191,-80,191,-214,-215,-217,191,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,-279,-280,-260,191,-261,-262,-263,191,-305,-307,-11,191,-12,191,-266,191,191,-310,191,191,191,-203,191,-205,191,-287,-79,191,-213,-216,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,317,317,317,317,-274,-275,-276,-277,-278,-310,-182,191,-310,-28,-266,-204,191,191,-310,-258,191,191,191,191,191,-272,-273,191,-264,191,-11,-266,191,191,-206,-80,-208,-209,191,191,-281,-310,191,-288,191,-207,-282,-210,191,191,-212,-211,]),'PLUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,217,218,219,223,226,227,240,249,263,265,266,268,271,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,328,329,330,331,332,333,334,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,388,396,398,399,400,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,463,464,465,466,467,469,470,471,472,474,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,189,189,-310,189,-310,-28,-294,189,-167,-309,189,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,189,189,189,189,-257,307,-259,189,189,189,-238,189,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,189,-310,-28,-266,189,189,-310,189,189,-202,-201,189,-257,189,189,-218,189,189,189,189,189,-80,189,-214,-215,-217,189,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,-279,-280,-260,189,-261,-262,-263,189,-305,-307,-11,189,-12,189,-266,189,189,-310,189,189,189,-203,189,-205,189,-287,-79,189,-213,-216,-239,-240,-241,-242,-243,307,307,307,307,307,307,307,307,307,307,307,307,307,-274,-275,-276,-277,-278,-310,-182,189,-310,-28,-266,-204,189,189,-310,-258,189,189,189,189,189,-272,-273,189,-264,189,-11,-266,189,189,-206,-80,-208,-209,189,189,-281,-310,189,-288,189,-207,-282,-210,189,189,-212,-211,]),'MINUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,217,218,219,223,226,227,240,249,263,265,266,268,271,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,326,327,328,329,330,331,332,333,334,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,388,396,398,399,400,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,433,435,445,446,447,455,459,460,461,462,463,464,465,466,467,469,470,471,472,474,477,478,483,484,485,492,493,495,496,499,509,510,511,513,515,517,520,521,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,190,190,-310,190,-310,-28,-294,190,-167,-309,190,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,190,190,190,190,-257,308,-259,190,190,190,-238,190,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,190,-310,-28,-266,190,190,-310,190,190,-202,-201,190,-257,190,190,-218,190,190,190,190,190,-80,190,-214,-215,-217,190,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,-279,-280,-260,190,-261,-262,-263,190,-305,-307,-11,190,-12,190,-266,190,190,-310,190,190,190,-203,190,-205,190,-287,-79,190,-213,-216,-239,-240,-241,-242,-243,308,308,308,308,308,308,308,308,308,308,308,308,308,-274,-275,-276,-277,-278,-310,-182,190,-310,-28,-266,-204,190,190,-310,-258,190,190,190,190,190,-272,-273,190,-264,190,-11,-266,190,190,-206,-80,-208,-209,190,190,-281,-310,190,-288,190,-207,-282,-210,190,190,-212,-211,]),'NOT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,193,193,-310,193,-310,-28,193,-167,-309,193,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,193,193,193,193,193,193,193,193,-266,-267,-268,-265,-269,-270,-310,193,-310,-28,-266,193,193,-310,193,193,-202,-201,193,193,193,-218,193,193,193,193,193,-80,193,-214,-215,-217,193,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,-11,193,-12,193,-266,193,193,-310,193,193,193,-203,193,-205,193,-79,193,-213,-216,-310,-182,193,-310,-28,-266,-204,193,193,-310,193,193,193,193,193,193,193,-11,-266,193,193,-206,-80,-208,-209,193,193,-310,193,193,-207,-210,193,193,-212,-211,]),'LNOT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,194,194,-310,194,-310,-28,194,-167,-309,194,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,194,194,194,194,194,194,194,194,-266,-267,-268,-265,-269,-270,-310,194,-310,-28,-266,194,194,-310,194,194,-202,-201,194,194,194,-218,194,194,194,194,194,-80,194,-214,-215,-217,194,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,-11,194,-12,194,-266,194,194,-310,194,194,194,-203,194,-205,194,-79,194,-213,-216,-310,-182,194,-310,-28,-266,-204,194,194,-310,194,194,194,194,194,194,194,-11,-266,194,194,-206,-80,-208,-209,194,194,-310,194,194,-207,-210,194,194,-212,-211,]),'OFFSETOF':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,199,199,-310,199,-310,-28,199,-167,-309,199,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,199,199,199,199,199,199,199,199,-266,-267,-268,-265,-269,-270,-310,199,-310,-28,-266,199,199,-310,199,199,-202,-201,199,199,199,-218,199,199,199,199,199,-80,199,-214,-215,-217,199,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,-11,199,-12,199,-266,199,199,-310,199,199,199,-203,199,-205,199,-79,199,-213,-216,-310,-182,199,-310,-28,-266,-204,199,199,-310,199,199,199,199,199,199,199,-11,-266,199,199,-206,-80,-208,-209,199,199,-310,199,199,-207,-210,199,199,-212,-211,]),'INT_CONST_DEC':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,200,200,-310,200,-310,-28,200,-167,-309,200,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,200,200,200,200,200,200,200,200,-266,-267,-268,-265,-269,-270,-310,200,-310,-28,-266,200,200,-310,200,200,-202,-201,200,200,200,-218,200,200,200,200,200,-80,200,-214,-215,-217,200,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,-11,200,-12,200,-266,200,200,-310,200,200,200,-203,200,-205,200,-79,200,-213,-216,-310,-182,200,-310,-28,-266,-204,200,200,-310,200,200,200,200,200,200,200,-11,-266,200,200,-206,-80,-208,-209,200,200,-310,200,200,-207,-210,200,200,-212,-211,]),'INT_CONST_OCT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,201,201,-310,201,-310,-28,201,-167,-309,201,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,201,201,201,201,201,201,201,201,-266,-267,-268,-265,-269,-270,-310,201,-310,-28,-266,201,201,-310,201,201,-202,-201,201,201,201,-218,201,201,201,201,201,-80,201,-214,-215,-217,201,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,-11,201,-12,201,-266,201,201,-310,201,201,201,-203,201,-205,201,-79,201,-213,-216,-310,-182,201,-310,-28,-266,-204,201,201,-310,201,201,201,201,201,201,201,-11,-266,201,201,-206,-80,-208,-209,201,201,-310,201,201,-207,-210,201,201,-212,-211,]),'INT_CONST_HEX':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,202,202,-310,202,-310,-28,202,-167,-309,202,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,202,202,202,202,202,202,202,202,-266,-267,-268,-265,-269,-270,-310,202,-310,-28,-266,202,202,-310,202,202,-202,-201,202,202,202,-218,202,202,202,202,202,-80,202,-214,-215,-217,202,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,-11,202,-12,202,-266,202,202,-310,202,202,202,-203,202,-205,202,-79,202,-213,-216,-310,-182,202,-310,-28,-266,-204,202,202,-310,202,202,202,202,202,202,202,-11,-266,202,202,-206,-80,-208,-209,202,202,-310,202,202,-207,-210,202,202,-212,-211,]),'INT_CONST_BIN':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,203,203,-310,203,-310,-28,203,-167,-309,203,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,203,203,203,203,203,203,203,203,-266,-267,-268,-265,-269,-270,-310,203,-310,-28,-266,203,203,-310,203,203,-202,-201,203,203,203,-218,203,203,203,203,203,-80,203,-214,-215,-217,203,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,-11,203,-12,203,-266,203,203,-310,203,203,203,-203,203,-205,203,-79,203,-213,-216,-310,-182,203,-310,-28,-266,-204,203,203,-310,203,203,203,203,203,203,203,-11,-266,203,203,-206,-80,-208,-209,203,203,-310,203,203,-207,-210,203,203,-212,-211,]),'INT_CONST_CHAR':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,204,204,-310,204,-310,-28,204,-167,-309,204,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,204,204,204,204,204,204,204,204,-266,-267,-268,-265,-269,-270,-310,204,-310,-28,-266,204,204,-310,204,204,-202,-201,204,204,204,-218,204,204,204,204,204,-80,204,-214,-215,-217,204,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,-11,204,-12,204,-266,204,204,-310,204,204,204,-203,204,-205,204,-79,204,-213,-216,-310,-182,204,-310,-28,-266,-204,204,204,-310,204,204,204,204,204,204,204,-11,-266,204,204,-206,-80,-208,-209,204,204,-310,204,204,-207,-210,204,204,-212,-211,]),'FLOAT_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,205,205,-310,205,-310,-28,205,-167,-309,205,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,205,205,205,205,205,205,205,205,-266,-267,-268,-265,-269,-270,-310,205,-310,-28,-266,205,205,-310,205,205,-202,-201,205,205,205,-218,205,205,205,205,205,-80,205,-214,-215,-217,205,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,-11,205,-12,205,-266,205,205,-310,205,205,205,-203,205,-205,205,-79,205,-213,-216,-310,-182,205,-310,-28,-266,-204,205,205,-310,205,205,205,205,205,205,205,-11,-266,205,205,-206,-80,-208,-209,205,205,-310,205,205,-207,-210,205,205,-212,-211,]),'HEX_FLOAT_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,206,206,-310,206,-310,-28,206,-167,-309,206,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,206,206,206,206,206,206,206,206,-266,-267,-268,-265,-269,-270,-310,206,-310,-28,-266,206,206,-310,206,206,-202,-201,206,206,206,-218,206,206,206,206,206,-80,206,-214,-215,-217,206,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,-11,206,-12,206,-266,206,206,-310,206,206,206,-203,206,-205,206,-79,206,-213,-216,-310,-182,206,-310,-28,-266,-204,206,206,-310,206,206,206,206,206,206,206,-11,-266,206,206,-206,-80,-208,-209,206,206,-310,206,206,-207,-210,206,206,-212,-211,]),'CHAR_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,207,207,-310,207,-310,-28,207,-167,-309,207,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,207,207,207,207,207,207,207,207,-266,-267,-268,-265,-269,-270,-310,207,-310,-28,-266,207,207,-310,207,207,-202,-201,207,207,207,-218,207,207,207,207,207,-80,207,-214,-215,-217,207,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,-11,207,-12,207,-266,207,207,-310,207,207,207,-203,207,-205,207,-79,207,-213,-216,-310,-182,207,-310,-28,-266,-204,207,207,-310,207,207,207,207,207,207,207,-11,-266,207,207,-206,-80,-208,-209,207,207,-310,207,207,-207,-210,207,207,-212,-211,]),'WCHAR_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,208,208,-310,208,-310,-28,208,-167,-309,208,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,208,208,208,208,208,208,208,208,-266,-267,-268,-265,-269,-270,-310,208,-310,-28,-266,208,208,-310,208,208,-202,-201,208,208,208,-218,208,208,208,208,208,-80,208,-214,-215,-217,208,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,-11,208,-12,208,-266,208,208,-310,208,208,208,-203,208,-205,208,-79,208,-213,-216,-310,-182,208,-310,-28,-266,-204,208,208,-310,208,208,208,208,208,208,208,-11,-266,208,208,-206,-80,-208,-209,208,208,-310,208,208,-207,-210,208,208,-212,-211,]),'STRING_LITERAL':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,197,209,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,334,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,209,209,-310,209,-310,-28,209,-167,-309,209,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,209,209,209,209,209,209,209,209,-266,-267,-268,-265,-269,-270,334,-304,-310,209,-310,-28,-266,209,209,-310,209,209,-202,-201,209,209,209,-218,209,209,209,209,209,-80,209,-214,-215,-217,209,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,-305,-11,209,-12,209,-266,209,209,-310,209,209,209,-203,209,-205,209,-79,209,-213,-216,-310,-182,209,-310,-28,-266,-204,209,209,-310,209,209,209,209,209,209,209,-11,-266,209,209,-206,-80,-208,-209,209,209,-310,209,209,-207,-210,209,209,-212,-211,]),'WSTRING_LITERAL':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,198,210,216,217,218,219,223,226,227,240,249,263,265,266,268,272,273,274,275,279,280,281,283,284,285,287,288,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,335,339,340,341,344,347,348,349,361,368,370,380,382,383,385,387,396,398,399,400,433,435,445,446,447,455,459,460,461,462,464,465,466,467,469,472,477,478,483,484,485,492,493,495,496,499,509,511,513,517,520,522,524,527,528,530,],[-68,-117,-118,-119,-83,-69,-310,-27,-28,-166,-308,210,210,-310,210,-310,-28,210,-167,-309,210,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,210,210,210,210,210,210,210,210,-266,-267,-268,-265,-269,-270,335,-306,-310,210,-310,-28,-266,210,210,-310,210,210,-202,-201,210,210,210,-218,210,210,210,210,210,-80,210,-214,-215,-217,210,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,210,-307,-11,210,-12,210,-266,210,210,-310,210,210,210,-203,210,-205,210,-79,210,-213,-216,-310,-182,210,-310,-28,-266,-204,210,210,-310,210,210,210,210,210,210,210,-11,-266,210,210,-206,-80,-208,-209,210,210,-310,210,210,-207,-210,210,210,-212,-211,]),'ELSE':([14,78,144,156,157,158,159,160,161,162,265,274,283,284,287,288,290,382,385,396,399,400,459,492,493,495,496,520,522,528,530,],[-68,-69,-309,-72,-73,-74,-75,-76,-77,-78,-202,-218,-78,-80,-214,-215,-217,-203,-205,-79,-213,-216,-204,-206,509,-208,-209,-207,-210,-212,-211,]),'PPPRAGMASTR':([14,],[78,]),'EQUALS':([15,23,62,73,74,75,76,81,92,108,110,127,131,138,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,213,228,229,271,326,327,328,330,331,332,334,335,342,343,350,351,352,353,388,423,425,426,427,428,436,438,439,440,443,444,463,470,471,474,479,480,481,510,515,521,],[-52,-29,-162,113,-163,-54,-37,-53,130,-162,-55,-294,-30,249,-309,-294,292,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-38,-35,-36,-257,-279,-280,-260,-261,-262,-263,-305,-307,435,-183,-43,-44,-31,-34,-287,-274,-275,-276,-277,-278,-184,-186,-39,-42,-32,-33,-258,-272,-273,-264,-185,-40,-41,-281,-288,-282,]),'COMMA':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,62,63,64,65,66,70,72,73,74,75,76,81,87,90,91,92,94,95,96,97,98,99,102,103,108,110,121,123,124,125,126,127,131,132,133,136,137,138,142,144,148,163,169,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,212,213,214,215,228,229,232,233,234,235,236,237,238,241,242,243,244,245,246,247,248,251,253,254,257,258,260,261,262,264,270,271,277,278,289,326,327,328,330,331,332,334,335,338,350,351,352,353,357,358,359,360,372,373,374,375,376,377,381,386,388,389,390,392,393,394,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,431,432,434,439,440,443,444,450,451,453,457,458,463,470,471,474,476,480,481,486,487,488,489,490,491,494,497,500,501,504,505,506,510,515,518,519,521,526,],[-52,-116,-93,-29,-97,-310,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-162,-89,-90,-91,-92,111,-120,-122,-163,-54,-37,-53,-94,129,-124,-126,-164,-27,-28,-166,-152,-153,-132,-133,-162,-55,230,231,-170,-175,-310,-294,-30,-165,-167,248,-157,-160,-135,-309,-130,-294,279,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-121,-38,-123,-177,-35,-36,-172,-173,-174,-188,-56,-1,-2,-45,-190,-125,-127,248,248,-154,-158,-137,-139,-134,-128,-129,379,-147,-149,-131,-235,-257,279,-310,279,-279,-280,-260,-261,-262,-263,-305,-307,433,-43,-44,-31,-34,-171,-176,-57,-189,-155,-156,-159,-161,-136,-138,-151,279,-287,-187,-188,-220,279,279,-223,279,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,279,472,-274,-292,-275,-276,-277,-278,475,-178,-180,-39,-42,-32,-33,-191,-197,-195,-148,-150,-258,-272,-273,-264,-179,-40,-41,-50,-51,-193,-192,-194,-196,511,279,-237,-293,-181,-46,-49,-281,-288,-47,-48,-282,279,]),'RPAREN':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,63,64,65,66,75,76,80,81,87,93,94,95,96,97,98,99,102,103,110,112,115,119,120,121,122,123,124,125,126,127,131,132,133,142,144,148,169,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,213,220,221,228,229,232,233,234,235,236,237,238,239,241,242,247,251,253,254,257,258,264,267,271,276,277,278,323,326,327,328,330,331,332,334,335,350,351,352,353,356,357,358,359,360,362,363,364,365,366,367,371,372,373,376,377,384,386,388,389,390,391,392,393,394,401,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,422,423,424,425,426,427,428,429,430,439,440,443,444,448,449,450,451,453,456,463,470,471,474,480,481,486,487,488,489,490,491,497,499,500,501,502,503,505,506,510,513,514,515,518,519,521,523,525,529,],[-52,-116,-93,-29,-97,-310,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-89,-90,-91,-92,-54,-37,-310,-53,-94,131,-164,-27,-28,-166,-152,-153,-132,-133,-55,213,-310,228,229,-168,-17,-18,-170,-175,-310,-294,-30,-165,-167,-135,-309,-130,-14,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-38,350,351,-35,-36,-172,-173,-174,-188,-56,-1,-2,-310,-45,-190,-154,-137,-139,-134,-128,-129,-131,-13,-257,387,388,-310,423,-279,-280,-260,-261,-262,-263,-305,-307,-43,-44,-31,-34,-169,-171,-176,-57,-189,-310,450,451,-188,-23,-24,-310,-155,-156,-136,-138,460,461,-287,-187,-188,-310,-220,464,465,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,471,-274,-292,-275,-276,-277,-278,473,474,-39,-42,-32,-33,486,487,-191,-197,-195,491,-258,-272,-273,-264,-40,-41,-50,-51,-193,-192,-194,-196,512,-310,-237,-293,515,-289,-46,-49,-281,-310,524,-288,-47,-48,-282,527,-290,-291,]),'COLON':([15,20,23,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,74,75,76,81,98,99,102,103,108,110,127,131,142,144,145,148,163,165,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,213,228,229,247,251,253,254,257,258,262,264,269,270,271,326,327,328,330,331,332,334,335,350,351,352,353,372,373,376,377,379,388,392,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,439,440,443,444,463,470,471,474,480,481,500,510,515,521,],[-52,-116,-29,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-163,-54,-37,-53,-152,-153,-132,-133,-162,-55,-294,-30,-135,-309,263,-130,268,273,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-38,-35,-36,-154,-137,-139,-134,-128,-129,380,-131,383,-235,-257,-279,-280,-260,-261,-262,-263,-305,-307,-43,-44,-31,-34,-155,-156,-136,-138,263,-287,-220,-223,469,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-39,-42,-32,-33,-258,-272,-273,-264,-40,-41,-237,-281,-288,-282,]),'LBRACKET':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,63,64,65,66,75,76,81,87,94,95,96,97,98,99,101,102,103,110,126,127,131,132,133,142,144,148,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,213,216,228,229,235,236,239,241,242,247,251,253,254,257,258,264,278,326,327,334,335,342,343,350,351,352,353,359,360,365,372,373,376,377,388,390,391,423,425,426,427,428,433,436,438,439,440,443,444,450,451,453,462,470,471,479,480,481,486,487,488,489,490,491,502,503,505,506,510,511,515,518,519,521,525,529,],[79,-116,-93,-29,-97,-310,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-89,-90,-91,-92,114,-37,79,-94,-164,-27,-28,-166,-152,-153,-308,-132,-133,114,240,-294,-30,-165,-167,-135,-309,-130,-294,322,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-38,344,-35,-36,240,361,240,-45,370,-154,-137,-139,-134,-128,-129,-131,240,-279,-280,-305,-307,344,-183,-43,-44,-31,-34,361,370,240,-155,-156,-136,-138,-287,240,240,-274,-275,-276,-277,-278,344,-184,-186,-39,-42,-32,-33,-191,-197,-195,344,-272,-273,-185,-40,-41,-50,-51,-193,-192,-194,-196,517,-289,-46,-49,-281,344,-288,-47,-48,-282,-290,-291,]),'RBRACKET':([45,46,47,79,95,96,97,114,116,118,127,133,144,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,217,219,222,223,224,225,240,270,271,326,327,328,330,331,332,334,335,346,347,354,355,361,368,369,370,388,392,401,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,423,425,426,427,428,437,441,442,445,447,452,454,455,463,470,471,474,482,483,500,507,508,510,515,521,526,],[-117,-118,-119,-310,-27,-28,-166,-310,-310,-28,-294,-167,-309,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-310,-28,352,353,-3,-4,-310,-235,-257,-279,-280,-260,-261,-262,-263,-305,-307,439,440,443,444,-310,-310,453,-310,-287,-220,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,470,-274,-275,-276,-277,-278,479,480,481,-310,-28,488,489,490,-258,-272,-273,-264,505,506,-237,518,519,-281,-288,-282,529,]),'PERIOD':([101,127,144,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,216,326,327,334,335,342,343,388,423,425,426,427,428,433,436,438,462,470,471,479,502,503,510,511,515,521,525,529,],[-308,-294,-309,-294,324,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,345,-279,-280,-305,-307,345,-183,-287,-274,-275,-276,-277,-278,345,-184,-186,345,-272,-273,-185,516,-289,-281,345,-288,-282,-290,-291,]),'ARROW':([127,144,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,326,327,334,335,388,423,425,426,427,428,470,471,510,515,521,],[-294,-309,-294,325,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-279,-280,-305,-307,-287,-274,-275,-276,-277,-278,-272,-273,-281,-288,-282,]),'XOREQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,293,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'TIMESEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,294,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'DIVEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,295,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MODEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,296,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'PLUSEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,297,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MINUSEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,298,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LSHIFTEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,299,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'RSHIFTEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,300,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'ANDEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,301,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'OREQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,302,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'CONDOP':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,303,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'DIVIDE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,305,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,305,305,305,305,305,305,305,305,305,305,305,305,305,305,305,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MOD':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,306,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,306,306,306,306,306,306,306,306,306,306,306,306,306,306,306,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'RSHIFT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,309,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,309,309,309,309,309,309,309,309,309,309,309,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LSHIFT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,310,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,310,310,310,310,310,310,310,310,310,310,310,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,311,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,311,311,311,311,311,311,311,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,312,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,312,312,312,312,312,312,312,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'GE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,313,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,313,313,313,313,313,313,313,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'GT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,314,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,314,314,314,314,314,314,314,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'EQ':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,315,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,315,315,315,315,315,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'NE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,316,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,316,316,316,316,316,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'OR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,318,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,318,318,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'XOR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,319,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,319,-254,319,319,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LAND':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,320,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,320,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LOR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,271,326,327,328,330,331,332,334,335,388,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,423,425,426,427,428,463,470,471,474,510,515,521,],[-294,-309,-294,-257,321,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-304,-306,-257,-279,-280,-260,-261,-262,-263,-305,-307,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'ELLIPSIS':([230,],[356,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'translation_unit_or_empty':([0,],[1,]),'translation_unit':([0,],[2,]),'empty':([0,10,11,17,18,19,22,26,60,61,62,79,80,106,114,115,116,117,126,145,152,172,216,217,218,239,240,268,273,278,283,285,361,362,368,370,371,383,391,398,433,445,446,461,462,464,465,467,499,509,511,513,524,527,],[3,57,69,83,83,83,89,95,69,89,57,95,122,151,95,122,224,95,237,259,267,267,339,224,95,366,95,267,267,237,267,267,95,122,224,224,366,267,366,267,478,224,95,267,478,267,267,267,267,267,478,267,267,267,]),'external_declaration':([0,2,],[4,55,]),'function_definition':([0,2,],[5,5,]),'declaration':([0,2,10,58,62,106,152,285,],[6,6,59,107,59,154,154,398,]),'pp_directive':([0,2,],[7,7,]),'pppragma_directive':([0,2,104,106,139,140,141,152,172,250,252,268,273,283,383,461,464,465,509,524,527,],[8,8,147,162,147,147,147,162,283,147,147,283,283,162,283,283,283,283,283,283,283,]),'id_declarator':([0,2,11,22,24,60,61,71,111,126,129,145,239,379,],[10,10,62,92,93,108,92,93,108,232,108,108,93,108,]),'declaration_specifiers':([0,2,10,58,62,80,106,115,152,230,239,285,362,371,391,],[11,11,60,60,60,126,60,126,60,126,126,60,126,126,126,]),'decl_body':([0,2,10,58,62,106,152,285,],[12,12,12,12,12,12,12,12,]),'direct_id_declarator':([0,2,11,16,22,24,60,61,68,71,111,126,129,145,235,239,365,379,],[15,15,15,81,15,15,15,15,81,15,15,15,15,15,81,15,81,15,]),'pointer':([0,2,11,22,24,60,61,71,94,111,126,129,145,239,278,379,391,],[16,16,68,16,16,68,16,68,132,68,235,68,68,365,390,68,390,]),'type_qualifier':([0,2,10,11,17,18,19,26,58,60,62,79,80,96,104,106,114,115,117,118,126,139,140,141,145,149,152,168,218,219,230,239,240,250,252,272,278,285,329,333,336,361,362,371,391,446,447,],[17,17,17,63,17,17,17,97,17,63,17,97,17,133,97,17,97,17,97,133,63,97,97,97,258,133,17,97,97,133,17,17,97,97,97,97,258,17,97,97,97,97,17,17,17,97,133,]),'storage_class_specifier':([0,2,10,11,17,18,19,58,60,62,80,106,115,126,152,230,239,285,362,371,391,],[18,18,18,64,18,18,18,18,64,18,18,18,18,64,18,18,18,18,18,18,18,]),'function_specifier':([0,2,10,11,17,18,19,58,60,62,80,106,115,126,152,230,239,285,362,371,391,],[19,19,19,65,19,19,19,19,65,19,19,19,19,65,19,19,19,19,19,19,19,]),'type_specifier_no_typeid':([0,2,10,11,22,58,60,61,62,80,104,106,115,126,128,139,140,141,145,149,152,168,230,239,250,252,272,278,285,329,333,336,362,371,391,],[20,20,20,66,20,20,66,20,20,20,20,20,20,66,20,20,20,20,257,20,20,20,20,20,20,20,20,257,20,20,20,20,20,20,20,]),'type_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,230,239,250,252,272,285,329,333,336,362,371,391,],[21,21,21,87,21,87,21,21,148,21,21,87,148,148,148,264,21,148,21,21,148,148,148,21,148,148,148,21,21,21,]),'declaration_specifiers_no_type':([0,2,10,17,18,19,58,62,80,106,115,152,230,239,285,362,371,391,],[22,22,61,84,84,84,61,61,128,61,128,61,128,128,61,128,128,128,]),'typedef_name':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,230,239,250,252,272,285,329,333,336,362,371,391,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'enum_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,230,239,250,252,272,285,329,333,336,362,371,391,],[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,]),'struct_or_union_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,230,239,250,252,272,285,329,333,336,362,371,391,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'struct_or_union':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,230,239,250,252,272,285,329,333,336,362,371,391,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'declaration_list_opt':([10,62,],[56,109,]),'declaration_list':([10,62,],[58,58,]),'init_declarator_list_opt':([11,60,],[67,67,]),'init_declarator_list':([11,60,],[70,70,]),'init_declarator':([11,60,111,129,],[72,72,212,243,]),'declarator':([11,60,111,129,145,379,],[73,73,73,73,262,262,]),'typeid_declarator':([11,60,71,111,129,145,379,],[74,74,112,74,74,74,74,]),'direct_typeid_declarator':([11,60,68,71,111,129,145,379,],[75,75,110,75,75,75,75,75,]),'declaration_specifiers_no_type_opt':([17,18,19,],[82,85,86,]),'id_init_declarator_list_opt':([22,61,],[88,88,]),'id_init_declarator_list':([22,61,],[90,90,]),'id_init_declarator':([22,61,],[91,91,]),'type_qualifier_list_opt':([26,79,114,117,218,240,361,446,],[94,116,217,226,348,368,445,484,]),'type_qualifier_list':([26,79,104,114,117,139,140,141,168,218,240,250,252,272,329,333,336,361,446,],[96,118,149,219,96,149,149,149,149,96,96,149,149,149,149,149,149,447,96,]),'brace_open':([31,32,56,98,99,102,103,106,109,113,130,152,172,268,273,283,340,383,387,460,461,464,465,473,474,477,509,524,527,],[100,104,106,134,135,139,140,106,106,216,216,106,106,106,106,106,216,106,462,462,106,106,106,462,462,216,106,106,106,]),'compound_statement':([56,106,109,152,172,268,273,283,383,461,464,465,509,524,527,],[105,158,211,158,158,158,158,158,158,158,158,158,158,158,158,]),'parameter_type_list':([80,115,239,362,371,391,],[119,220,367,448,367,367,]),'identifier_list_opt':([80,115,362,],[120,221,449,]),'parameter_list':([80,115,239,362,371,391,],[121,121,121,121,121,121,]),'identifier_list':([80,115,362,],[123,123,123,]),'parameter_declaration':([80,115,230,239,362,371,391,],[124,124,357,124,124,124,124,]),'identifier':([80,106,113,115,116,130,152,164,168,172,177,183,184,185,187,217,226,227,231,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,345,348,349,362,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,475,477,484,485,499,509,513,516,517,524,527,],[125,195,195,125,195,195,195,195,195,195,195,195,195,195,195,195,195,195,358,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,438,195,195,125,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,503,195,195,195,195,195,195,525,195,195,195,]),'enumerator_list':([100,134,135,],[136,245,246,]),'enumerator':([100,134,135,248,],[137,137,137,374,]),'struct_declaration_list':([104,139,140,],[141,250,252,]),'brace_close':([104,136,139,140,141,150,245,246,250,252,337,433,494,511,],[142,247,251,253,254,265,372,373,376,377,432,476,510,521,]),'struct_declaration':([104,139,140,141,250,252,],[143,143,143,255,255,255,]),'specifier_qualifier_list':([104,139,140,141,168,250,252,272,329,333,336,],[145,145,145,145,278,145,145,278,278,278,278,]),'block_item_list_opt':([106,],[150,]),'block_item_list':([106,],[152,]),'block_item':([106,152,],[153,266,]),'statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[155,155,284,284,284,396,284,493,284,284,284,284,284,]),'labeled_statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[156,156,156,156,156,156,156,156,156,156,156,156,156,]),'expression_statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[157,157,157,157,157,157,157,157,157,157,157,157,157,]),'selection_statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[159,159,159,159,159,159,159,159,159,159,159,159,159,]),'iteration_statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[160,160,160,160,160,160,160,160,160,160,160,160,160,]),'jump_statement':([106,152,172,268,273,283,383,461,464,465,509,524,527,],[161,161,161,161,161,161,161,161,161,161,161,161,161,]),'expression_opt':([106,152,172,268,273,283,285,383,398,461,464,465,467,499,509,513,524,527,],[166,166,166,166,166,166,397,166,468,166,166,166,498,514,166,523,166,166,]),'expression':([106,152,168,172,177,268,272,273,275,280,281,283,285,303,322,329,333,383,398,461,464,465,466,467,499,509,513,517,524,527,],[169,169,277,169,289,169,277,169,386,393,394,169,169,402,421,277,277,169,169,169,169,169,497,169,169,169,169,526,169,169,]),'assignment_expression':([106,113,116,130,152,168,172,177,217,226,227,268,272,273,275,279,280,281,283,285,291,303,322,323,329,333,340,348,349,368,370,383,398,445,461,464,465,466,467,472,477,484,485,499,509,513,517,524,527,],[178,215,225,215,178,178,178,178,225,354,355,178,178,178,178,392,178,178,178,178,401,178,178,424,178,178,215,441,442,225,225,178,178,225,178,178,178,178,178,501,215,507,508,178,178,178,178,178,178,]),'conditional_expression':([106,113,116,130,152,164,168,172,177,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,322,323,329,333,340,344,348,349,368,370,380,383,398,445,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[179,179,179,179,179,270,179,179,179,179,179,179,270,270,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,270,179,179,179,179,270,179,179,179,179,179,179,179,179,500,179,179,179,179,179,179,179,179,179,179,]),'unary_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[180,180,180,180,180,271,180,180,180,328,330,271,332,180,180,180,271,271,180,180,180,180,180,180,180,180,180,180,180,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,180,180,180,180,180,271,180,180,180,180,271,180,271,180,180,271,180,180,180,180,180,271,180,180,180,180,180,180,180,180,180,180,]),'binary_expression':([106,113,116,130,152,164,168,172,177,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,398,445,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,]),'postfix_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,]),'unary_operator':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,]),'cast_expression':([106,113,116,130,152,164,168,172,177,185,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[186,186,186,186,186,186,186,186,186,331,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,463,186,186,463,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,]),'primary_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,]),'constant':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,]),'unified_string_literal':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,]),'unified_wstring_literal':([106,113,116,130,152,164,168,172,177,183,184,185,187,217,226,227,249,263,268,272,273,275,279,280,281,283,285,291,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,329,333,340,344,348,349,368,370,380,383,387,398,445,460,461,464,465,466,467,469,472,477,484,485,499,509,513,517,524,527,],[198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,]),'initializer':([113,130,340,477,],[214,244,434,504,]),'assignment_expression_opt':([116,217,368,370,445,],[222,346,452,454,482,]),'typeid_noparen_declarator':([126,],[233,]),'abstract_declarator_opt':([126,278,],[234,389,]),'direct_typeid_noparen_declarator':([126,235,],[236,359,]),'abstract_declarator':([126,239,278,391,],[238,363,238,363,]),'direct_abstract_declarator':([126,235,239,278,365,390,391,],[242,360,242,242,360,360,242,]),'struct_declarator_list_opt':([145,],[256,]),'struct_declarator_list':([145,],[260,]),'struct_declarator':([145,379,],[261,457,]),'constant_expression':([164,249,263,344,380,],[269,375,381,437,458,]),'type_name':([168,272,329,333,336,],[276,384,429,430,431,]),'pragmacomp_or_statement':([172,268,273,383,461,464,465,509,524,527,],[282,382,385,459,492,495,496,520,528,530,]),'assignment_operator':([180,],[291,]),'initializer_list_opt':([216,],[337,]),'initializer_list':([216,462,],[338,494,]),'designation_opt':([216,433,462,511,],[340,477,340,477,]),'designation':([216,433,462,511,],[341,341,341,341,]),'designator_list':([216,433,462,511,],[342,342,342,342,]),'designator':([216,342,433,462,511,],[343,436,343,343,343,]),'parameter_type_list_opt':([239,371,391,],[364,456,364,]),'argument_expression_list':([323,],[422,]),'offsetof_member_designator':([475,],[502,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> translation_unit_or_empty","S'",1,None,None,None),
('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',517),
('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',518),
('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',526),
('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',533),
('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',544),
('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',549),
('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',554),
('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',555),
('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',560),
('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',565),
('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',571),
('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',572),
('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',583),
('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',600),
('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',611),
('statement -> expression_statement','statement',1,'p_statement','c_parser.py',612),
('statement -> compound_statement','statement',1,'p_statement','c_parser.py',613),
('statement -> selection_statement','statement',1,'p_statement','c_parser.py',614),
('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',615),
('statement -> jump_statement','statement',1,'p_statement','c_parser.py',616),
('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',617),
('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',664),
('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',665),
('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',684),
('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',685),
('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',744),
('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',753),
('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',754),
('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',764),
('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',769),
('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',774),
('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',780),
('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',785),
('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',790),
('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',795),
('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',800),
('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',805),
('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',811),
('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',812),
('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',813),
('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',814),
('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',815),
('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',820),
('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',825),
('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',826),
('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',827),
('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',828),
('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',829),
('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',830),
('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',831),
('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',832),
('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',833),
('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',834),
('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',835),
('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',836),
('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',841),
('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',842),
('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',843),
('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',844),
('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',849),
('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',850),
('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',851),
('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',856),
('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',857),
('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',865),
('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',866),
('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',871),
('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',872),
('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',877),
('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',878),
('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',885),
('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',890),
('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',895),
('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',900),
('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',909),
('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',910),
('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',920),
('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',921),
('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',938),
('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',939),
('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',940),
('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',941),
('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',957),
('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',958),
('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',965),
('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',966),
('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',974),
('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1012),
('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1017),
('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1022),
('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1023),
('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1031),
('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1036),
('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1037),
('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1045),
('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1046),
('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1051),
('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1056),
('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1057),
('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1062),
('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1063),
('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1064),
('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1075),
('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1076),
('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1091),
('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1092),
('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1203),
('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1204),
('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1233),
('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1234),
('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1239),
('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1240),
('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1248),
('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1249),
('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1268),
('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1269),
('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1280),
('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1311),
('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1312),
('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1321),
('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1326),
('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1327),
('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1335),
('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1336),
('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1347),
('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1355),
('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1356),
('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1361),
('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1362),
('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1367),
('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1378),
('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1386),
('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1391),
('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1401),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1405),
('direct_abstract_declarator -> LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_3','c_parser.py',1416),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1426),
('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1437),
('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1446),
('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1456),
('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1467),
('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1468),
('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1475),
('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1476),
('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1482),
('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1488),
('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1492),
('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1496),
('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1500),
('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1504),
('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1508),
('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1513),
('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1517),
('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1521),
('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1525),
('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1530),
('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1534),
('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1538),
('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1542),
('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1543),
('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1548),
('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1555),
('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1556),
('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1568),
('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1572),
('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1573),
('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1586),
('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1587),
('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1588),
('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1589),
('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1590),
('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1591),
('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1592),
('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1593),
('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1594),
('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1595),
('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1596),
('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1601),
('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1605),
('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1606),
('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1614),
('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1615),
('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1616),
('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1617),
('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1618),
('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1619),
('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1620),
('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1621),
('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1622),
('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1623),
('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1624),
('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1625),
('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1626),
('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1627),
('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1628),
('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1629),
('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1630),
('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1631),
('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1632),
('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1640),
('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1644),
('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1648),
('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1652),
('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1653),
('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1654),
('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1659),
('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1660),
('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1668),
('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1669),
('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1670),
('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1671),
('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1672),
('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1673),
('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1678),
('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1682),
('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1686),
('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1687),
('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1692),
('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1693),
('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1694),
('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1695),
('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1701),
('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1702),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1707),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1708),
('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1713),
('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1717),
('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1721),
('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1722),
('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1727),
('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1731),
('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1739),
('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1740),
('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1741),
('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1753),
('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1754),
('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1763),
('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1767),
('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1768),
('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1769),
('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1770),
('constant -> INT_CONST_CHAR','constant',1,'p_constant_1','c_parser.py',1771),
('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1790),
('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1791),
('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1807),
('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1808),
('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1819),
('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1820),
('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1830),
('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1831),
('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1841),
('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1847),
('empty -> <empty>','empty',0,'p_empty','c_parser.py',1853),
]
| 505.817647
| 96,884
| 0.696996
| 34,455
| 171,978
| 3.375156
| 0.028472
| 0.015229
| 0.01958
| 0.007842
| 0.772674
| 0.716487
| 0.65699
| 0.620315
| 0.574283
| 0.544935
| 0
| 0.457852
| 0.048989
| 171,978
| 339
| 96,885
| 507.309735
| 0.253178
| 0.000355
| 0
| 0.006061
| 1
| 0.00303
| 0.312053
| 0.107008
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
164d0cd5ca1b14f598739b46a707cb0d5c3e645f
| 7,133
|
py
|
Python
|
packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py
|
colinRawlings/osparc-simcore
|
bf2f18d5bc1e574d5f4c238d08ad15156184c310
|
[
"MIT"
] | 25
|
2018-04-13T12:44:12.000Z
|
2022-03-12T15:01:17.000Z
|
packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py
|
colinRawlings/osparc-simcore
|
bf2f18d5bc1e574d5f4c238d08ad15156184c310
|
[
"MIT"
] | 2,553
|
2018-01-18T17:11:55.000Z
|
2022-03-31T16:26:40.000Z
|
packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py
|
colinRawlings/osparc-simcore
|
bf2f18d5bc1e574d5f4c238d08ad15156184c310
|
[
"MIT"
] | 20
|
2018-01-18T19:45:33.000Z
|
2022-03-29T07:08:47.000Z
|
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=too-many-arguments
import filecmp
from pathlib import Path
from typing import Callable
from uuid import uuid4
import np_helpers
import pytest
from simcore_sdk.node_ports_common import exceptions, filemanager
pytest_simcore_core_services_selection = ["postgres", "storage"]
pytest_simcore_ops_services_selection = ["minio", "adminer"]
async def test_valid_upload_download(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
create_valid_file_uuid: Callable[[Path], str],
s3_simcore_location: str,
):
file_path = Path(tmpdir) / "test.test"
file_path.write_text("I am a test file")
assert file_path.exists()
file_id = create_valid_file_uuid(file_path)
store_id, e_tag = await filemanager.upload_file(
user_id=user_id,
store_id=s3_simcore_location,
s3_object=file_id,
local_file_path=file_path,
)
assert store_id == s3_simcore_location
assert e_tag
get_store_id, get_e_tag = await filemanager.get_file_metadata(
user_id=user_id, store_id=store_id, s3_object=file_id
)
assert get_store_id == store_id
assert get_e_tag == e_tag
download_folder = Path(tmpdir) / "downloads"
download_file_path = await filemanager.download_file_from_s3(
user_id=user_id,
store_id=s3_simcore_location,
s3_object=file_id,
local_folder=download_folder,
)
assert download_file_path.exists()
assert download_file_path.name == "test.test"
assert filecmp.cmp(download_file_path, file_path)
async def test_invalid_file_path(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
create_valid_file_uuid: Callable[[Path], str],
s3_simcore_location: str,
):
file_path = Path(tmpdir) / "test.test"
file_path.write_text("I am a test file")
assert file_path.exists()
file_id = create_valid_file_uuid(file_path)
store = s3_simcore_location
with pytest.raises(FileNotFoundError):
await filemanager.upload_file(
user_id=user_id,
store_id=store,
s3_object=file_id,
local_file_path=Path(tmpdir) / "some other file.txt",
)
download_folder = Path(tmpdir) / "downloads"
with pytest.raises(exceptions.InvalidDownloadLinkError):
await filemanager.download_file_from_s3(
user_id=user_id,
store_id=store,
s3_object=file_id,
local_folder=download_folder,
)
async def test_errors_upon_invalid_file_identifiers(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
project_id: str,
s3_simcore_location: str,
):
file_path = Path(tmpdir) / "test.test"
file_path.write_text("I am a test file")
assert file_path.exists()
store = s3_simcore_location
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.upload_file(
user_id=user_id, store_id=store, s3_object="", local_file_path=file_path
)
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.upload_file(
user_id=user_id,
store_id=store,
s3_object="file_id",
local_file_path=file_path,
)
download_folder = Path(tmpdir) / "downloads"
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.download_file_from_s3(
user_id=user_id, store_id=store, s3_object="", local_folder=download_folder
)
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.download_file_from_s3(
user_id=user_id,
store_id=store,
s3_object=np_helpers.file_uuid("invisible.txt", project_id, f"{uuid4()}"),
local_folder=download_folder,
)
async def test_invalid_store(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
create_valid_file_uuid: Callable[[Path], str],
s3_simcore_location: str,
):
file_path = Path(tmpdir) / "test.test"
file_path.write_text("I am a test file")
assert file_path.exists()
file_id = create_valid_file_uuid(file_path)
store = "somefunkystore"
with pytest.raises(exceptions.S3InvalidStore):
await filemanager.upload_file(
user_id=user_id,
store_name=store,
s3_object=file_id,
local_file_path=file_path,
)
download_folder = Path(tmpdir) / "downloads"
with pytest.raises(exceptions.S3InvalidStore):
await filemanager.download_file_from_s3(
user_id=user_id,
store_name=store,
s3_object=file_id,
local_folder=download_folder,
)
async def test_valid_metadata(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
create_valid_file_uuid: Callable[[Path], str],
s3_simcore_location: str,
):
# first we go with a non-existing file
file_path = Path(tmpdir) / "test.test"
file_id = create_valid_file_uuid(file_path)
assert file_path.exists() is False
is_metadata_present = await filemanager.entry_exists(
user_id=user_id, store_id=s3_simcore_location, s3_object=file_id # type: ignore
)
assert is_metadata_present == False
# now really create the file and upload it
file_path.write_text("I am a test file")
assert file_path.exists()
file_id = create_valid_file_uuid(file_path)
store_id, e_tag = await filemanager.upload_file(
user_id=user_id,
store_id=s3_simcore_location,
s3_object=file_id,
local_file_path=file_path,
)
assert store_id == s3_simcore_location
assert e_tag
is_metadata_present = await filemanager.entry_exists(
user_id=user_id, store_id=store_id, s3_object=file_id
)
assert is_metadata_present is True
async def test_invalid_call_raises_exception(
tmpdir: Path,
bucket: str,
filemanager_cfg: None,
user_id: int,
create_valid_file_uuid: Callable[[Path], str],
s3_simcore_location: str,
):
file_path = Path(tmpdir) / "test.test"
file_id = create_valid_file_uuid(file_path)
assert file_path.exists() is False
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.entry_exists(
user_id=None, store_id=s3_simcore_location, s3_object=file_id # type: ignore
)
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.entry_exists(
user_id=user_id, store_id=None, s3_object=file_id # type: ignore
)
with pytest.raises(exceptions.StorageInvalidCall):
await filemanager.entry_exists(
user_id=user_id, store_id=s3_simcore_location, s3_object=None # type: ignore
)
| 31.285088
| 90
| 0.66662
| 908
| 7,133
| 4.881057
| 0.121145
| 0.074007
| 0.034522
| 0.043321
| 0.772112
| 0.761056
| 0.761056
| 0.728114
| 0.723601
| 0.704422
| 0
| 0.007878
| 0.252629
| 7,133
| 227
| 91
| 31.422907
| 0.823485
| 0.03659
| 0
| 0.712766
| 0
| 0
| 0.040398
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 1
| 0
| false
| 0
| 0.037234
| 0
| 0.037234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
168b17066852cdcc866e3c4a76e129b8ae24ab61
| 7,860
|
py
|
Python
|
fastreid/data/datasets/vehiclecarla.py
|
yangyueren/fast-reid-video
|
539b30d6a0ff4f6d2f5841bcbc49344795c36abe
|
[
"Apache-2.0"
] | 1
|
2020-12-24T09:32:21.000Z
|
2020-12-24T09:32:21.000Z
|
fastreid/data/datasets/vehiclecarla.py
|
yangyueren/fast-reid-video
|
539b30d6a0ff4f6d2f5841bcbc49344795c36abe
|
[
"Apache-2.0"
] | null | null | null |
fastreid/data/datasets/vehiclecarla.py
|
yangyueren/fast-reid-video
|
539b30d6a0ff4f6d2f5841bcbc49344795c36abe
|
[
"Apache-2.0"
] | null | null | null |
import glob
import os.path as osp
import re
from .bases import ImageDataset
from ..datasets import DATASET_REGISTRY
@DATASET_REGISTRY.register()
class CarlaVehicle(ImageDataset):
"""
"""
dataset_dir = "carla_vehicles_random_1000"
dataset_name = "carla_vehicles_random_1000"
# dataset_dir = "carla_vehicles_108"
# dataset_name = "carla_vehicles_108"
EXTRACT = False
def __init__(self, root='datasets', **kwargs):
self.dataset_dir = osp.join(root, self.dataset_dir)
if self.EXTRACT == True:
# change dir
self.dataset_dir = "carla_cameras_big_epic_video_1000"
self.dataset_name = "carla_cameras_big_epic_video_1000"
self.dataset_dir = osp.join(root, self.dataset_dir)
self.train_dir = osp.join(self.dataset_dir, 'train_nouse')
self.query_dir = osp.join(self.dataset_dir, 'query_nouse')
self.gallery_dir = osp.join(self.dataset_dir, 'traffic-camera-output-combine')
else:
self.train_dir = osp.join(self.dataset_dir, 'train_test')
self.query_dir = osp.join(self.dataset_dir, 'query2_random1000')
self.gallery_dir = osp.join(self.dataset_dir, 'test2_random1000')
# self.train_dir = osp.join(self.dataset_dir, 'train2')
# self.query_dir = osp.join(self.dataset_dir, 'query2')
# self.gallery_dir = osp.join(self.dataset_dir, 'test2')
# self.query_dir = osp.join(self.dataset_dir, 'query2_epic')
# self.query_dir = osp.join(self.dataset_dir, 'query2_epic1k')
# self.gallery_dir = osp.join(self.dataset_dir, 'test2_epic')
# self.gallery_dir = osp.join(self.dataset_dir, 'test2_epic1k')
required_files = [
self.dataset_dir,
self.train_dir,
self.query_dir,
self.gallery_dir,
]
self.check_before_run(required_files)
if self.EXTRACT:
train = self.process_carla_camera_dir(self.train_dir)
query = self.process_carla_camera_dir(self.query_dir, is_train=False)
gallery = self.process_carla_camera_dir(self.gallery_dir, is_train=False)
else:
train = self.process_dir(self.train_dir)
query = self.process_dir(self.query_dir, is_train=False)
gallery = self.process_dir(self.gallery_dir, is_train=False)
super(CarlaVehicle, self).__init__(train, query, gallery, **kwargs)
def process_carla_camera_dir(self, dir_path, is_train=True):
img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
#test格式 c0113_0000182_0000020.jpg
#query格式 c0113_0000182_0000020.jpg camid, frame, idxinframe
pattern = re.compile(r'c([\d]+)[-_]([\d]+)[-_]([\d]+)')
data = []
for img_path in img_paths:
# print(img_path)
camid, frame, idx_in_frame = map(int, pattern.search(img_path).groups())
# pid, camid = map(int, pattern.search(img_path).groups())
if frame == -1: continue # junk images are just ignored
assert 0 <= camid <= 192
# assert 0 <= pid <= 150
assert 0 <= frame <= 100000000
if is_train:
pid = self.dataset_name + "_" + str(frame)
camid = self.dataset_name + "_" + str(camid)
else:
pid = str(frame) + '_' + str(idx_in_frame)
data.append((img_path, pid, camid))
return data
def process_dir(self, dir_path, is_train=True):
#c0000_0980_0000006.jpg camid vehicleid junkinfo
img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
pattern = re.compile(r'c(\d\d\d\d)_(\d\d\d\d)')
data = []
for img_path in img_paths:
camid, pid = map(int, pattern.search(img_path).groups())
if pid == -1: continue # junk images are just ignored
assert 0 <= pid <= 1000
assert 0 <= camid <= 2
if is_train:
pid = self.dataset_name + "_" + str(pid)
camid = self.dataset_name + "_" + str(camid)
data.append((img_path, pid, camid))
return data
# import glob
# import os.path as osp
# import re
# from .bases import ImageDataset
# from ..datasets import DATASET_REGISTRY
# @DATASET_REGISTRY.register()
# class VeRi(ImageDataset):
# """VeRi.
# Reference:
# Liu et al. A Deep Learning based Approach for Progressive Vehicle Re-Identification. ECCV 2016.
# URL: `<https://vehiclereid.github.io/VeRi/>`_
# Dataset statistics:
# - identities: 775.
# - images: 37778 (train) + 1678 (query) + 11579 (gallery).
# """
# # dataset_dir = "carla_vehicles"
# dataset_dir = "carla_cameras"
# dataset_name = "veri"
# def __init__(self, root='datasets', **kwargs):
# self.dataset_dir = osp.join(root, self.dataset_dir)
# self.train_dir = osp.join(self.dataset_dir, 'train')
# self.query_dir = osp.join(self.dataset_dir, 'query')
# self.gallery_dir = osp.join(self.dataset_dir, 'traffic-camera-output-combine')
# # self.train_dir = osp.join(self.dataset_dir, 'train2')
# # self.query_dir = osp.join(self.dataset_dir, 'query2')
# # self.gallery_dir = osp.join(self.dataset_dir, 'test2')
# required_files = [
# self.dataset_dir,
# self.train_dir,
# self.query_dir,
# self.gallery_dir,
# ]
# self.check_before_run(required_files)
# train = self.process_carla_camera_dir(self.train_dir)
# query = self.process_carla_camera_dir(self.query_dir, is_train=False)
# gallery = self.process_carla_camera_dir(self.gallery_dir, is_train=False)
# super(VeRi, self).__init__(train, query, gallery, **kwargs)
# def process_carla_camera_dir(self, dir_path, is_train=True):
# img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
# pattern = re.compile(r'c([\d]+)-(\d\d\d\d\d\d)-(\d\d\d)')
# # pattern = re.compile(r'([\d]+)_c(\d\d\d)')
# data = []
# for img_path in img_paths:
# camid, frame, idx_in_frame = map(int, pattern.search(img_path).groups())
# # pid, camid = map(int, pattern.search(img_path).groups())
# if frame == -1: continue # junk images are just ignored
# assert 0 <= camid <= 21
# # assert 0 <= pid <= 150
# assert 0 <= frame <= 10000000
# # camid -= 1 # index starts from 0
# if is_train:
# pid = self.dataset_name + "_" + str(frame)
# camid = self.dataset_name + "_" + str(camid)
# else:
# pid = str(frame) + '_' + str(idx_in_frame)
# data.append((img_path, pid, camid))
# return data
# def process_dir(self, dir_path, is_train=True):
# img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
# pattern = re.compile(r'c([\d]+)-(\d\d\d\d\d\d)')
# # pattern = re.compile(r'([\d]+)_c(\d\d\d)')
# data = []
# for img_path in img_paths:
# camid, pid = map(int, pattern.search(img_path).groups())
# # pid, camid = map(int, pattern.search(img_path).groups())
# if pid == -1: continue # junk images are just ignored
# assert 0 <= camid <= 21
# # assert 0 <= pid <= 150
# assert 0 <= pid <= 10000000
# # camid -= 1 # index starts from 0
# if is_train:
# pid = self.dataset_name + "_" + str(pid)
# camid = self.dataset_name + "_" + str(camid)
# else:
# data.append((img_path, pid, camid))
# return data
| 37.971014
| 105
| 0.578626
| 983
| 7,860
| 4.380468
| 0.140387
| 0.094519
| 0.091036
| 0.061774
| 0.842778
| 0.828843
| 0.828843
| 0.813748
| 0.768927
| 0.687413
| 0
| 0.032478
| 0.290967
| 7,860
| 206
| 106
| 38.15534
| 0.740176
| 0.522519
| 0
| 0.275362
| 0
| 0
| 0.079085
| 0.054836
| 0
| 0
| 0
| 0
| 0.057971
| 1
| 0.043478
| false
| 0
| 0.072464
| 0
| 0.202899
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
16af454db4d26d0850d90c378082d1715eb46d8e
| 133
|
py
|
Python
|
web3/utils/empty.py
|
jsmeng324/web3.py
|
6f240dcf4f37f55f0ac09c90985674233f344c37
|
[
"MIT"
] | 326
|
2016-04-29T21:51:06.000Z
|
2022-03-31T03:20:54.000Z
|
web3/utils/empty.py
|
jsmeng324/web3.py
|
6f240dcf4f37f55f0ac09c90985674233f344c37
|
[
"MIT"
] | 283
|
2016-04-15T16:41:31.000Z
|
2017-11-28T16:41:36.000Z
|
web3/utils/empty.py
|
jsmeng324/web3.py
|
6f240dcf4f37f55f0ac09c90985674233f344c37
|
[
"MIT"
] | 146
|
2016-04-14T16:27:54.000Z
|
2021-10-03T13:31:07.000Z
|
class Empty(object):
def __bool__(self):
return False
def __nonzero__(self):
return False
empty = Empty()
| 13.3
| 26
| 0.609023
| 15
| 133
| 4.866667
| 0.6
| 0.273973
| 0.410959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.300752
| 133
| 9
| 27
| 14.777778
| 0.784946
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
16b1562114e6791a339f59a35d497ab500cc2d03
| 86
|
py
|
Python
|
common/personal_info.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 34
|
2021-08-18T14:51:44.000Z
|
2022-03-10T14:14:48.000Z
|
common/personal_info.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 27
|
2021-08-30T14:42:09.000Z
|
2022-03-17T22:11:45.000Z
|
common/personal_info.py
|
oserikov/dream
|
109ba2df799025dcdada1fddbb7380e1c03100eb
|
[
"Apache-2.0"
] | 40
|
2021-08-22T07:13:32.000Z
|
2022-03-29T11:45:32.000Z
|
def skill_trigger_phrases():
return ["What is your name?", "Where are you from?"]
| 28.666667
| 56
| 0.686047
| 13
| 86
| 4.384615
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174419
| 86
| 2
| 57
| 43
| 0.802817
| 0
| 0
| 0
| 0
| 0
| 0.430233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
bc4321bcdb0370d8a4ad40fa0774b5b9023518ca
| 2,724
|
py
|
Python
|
pybit/account_asset.py
|
Cryptverse/pybit
|
e6452d2bb741dec9161a3a403ed99f2d646460eb
|
[
"MIT"
] | null | null | null |
pybit/account_asset.py
|
Cryptverse/pybit
|
e6452d2bb741dec9161a3a403ed99f2d646460eb
|
[
"MIT"
] | null | null | null |
pybit/account_asset.py
|
Cryptverse/pybit
|
e6452d2bb741dec9161a3a403ed99f2d646460eb
|
[
"MIT"
] | null | null | null |
from ._http_manager import _HTTPManager
class HTTP(_HTTPManager):
def create_internal_transfer(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-createinternaltransfer.
:returns: Request results as dictionary.
"""
suffix = "/asset/v1/private/transfer"
if self._verify_string(kwargs, "amount"):
return self._submit_request(
method="POST",
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
else:
self.logger.error("amount must be in string format")
def create_subaccount_transfer(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-createsubaccounttransfer.
:returns: Request results as dictionary.
"""
suffix = "/asset/v1/private/sub-member/transfer"
if self._verify_string(kwargs, "amount"):
return self._submit_request(
method="POST",
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
else:
self.logger.error("amount must be in string format")
def query_transfer_list(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-querytransferlist.
:returns: Request results as dictionary.
"""
suffix = "/asset/v1/private/transfer/list"
return self._submit_request(
method="GET",
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def query_subaccount_list(self):
"""
Create internal transfer.
:returns: Request results as dictionary.
"""
suffix = "/asset/v1/private/sub-member/member-ids"
return self._submit_request(
method="GET",
path=self.endpoint + suffix,
query={},
auth=True
)
def query_subaccount_transfer_list(self, **kwargs):
"""
Create internal transfer.
:param kwargs: See
https://bybit-exchange.github.io/docs/account_asset/#t-querysubaccounttransferlist.
:returns: Request results as dictionary.
"""
suffix = "/asset/v1/private/sub-member/transfer/list"
return self._submit_request(
method="GET",
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
| 28.082474
| 95
| 0.562775
| 265
| 2,724
| 5.660377
| 0.230189
| 0.056
| 0.088
| 0.076667
| 0.854
| 0.824667
| 0.824667
| 0.824667
| 0.824667
| 0.824667
| 0
| 0.002756
| 0.334068
| 2,724
| 96
| 96
| 28.375
| 0.824146
| 0.274963
| 0
| 0.625
| 0
| 0
| 0.151567
| 0.099715
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104167
| false
| 0
| 0.020833
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bc446035edcbc272376303ed3520e44cb5911c4b
| 1,089
|
py
|
Python
|
venv/lib/python3.6/site-packages/xero_python/assets/models/__init__.py
|
6enno/FarmXero
|
881b1e6648e927631b276e66a4c5287e4de2cbc1
|
[
"MIT"
] | null | null | null |
venv/lib/python3.6/site-packages/xero_python/assets/models/__init__.py
|
6enno/FarmXero
|
881b1e6648e927631b276e66a4c5287e4de2cbc1
|
[
"MIT"
] | null | null | null |
venv/lib/python3.6/site-packages/xero_python/assets/models/__init__.py
|
6enno/FarmXero
|
881b1e6648e927631b276e66a4c5287e4de2cbc1
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# flake8: noqa
"""
Xero Assets API
This is the Xero Assets API # noqa: E501
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
# import models into model package
from xero_python.assets.models.asset import Asset
from xero_python.assets.models.asset_status import AssetStatus
from xero_python.assets.models.asset_status_query_param import AssetStatusQueryParam
from xero_python.assets.models.asset_type import AssetType
from xero_python.assets.models.assets import Assets
from xero_python.assets.models.book_depreciation_detail import BookDepreciationDetail
from xero_python.assets.models.book_depreciation_setting import BookDepreciationSetting
from xero_python.assets.models.error import Error
from xero_python.assets.models.field_validation_errors_element import (
FieldValidationErrorsElement,
)
from xero_python.assets.models.pagination import Pagination
from xero_python.assets.models.resource_validation_errors_element import (
ResourceValidationErrorsElement,
)
from xero_python.assets.models.setting import Setting
| 35.129032
| 87
| 0.836547
| 142
| 1,089
| 6.225352
| 0.352113
| 0.108597
| 0.190045
| 0.271493
| 0.425339
| 0.248869
| 0.178733
| 0
| 0
| 0
| 0
| 0.005123
| 0.103765
| 1,089
| 30
| 88
| 36.3
| 0.900615
| 0.171717
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bc494a9b82ebfc3e3b8468356a052ba3bc3862b1
| 2,735
|
py
|
Python
|
tests/test_view_permissions.py
|
dominicrodger/djohno
|
1b0891ee661eae4bd313b619fe2cc2b0965763c0
|
[
"BSD-2-Clause"
] | 3
|
2015-04-07T13:19:05.000Z
|
2016-02-22T08:42:47.000Z
|
tests/test_view_permissions.py
|
dominicrodger/djohno
|
1b0891ee661eae4bd313b619fe2cc2b0965763c0
|
[
"BSD-2-Clause"
] | 2
|
2018-01-30T10:41:55.000Z
|
2018-03-12T07:15:51.000Z
|
tests/test_view_permissions.py
|
dominicrodger/djohno
|
1b0891ee661eae4bd313b619fe2cc2b0965763c0
|
[
"BSD-2-Clause"
] | 2
|
2018-01-30T07:50:06.000Z
|
2021-12-01T00:05:04.000Z
|
from django.core.urlresolvers import reverse
from django.test import TestCase
class DjohnoViewPermissionTests(TestCase):
def test_djohno_frame_403s_without_login(self):
"""
Tests to ensure loading the root djohno view without
authenticating results in a 403.
"""
url = reverse('djohno_frame')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_index_403s_without_login(self):
"""
Tests to ensure loading the framed djohno view without
authenticating results in a 403.
"""
url = reverse('djohno_index')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_403_403s_without_login(self):
"""
Tests to ensure loading the framed djohno 403 test view
without authenticating results in a 403.
"""
url = reverse('djohno_403')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_404_403s_without_login(self):
"""
Tests to ensure loading the framed djohno 404 test view
without authenticating results in a 403.
"""
url = reverse('djohno_404')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_500_403s_without_login(self):
"""
Tests to ensure loading the framed djohno 500 test view
without authenticating results in a 403.
"""
url = reverse('djohno_500')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_idempotent_mail_403s_without_login(self):
"""
Tests to ensure loading the framed djohno email test view
without authenticating results in a 403.
"""
url = reverse('djohno_email')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
def test_djohno_mail_send_403s_without_login(self):
"""
Tests to ensure loading the framed djohno email sending test view
without authenticating results in a 403.
"""
url = reverse('djohno_email')
response = self.client.post(url)
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, '403.html')
| 36.466667
| 73
| 0.659598
| 321
| 2,735
| 5.461059
| 0.149533
| 0.027952
| 0.051911
| 0.079863
| 0.884769
| 0.884769
| 0.884769
| 0.884769
| 0.884769
| 0.86024
| 0
| 0.054572
| 0.256307
| 2,735
| 74
| 74
| 36.959459
| 0.807276
| 0.244973
| 0
| 0.578947
| 0
| 0
| 0.07255
| 0
| 0
| 0
| 0
| 0
| 0.368421
| 1
| 0.184211
| false
| 0
| 0.052632
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bccc79d946a744cd6193bac928d0b5518943534d
| 21
|
py
|
Python
|
resume/__init__.py
|
MayankFawkes/resume
|
c48007ccac1d593dcc0db5d8e84fdc5e8662b01b
|
[
"MIT"
] | null | null | null |
resume/__init__.py
|
MayankFawkes/resume
|
c48007ccac1d593dcc0db5d8e84fdc5e8662b01b
|
[
"MIT"
] | 7
|
2021-09-23T13:12:14.000Z
|
2022-03-31T13:16:49.000Z
|
resume/__init__.py
|
MayankFawkes/resume
|
c48007ccac1d593dcc0db5d8e84fdc5e8662b01b
|
[
"MIT"
] | null | null | null |
from .site import app
| 21
| 21
| 0.809524
| 4
| 21
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4c06509d88810f33e92bfd4def8548050ee1063b
| 117
|
py
|
Python
|
exception/argument_empty_exception.py
|
joaoteixeira88/pyguard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | 2
|
2021-03-11T22:09:40.000Z
|
2022-01-09T16:16:43.000Z
|
exception/argument_empty_exception.py
|
joaoteixeira88/pyguard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | 3
|
2021-03-11T08:20:43.000Z
|
2021-03-30T07:34:38.000Z
|
exception/argument_empty_exception.py
|
joaoteixeira88/python-guard
|
e9bdcb58034fd0db254121f71ac9bd76c7dec973
|
[
"MIT"
] | null | null | null |
from exception.base_exception import BaseGuardException
class ArgumentEmptyException(BaseGuardException):
pass
| 19.5
| 55
| 0.854701
| 10
| 117
| 9.9
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 117
| 5
| 56
| 23.4
| 0.951923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
4c0a06b65a806e2520356159f7063fbd3ecdec66
| 73
|
py
|
Python
|
tests/test_fingers/test_theme/test_finger.py
|
sonirico/wpoke
|
be193a41159dabf912d793eb5a6ebf2f0e9440bb
|
[
"MIT"
] | 4
|
2019-08-19T12:32:40.000Z
|
2019-10-25T20:57:29.000Z
|
tests/test_fingers/test_theme/test_finger.py
|
sonirico/wpoke
|
be193a41159dabf912d793eb5a6ebf2f0e9440bb
|
[
"MIT"
] | 15
|
2019-07-15T18:30:43.000Z
|
2020-09-25T08:10:05.000Z
|
tests/test_fingers/test_theme/test_finger.py
|
sonirico/wpoke
|
be193a41159dabf912d793eb5a6ebf2f0e9440bb
|
[
"MIT"
] | null | null | null |
import unittest
class ThemeFingerTestCase(unittest.TestCase):
pass
| 12.166667
| 45
| 0.794521
| 7
| 73
| 8.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 73
| 5
| 46
| 14.6
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
4c220e9e0f241793f14a4bbbd5723bca4437d305
| 13,417
|
py
|
Python
|
display4D/fieldsGUI/vectorfieldGUI.py
|
seVenVo1d/General-Relativity-Tensorial-Calculations
|
6c07823f74840352253c235af2e4dbe60044941a
|
[
"MIT"
] | 1
|
2021-06-16T07:29:30.000Z
|
2021-06-16T07:29:30.000Z
|
display4D/fieldsGUI/vectorfieldGUI.py
|
seVenVo1d/General-Relativity-Tensorial-Calculations
|
6c07823f74840352253c235af2e4dbe60044941a
|
[
"MIT"
] | null | null | null |
display4D/fieldsGUI/vectorfieldGUI.py
|
seVenVo1d/General-Relativity-Tensorial-Calculations
|
6c07823f74840352253c235af2e4dbe60044941a
|
[
"MIT"
] | 1
|
2021-12-02T15:11:06.000Z
|
2021-12-02T15:11:06.000Z
|
import PySimpleGUI as sg
from display4D.image_resizer_fields import *
from equations.FieldsEP.vectorfieldEP import *
from sympy import preview, sympify
def vectorfield_gui4d(event, metric_tensor, coord_sys):
"""
The main process of the GUI that produces the image of a vector field
for a given metric tensor and coordinate system in 4D
Args:
event: Events read from Vector Field GUI
metric_tensor [list]: The metric tensor, provided by the user
coord_sys [list]: The coordinate system given as a list (e.g., [t,x,y,z])
"""
if event == 'Type (1,0) Vector Field':
vector_field_10_layout = [
[sg.Image(r'display4D\input images\vectorfield_10_0.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_10_1.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_10_2.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_10_3.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Frame(layout=[
[sg.Button('Calculate', button_color='purple'),
sg.Image(r'display4D\input images\cov_vectorfield_10.png'),
sg.Text('for', font=('Verdana', 11)),
sg.Image(r'display4D\input images\gamma.png'),
sg.InputCombo(coord_sys, default_value=coord_sys[0])
]], title='Covariant Derivative', font=('Verdana', 12))],
[sg.Frame(layout=[
[sg.Image(r'display4D\input images\LX0.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX1.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX2.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX3.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Button('Calculate', button_color='purple'),
sg.Image(r'display4D\input images\LX_vectorfield_10.png')]], title='Lie Derivative', font=('Verdana', 12))],
[sg.Frame(layout=[
[sg.Button('Check', button_color='purple'),
sg.Image(r'display4D\input images\killingvector.png')]], title='Killing Field Condition', font=('Verdana', 12))]
]
windows_vector_field = sg.Window('Vector Field', vector_field_10_layout)
while True:
event, values = windows_vector_field.read()
if event == sg.WIN_CLOSED or event == 'Exit':
break
else:
vector_field = [sympify(values[i]) for i in range(1, 9, 2)] # Obtaining the vector field
# Calculation of the covariant derivative
if event == 'Calculate':
index_symbol = values[10]
cd_vector_field_eqn = cd_vectorfield10_ep(metric_tensor, coord_sys, vector_field, index_symbol)
preview(cd_vector_field_eqn, viewer='file', filename=r'display4D\output images\cd_vector_field_10.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_cd_image4d('Type (1,0) Vector Field')
layout_cd_vector_field_result = [
[sg.Image(r'display4D\output images\cd_vector_field_10.png')],
]
window_cd_vector_field_result = sg.Window('Vector Field', layout_cd_vector_field_result)
while True:
event, values = window_cd_vector_field_result.read()
if event == sg.WIN_CLOSED:
break
# Calculation of the lie derivative
elif event == 'Calculate0':
X = [sympify(values[i]) for i in range(12, 20, 2)]
ld_vector_field_eqn = ld_vectorfield10_ep(metric_tensor, coord_sys, vector_field, X)
preview(ld_vector_field_eqn, viewer='file', filename=r'display4D\output images\ld_vector_field_10.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_ld_image4d('Type (1,0) Vector Field')
layout_ld_vector_field_result = [
[sg.Image(r'display4D\output images\ld_vector_field_10.png')],
]
window_ld_vector_field_result = sg.Window('Vector Field', layout_ld_vector_field_result)
while True:
event, values = window_ld_vector_field_result.read()
if event == sg.WIN_CLOSED:
break
# Checking Killing Field Condition
elif event == 'Check':
killingfield_eqn = killingfield10_ep(metric_tensor, coord_sys, vector_field)
preview(killingfield_eqn, viewer='file', filename=r'display4D\output images\killing_field_10.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_killing_image4d('Type (1,0) Vector Field')
layout_killing_field_result = [
[sg.Image(r'display4D\output images\killing_field_10.png')]
]
window_killing_field_result = sg.Window('Vector Field', layout_killing_field_result)
while True:
event, values = window_killing_field_result.read()
if event == sg.WIN_CLOSED:
break
else:
vector_field_01_layout = [
[sg.Image(r'display4D\input images\vectorfield_01_0.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_01_1.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_01_2.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\vectorfield_01_3.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Frame(layout=[
[sg.Button('Calculate', button_color='purple'),
sg.Image(r'display4D\input images\cov_vectorfield_01.png'),
sg.Text('for', font=('Verdana', 11)),
sg.Image(r'display4D\input images\gamma.png'),
sg.InputCombo(coord_sys, default_value=coord_sys[0])
]], title='Covariant Derivative', font=('Verdana', 12))],
[sg.Frame(layout=[
[sg.Image(r'display4D\input images\LX0.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX1.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX2.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Image(r'display4D\input images\LX3.png'),
sg.InputText(default_text='0', font=('Tahoma', 11))],
[sg.Button('Calculate', button_color='purple'),
sg.Image(r'display4D\input images\LX_vectorfield_01.png')]], title='Lie Derivative', font=('Verdana', 12))],
[sg.Frame(layout=[
[sg.Button('Check', button_color='purple'),
sg.Image(r'display4D\input images\killingvector.png')]], title='Killing Field Condition', font=('Verdana', 12))]
]
windows_vector_field = sg.Window('Vector Field', vector_field_01_layout)
while True:
event, values = windows_vector_field.read()
if event == sg.WIN_CLOSED or event == 'Exit':
break
else:
vector_field = [sympify(values[i]) for i in range(1, 9, 2)] # Obtaining the vector field
# Calculation of the covariant derivative
if event == 'Calculate':
index_symbol = values[10]
cd_vector_field_eqn = cd_vectorfield01_ep(metric_tensor, coord_sys, vector_field, index_symbol)
preview(cd_vector_field_eqn, viewer='file', filename=r'display4D\output images\cd_vector_field_01.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_cd_image4d('Type (0,1) Vector Field')
layout_cd_vector_field_result = [
[sg.Image(r'display4D\output images\cd_vector_field_01.png')],
]
window_cd_vector_field_result = sg.Window('Vector Field', layout_cd_vector_field_result)
while True:
event, values = window_cd_vector_field_result.read()
if event == sg.WIN_CLOSED:
break
# Calculation of the lie derivative
elif event == 'Calculate0':
X = [sympify(values[i]) for i in range(12, 20, 2)]
ld_vector_field_eqn = ld_vectorfield01_ep(metric_tensor, coord_sys, vector_field, X)
preview(ld_vector_field_eqn, viewer='file', filename=r'display4D\output images\ld_vector_field_01.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_ld_image4d('Type (0,1) Vector Field')
layout_ld_vector_field_result = [
[sg.Image(r'display4D\output images\ld_vector_field_01.png')],
]
window_ld_vector_field_result = sg.Window('Vector Field', layout_ld_vector_field_result)
while True:
event, values = window_ld_vector_field_result.read()
if event == sg.WIN_CLOSED:
break
# Checking Killing Field Condition
elif event == 'Check':
killingfield_eqn = killingfield01_ep(metric_tensor, coord_sys, vector_field)
preview(killingfield_eqn, viewer='file', filename=r'display4D\output images\killing_field_01.png', euler=True,
dvioptions=['-T', 'tight', '-z', '0', '--truecolor', '-D 1200', '-bg', 'Transparent'])
resize_killing_image4d('Type (0,1) Vector Field')
layout_killing_field_result = [
[sg.Image(r'display4D\output images\killing_field_01.png')]
]
window_killing_field_result = sg.Window('Vector Field', layout_killing_field_result)
while True:
event, values = window_killing_field_result.read()
if event == sg.WIN_CLOSED:
break
| 68.454082
| 157
| 0.474324
| 1,294
| 13,417
| 4.700927
| 0.109737
| 0.121157
| 0.039454
| 0.08384
| 0.923229
| 0.922242
| 0.917968
| 0.915995
| 0.891337
| 0.890021
| 0
| 0.033492
| 0.41917
| 13,417
| 195
| 158
| 68.805128
| 0.747081
| 0.043825
| 0
| 0.672727
| 0
| 0
| 0.188409
| 0.056155
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006061
| false
| 0
| 0.024242
| 0
| 0.030303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4c49d15cf27d1b0891fc743e050152e67f6eeee6
| 46
|
py
|
Python
|
src/data_preparation/test_data.py
|
tobiasraabe/locus-of-control
|
fe249bf8c85d163527d82e0c018e86bd37eff345
|
[
"BSD-3-Clause"
] | null | null | null |
src/data_preparation/test_data.py
|
tobiasraabe/locus-of-control
|
fe249bf8c85d163527d82e0c018e86bd37eff345
|
[
"BSD-3-Clause"
] | 56
|
2019-01-27T14:39:33.000Z
|
2020-06-22T20:42:32.000Z
|
src/data_preparation/test_data.py
|
tobiasraabe/locus-of-control
|
fe249bf8c85d163527d82e0c018e86bd37eff345
|
[
"BSD-3-Clause"
] | 1
|
2018-03-01T09:19:00.000Z
|
2018-03-01T09:19:00.000Z
|
"""Test data files."""
def test():
pass
| 7.666667
| 22
| 0.521739
| 6
| 46
| 4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 46
| 5
| 23
| 9.2
| 0.705882
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
4c5004197ee944efe97974c7029e31ef92e32fc0
| 81
|
py
|
Python
|
sesameeg/io/__init__.py
|
gvluria/sesameeg
|
db32673d634e5db2ac9912779a7f4adfeae02bf7
|
[
"BSD-3-Clause"
] | 3
|
2019-11-27T11:59:16.000Z
|
2021-11-15T13:43:42.000Z
|
sesameeg/io/__init__.py
|
gvluria/sesameeg
|
db32673d634e5db2ac9912779a7f4adfeae02bf7
|
[
"BSD-3-Clause"
] | null | null | null |
sesameeg/io/__init__.py
|
gvluria/sesameeg
|
db32673d634e5db2ac9912779a7f4adfeae02bf7
|
[
"BSD-3-Clause"
] | 2
|
2019-11-15T09:13:43.000Z
|
2020-05-26T14:27:54.000Z
|
from .io import write_h5, write_pkl, read_h5, _export_to_stc, _export_to_vol_stc
| 40.5
| 80
| 0.839506
| 16
| 81
| 3.625
| 0.6875
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.098765
| 81
| 1
| 81
| 81
| 0.767123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4c795dc2f9a9537f6b61dfbed98a95862ec1f639
| 200
|
py
|
Python
|
stella_nav_listener/src/stella_nav_listener/trigger_listener.py
|
ymd-stella/stella_nav
|
b92f2dcaf52d0bb03c9ea4228124dc3444af2681
|
[
"MIT"
] | null | null | null |
stella_nav_listener/src/stella_nav_listener/trigger_listener.py
|
ymd-stella/stella_nav
|
b92f2dcaf52d0bb03c9ea4228124dc3444af2681
|
[
"MIT"
] | null | null | null |
stella_nav_listener/src/stella_nav_listener/trigger_listener.py
|
ymd-stella/stella_nav
|
b92f2dcaf52d0bb03c9ea4228124dc3444af2681
|
[
"MIT"
] | 1
|
2022-01-14T07:55:22.000Z
|
2022-01-14T07:55:22.000Z
|
class TriggerListener(object):
def __init__(self, state_machine, **kwargs):
self._state_machine = state_machine
def __call__(self, msg):
self._state_machine.trigger(msg.data)
| 28.571429
| 48
| 0.705
| 24
| 200
| 5.291667
| 0.541667
| 0.377953
| 0.377953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19
| 200
| 6
| 49
| 33.333333
| 0.783951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
d5bea77e5e22e3f1712b123bcab4ecdfe66776a1
| 3,322
|
py
|
Python
|
djangocms_bootstrap4/contrib/bootstrap4_collapse/migrations/0001_initial.py
|
jpVm5jYYRE1VIKL/djangocms-bootstrap4
|
d36a369af54850eddaa0299e5ae33ee5e78cf2b1
|
[
"BSD-3-Clause"
] | 59
|
2017-09-28T17:13:38.000Z
|
2020-09-22T02:55:47.000Z
|
djangocms_bootstrap4/contrib/bootstrap4_collapse/migrations/0001_initial.py
|
jpVm5jYYRE1VIKL/djangocms-bootstrap4
|
d36a369af54850eddaa0299e5ae33ee5e78cf2b1
|
[
"BSD-3-Clause"
] | 102
|
2017-10-20T09:37:52.000Z
|
2020-09-23T06:37:47.000Z
|
djangocms_bootstrap4/contrib/bootstrap4_collapse/migrations/0001_initial.py
|
jpVm5jYYRE1VIKL/djangocms-bootstrap4
|
d36a369af54850eddaa0299e5ae33ee5e78cf2b1
|
[
"BSD-3-Clause"
] | 40
|
2017-12-29T20:05:40.000Z
|
2020-09-21T08:33:48.000Z
|
# Generated by Django 1.9.13 on 2017-10-15 21:03
import django.db.models.deletion
from django.db import migrations, models
import djangocms_bootstrap4.fields
from djangocms_bootstrap4.constants import TAG_CHOICES
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='Bootstrap4Collapse',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='bootstrap4_collapse_bootstrap4collapse', serialize=False, to='cms.CMSPlugin')),
('siblings', models.CharField(default='.card', help_text='Element to be used to create accordions.', max_length=255, verbose_name='Siblings')),
('tag_type', djangocms_bootstrap4.fields.TagTypeField(choices=TAG_CHOICES, default=TAG_CHOICES[0][0], help_text='Select the HTML tag to be used.', max_length=255, verbose_name='Tag type')),
('attributes', djangocms_bootstrap4.fields.AttributesField(blank=True, default=dict, verbose_name='Attributes')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='Bootstrap4CollapseContainer',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='bootstrap4_collapse_bootstrap4collapsecontainer', serialize=False, to='cms.CMSPlugin')),
('identifier', models.SlugField(help_text='Identifier to connect trigger with container.', max_length=255, verbose_name='Unique identifier')),
('tag_type', djangocms_bootstrap4.fields.TagTypeField(choices=TAG_CHOICES, default=TAG_CHOICES[0][0], help_text='Select the HTML tag to be used.', max_length=255, verbose_name='Tag type')),
('attributes', djangocms_bootstrap4.fields.AttributesField(blank=True, default=dict, verbose_name='Attributes')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='Bootstrap4CollapseTrigger',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='bootstrap4_collapse_bootstrap4collapsetrigger', serialize=False, to='cms.CMSPlugin')),
('identifier', models.SlugField(help_text='Identifier to connect trigger with container.', max_length=255, verbose_name='Unique identifier')),
('tag_type', djangocms_bootstrap4.fields.TagTypeField(choices=TAG_CHOICES, default=TAG_CHOICES[0][0], help_text='Select the HTML tag to be used.', max_length=255, verbose_name='Tag type')),
('attributes', djangocms_bootstrap4.fields.AttributesField(blank=True, default=dict, verbose_name='Attributes')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| 57.275862
| 257
| 0.664058
| 352
| 3,322
| 6.079545
| 0.255682
| 0.046262
| 0.081776
| 0.053271
| 0.76028
| 0.736449
| 0.736449
| 0.736449
| 0.736449
| 0.736449
| 0
| 0.027948
| 0.213727
| 3,322
| 57
| 258
| 58.280702
| 0.791348
| 0.013847
| 0
| 0.58
| 1
| 0
| 0.236103
| 0.062615
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d5d0bb993da45a5cd4eaeadf0fa44c0be631f5ef
| 162
|
py
|
Python
|
scoda/blueprints/scoda/scoda.py
|
opendatadurban/scoda
|
d02be6ff792deffd2615a48fd50bfe1ec1da5065
|
[
"Apache-2.0"
] | null | null | null |
scoda/blueprints/scoda/scoda.py
|
opendatadurban/scoda
|
d02be6ff792deffd2615a48fd50bfe1ec1da5065
|
[
"Apache-2.0"
] | 24
|
2020-09-21T15:18:52.000Z
|
2022-03-03T09:35:32.000Z
|
scoda/blueprints/scoda/scoda.py
|
opendatadurban/scoda
|
d02be6ff792deffd2615a48fd50bfe1ec1da5065
|
[
"Apache-2.0"
] | 1
|
2020-09-17T14:51:58.000Z
|
2020-09-17T14:51:58.000Z
|
from flask import render_template, Blueprint
import json
SCODA = Blueprint('SCODA',__name__)
@SCODA.route('/')
def index():
return render_template("scoda.html")
| 23.142857
| 44
| 0.765432
| 21
| 162
| 5.619048
| 0.666667
| 0.237288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 162
| 7
| 45
| 23.142857
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0.09816
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
d5db443ad3b15b8f3e1b2ac16bcb0d89995d90af
| 71
|
py
|
Python
|
practica3/software/FUENTES/algorithms/evolutionary/__init__.py
|
antoniomdk/practicas_mh_ugr
|
e933224f8a94cf8f4c8d04a1a10b2d2f66bc8c3f
|
[
"MIT"
] | null | null | null |
practica3/software/FUENTES/algorithms/evolutionary/__init__.py
|
antoniomdk/practicas_mh_ugr
|
e933224f8a94cf8f4c8d04a1a10b2d2f66bc8c3f
|
[
"MIT"
] | null | null | null |
practica3/software/FUENTES/algorithms/evolutionary/__init__.py
|
antoniomdk/practicas_mh_ugr
|
e933224f8a94cf8f4c8d04a1a10b2d2f66bc8c3f
|
[
"MIT"
] | null | null | null |
from .genetic_algorithm import EvolutionaryAlgorithm, MemeticAlgorithm
| 35.5
| 70
| 0.901408
| 6
| 71
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070423
| 71
| 1
| 71
| 71
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d5e1115d7ad9e3a6bb9df61cacfd2469408e68f2
| 18
|
py
|
Python
|
test/test_tracer.py
|
glongh/visual-run
|
252e160cec8906319baf0394fc3feb133df3947a
|
[
"MIT"
] | 4
|
2017-12-06T20:01:55.000Z
|
2020-02-22T15:07:58.000Z
|
tests/test_cohst.py
|
mitmedialab/CoMET
|
bb5d547c3a496c93ebcf77def5c46775e57adc80
|
[
"MIT"
] | 2
|
2017-12-08T03:15:38.000Z
|
2019-06-14T20:18:01.000Z
|
tests/test_cohst.py
|
mitmedialab/CoMET
|
bb5d547c3a496c93ebcf77def5c46775e57adc80
|
[
"MIT"
] | 2
|
2017-12-08T01:44:27.000Z
|
2019-05-24T13:08:42.000Z
|
# TODO: write test
| 18
| 18
| 0.722222
| 3
| 18
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 18
| 1
| 18
| 18
| 0.866667
| 0.888889
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
914a72d35c6fce7c8cae59e89eb8c99376a49080
| 46,092
|
py
|
Python
|
google/cloud/networkconnectivity_v1/services/hub_service/async_client.py
|
googleapis/python-network-connectivity
|
b24e098625273565d2d7959d8409c405f5ddd912
|
[
"Apache-2.0"
] | 2
|
2021-01-28T02:50:10.000Z
|
2021-03-05T22:34:53.000Z
|
google/cloud/networkconnectivity_v1/services/hub_service/async_client.py
|
googleapis/python-network-connectivity
|
b24e098625273565d2d7959d8409c405f5ddd912
|
[
"Apache-2.0"
] | 42
|
2021-01-26T13:47:10.000Z
|
2022-03-07T15:59:24.000Z
|
google/cloud/networkconnectivity_v1/services/hub_service/async_client.py
|
googleapis/python-network-connectivity
|
b24e098625273565d2d7959d8409c405f5ddd912
|
[
"Apache-2.0"
] | 2
|
2021-01-25T16:33:50.000Z
|
2022-01-29T08:11:31.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.networkconnectivity_v1.services.hub_service import pagers
from google.cloud.networkconnectivity_v1.types import common
from google.cloud.networkconnectivity_v1.types import hub
from google.cloud.networkconnectivity_v1.types import hub as gcn_hub
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import HubServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import HubServiceGrpcAsyncIOTransport
from .client import HubServiceClient
class HubServiceAsyncClient:
"""Network Connectivity Center is a hub-and-spoke abstraction
for network connectivity management in Google Cloud. It reduces
operational complexity through a simple, centralized
connectivity management model.
"""
_client: HubServiceClient
DEFAULT_ENDPOINT = HubServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = HubServiceClient.DEFAULT_MTLS_ENDPOINT
hub_path = staticmethod(HubServiceClient.hub_path)
parse_hub_path = staticmethod(HubServiceClient.parse_hub_path)
instance_path = staticmethod(HubServiceClient.instance_path)
parse_instance_path = staticmethod(HubServiceClient.parse_instance_path)
interconnect_attachment_path = staticmethod(
HubServiceClient.interconnect_attachment_path
)
parse_interconnect_attachment_path = staticmethod(
HubServiceClient.parse_interconnect_attachment_path
)
network_path = staticmethod(HubServiceClient.network_path)
parse_network_path = staticmethod(HubServiceClient.parse_network_path)
spoke_path = staticmethod(HubServiceClient.spoke_path)
parse_spoke_path = staticmethod(HubServiceClient.parse_spoke_path)
vpn_tunnel_path = staticmethod(HubServiceClient.vpn_tunnel_path)
parse_vpn_tunnel_path = staticmethod(HubServiceClient.parse_vpn_tunnel_path)
common_billing_account_path = staticmethod(
HubServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
HubServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(HubServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(HubServiceClient.parse_common_folder_path)
common_organization_path = staticmethod(HubServiceClient.common_organization_path)
parse_common_organization_path = staticmethod(
HubServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(HubServiceClient.common_project_path)
parse_common_project_path = staticmethod(HubServiceClient.parse_common_project_path)
common_location_path = staticmethod(HubServiceClient.common_location_path)
parse_common_location_path = staticmethod(
HubServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
HubServiceAsyncClient: The constructed client.
"""
return HubServiceClient.from_service_account_info.__func__(HubServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
HubServiceAsyncClient: The constructed client.
"""
return HubServiceClient.from_service_account_file.__func__(HubServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> HubServiceTransport:
"""Returns the transport used by the client instance.
Returns:
HubServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(HubServiceClient).get_transport_class, type(HubServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, HubServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the hub service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.HubServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = HubServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_hubs(
self,
request: hub.ListHubsRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListHubsAsyncPager:
r"""Lists hubs in a given project.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.ListHubsRequest`):
The request object. Request for
[HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
method.
parent (:class:`str`):
Required. The parent resource's name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.services.hub_service.pagers.ListHubsAsyncPager:
Response for
[HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.ListHubsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_hubs,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListHubsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_hub(
self,
request: hub.GetHubRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> hub.Hub:
r"""Gets details about the specified hub.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.GetHubRequest`):
The request object. Request for
[HubService.GetHub][google.cloud.networkconnectivity.v1.HubService.GetHub]
method.
name (:class:`str`):
Required. The name of the hub
resource to get.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.types.Hub:
A hub is essentially a collection of
spokes. A single hub can contain spokes
from multiple regions. However, all of a
hub's spokes must be associated with
resources that reside in the same VPC
network.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.GetHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_hub,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_hub(
self,
request: gcn_hub.CreateHubRequest = None,
*,
parent: str = None,
hub: gcn_hub.Hub = None,
hub_id: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new hub in the specified project.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.CreateHubRequest`):
The request object. Request for
[HubService.CreateHub][google.cloud.networkconnectivity.v1.HubService.CreateHub]
method.
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
hub (:class:`google.cloud.networkconnectivity_v1.types.Hub`):
Required. The initial values for a
new hub.
This corresponds to the ``hub`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
hub_id (:class:`str`):
Optional. A unique identifier for the
hub.
This corresponds to the ``hub_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Hub` A hub is essentially a collection of spokes. A single hub can contain spokes
from multiple regions. However, all of a hub's spokes
must be associated with resources that reside in the
same VPC network.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, hub, hub_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcn_hub.CreateHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if hub is not None:
request.hub = hub
if hub_id is not None:
request.hub_id = hub_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gcn_hub.Hub,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def update_hub(
self,
request: gcn_hub.UpdateHubRequest = None,
*,
hub: gcn_hub.Hub = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the description and/or labels of the
specified hub.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.UpdateHubRequest`):
The request object. Request for
[HubService.UpdateHub][google.cloud.networkconnectivity.v1.HubService.UpdateHub]
method.
hub (:class:`google.cloud.networkconnectivity_v1.types.Hub`):
Required. The state that the hub
should be in after the update.
This corresponds to the ``hub`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Optional. In the case of an update to an existing hub,
field mask is used to specify the fields to be
overwritten. The fields specified in the update_mask are
relative to the resource, not the full request. A field
is overwritten if it is in the mask. If the user does
not provide a mask, then all fields are overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Hub` A hub is essentially a collection of spokes. A single hub can contain spokes
from multiple regions. However, all of a hub's spokes
must be associated with resources that reside in the
same VPC network.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([hub, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcn_hub.UpdateHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if hub is not None:
request.hub = hub
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("hub.name", request.hub.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gcn_hub.Hub,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def delete_hub(
self,
request: hub.DeleteHubRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes the specified hub.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.DeleteHubRequest`):
The request object. The request for
[HubService.DeleteHub][google.cloud.networkconnectivity.v1.HubService.DeleteHub].
name (:class:`str`):
Required. The name of the hub to
delete.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.DeleteHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def list_spokes(
self,
request: hub.ListSpokesRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListSpokesAsyncPager:
r"""Lists the spokes in the specified project and
location.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.ListSpokesRequest`):
The request object. The request for
[HubService.ListSpokes][google.cloud.networkconnectivity.v1.HubService.ListSpokes].
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.services.hub_service.pagers.ListSpokesAsyncPager:
The response for
[HubService.ListSpokes][google.cloud.networkconnectivity.v1.HubService.ListSpokes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.ListSpokesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_spokes,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListSpokesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_spoke(
self,
request: hub.GetSpokeRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> hub.Spoke:
r"""Gets details about the specified spoke.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.GetSpokeRequest`):
The request object. The request for
[HubService.GetSpoke][google.cloud.networkconnectivity.v1.HubService.GetSpoke].
name (:class:`str`):
Required. The name of the spoke
resource.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.types.Spoke:
A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.GetSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_spoke,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_spoke(
self,
request: hub.CreateSpokeRequest = None,
*,
parent: str = None,
spoke: hub.Spoke = None,
spoke_id: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a spoke in the specified project and
location.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.CreateSpokeRequest`):
The request object. The request for
[HubService.CreateSpoke][google.cloud.networkconnectivity.v1.HubService.CreateSpoke].
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
spoke (:class:`google.cloud.networkconnectivity_v1.types.Spoke`):
Required. The initial values for a
new spoke.
This corresponds to the ``spoke`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
spoke_id (:class:`str`):
Optional. Unique id for the spoke to
create.
This corresponds to the ``spoke_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Spoke` A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, spoke, spoke_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.CreateSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if spoke is not None:
request.spoke = spoke
if spoke_id is not None:
request.spoke_id = spoke_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
hub.Spoke,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def update_spoke(
self,
request: hub.UpdateSpokeRequest = None,
*,
spoke: hub.Spoke = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of the specified spoke.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.UpdateSpokeRequest`):
The request object. Request for
[HubService.UpdateSpoke][google.cloud.networkconnectivity.v1.HubService.UpdateSpoke]
method.
spoke (:class:`google.cloud.networkconnectivity_v1.types.Spoke`):
Required. The state that the spoke
should be in after the update.
This corresponds to the ``spoke`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Optional. In the case of an update to an existing spoke,
field mask is used to specify the fields to be
overwritten. The fields specified in the update_mask are
relative to the resource, not the full request. A field
is overwritten if it is in the mask. If the user does
not provide a mask, then all fields are overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Spoke` A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([spoke, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.UpdateSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if spoke is not None:
request.spoke = spoke
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("spoke.name", request.spoke.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
hub.Spoke,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def delete_spoke(
self,
request: hub.DeleteSpokeRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes the specified spoke.
Args:
request (:class:`google.cloud.networkconnectivity_v1.types.DeleteSpokeRequest`):
The request object. The request for
[HubService.DeleteSpoke][google.cloud.networkconnectivity.v1.HubService.DeleteSpoke].
name (:class:`str`):
Required. The name of the spoke to
delete.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.DeleteSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-networkconnectivity",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("HubServiceAsyncClient",)
| 41.375224
| 190
| 0.620259
| 5,176
| 46,092
| 5.409776
| 0.081144
| 0.029642
| 0.041784
| 0.043427
| 0.812185
| 0.762294
| 0.746866
| 0.725474
| 0.714153
| 0.695225
| 0
| 0.003562
| 0.311681
| 46,092
| 1,113
| 191
| 41.412399
| 0.879027
| 0.183676
| 0
| 0.547461
| 0
| 0
| 0.05322
| 0.002704
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00883
| false
| 0
| 0.050773
| 0
| 0.152318
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e66d0d5cb10c4c64550fa759fcd53ee4db7921e7
| 129
|
py
|
Python
|
clearfile.py
|
redcapattitide/Python
|
f0493a9218caa15ae8aec735c3823bfe417b9ede
|
[
"MIT"
] | null | null | null |
clearfile.py
|
redcapattitide/Python
|
f0493a9218caa15ae8aec735c3823bfe417b9ede
|
[
"MIT"
] | null | null | null |
clearfile.py
|
redcapattitide/Python
|
f0493a9218caa15ae8aec735c3823bfe417b9ede
|
[
"MIT"
] | null | null | null |
f = open('C:/Users/Tom/Documents/Python/Bot/Receptura/Recept/Recept/bin/Release/moves_info.txt','w',encoding='UTF-8')
f.close()
| 32.25
| 117
| 0.736434
| 22
| 129
| 4.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.03876
| 129
| 3
| 118
| 43
| 0.75
| 0
| 0
| 0
| 0
| 0.5
| 0.703125
| 0.65625
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6744a6c558c5d5b23a6948985e57e3636af5c58
| 119
|
py
|
Python
|
venv/lib/python3.9/site-packages/pytzdata/exceptions.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 15
|
2016-09-29T17:06:21.000Z
|
2021-03-12T22:22:32.000Z
|
venv/lib/python3.9/site-packages/pytzdata/exceptions.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 10
|
2017-03-13T21:24:19.000Z
|
2022-02-02T17:12:44.000Z
|
venv/lib/python3.9/site-packages/pytzdata/exceptions.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 11
|
2017-03-08T15:22:41.000Z
|
2021-08-23T10:44:52.000Z
|
# -*- coding: utf-8 -*-
from ._compat import FileNotFoundError
class TimezoneNotFound(FileNotFoundError):
pass
| 13.222222
| 42
| 0.722689
| 11
| 119
| 7.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.168067
| 119
| 8
| 43
| 14.875
| 0.848485
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
e6823160623e1428e2618d3d7e94b1ace58f417f
| 1,625
|
py
|
Python
|
repstruct/analysis/pca.py
|
oscarlorentzon/repstruct
|
9af72e4a7e7483b5a89261d438cef4e606c67e1b
|
[
"BSD-3-Clause"
] | 2
|
2017-03-26T17:53:50.000Z
|
2021-03-26T07:30:17.000Z
|
repstruct/analysis/pca.py
|
oscarlorentzon/repstruct
|
9af72e4a7e7483b5a89261d438cef4e606c67e1b
|
[
"BSD-3-Clause"
] | null | null | null |
repstruct/analysis/pca.py
|
oscarlorentzon/repstruct
|
9af72e4a7e7483b5a89261d438cef4e606c67e1b
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import process
def neutral_sub_pca(X, neut_factor=0.8):
""" Performs PCA by singular value decomposition after
subtracting a neutral vector with a specified factor.
:param X: A 2-D array with normalized row vectors.
:param neut_factor: The factor of the neutralization vector.
:return Y: A 2-D array of projections of the row vectors
of X on the principal components.
:return V: The principal components of X.
"""
X_shape = X.shape
row_count = X_shape[0]
vector_length = X_shape[1]
# Subtracting a neutral vector for each row in X before performing SVD.
N = process.create_neutral_vector(np.array([[vector_length, 1.]]), row_count)
X_neut = X-neut_factor*N
U, S, VT = np.linalg.svd(X_neut)
# Projecting feature vectors on principal components.
V = VT.T
Y = np.dot(X_neut, V)
return Y, V
def neutral_sub_pca_vector(X, N):
""" Performs PCA by singular value decomposition after subtracting
a neutral vector with a specified factor.
:param X: A 2-D array with normalized row vectors.
:param neut_factor: The factor of the neutralization vector.
:return Y: A 2-D array of projections of the row vectors of X on
the principal components.
:return V: The principal components of X.
"""
# Subtracting a neutral vector for each row in X before performing SVD.
X_neut = X-N
U, S, VT = np.linalg.svd(X_neut)
# Projecting feature vectors on principal components.
V = VT.T
Y = np.dot(X_neut, V)
return Y, V
| 29.017857
| 81
| 0.663385
| 252
| 1,625
| 4.18254
| 0.246032
| 0.037951
| 0.072106
| 0.094877
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0
| 0.007531
| 0.264615
| 1,625
| 56
| 82
| 29.017857
| 0.874477
| 0.602462
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6a76672d985428024f80ad9c646c45cab36adbc
| 351
|
py
|
Python
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_09_10PottedMeatCan.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_09_10PottedMeatCan.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_09_10PottedMeatCan.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
_base_ = "./FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan.py"
OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/09_10PottedMeatCan"
DATASETS = dict(TRAIN=("ycbv_010_potted_meat_can_train_pbr",))
| 87.75
| 158
| 0.905983
| 46
| 351
| 6.195652
| 0.695652
| 0.077193
| 0.182456
| 0.259649
| 0.554386
| 0.554386
| 0.554386
| 0.554386
| 0.554386
| 0.554386
| 0
| 0.090643
| 0.025641
| 351
| 3
| 159
| 117
| 0.74269
| 0
| 0
| 0
| 0
| 0
| 0.837607
| 0.837607
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6aabde95bf4900bbd84ffdf3261463a0aa2d9e9
| 43
|
py
|
Python
|
nitropyapp/exceptions.py
|
Niklas-Nitrokey/nitropy-app
|
c62af73caee5c4cf7536484e4ce16aefd6ec849a
|
[
"Apache-2.0"
] | 1
|
2021-04-06T10:32:21.000Z
|
2021-04-06T10:32:21.000Z
|
nitropyapp/exceptions.py
|
Nitrokey/nitrokey-app2
|
ab66bbceb854e1f18987b0331528e86e3e7ff702
|
[
"Apache-2.0"
] | null | null | null |
nitropyapp/exceptions.py
|
Nitrokey/nitrokey-app2
|
ab66bbceb854e1f18987b0331528e86e3e7ff702
|
[
"Apache-2.0"
] | 1
|
2021-04-13T09:23:56.000Z
|
2021-04-13T09:23:56.000Z
|
class BasePyNKException(Exception): pass
| 10.75
| 40
| 0.813953
| 4
| 43
| 8.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 3
| 41
| 14.333333
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
e6b7fbe30ba3e3eebc19c7995980cc768feb71c9
| 8,030
|
py
|
Python
|
Elastic/searchIndex.py
|
sourabhpoddar404/falcon
|
d47cd515e7c214658e8f2d50c3c742ee5fd2fc27
|
[
"MIT"
] | null | null | null |
Elastic/searchIndex.py
|
sourabhpoddar404/falcon
|
d47cd515e7c214658e8f2d50c3c742ee5fd2fc27
|
[
"MIT"
] | null | null | null |
Elastic/searchIndex.py
|
sourabhpoddar404/falcon
|
d47cd515e7c214658e8f2d50c3c742ee5fd2fc27
|
[
"MIT"
] | null | null | null |
from elasticsearch import Elasticsearch
es = Elasticsearch(['http://172.18.0.1:9200'])
docType = "doc"
def entitySearch(query):
indexName = "dbentityindex"
results=[]
###################################################
elasticResults=es.search(index=indexName,doc_type=docType, body={
"query": {
"prefix" : { "uri" : "http://dbpedia.org/resource/"+query.capitalize().replace(" ", "_") }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/resource/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*50,40])
elif "_" in result["_source"]["uri"] and result["_source"]["uri"].lower()[:result["_source"]["uri"].index("_")]=="http://dbpedia.org/resource/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*50,30])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*10,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"match" : { "uri" : "http://dbpedia.org/resource/"+query.capitalize().replace(" ", "_") }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/resource/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*50,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*20,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"match" : { "label" : query }
}
,"size":10
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/resource/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*50,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*40,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"fuzzy" : { "label" : query }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/resource/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*50,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*25,0])
return results
#for result in results['hits']['hits']:
#print (result["_score"])
#print (result["_source"])
#print("-----------")
def ontologySearch(query):
indexName = "dbontologyindex"
results=[]
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"prefix" : { "uri" : "http://dbpedia.org/ontology/"+query.replace(" ", "_") }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/ontology/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*10,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*10,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"match" : { "uri" : "http://dbpedia.org/ontology/"+query.replace(" ", "_") }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/ontology/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*20,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*20,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"match" : { "label" : query }
}
,"size":10
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/ontology/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*40,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*40,0])
###################################################
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"fuzzy" : { "label" : query }
}
,"size":5
}
)
for result in elasticResults['hits']['hits']:
if result["_source"]["uri"].lower()=="http://dbpedia.org/ontology/"+query.replace(" ", "_").lower():
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*25,40])
else:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*25,0])
return results
#for result in results['hits']['hits']:
#print (result["_score"])
#print (result["_source"])
#print("-----------")
def classSearch(query):
indexName = "dbclassindex"
results=[]
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"bool": {
"must": {
"bool" : { "should": [
{ "multi_match": { "query": "http://dbpedia.org/ontology/"+query.replace(" ", "") , "fields": ["uri"] }},
{ "multi_match": { "query": query , "fields": ["label"] , "fuzziness": "AUTO" }},
]}
}
}
}
,"size":5
})
#print(elasticResults)
for result in elasticResults['hits']['hits']:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"],0])
return results
#for result in results['hits']['hits']:
#print (result["_score"])
#print (result["_source"])
#print("-----------")
def propertySearch(query):
indexName = "dbpropertyindex"
results=[]
elasticResults=es.search(index=indexName, doc_type=docType, body={
"query": {
"bool": {
"must": {
"bool" : { "should": [
{ "multi_match": { "query": query , "fields": ["label"] }},
{ "multi_match": { "query": "http://dbpedia.org/ontology/"+query.replace(" ", "") , "fields": ["uri"] , "fuzziness": "AUTO"}} ] }
}
}
}
,"size":10})
for result in elasticResults['hits']['hits']:
results.append([result["_source"]["label"],result["_source"]["uri"],result["_score"]*2,0])
return results
#for result in results['hits']['hits']:
#print (result["_score"])
#print (result["_source"])
#print("-----------")
| 43.879781
| 185
| 0.488543
| 724
| 8,030
| 5.273481
| 0.096685
| 0.166579
| 0.117863
| 0.124411
| 0.906234
| 0.906234
| 0.893924
| 0.893924
| 0.892352
| 0.892352
| 0
| 0.014437
| 0.249564
| 8,030
| 182
| 186
| 44.120879
| 0.61915
| 0.055915
| 0
| 0.586207
| 0
| 0
| 0.221104
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027586
| false
| 0
| 0.006897
| 0
| 0.062069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6b8d54d311f59f2c5e5cbe4f94db561c5c43fea
| 2,394
|
py
|
Python
|
tests/test_search.py
|
marco-team/opynfec
|
650dd7f63c68f4bed8cc6db5b7ae11c60920455d
|
[
"MIT"
] | 1
|
2022-02-25T19:43:22.000Z
|
2022-02-25T19:43:22.000Z
|
tests/test_search.py
|
marco-team/opynfec
|
650dd7f63c68f4bed8cc6db5b7ae11c60920455d
|
[
"MIT"
] | null | null | null |
tests/test_search.py
|
marco-team/opynfec
|
650dd7f63c68f4bed8cc6db5b7ae11c60920455d
|
[
"MIT"
] | null | null | null |
import unittest
from src.opynfec import OpynFEC
class TestSearch(unittest.TestCase):
def setUp(self) -> None:
self.api_wrapper = OpynFEC(api_key="DEMO_KEY")
def test_candidate(self):
res = self.api_wrapper.search("Richard Blumenthal", "candidates")
self.assertIsInstance(res, list, "Search did not return list")
self.assertGreaterEqual(len(res), 1, "Search results does not contain anything")
self.assertIsInstance(res[0], dict, "Search result list does not contain dicts")
self.assertEqual(
set(res[0].keys()),
{"id", "name", "office_sought"},
"Search results element keys not as expected",
)
def test_candidates(self):
res = self.api_wrapper.search(
["Richard Blumenthal", "Chris Murphy"], "candidates"
)
self.assertIsInstance(res, list, "Search did not return list")
self.assertGreaterEqual(
len(res), 2, "Search results does not contain at least 2 results"
)
self.assertIsInstance(res[0], dict, "Search result list does not contain dicts")
self.assertEqual(
set(res[0].keys()),
{"id", "name", "office_sought"},
"Search results element keys not as expected",
)
def test_committee(self):
res = self.api_wrapper.search("justice", "committees")
self.assertIsInstance(res, list, "Search did not return list")
self.assertGreaterEqual(
len(res), 1, "Search results does not contain at least 1 results"
)
self.assertIsInstance(res[0], dict, "Search result list does not contain dicts")
self.assertEqual(
set(res[0].keys()),
{"id", "name", "is_active"},
"Search results element keys not as expected",
)
def test_committees(self):
res = self.api_wrapper.search(["justice", "peace"], "committees")
self.assertIsInstance(res, list, "Search did not return list")
self.assertGreaterEqual(
len(res), 2, "Search results does not contain at least 2 results"
)
self.assertIsInstance(res[0], dict, "Search result list does not contain dicts")
self.assertEqual(
set(res[0].keys()),
{"id", "name", "is_active"},
"Search results element keys not as expected",
)
| 39.9
| 88
| 0.608187
| 277
| 2,394
| 5.202166
| 0.216607
| 0.111034
| 0.127689
| 0.038862
| 0.866759
| 0.866759
| 0.866759
| 0.814712
| 0.753643
| 0.748786
| 0
| 0.008646
| 0.275272
| 2,394
| 59
| 89
| 40.576271
| 0.821902
| 0
| 0
| 0.54717
| 0
| 0
| 0.339599
| 0
| 0
| 0
| 0
| 0
| 0.301887
| 1
| 0.09434
| false
| 0
| 0.037736
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc0bf6b6c46814466b2638a318ab39fee88a0b86
| 263
|
py
|
Python
|
cpc/asm/Symbol.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | 1
|
2021-05-09T07:10:19.000Z
|
2021-05-09T07:10:19.000Z
|
cpc/asm/Symbol.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
cpc/asm/Symbol.py
|
U-Ar/Cpresto
|
f723458fb237c9e3e8bc8a6afdf7c81858a65363
|
[
"BSD-3-Clause"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from .Literal import Literal
class Symbol(Literal):
@abstractmethod
def name(self):
pass
@abstractmethod
def to_string(self):
pass
@abstractmethod
def dump(self):
pass
| 16.4375
| 39
| 0.638783
| 28
| 263
| 5.964286
| 0.535714
| 0.305389
| 0.263473
| 0.299401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296578
| 263
| 16
| 40
| 16.4375
| 0.902703
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.166667
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
fc79b9e90f48d29abbab6efee7fa24c10fb6162e
| 8,200
|
py
|
Python
|
saucelab_api_client/base_classes/insights_api.py
|
Slamnlc/saucelab-api-client
|
82ccf1b675e69dc36047844c5b5e4fc032ab9a8d
|
[
"MIT"
] | null | null | null |
saucelab_api_client/base_classes/insights_api.py
|
Slamnlc/saucelab-api-client
|
82ccf1b675e69dc36047844c5b5e4fc032ab9a8d
|
[
"MIT"
] | null | null | null |
saucelab_api_client/base_classes/insights_api.py
|
Slamnlc/saucelab-api-client
|
82ccf1b675e69dc36047844c5b5e4fc032ab9a8d
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from saucelab_api_client.category import Base
from saucelab_api_client.models.insights import Insight
from saucelab_api_client.models.service import get_dict_from_locals, get_datetime_for_insights
class Insights(Base):
__sub_host = '/v1/analytics'
def test_results(self, start: datetime, end: datetime, scope=None, owner=None, status=None, build=None, from_=None,
max_results=None, missing_build=None, query=None, desc=None, error=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-test-results
Returns run data for all tests that match the request criteria
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param build: Limit results to those grouped by this build name
:param from_: Begin results list from this record number
:param max_results: The maximum number of results to return
:param missing_build: Requires no value. If this parameter is included in the query string,
results will only include tests with no assigned build
:param query: Limit results to only those with this test name
:param desc: Set to true to sort results in descending order by creation time. Default value is false
:param error: Limit results to only those that threw this error message
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._valid(self._session.request('get', f'{self.__sub_host}/tests', params=params), Insight, 'items')
def get_summary_of_test_metric(self, start: datetime, end: datetime, scope=None, owner=None, status=None,
query=None, os=None, browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-summary-of-test-metrics
Returns an aggregate of metric values for runs of a specified test during the specified time period
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param query: The name of the test for which results are requested
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/insights/test-metrics', params=params)
def get_test_trends(self, start: datetime, end: datetime, interval: str, scope=None, owner=None, status=None,
os=None, browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-test-trends
Returns a set of data "buckets" representing tests that were run in each time interval defined
by the request parameters
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param interval: The amount of time representing the boundary of each data bucket
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/tests', params=params)
def get_builds_and_tests(self, start: datetime, end: datetime, scope=None, owner=None, status=None, os=None,
browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-builds-and-tests
Returns the set of all tests run within the specified time period, grouped by whether
each test was part of a build or not
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/builds_tests', params=params)
def get_error_trends(self, start: datetime, end: datetime, scope=None, owner=None, status=None, os=None,
browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-error-trends
Returns an array of errors that occurred on all tests run within the specified time period.
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/errors', params=params)
| 60.294118
| 119
| 0.665976
| 1,155
| 8,200
| 4.654545
| 0.138528
| 0.02846
| 0.041667
| 0.050223
| 0.760789
| 0.737909
| 0.725446
| 0.725446
| 0.708891
| 0.708891
| 0
| 0.000167
| 0.270732
| 8,200
| 135
| 120
| 60.740741
| 0.898829
| 0.643171
| 0
| 0.387097
| 0
| 0
| 0.085778
| 0.071111
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16129
| false
| 0
| 0.129032
| 0
| 0.516129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
fc7d7d69a887d630f23a7a99c4bb176a5ad7d52f
| 32,906
|
py
|
Python
|
image.py
|
snowsquizy/MiBand2
|
11901cd856b68b0dd4b72a6bb53890bb6c320a3d
|
[
"CC0-1.0"
] | 1
|
2021-07-19T22:53:44.000Z
|
2021-07-19T22:53:44.000Z
|
image.py
|
snowsquizy/MiBand2
|
11901cd856b68b0dd4b72a6bb53890bb6c320a3d
|
[
"CC0-1.0"
] | 1
|
2020-05-29T12:27:27.000Z
|
2020-05-29T12:29:11.000Z
|
image.py
|
snowsquizy/MiBand2
|
11901cd856b68b0dd4b72a6bb53890bb6c320a3d
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# image.py
#
# Copyright 2020 Andrew Taylor <andrew@snowsquizy.id.au>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import time
import d_base
import os # Used for testing and cleanup
import pylab # Used for plotting data
from PIL import Image # Used for creating Data images
from PIL import ImageDraw # Used for creating Data images
#from PIL import ImageFont # Used for creating Data images
#import pyecharts
file_names = ["blank.png", "icon.svg", "data.png", "plots.png", "test_file"]
testing = False
def create_start_images(s_image, a_icon):
"""
Method to create a blank plot area for application
ARGS: s_image - Byte stream of blank image
Returns: None
"""
if testing:
print("converting byte stream")
with open(file_names[0], 'wb') as file:
file.write(s_image)
with open(file_names[1], 'wb') as file:
file.write(a_icon)
def create_data_image(hours):
"""
Method to create image with written word for the number of hours provided
ARGS: hours - number of hours to retreive from database
RETURNS: None
"""
if testing:
print("creating data image")
else:
clean_up()
# Get Database Data
d_t, r_k, r_i, s_t, h_r = d_base.get_watch_data(hours)
total_st = 0
total_hr = 0
running = 0
walking = 0
sleep_l = 0
sleep_h = 0
sitting = 0
not_worn = 0
unknown = 0
sedentary = 0
hs_count = 0
denominator = round((hours/24), 2)
# Step Calculations
tot_st = sum(s_t)
aver_st = round(tot_st/len(s_t), 2)
step_h = round(tot_st/hours, 2)
h_r = cleanse_hr(h_r)
# Heart Rate Calculations
tot_hr = sum(h_r)
aver_hr = round(tot_hr/len(h_r), 2)
# Activity Type Calculation
for i in range(len(d_t)):
# Running
if r_k[i] == 98 or \
r_k[i] == 66 or \
r_k[i] == 50 or \
r_k[i] == 82:
running += 1
# Walking
elif r_k[i] == 1 or \
r_k[i] == 16 or \
r_k[i] == 17 or \
r_k[i] == 33 or \
r_k[i] == 18 or \
r_k[i] == 34 or \
r_k[i] == 65 or \
r_k[i] == 49:
walking += 1
# Light Sleep
elif r_k[i] == 112:
if r_i[i] <= 12:
hs_count += 1
if hs_count >= 20:
sleep_h += 1
else:
sleep_l += 1
else:
hs_count = 0
sleep_l += 1
# Heavy sleep
elif r_k[i] == 122:
sleep_h += 1
# Sitting
elif r_k[i] == 80 or r_k[i] == 96 or r_k[i] == 99:
sitting += 1
# Not worn right way up, Not worn right way down or Charging
elif r_k[i] == 83 or \
r_k[i] == 115 or \
r_k[i] == 6 or \
r_k[i] == 3:
not_worn += 1
# sitting for 5 minutes straight
elif r_k[i] == 90:
sedentary += 5
# Unknown Activities
else:
unknown += 1
sle_la = round(sleep_l/denominator, 2)
sle_ha = round(sleep_h/denominator, 2)
sleepa = round((sleep_l+sleep_h)/denominator, 2)
walk_a = round(walking/denominator, 2)
runn_a = round(running/denominator, 2)
image_header = "Data Analysis for last {} Hours\n".format(hours)
image_head_s = "***** Step Data ******\n"
image_step_t = "Total Steps Taken :{}\n".format(tot_st)
image_step_h = "Hourly Step Average :{}\n".format(step_h)
image_step_a = "Minute Step Average :{}\n".format(aver_st)
image_head_h = "***** Heart Rate ******\n"
image_hear_t = "Total Heart Beats :{}\n".format(tot_hr)
image_hear_a = "Heart Beat Average :{}\n".format(aver_hr)
image_head_r = "**** Sleeping Data ****\n"
image_sle_li = "Light Sleep Minutes :{}\n".format(sleep_l)
image_sle_la = "Light Sleep Average :{}\n".format(sle_la)
image_sle_he = "Heavy Sleep Minutes :{}\n".format(sleep_h)
image_sle_ha = "Heavy Sleep Average :{}\n".format(sle_ha)
image_sleepa = "Average Sleep :{}\n".format(sleepa)
image_head_a = "***** Activities *****\n"
image_sittin = "Sitting Time :{}\n".format(sitting)
image_sedent = "Sedentary Time :{}\n".format(sedentary)
image_walk_t = "Walking Time :{}\n".format(walking)
image_walk_a = "Walking Time Average :{}\n".format(walk_a)
image_runn_t = "Running Time :{}\n".format(running)
image_runn_a = "Running Time Average :{}\n".format(runn_a)
image_not_wo = "Bip Not Worn :{}\n".format(not_worn)
image_input = " ".join(
(image_header, image_head_s, image_step_t,
image_step_h, image_step_a, image_head_h, image_hear_t,
image_hear_a, image_head_r, image_sle_li, image_sle_la,
image_sle_he, image_sle_ha, image_sleepa, image_head_a,
image_sittin, image_sedent, image_walk_t, image_walk_a,
image_runn_t, image_runn_a, image_not_wo))
image_base = Image.new('RGB', (355, 355), color='white')
data_image = ImageDraw.Draw(image_base)
data_image.multiline_text((2, 2), image_input, fill=(0, 0, 0))
image_base.save("data.png")
def create_plot_image(hours):
if testing:
print("creating plot image method started")
else:
clean_up()
# Get Database Data
d_t, r_k, r_i, s_t, h_r = d_base.get_watch_data(hours)
st_cum = [0]
for i in range(1, len(s_t)):
st_cum.append(st_cum[i-1] + s_t[i])
h_r = cleanse_hr(h_r)
sleep_l = []
sleep_h = []
hs_count = 0
# Get data points for sleep
for i in range(len(d_t)):
# Heavy Sleep = 100
if r_k[i] == 122:
sleep_h.append(100)
sleep_l.append(-10)
elif r_k[i] == 112:
if r_i[i] <= 12:
hs_count += 1
# 20 min light sleep min movement
if hs_count >= 20:
sleep_h.append(100)
sleep_l.append(-10)
else:
sleep_h.append(-10)
sleep_l.append(50)
else:
hs_count = 0
sleep_h.append(-10)
sleep_l.append(50)
else:
sleep_h.append(-10)
sleep_l.append(-10)
if testing:
print("creating plot")
# Plot Data
pylab.figure(figsize=(6.4, 6.4), dpi=60)
pylab.subplot(311)
pylab.plot(sleep_h, color='blue')
pylab.plot(sleep_l, color='cyan')
pylab.plot(s_t, color='green')
pylab.ylabel("Steps / Sleep")
pylab.gca().set_xticklabels([])
pylab.gca().set_ylim([0, None])
pylab.title("Plots for Last {} Hours".format(hours))
pylab.subplot(312)
pylab.plot(r_i, color='midnightblue')
pylab.plot(h_r, color='red')
pylab.ylabel("HRM / Intensity")
pylab.gca().set_xticklabels([])
pylab.gca().set_ylim([0, None])
pylab.subplot(313)
pylab.plot(st_cum, '-b')
pylab.ylabel("Total Steps")
pylab.gca().set_xticklabels([])
if testing:
print("About to save Plot")
pylab.savefig("plots.png", bbox_inches='tight')
"""
def create_plot_image(hours)
if testing:
print("creating plot image method started")
else:
clean_up()
# Get Database Data
d_t, r_k, r_i, s_t, h_r = d_base.get_watch_data(hours)
st_cum = [0]
for i in range(1, len(s_t)):
st_cum.append(st_cum[i-1] + s_t[i])
h_r = cleanse_hr(h_r)
if testing:
print("creating plot")
"""
def cleanse_hr(h_r):
if testing:
print("HR data being cleansed")
# Correct heart rate by removing values greater than 220
for i in range(1, len(h_r)):
if h_r[i] > 220:
h_r[i] = h_r[i-1]
return h_r
def clean_up():
"""
method for deleting files when needed
ARGS: None
RETURNS: None
"""
if testing:
print("Deleting all Files")
for i in range(len(file_names)):
if os.path.isfile(file_names[i]):
os.remove(file_names[i])
if testing:
"""
Testing for all methods in this program
"""
image_file = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01h\x00\x00\x01h\x08\x02\x00\x00\x00\xf5\x87\xf6\x82\x00\x00\x00\tpHYs\x00\x00\t:\x00\x00\t:\x01\xf0d\x92J\x00\x00\x00\x18tEXtComment\x00Andrew Taylor(c)(\x89I\xeb\x00\x00\x03ZIDATx\xda\xed\xd41\x01\x00\x00\x08\xc30\xc0\x0f\xc7\xfc\x9bC\x03\x7f"\xa1G;\x9b\x02\xf8\x18\t\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8cC\x02\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x000\x0e\xc08\x00\xe3\x00\x8c\x030\x0e\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\x8c\x030\x0e\xc08\x00\xe3\x00\x8c\x03\xc08\x00\xe3\x00\x8c\x030\x0e\xc08\x00\xe3\x00\xf89}\x01\x03_\xb7\x12c \x00\x00\x00\x00IEND\xaeB`\x82'
icon_file = b'<?xml version="1.0" encoding="UTF-8"?>\n<!-- Created with Inkscape (http://www.inkscape.org/) -->\n<svg width="192" height="192" version="1.1" viewBox="0 0 192 192" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"><metadata><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/><dc:title/></cc:Work></rdf:RDF></metadata><defs><clipPath id="clipPath16"><path d="m0 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath26"><path d="m-4.883e-4 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath34"><path d="m-4.883e-4 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath38"><path d="m0 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath42"><path d="m0 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath116"><path d="m0 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath120"><path d="m0 192h192v-192h-192v192z"/></clipPath><clipPath id="clipPath124"><path d="m192 192h-192v-192h192v192z"/></clipPath></defs><g transform="matrix(1.25,0,0,-1.25,0,192)"><g transform="scale(.8)" fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath16)" fill="#666" fill-opacity=".7992"><g transform="matrix(1,0,0,-1,0,192)"><g fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath26)" fill="#666" fill-opacity=".7992"><g fill="#666" fill-opacity=".7992"><g fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath34)" fill="#666" fill-opacity=".7992" opacity=".4"><g clip-path="url(#clipPath38)" fill="#666" fill-opacity=".7992"><g transform="translate(.2497 .2497)" fill="#666" fill-opacity=".7992"><path d="m0 0 191.5 191.5" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g></g><path d="m0 0h192v192h-192zm0.249 191.8h191.5v-191.5h-191.5z"/><g clip-path="url(#clipPath42)" fill="#666" fill-opacity=".7992"><g transform="translate(191.7 .2497)"><path d="m0 0-191.5 191.5" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(124 -4e-4)"><path d="m0 0v192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(68 -4e-4)"><path d="m0 0v192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(192 124)"><path d="m0 0h-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(192 68)"><path d="m0 0h-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(136 96)"><path d="m0 0c0 22.09-17.91 40-40 40s-40-17.91-40-40 17.91-40 40-40 40 17.91 40 40z" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(-4e-4 96)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(96 -5e-4)"><path d="m0 0v192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(96 8)"><path d="m0 0c-48.6 0-88 39.4-88 88s39.4 88 88 88 88-39.4 88-88-39.4-88-88-88m0 0.25c48.39 0 87.75 39.36 87.75 87.75s-39.36 87.75-87.75 87.75-87.75-39.36-87.75-87.75 39.36-87.75 87.75-87.75" fill="#666" fill-opacity=".7992"/></g><g transform="translate(160 20)"><path d="m0 0h-128c-6.6 0-12 5.4-12 12v128c0 6.6 5.4 12 12 12h128c6.6 0 12-5.4 12-12v-128c0-6.6-5.4-12-12-12m0 0.25c6.479 0 11.75 5.271 11.75 11.75v128c0 6.479-5.271 11.75-11.75 11.75h-128c-6.479 0-11.75-5.271-11.75-11.75v-128c0-6.479 5.271-11.75 11.75-11.75h128" fill="#666" fill-opacity=".7992"/></g><g transform="translate(148 8)"><path d="m0 0h-104c-6.6 0-12 5.4-12 12v152c0 6.6 5.4 12 12 12h104c6.6 0 12-5.4 12-12v-152c0-6.6-5.4-12-12-12m0 0.25c6.479 0 11.75 5.271 11.75 11.75v152c0 6.479-5.271 11.75-11.75 11.75h-104c-6.479 0-11.75-5.271-11.75-11.75v-152c0-6.479 5.271-11.75 11.75-11.75h104" fill="#666" fill-opacity=".7992"/></g><g transform="translate(172 32)"><path d="m0 0h-152c-6.6 0-12 5.4-12 12v104c0 6.6 5.4 12 12 12h152c6.6 0 12-5.4 12-12v-104c0-6.6-5.4-12-12-12m0 0.25c6.479 0 11.75 5.271 11.75 11.75v104c0 6.479-5.271 11.75-11.75 11.75h-152c-6.479 0-11.75-5.271-11.75-11.75v-104c0-6.479 5.271-11.75 11.75-11.75h152" fill="#666" fill-opacity=".7992"/></g></g></g></g></g></g></g></g><g fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath116)" fill="#666" fill-opacity=".7992" opacity=".15"><g transform="matrix(1,0,0,-1,0,192)" fill="#666" fill-opacity=".7992"><g fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath120)" fill="#666" fill-opacity=".7992"><g clip-path="url(#clipPath124)" fill="#666" fill-opacity=".7992"><g transform="translate(4 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(8 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(12 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(16 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(20 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(24 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(28 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(32 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(36 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(40 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(44 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(48 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(52 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(56 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(60 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(64 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(68 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(72 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(76 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(80 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(84 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(88 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(92 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(96 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(100 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(104 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(108 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(112 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(116 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(120 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(124 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(128 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(132 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(136 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(140 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(144 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(148 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(152 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(156 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(160 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(164 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(168 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(172 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(176 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(180 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(184 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(188 192)"><path d="m0 0v-192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 4)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 8)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 12)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 16)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 20)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 24)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 28)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 32)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 36)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 40)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 44)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 48)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 52)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 56)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 60)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 64)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 68)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 72)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 76)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 80)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 84)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 88)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 92)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 96)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 100)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 104)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 108)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 112)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 116)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 120)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 124)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 128)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 132)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 136)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 140)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 144)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 148)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 152)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 156)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 160)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 164)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 168)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 172)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 176)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 180)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 184)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g><g transform="translate(0 188)"><path d="m0 0h192" fill="#666" fill-opacity=".7992" stroke="#000" stroke-miterlimit="10" stroke-width=".25"/></g></g><path d="m0 1e-3h192v192h-192zm0.25 191.7h191.5v-191.5h-191.5z"/></g></g></g></g></g></g></g></g><g><rect x="28.37" y="12.41" width="135.3" height="167.5" ry="8.225" fill-opacity=".8543" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="8.482"/><text x="35.333336" y="119.33334" fill="#ffffff" font-family="Sans" font-size="13.33px" letter-spacing="0px" stroke="#ffffff" stroke-width="1px" word-spacing="0px" style="line-height:125%" xml:space="preserve"><tspan x="35.333336" y="119.33334" fill="#ffffff" font-family="\'hooge 05_53\'" font-size="74.67px" stroke="#ffffff" stroke-width="1px" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal">BIP</tspan></text><g stroke="#000" stroke-linecap="round" stroke-linejoin="round">\n<rect x="47.08" y=".4557" width="9.422" height="7.749"/><rect x="135.3" y=".25" width="9.422" height="7.749"/><rect x="135.3" y="184" width="9.422" height="7.749"/><rect x="47.29" y="184" width="9.422" height="7.749"/></g><path d="m175.7 96a4 14 0 0 1-3.97 14 4 14 0 0 1-4.029-13.79 4 14 0 0 1 3.911-14.2 4 14 0 0 1 4.087 13.58l-3.998 0.4147z" fill="#0f0000" stroke="#000" stroke-linecap="round" stroke-linejoin="round"/></g></svg>\n'
create_start_images(image_file, icon_file)
if os.path.isfile(file_names[0]):
os.remove(file_names[0])
print("*** Tested 'create_start_image' successfully ***")
else:
print("Failed 'create_start_image'")
create_data_image(100)
if os.path.isfile(file_names[1]):
os.remove(file_names[1])
print("*** Tested 'create_data_image' successfully ***")
else:
print("Failed 'create_data_image'")
create_plot_image(100)
if os.path.isfile(file_names[2]):
os.remove(file_names[2])
print("*** Tested 'create_plot_image' successfully ***")
else:
print("Failed 'create_plot_image'")
os.mknod(file_names[3])
clean_up()
if not os.path.isfile(file_names[3]):
print("*** Tested 'clean_up' successfully")
else:
print("Failed 'clean_up'")
| 100.323171
| 19,918
| 0.655169
| 5,588
| 32,906
| 3.804402
| 0.10451
| 0.064161
| 0.063644
| 0.104144
| 0.736159
| 0.707277
| 0.687097
| 0.678207
| 0.652194
| 0.623877
| 0
| 0.180056
| 0.126573
| 32,906
| 327
| 19,919
| 100.629969
| 0.559619
| 0.052544
| 0
| 0.283019
| 0
| 0.014151
| 0.790098
| 0.419282
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023585
| false
| 0
| 0.028302
| 0
| 0.056604
| 0.070755
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc887b7767ea4ad2eb8aa58cd5222e3b1086dc12
| 47,828
|
py
|
Python
|
cottonformation/res/wafregional.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 5
|
2021-07-22T03:45:59.000Z
|
2021-12-17T21:07:14.000Z
|
cottonformation/res/wafregional.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 1
|
2021-06-25T18:01:31.000Z
|
2021-06-25T18:01:31.000Z
|
cottonformation/res/wafregional.py
|
MacHu-GWU/cottonformation-project
|
23e28c08cfb5a7cc0db6dbfdb1d7e1585c773f3b
|
[
"BSD-2-Clause"
] | 2
|
2021-06-27T03:08:21.000Z
|
2021-06-28T22:15:51.000Z
|
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class PropRulePredicate(Property):
"""
AWS Object Type = "AWS::WAFRegional::Rule.Predicate"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html
Property Document:
- ``rp_DataId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-dataid
- ``rp_Negated``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-negated
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-type
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::Rule.Predicate"
rp_DataId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DataId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-dataid"""
rp_Negated: bool = attr.ib(
default=None,
validator=attr.validators.instance_of(bool),
metadata={AttrMeta.PROPERTY_NAME: "Negated"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-negated"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-rule-predicate.html#cfn-wafregional-rule-predicate-type"""
@attr.s
class PropByteMatchSetFieldToMatch(Property):
"""
AWS Object Type = "AWS::WAFRegional::ByteMatchSet.FieldToMatch"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-fieldtomatch.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-fieldtomatch.html#cfn-wafregional-bytematchset-fieldtomatch-type
- ``p_Data``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-fieldtomatch.html#cfn-wafregional-bytematchset-fieldtomatch-data
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::ByteMatchSet.FieldToMatch"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-fieldtomatch.html#cfn-wafregional-bytematchset-fieldtomatch-type"""
p_Data: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Data"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-fieldtomatch.html#cfn-wafregional-bytematchset-fieldtomatch-data"""
@attr.s
class PropSizeConstraintSetFieldToMatch(Property):
"""
AWS Object Type = "AWS::WAFRegional::SizeConstraintSet.FieldToMatch"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-fieldtomatch.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-fieldtomatch.html#cfn-wafregional-sizeconstraintset-fieldtomatch-type
- ``p_Data``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-fieldtomatch.html#cfn-wafregional-sizeconstraintset-fieldtomatch-data
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SizeConstraintSet.FieldToMatch"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-fieldtomatch.html#cfn-wafregional-sizeconstraintset-fieldtomatch-type"""
p_Data: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Data"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-fieldtomatch.html#cfn-wafregional-sizeconstraintset-fieldtomatch-data"""
@attr.s
class PropGeoMatchSetGeoMatchConstraint(Property):
"""
AWS Object Type = "AWS::WAFRegional::GeoMatchSet.GeoMatchConstraint"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-geomatchset-geomatchconstraint.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-geomatchset-geomatchconstraint.html#cfn-wafregional-geomatchset-geomatchconstraint-type
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-geomatchset-geomatchconstraint.html#cfn-wafregional-geomatchset-geomatchconstraint-value
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::GeoMatchSet.GeoMatchConstraint"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-geomatchset-geomatchconstraint.html#cfn-wafregional-geomatchset-geomatchconstraint-type"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-geomatchset-geomatchconstraint.html#cfn-wafregional-geomatchset-geomatchconstraint-value"""
@attr.s
class PropSqlInjectionMatchSetFieldToMatch(Property):
"""
AWS Object Type = "AWS::WAFRegional::SqlInjectionMatchSet.FieldToMatch"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-fieldtomatch.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-fieldtomatch.html#cfn-wafregional-sqlinjectionmatchset-fieldtomatch-type
- ``p_Data``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-fieldtomatch.html#cfn-wafregional-sqlinjectionmatchset-fieldtomatch-data
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SqlInjectionMatchSet.FieldToMatch"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-fieldtomatch.html#cfn-wafregional-sqlinjectionmatchset-fieldtomatch-type"""
p_Data: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Data"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-fieldtomatch.html#cfn-wafregional-sqlinjectionmatchset-fieldtomatch-data"""
@attr.s
class PropWebACLAction(Property):
"""
AWS Object Type = "AWS::WAFRegional::WebACL.Action"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-action.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-action.html#cfn-wafregional-webacl-action-type
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::WebACL.Action"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-action.html#cfn-wafregional-webacl-action-type"""
@attr.s
class PropSqlInjectionMatchSetSqlInjectionMatchTuple(Property):
"""
AWS Object Type = "AWS::WAFRegional::SqlInjectionMatchSet.SqlInjectionMatchTuple"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple.html
Property Document:
- ``rp_FieldToMatch``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple-fieldtomatch
- ``rp_TextTransformation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple-texttransformation
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SqlInjectionMatchSet.SqlInjectionMatchTuple"
rp_FieldToMatch: typing.Union['PropSqlInjectionMatchSetFieldToMatch', dict] = attr.ib(
default=None,
converter=PropSqlInjectionMatchSetFieldToMatch.from_dict,
validator=attr.validators.instance_of(PropSqlInjectionMatchSetFieldToMatch),
metadata={AttrMeta.PROPERTY_NAME: "FieldToMatch"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple-fieldtomatch"""
rp_TextTransformation: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TextTransformation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuple-texttransformation"""
@attr.s
class PropWebACLRule(Property):
"""
AWS Object Type = "AWS::WAFRegional::WebACL.Rule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html
Property Document:
- ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-action
- ``rp_Priority``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-priority
- ``rp_RuleId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-ruleid
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::WebACL.Rule"
rp_Action: typing.Union['PropWebACLAction', dict] = attr.ib(
default=None,
converter=PropWebACLAction.from_dict,
validator=attr.validators.instance_of(PropWebACLAction),
metadata={AttrMeta.PROPERTY_NAME: "Action"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-action"""
rp_Priority: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "Priority"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-priority"""
rp_RuleId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RuleId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-webacl-rule.html#cfn-wafregional-webacl-rule-ruleid"""
@attr.s
class PropIPSetIPSetDescriptor(Property):
"""
AWS Object Type = "AWS::WAFRegional::IPSet.IPSetDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ipset-ipsetdescriptor.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ipset-ipsetdescriptor.html#cfn-wafregional-ipset-ipsetdescriptor-type
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ipset-ipsetdescriptor.html#cfn-wafregional-ipset-ipsetdescriptor-value
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::IPSet.IPSetDescriptor"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ipset-ipsetdescriptor.html#cfn-wafregional-ipset-ipsetdescriptor-type"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ipset-ipsetdescriptor.html#cfn-wafregional-ipset-ipsetdescriptor-value"""
@attr.s
class PropXssMatchSetFieldToMatch(Property):
"""
AWS Object Type = "AWS::WAFRegional::XssMatchSet.FieldToMatch"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html
Property Document:
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-type
- ``p_Data``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-data
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::XssMatchSet.FieldToMatch"
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-type"""
p_Data: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Data"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-data"""
@attr.s
class PropRateBasedRulePredicate(Property):
"""
AWS Object Type = "AWS::WAFRegional::RateBasedRule.Predicate"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html
Property Document:
- ``rp_DataId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-dataid
- ``rp_Negated``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-negated
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-type
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::RateBasedRule.Predicate"
rp_DataId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DataId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-dataid"""
rp_Negated: bool = attr.ib(
default=None,
validator=attr.validators.instance_of(bool),
metadata={AttrMeta.PROPERTY_NAME: "Negated"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-negated"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-ratebasedrule-predicate.html#cfn-wafregional-ratebasedrule-predicate-type"""
@attr.s
class PropByteMatchSetByteMatchTuple(Property):
"""
AWS Object Type = "AWS::WAFRegional::ByteMatchSet.ByteMatchTuple"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html
Property Document:
- ``rp_FieldToMatch``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-fieldtomatch
- ``rp_PositionalConstraint``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-positionalconstraint
- ``rp_TextTransformation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-texttransformation
- ``p_TargetString``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-targetstring
- ``p_TargetStringBase64``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-targetstringbase64
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::ByteMatchSet.ByteMatchTuple"
rp_FieldToMatch: typing.Union['PropByteMatchSetFieldToMatch', dict] = attr.ib(
default=None,
converter=PropByteMatchSetFieldToMatch.from_dict,
validator=attr.validators.instance_of(PropByteMatchSetFieldToMatch),
metadata={AttrMeta.PROPERTY_NAME: "FieldToMatch"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-fieldtomatch"""
rp_PositionalConstraint: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "PositionalConstraint"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-positionalconstraint"""
rp_TextTransformation: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TextTransformation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-texttransformation"""
p_TargetString: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TargetString"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-targetstring"""
p_TargetStringBase64: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TargetStringBase64"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-bytematchset-bytematchtuple.html#cfn-wafregional-bytematchset-bytematchtuple-targetstringbase64"""
@attr.s
class PropSizeConstraintSetSizeConstraint(Property):
"""
AWS Object Type = "AWS::WAFRegional::SizeConstraintSet.SizeConstraint"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html
Property Document:
- ``rp_ComparisonOperator``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-comparisonoperator
- ``rp_FieldToMatch``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-fieldtomatch
- ``rp_Size``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-size
- ``rp_TextTransformation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-texttransformation
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SizeConstraintSet.SizeConstraint"
rp_ComparisonOperator: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ComparisonOperator"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-comparisonoperator"""
rp_FieldToMatch: typing.Union['PropSizeConstraintSetFieldToMatch', dict] = attr.ib(
default=None,
converter=PropSizeConstraintSetFieldToMatch.from_dict,
validator=attr.validators.instance_of(PropSizeConstraintSetFieldToMatch),
metadata={AttrMeta.PROPERTY_NAME: "FieldToMatch"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-fieldtomatch"""
rp_Size: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "Size"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-size"""
rp_TextTransformation: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TextTransformation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-sizeconstraintset-sizeconstraint.html#cfn-wafregional-sizeconstraintset-sizeconstraint-texttransformation"""
@attr.s
class PropXssMatchSetXssMatchTuple(Property):
"""
AWS Object Type = "AWS::WAFRegional::XssMatchSet.XssMatchTuple"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-xssmatchtuple.html
Property Document:
- ``rp_FieldToMatch``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-xssmatchtuple.html#cfn-wafregional-xssmatchset-xssmatchtuple-fieldtomatch
- ``rp_TextTransformation``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-xssmatchtuple.html#cfn-wafregional-xssmatchset-xssmatchtuple-texttransformation
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::XssMatchSet.XssMatchTuple"
rp_FieldToMatch: typing.Union['PropXssMatchSetFieldToMatch', dict] = attr.ib(
default=None,
converter=PropXssMatchSetFieldToMatch.from_dict,
validator=attr.validators.instance_of(PropXssMatchSetFieldToMatch),
metadata={AttrMeta.PROPERTY_NAME: "FieldToMatch"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-xssmatchtuple.html#cfn-wafregional-xssmatchset-xssmatchtuple-fieldtomatch"""
rp_TextTransformation: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TextTransformation"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-xssmatchtuple.html#cfn-wafregional-xssmatchset-xssmatchtuple-texttransformation"""
#--- Resource declaration ---
@attr.s
class SqlInjectionMatchSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::SqlInjectionMatchSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sqlinjectionmatchset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sqlinjectionmatchset.html#cfn-wafregional-sqlinjectionmatchset-name
- ``p_SqlInjectionMatchTuples``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sqlinjectionmatchset.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuples
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SqlInjectionMatchSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sqlinjectionmatchset.html#cfn-wafregional-sqlinjectionmatchset-name"""
p_SqlInjectionMatchTuples: typing.List[typing.Union['PropSqlInjectionMatchSetSqlInjectionMatchTuple', dict]] = attr.ib(
default=None,
converter=PropSqlInjectionMatchSetSqlInjectionMatchTuple.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSqlInjectionMatchSetSqlInjectionMatchTuple), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SqlInjectionMatchTuples"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sqlinjectionmatchset.html#cfn-wafregional-sqlinjectionmatchset-sqlinjectionmatchtuples"""
@attr.s
class RegexPatternSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::RegexPatternSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-regexpatternset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-regexpatternset.html#cfn-wafregional-regexpatternset-name
- ``rp_RegexPatternStrings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-regexpatternset.html#cfn-wafregional-regexpatternset-regexpatternstrings
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::RegexPatternSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-regexpatternset.html#cfn-wafregional-regexpatternset-name"""
rp_RegexPatternStrings: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "RegexPatternStrings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-regexpatternset.html#cfn-wafregional-regexpatternset-regexpatternstrings"""
@attr.s
class WebACLAssociation(Resource):
"""
AWS Object Type = "AWS::WAFRegional::WebACLAssociation"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webaclassociation.html
Property Document:
- ``rp_ResourceArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webaclassociation.html#cfn-wafregional-webaclassociation-resourcearn
- ``rp_WebACLId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webaclassociation.html#cfn-wafregional-webaclassociation-webaclid
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::WebACLAssociation"
rp_ResourceArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ResourceArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webaclassociation.html#cfn-wafregional-webaclassociation-resourcearn"""
rp_WebACLId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "WebACLId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webaclassociation.html#cfn-wafregional-webaclassociation-webaclid"""
@attr.s
class SizeConstraintSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::SizeConstraintSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sizeconstraintset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sizeconstraintset.html#cfn-wafregional-sizeconstraintset-name
- ``p_SizeConstraints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sizeconstraintset.html#cfn-wafregional-sizeconstraintset-sizeconstraints
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::SizeConstraintSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sizeconstraintset.html#cfn-wafregional-sizeconstraintset-name"""
p_SizeConstraints: typing.List[typing.Union['PropSizeConstraintSetSizeConstraint', dict]] = attr.ib(
default=None,
converter=PropSizeConstraintSetSizeConstraint.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropSizeConstraintSetSizeConstraint), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "SizeConstraints"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-sizeconstraintset.html#cfn-wafregional-sizeconstraintset-sizeconstraints"""
@attr.s
class XssMatchSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::XssMatchSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-xssmatchset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-xssmatchset.html#cfn-wafregional-xssmatchset-name
- ``p_XssMatchTuples``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-xssmatchset.html#cfn-wafregional-xssmatchset-xssmatchtuples
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::XssMatchSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-xssmatchset.html#cfn-wafregional-xssmatchset-name"""
p_XssMatchTuples: typing.List[typing.Union['PropXssMatchSetXssMatchTuple', dict]] = attr.ib(
default=None,
converter=PropXssMatchSetXssMatchTuple.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropXssMatchSetXssMatchTuple), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "XssMatchTuples"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-xssmatchset.html#cfn-wafregional-xssmatchset-xssmatchtuples"""
@attr.s
class RateBasedRule(Resource):
"""
AWS Object Type = "AWS::WAFRegional::RateBasedRule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html
Property Document:
- ``rp_MetricName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-metricname
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-name
- ``rp_RateKey``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-ratekey
- ``rp_RateLimit``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-ratelimit
- ``p_MatchPredicates``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-matchpredicates
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::RateBasedRule"
rp_MetricName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MetricName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-metricname"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-name"""
rp_RateKey: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RateKey"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-ratekey"""
rp_RateLimit: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "RateLimit"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-ratelimit"""
p_MatchPredicates: typing.List[typing.Union['PropRateBasedRulePredicate', dict]] = attr.ib(
default=None,
converter=PropRateBasedRulePredicate.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropRateBasedRulePredicate), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "MatchPredicates"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ratebasedrule.html#cfn-wafregional-ratebasedrule-matchpredicates"""
@attr.s
class GeoMatchSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::GeoMatchSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-geomatchset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-geomatchset.html#cfn-wafregional-geomatchset-name
- ``p_GeoMatchConstraints``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-geomatchset.html#cfn-wafregional-geomatchset-geomatchconstraints
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::GeoMatchSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-geomatchset.html#cfn-wafregional-geomatchset-name"""
p_GeoMatchConstraints: typing.List[typing.Union['PropGeoMatchSetGeoMatchConstraint', dict]] = attr.ib(
default=None,
converter=PropGeoMatchSetGeoMatchConstraint.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropGeoMatchSetGeoMatchConstraint), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "GeoMatchConstraints"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-geomatchset.html#cfn-wafregional-geomatchset-geomatchconstraints"""
@attr.s
class WebACL(Resource):
"""
AWS Object Type = "AWS::WAFRegional::WebACL"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html
Property Document:
- ``rp_DefaultAction``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-defaultaction
- ``rp_MetricName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-metricname
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-name
- ``p_Rules``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-rules
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::WebACL"
rp_DefaultAction: typing.Union['PropWebACLAction', dict] = attr.ib(
default=None,
converter=PropWebACLAction.from_dict,
validator=attr.validators.instance_of(PropWebACLAction),
metadata={AttrMeta.PROPERTY_NAME: "DefaultAction"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-defaultaction"""
rp_MetricName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MetricName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-metricname"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-name"""
p_Rules: typing.List[typing.Union['PropWebACLRule', dict]] = attr.ib(
default=None,
converter=PropWebACLRule.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropWebACLRule), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Rules"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-webacl.html#cfn-wafregional-webacl-rules"""
@attr.s
class IPSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::IPSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ipset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ipset.html#cfn-wafregional-ipset-name
- ``p_IPSetDescriptors``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ipset.html#cfn-wafregional-ipset-ipsetdescriptors
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::IPSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ipset.html#cfn-wafregional-ipset-name"""
p_IPSetDescriptors: typing.List[typing.Union['PropIPSetIPSetDescriptor', dict]] = attr.ib(
default=None,
converter=PropIPSetIPSetDescriptor.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropIPSetIPSetDescriptor), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "IPSetDescriptors"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-ipset.html#cfn-wafregional-ipset-ipsetdescriptors"""
@attr.s
class ByteMatchSet(Resource):
"""
AWS Object Type = "AWS::WAFRegional::ByteMatchSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-bytematchset.html
Property Document:
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-bytematchset.html#cfn-wafregional-bytematchset-name
- ``p_ByteMatchTuples``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-bytematchset.html#cfn-wafregional-bytematchset-bytematchtuples
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::ByteMatchSet"
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-bytematchset.html#cfn-wafregional-bytematchset-name"""
p_ByteMatchTuples: typing.List[typing.Union['PropByteMatchSetByteMatchTuple', dict]] = attr.ib(
default=None,
converter=PropByteMatchSetByteMatchTuple.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropByteMatchSetByteMatchTuple), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "ByteMatchTuples"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-bytematchset.html#cfn-wafregional-bytematchset-bytematchtuples"""
@attr.s
class Rule(Resource):
"""
AWS Object Type = "AWS::WAFRegional::Rule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html
Property Document:
- ``rp_MetricName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-metricname
- ``rp_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-name
- ``p_Predicates``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-predicates
"""
AWS_OBJECT_TYPE = "AWS::WAFRegional::Rule"
rp_MetricName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MetricName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-metricname"""
rp_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-name"""
p_Predicates: typing.List[typing.Union['PropRulePredicate', dict]] = attr.ib(
default=None,
converter=PropRulePredicate.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(PropRulePredicate), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Predicates"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-wafregional-rule.html#cfn-wafregional-rule-predicates"""
| 57.416567
| 249
| 0.767061
| 5,040
| 47,828
| 7.182738
| 0.024405
| 0.033369
| 0.045883
| 0.07091
| 0.924035
| 0.921881
| 0.884257
| 0.841772
| 0.841772
| 0.841082
| 0
| 0.000257
| 0.105796
| 47,828
| 832
| 250
| 57.485577
| 0.846193
| 0.348833
| 0
| 0.523114
| 0
| 0
| 0.097713
| 0.066558
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009732
| 0
| 0.284672
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5d7a1ebd0b7b902cc3d8af3581fc6a3e42a6b289
| 22
|
py
|
Python
|
upeval/__init__.py
|
jnjcc/upeval
|
551579be5007997eb792bdd0c9b05b6c9269c1d0
|
[
"MIT"
] | 1
|
2021-06-24T05:13:26.000Z
|
2021-06-24T05:13:26.000Z
|
upeval/__init__.py
|
jnjcc/upeval
|
551579be5007997eb792bdd0c9b05b6c9269c1d0
|
[
"MIT"
] | null | null | null |
upeval/__init__.py
|
jnjcc/upeval
|
551579be5007997eb792bdd0c9b05b6c9269c1d0
|
[
"MIT"
] | null | null | null |
from .upeval import *
| 11
| 21
| 0.727273
| 3
| 22
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5dc926860f14cb057181b77dde2bb7eca649f3e0
| 38
|
py
|
Python
|
example/python/in/ex2.py
|
Hiroshi123/CompilerCook
|
f1f07cb26cd9278daa901ea4b5660eedcd265ffc
|
[
"BSD-3-Clause"
] | null | null | null |
example/python/in/ex2.py
|
Hiroshi123/CompilerCook
|
f1f07cb26cd9278daa901ea4b5660eedcd265ffc
|
[
"BSD-3-Clause"
] | null | null | null |
example/python/in/ex2.py
|
Hiroshi123/CompilerCook
|
f1f07cb26cd9278daa901ea4b5660eedcd265ffc
|
[
"BSD-3-Clause"
] | null | null | null |
#this is com
#this is another
#third
| 7.6
| 16
| 0.710526
| 7
| 38
| 3.857143
| 0.714286
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 38
| 4
| 17
| 9.5
| 0.9
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5dca2bea1177bcbbff535518157ea4123fb4b34f
| 65
|
py
|
Python
|
bunruija/filters/__init__.py
|
tma15/bunruija
|
64a5c993a06e9de75f8f382cc4b817f91965223f
|
[
"MIT"
] | 4
|
2020-12-22T11:12:35.000Z
|
2021-12-15T13:30:02.000Z
|
bunruija/filters/__init__.py
|
tma15/bunruija
|
64a5c993a06e9de75f8f382cc4b817f91965223f
|
[
"MIT"
] | 4
|
2021-01-16T07:34:22.000Z
|
2021-08-14T06:56:07.000Z
|
bunruija/filters/__init__.py
|
tma15/bunruija
|
64a5c993a06e9de75f8f382cc4b817f91965223f
|
[
"MIT"
] | null | null | null |
from .filter import BaseFilter
from .pos_filter import PosFilter
| 21.666667
| 33
| 0.846154
| 9
| 65
| 6
| 0.666667
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 2
| 34
| 32.5
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5de244106c27919ddfcedaae587a0f37bbe85f95
| 8,265
|
py
|
Python
|
api_gateway/rooms_endpoints.py
|
dearbornlavern/virtual-assistant-1
|
b3a2ffd87c95c1031cdf4df0e9baa0223d9e3c33
|
[
"Apache-2.0"
] | null | null | null |
api_gateway/rooms_endpoints.py
|
dearbornlavern/virtual-assistant-1
|
b3a2ffd87c95c1031cdf4df0e9baa0223d9e3c33
|
[
"Apache-2.0"
] | null | null | null |
api_gateway/rooms_endpoints.py
|
dearbornlavern/virtual-assistant-1
|
b3a2ffd87c95c1031cdf4df0e9baa0223d9e3c33
|
[
"Apache-2.0"
] | null | null | null |
from __main__ import sio
''' Events handlers for Join room and leave room ,
This method contains join room and leave room for all name spaces. This was introduced to ensure join room and leave
room happens within the namespace'''
@sio.on('join_room', namespace='/project')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /project_______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/project')
sio.enter_room(sid, room=sid, namespace='/project')
@sio.on('leave_room', namespace='/project')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /project_______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/project')
sio.leave_room(sid, room=sid, namespace='/project')
@sio.on('join_room', namespace='/domain')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /domain _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/domain')
sio.enter_room(sid, room=sid, namespace='/domain')
@sio.on('leave_room', namespace='/domain')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /domain _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/domain')
sio.leave_room(sid, room=sid, namespace='/domain')
@sio.on('join_room', namespace='/intent')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /intent _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/intent')
sio.enter_room(sid, room=sid, namespace='/intent')
@sio.on('leave_room', namespace='/intent')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /intent _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/intent')
sio.leave_room(sid, room=sid, namespace='/intent')
@sio.on('join_room', namespace='/response')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /response _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/response')
sio.enter_room(sid, room=sid, namespace='/response')
@sio.on('leave_room', namespace='/response')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /response _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/response')
sio.leave_room(sid, room=sid, namespace='/response')
@sio.on('join_room', namespace='/story')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /story _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/story')
sio.enter_room(sid, room=sid, namespace='/story')
@sio.on('leave_room', namespace='/story')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /story _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/story')
sio.leave_room(sid, room=sid, namespace='/story')
@sio.on('join_room', namespace='/dashboard')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /dashboard _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/dashboard')
sio.enter_room(sid, room=sid, namespace='/dashboard')
@sio.on('leave_room', namespace='/dashboard')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /dashboard _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/dashboard')
sio.leave_room(sid, room=sid, namespace='/dashboard')
@sio.on('join_room', namespace='/nav')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /dashboard _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/nav')
sio.enter_room(sid, room=sid, namespace='/nav')
@sio.on('leave_room', namespace='/nav')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /dashboard _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/nav')
sio.leave_room(sid, room=sid, namespace='/nav')
@sio.on('join_room', namespace='/trynow')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /trynow _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/trynow')
sio.enter_room(sid, room=sid, namespace='/trynow')
@sio.on('leave_room', namespace='/trynow')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /trynow _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/trynow')
sio.leave_room(sid, room=sid, namespace='/trynow')
@sio.on('join_room', namespace='/modelpublish')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /modelpublish _______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/modelpublish')
sio.enter_room(sid, room=sid, namespace='/modelpublish')
@sio.on('leave_room', namespace='/modelpublish')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /modelpublish _______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/modelpublish')
sio.leave_room(sid, room=sid, namespace='/modelpublish')
@sio.on('join_room', namespace='/action')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /action_______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/action')
sio.enter_room(sid, room=sid, namespace='/action')
@sio.on('leave_room', namespace='/action')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /action_______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/action')
sio.leave_room(sid, room=sid, namespace='/action')
@sio.on('join_room', namespace='/conversation')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /conversation_______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/conversation')
sio.enter_room(sid, room=sid, namespace='/conversation')
@sio.on('leave_room', namespace='/conversation')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /conversation_______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/conversation')
sio.leave_room(sid, room=sid, namespace='/conversation')
@sio.on('join_room', namespace='/aconversation')
async def join_room(sid, room_name):
print("________________________ User {} joined room {} with namespace /conversation_______________________".format(sid, room_name))
sio.enter_room(sid, room=room_name, namespace='/conversation')
sio.enter_room(sid, room=sid, namespace='/conversation')
@sio.on('leave_room', namespace='/aconversation')
async def leave_room(sid, room_name):
print("________________________ User {} Left room {} with namespace /conversation_______________________".format(sid, room_name))
sio.leave_room(sid, room=room_name, namespace='/conversation')
sio.leave_room(sid, room=sid, namespace='/conversation')
| 48.617647
| 136
| 0.748699
| 1,022
| 8,265
| 4.782779
| 0.046967
| 0.13748
| 0.162029
| 0.11784
| 0.975859
| 0.845949
| 0.845949
| 0.825491
| 0.64198
| 0.619067
| 0
| 0
| 0.11095
| 8,265
| 170
| 137
| 48.617647
| 0.665215
| 0
| 0
| 0.330579
| 0
| 0
| 0.388241
| 0.150087
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.008264
| 0
| 0.008264
| 0.198347
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b910b3536c88777b99e2d08a6024daffcd091bf7
| 367
|
py
|
Python
|
optapy-jpype/src/main/python/__init__.py
|
ge0ffrey/optapy
|
645a8c30fe5da5f7427c9f45ac0b6dca09295b21
|
[
"Apache-2.0"
] | null | null | null |
optapy-jpype/src/main/python/__init__.py
|
ge0ffrey/optapy
|
645a8c30fe5da5f7427c9f45ac0b6dca09295b21
|
[
"Apache-2.0"
] | null | null | null |
optapy-jpype/src/main/python/__init__.py
|
ge0ffrey/optapy
|
645a8c30fe5da5f7427c9f45ac0b6dca09295b21
|
[
"Apache-2.0"
] | null | null | null |
from .annotations import PlanningEntity, PlanningScore, PlanningSolution, PlanningId, PlanningVariable, \
PlanningEntityCollectionProperty, ProblemFactCollectionProperty, ProblemFact, PlanningScore, \
ValueRangeProvider, ConstraintProvider
from .optaplanner_java_interop import getClass, SolverConfig, solve
from .types import Joiners, HardSoftScore, Duration
| 73.4
| 105
| 0.852861
| 28
| 367
| 11.107143
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095368
| 367
| 5
| 106
| 73.4
| 0.936747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f8d58520df026bc5f2fff1cb5480473b1cd91943
| 20
|
py
|
Python
|
mattspy/stats/__init__.py
|
beckermr/mattspy
|
e3752dab96479a9439f369000c6ea05c8b717113
|
[
"BSD-3-Clause"
] | null | null | null |
mattspy/stats/__init__.py
|
beckermr/mattspy
|
e3752dab96479a9439f369000c6ea05c8b717113
|
[
"BSD-3-Clause"
] | 4
|
2022-02-08T15:50:52.000Z
|
2022-02-11T21:08:31.000Z
|
mattspy/stats/__init__.py
|
beckermr/mattspy
|
e3752dab96479a9439f369000c6ea05c8b717113
|
[
"BSD-3-Clause"
] | null | null | null |
from mad import mad
| 10
| 19
| 0.8
| 4
| 20
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d14c706c6e881469eee66b347f6d7095362c859
| 61
|
py
|
Python
|
extended_int/__init__.py
|
NeilGirdhar/extended_int
|
c288c97781024d4883e80c84af88ff50fda5a1b8
|
[
"MIT"
] | 2
|
2019-09-11T21:12:35.000Z
|
2022-01-14T12:01:50.000Z
|
extended_int/__init__.py
|
NeilGirdhar/extended_int
|
c288c97781024d4883e80c84af88ff50fda5a1b8
|
[
"MIT"
] | null | null | null |
extended_int/__init__.py
|
NeilGirdhar/extended_int
|
c288c97781024d4883e80c84af88ff50fda5a1b8
|
[
"MIT"
] | null | null | null |
from .extended_int import *
from .extended_integral import *
| 20.333333
| 32
| 0.803279
| 8
| 61
| 5.875
| 0.625
| 0.510638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 2
| 33
| 30.5
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d15010299ffb4c6d44a4ed442843703e81534a3
| 34
|
py
|
Python
|
python/ql/test/2/library-tests/PointsTo/imports/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/2/library-tests/PointsTo/imports/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/2/library-tests/PointsTo/imports/test.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
from package import foo as myfoo
| 11.333333
| 32
| 0.794118
| 6
| 34
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 34
| 2
| 33
| 17
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d3f7361151cf02358a67ab78f2264b79127753a
| 112
|
py
|
Python
|
conftest.py
|
valdergallo/django-compress-field
|
311560fcffc07299d8641adbd1c16b130aa894dc
|
[
"BSD-2-Clause-FreeBSD"
] | 13
|
2015-10-02T20:25:08.000Z
|
2020-07-28T18:01:17.000Z
|
conftest.py
|
valdergallo/django-compress-field
|
311560fcffc07299d8641adbd1c16b130aa894dc
|
[
"BSD-2-Clause-FreeBSD"
] | 4
|
2015-10-14T12:36:38.000Z
|
2020-07-15T14:51:02.000Z
|
conftest.py
|
valdergallo/django-compress-field
|
311560fcffc07299d8641adbd1c16b130aa894dc
|
[
"BSD-2-Clause-FreeBSD"
] | 5
|
2017-03-07T07:37:17.000Z
|
2021-09-19T14:14:45.000Z
|
import os
def pytest_configure():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_test_settings')
| 18.666667
| 75
| 0.785714
| 14
| 112
| 5.928571
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 112
| 5
| 76
| 22.4
| 0.83
| 0
| 0
| 0
| 0
| 0
| 0.378378
| 0.198198
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
53d029e3579cfa6b6f3e1ff4195ca1dbee10ac9c
| 8,632
|
py
|
Python
|
e3nn/non_linearities/norm_activation.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
e3nn/non_linearities/norm_activation.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
e3nn/non_linearities/norm_activation.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
# pylint: disable=C,R,E1101
import torch
import numpy as np
class NormActivation(torch.nn.Module):
def __init__(self, dimensionalities, tensor_act=None, scalar_act=None, eps=1e-6, bias_min=.5, bias_max=2):
'''
:param dimensionalities: list of dimensionalities of the capsules
:param scalar_act: activation function applied to scalar capsules - in last layer often set to None
:param eps: regularazier added to norm to prevent division by zero
:param bias_min: lower cutoff of uniform bias initialization
:param bias_max: upper cutoff of uniform bias initialization
scalar capsules are acted on by a ReLU nonlinearity, higher order capsules with a nonlinearity acting on their norm
'''
super().__init__()
self.dimensionalities = dimensionalities
self.tensor_act = torch.nn.Softplus(beta=1, threshold=20) if not tensor_act else tensor_act
self.scalar_act = scalar_act
self.is_scalar = [dim == 1 for dim in dimensionalities]
nbias = int(np.sum(np.array(dimensionalities) != 1))
self.bias = torch.nn.Parameter(torch.Tensor(nbias)) if nbias > 0 else None
self.eps = eps
self.bias_min = bias_min
self.bias_max = bias_max
self.reset_parameters()
def reset_parameters(self):
if self.bias is not None:
self.bias.data.uniform_(self.bias_min, self.bias_max)
def forward(self, input): # pylint: disable=W
'''
:param input: [batch, feature, x, y, z]
'''
capsule_activations = []
idx_capsule_begin = 0
idx_bias = 0
for dim, scalar_bool in zip(self.dimensionalities, self.is_scalar):
# take capsule out of input
capsule = input[:, idx_capsule_begin:idx_capsule_begin + dim]
# act on scalar capsules with scalar activation
if scalar_bool:
if self.scalar_act is None:
capsule_activ = capsule
else:
capsule_activ = self.scalar_act(capsule)
# act on norms of higher order capsules
else:
norm = torch.norm(capsule, p=2, dim=1, keepdim=True) + self.eps # [batch, 1, x, y, z]
b = self.bias[idx_bias].expand_as(norm) # [batch, 1, x, y, z]
activ_factor = self.tensor_act(norm - b) # [batch, 1, x, y, z]
# activ_factor = 1 + torch.nn.ELU(norm - b.expand_as(norm)) # add 1 to make scaling factor positive
capsule_activ = activ_factor * (capsule / norm)
idx_bias += 1
# append to list of nonlinearly transformed capsules
capsule_activations.append(capsule_activ)
idx_capsule_begin += dim
assert idx_capsule_begin == input.size(1)
if self.bias is not None:
assert idx_bias == self.bias.size(0)
return torch.cat(capsule_activations, dim=1)
class NormSoftplus(torch.nn.Module):
def __init__(self, dimensionalities, scalar_act, eps=1e-6, bias_min=.5, bias_max=2):
'''
:param dimensionalities: list of dimensionalities of the capsules
:param scalar_act: activation function applied to scalar capsules - in last layer often set to None
:param eps: regularazier added to norm to prevent division by zero
:param bias_min: lower cutoff of uniform bias initialization
:param bias_max: upper cutoff of uniform bias initialization
scalar capsules are acted on by a ReLU nonlinearity, higher order capsules with a nonlinearity acting on their norm
'''
super().__init__()
self.dimensionalities = dimensionalities
self.scalar_act = scalar_act
self.is_scalar = [dim == 1 for dim in dimensionalities]
nbias = int(np.sum(np.array(dimensionalities) != 1))
self.bias = torch.nn.Parameter(torch.Tensor(nbias)) if nbias > 0 else None
self.eps = eps
self.bias_min = bias_min
self.bias_max = bias_max
self.reset_parameters()
def reset_parameters(self):
if self.bias is not None:
self.bias.data.uniform_(self.bias_min, self.bias_max)
def forward(self, input): # pylint: disable=W
'''
:param input: [batch, feature, x, y, z]
'''
capsule_activations = []
idx_capsule_begin = 0
idx_bias = 0
for dim, scalar_bool in zip(self.dimensionalities, self.is_scalar):
# take capsule out of input
capsule = input[:, idx_capsule_begin:idx_capsule_begin + dim]
# act on scalar capsules with scalar activation
if scalar_bool:
if self.scalar_act is None:
capsule_activ = capsule
else:
capsule_activ = self.scalar_act(capsule)
# act on norms of higher order capsules
else:
norm = torch.norm(capsule, p=2, dim=1, keepdim=True) + self.eps # [batch, 1, x, y, z]
b = self.bias[idx_bias].expand_as(norm) # [batch, 1, x, y, z]
activ_factor = torch.nn.Softplus(beta=1, threshold=20)(norm - b) # [batch, 1, x, y, z]
# activ_factor = 1 + torch.nn.ELU(norm - b.expand_as(norm)) # add 1 to make scaling factor positive
capsule_activ = activ_factor * (capsule / norm)
idx_bias += 1
# append to list of nonlinearly transformed capsules
capsule_activations.append(capsule_activ)
idx_capsule_begin += dim
assert idx_capsule_begin == input.size(1)
if self.bias is not None:
assert idx_bias == self.bias.size(0)
return torch.cat(capsule_activations, dim=1)
class NormRelu(torch.nn.Module):
def __init__(self, enable):
'''
:param enable: list of tuple (dimension, boolean)
If boolean is True a bias and relu will be applied
'''
super().__init__()
self.enable = enable
nbias = sum([1 for d, on in self.enable if on])
self.bias = torch.nn.Parameter(torch.FloatTensor(nbias)) if nbias > 0 else None
self.reset_parameters()
def reset_parameters(self):
if self.bias is not None:
self.bias.data[:] = 0.1
def forward(self, input): # pylint: disable=W
'''
:param input: [batch, feature, x, y, z]
'''
if self.bias is None:
return input
xs = []
begin1 = 0
begin2 = 0
for d, on in self.enable:
x = input[:, begin1:begin1 + d]
if on:
x = NormReluFunction()(x, self.bias[begin2:begin2 + 1])
begin2 += 1
xs.append(x)
begin1 += d
assert begin1 == input.size(1)
assert begin2 == self.bias.size(0)
return torch.cat(xs, dim=1)
class NormReluFunction(torch.autograd.Function):
def forward(self, x, b): # pylint: disable=W
norm = torch.sqrt(torch.sum(x * x, dim=1)) + 1e-8 # [batch, x, y, z]
newnorm = norm - b.expand_as(norm) # [batch, x, y, z]
newnorm[newnorm < 0] = 0
ratio = newnorm / norm
ratio = ratio.reshape(x.size(0), 1, x.size(2), x.size(3), x.size(4)).expand_as(x)
self.save_for_backward(x, b)
r = x * ratio
return r
def backward(self, grad_out): # pylint: disable=W
x, b = self.saved_tensors
norm = torch.sqrt(torch.sum(x * x, dim=1)) + 1e-8 # [batch, x, y, z]
grad_x = grad_b = None
if self.needs_input_grad[0]:
newnorm = norm - b.expand_as(norm) # [batch, x, y, z]
newnorm[newnorm < 0] = 0
ratio = newnorm / norm
ratio = ratio.reshape(x.size(0), 1, x.size(2), x.size(3), x.size(4)).expand_as(x)
grad_x = grad_out * ratio
grad_x += torch.sum(grad_out * x, dim=1, keepdim=True).expand_as(x) * x / \
(norm ** 2).reshape(x.size(0), 1, x.size(2), x.size(3), x.size(4)).expand_as(x) * (1 - ratio)
grad_x[ratio <= 0] = 0
if self.needs_input_grad[1]:
grad_b = -torch.sum(grad_out * x, dim=1) / norm
grad_b[norm < b] = 0
grad_b = torch.sum(grad_b.reshape(-1), dim=0)
return grad_x, grad_b
def test_norm_relu_gradient():
x = torch.autograd.Variable(torch.rand(1, 5, 3, 3, 3), requires_grad=True)
b = torch.autograd.Variable(torch.rand(1), requires_grad=True)
torch.autograd.gradcheck(NormReluFunction(), (x, b), eps=1e-3, rtol=1e-2)
| 39.59633
| 123
| 0.592563
| 1,178
| 8,632
| 4.204584
| 0.135823
| 0.041995
| 0.007874
| 0.014537
| 0.823743
| 0.811226
| 0.780739
| 0.734302
| 0.734302
| 0.734302
| 0
| 0.019451
| 0.303174
| 8,632
| 217
| 124
| 39.778802
| 0.80399
| 0.230074
| 0
| 0.620438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043796
| 1
| 0.087591
| false
| 0
| 0.014599
| 0
| 0.175182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
53e98885a6e3ba8c2af0fd113e052151a9e5bafa
| 11,517
|
py
|
Python
|
theano/gpuarray/tests/test_pool.py
|
JimmyRetza/Theano
|
72d83bce0d547d54ab3513bcba35c166979f7a6f
|
[
"BSD-3-Clause"
] | 9
|
2018-10-29T20:25:25.000Z
|
2021-11-17T11:03:17.000Z
|
theano/gpuarray/tests/test_pool.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
theano/gpuarray/tests/test_pool.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 2
|
2019-08-12T13:45:09.000Z
|
2020-08-14T10:13:50.000Z
|
from __future__ import absolute_import, print_function, division
import unittest
import copy
import itertools
import numpy as np
import theano
from theano import gradient
from theano import tensor
from theano.tensor.signal.pool import (Pool, MaxPoolGrad, AveragePoolGrad,
DownsampleFactorMaxGradGrad)
from theano.tests import unittest_tools as utt
from .config import mode_with_gpu, mode_without_gpu
from .test_basic_ops import rand
from ..pool import (GpuPool, GpuMaxPoolGrad, GpuAveragePoolGrad,
GpuDownsampleFactorMaxGradGrad)
class TestPool(unittest.TestCase):
def test_pool_py_interface(self):
shp = (2, 2, 2, 2)
inp = theano.shared(rand(*shp), 'a')
inp = tensor.as_tensor_variable(inp)
with self.assertRaises(ValueError):
# test when pad >= ws
ds_op = GpuPool(ignore_border=True, ndim=2)
ds_op(inp, [2, 2], pad=[3, 3])
with self.assertRaises(ValueError):
# test when ignore_border and pad >= 0
ds_op = GpuPool(ignore_border=False, ndim=2)
ds_op(inp, [2, 2], pad=[1, 1])
def test_pool_c_interface(self):
gpu_mode = mode_with_gpu.excluding("cudnn")
gpu_mode.check_py_code = False
shp = (2, 2, 2, 2)
inp = theano.shared(rand(*shp), 'a')
inp = tensor.as_tensor_variable(inp)
with self.assertRaises(ValueError):
# test when ignore_border and pad >= 0
ds_op = GpuPool(ignore_border=False, ndim=2)
pad = tensor.as_tensor_variable([1, 1])
f = theano.function([], ds_op(inp, [2, 2], pad=pad), mode=gpu_mode)
f()
def test_pool_big_ws(self):
gpu_mode = mode_with_gpu.excluding("cudnn")
gpu_mode.check_py_code = False
shp = (2, 2, 2, 2)
inp = theano.shared(rand(*shp), 'a')
inp = tensor.as_tensor_variable(inp)
ds_op = GpuPool(ignore_border=False, mode='average_exc_pad', ndim=2)
pad = tensor.as_tensor_variable([0, 0])
f = theano.function([], ds_op(inp, [5, 5], stride=[1, 1], pad=pad),
mode=gpu_mode)
f()
def test_pool2d():
shps = [(1, 12),
(1, 1, 12),
(1, 1, 1, 12),
(1, 1, 2, 2),
(1, 1, 1, 1),
(1, 1, 4, 4),
(1, 1, 10, 11),
(1, 2, 2, 2),
(3, 5, 4, 4),
(25, 1, 7, 7),
(1, 1, 12, 12),
(1, 1, 2, 14),
(1, 1, 12, 14),
(1, 1, 14, 14),
(1, 1, 16, 16),
(1, 1, 18, 18),
(1, 1, 24, 24),
(1, 6, 24, 24),
(10, 1, 24, 24),
(10, 6, 24, 24),
(30, 6, 12, 12),
(30, 2, 24, 24),
(30, 6, 24, 24),
(10, 10, 10, 11),
(1, 1, 10, 1025),
(1, 1, 10, 1023),
(1, 1, 1025, 10),
(1, 1, 1023, 10),
(3, 2, 16, 16, 16),
(3, 2, 6, 6, 6, 5),
(3, 2, 6, 6, 6, 5, 7), ]
np.random.RandomState(utt.fetch_seed()).shuffle(shps)
test_ws = (2, 2), (3, 2), (1, 1)
test_st = (2, 2), (3, 2), (1, 1)
test_mode = ['max', 'sum', 'average_inc_pad', 'average_exc_pad']
ref_mode = copy.copy(mode_without_gpu)
ref_mode.check_py_code = False
gpu_mode = mode_with_gpu.excluding("cudnn")
gpu_mode.check_py_code = False
for shp in shps:
for mode, ws, st in itertools.product(test_mode, test_ws, test_st):
if ws[0] > shp[-2] or ws[1] > shp[-1]:
continue
for ignore_border, pad in zip((True, False), [(1, 1), (0, 0)]):
if pad[0] >= ws[0] or pad[1] >= ws[1]:
continue
if mode == 'average_exc_pad' and (pad[0] > 0 or pad[1] > 0):
continue
# print('test_pool2d', shp, ws, st, pad, mode, ignore_border)
ds_op = Pool(ndim=len(ws), mode=mode, ignore_border=ignore_border)
a = theano.shared(rand(*shp), 'a')
a_pooled = ds_op(tensor.as_tensor_variable(a), ws, st, pad)
f = theano.function([], a_pooled, mode=gpu_mode)
f2 = theano.function([], a_pooled, mode=ref_mode)
assert any([isinstance(node.op, GpuPool)
for node in f.maker.fgraph.toposort()])
assert any([isinstance(node.op, Pool)
for node in f2.maker.fgraph.toposort()])
assert np.allclose(f(), f2()), (shp, ws, st, pad, mode, ignore_border)
a_pooled_grad = tensor.grad(a_pooled.sum(), a)
g = theano.function([], a_pooled_grad, mode=gpu_mode)
g2 = theano.function([], a_pooled_grad, mode=ref_mode)
if mode == 'max':
gop = GpuMaxPoolGrad
gop2 = MaxPoolGrad
else:
gop = GpuAveragePoolGrad
gop2 = AveragePoolGrad
assert any([isinstance(node.op, gop)
for node in g.maker.fgraph.toposort()])
assert any([isinstance(node.op, gop2)
for node in g2.maker.fgraph.toposort()])
assert np.allclose(g(), g2()), (shp, ws, st, pad, mode, ignore_border)
# test rop and grad grad for max pooling
# for average pooling grad grad is just average pooling grad
if mode != 'max':
continue
ea = theano.shared(rand(*shp), 'ea')
gr = theano.function([], tensor.Rop(a_pooled, a, ea), mode=gpu_mode)
gr2 = theano.function([], tensor.Rop(a_pooled, a, ea), mode=ref_mode)
assert any([
isinstance(node.op, GpuDownsampleFactorMaxGradGrad)
for node in gr.maker.fgraph.toposort()
])
assert any([
isinstance(node.op, DownsampleFactorMaxGradGrad)
for node in gr2.maker.fgraph.toposort()
])
assert np.allclose(gr(), gr2()), (shp, ws, st, pad, mode, ignore_border)
ggf = gradient.Lop(tensor.grad((a_pooled**2).sum(), a), a, a)
gg = theano.function([], ggf, mode=gpu_mode)
gg2 = theano.function([], ggf, mode=ref_mode)
assert any([
isinstance(node.op, GpuDownsampleFactorMaxGradGrad)
for node in gg.maker.fgraph.toposort()
])
assert any([
isinstance(node.op, DownsampleFactorMaxGradGrad)
for node in gg2.maker.fgraph.toposort()
])
assert np.allclose(gg(), gg2()), (shp, ws, st, pad, mode, ignore_border)
def test_pool3d():
shps = [(1, 1, 12),
(1, 1, 1, 1, 1),
(1, 1, 1, 1, 1025),
(1, 1, 2, 2, 2),
(1, 1, 7, 7, 7),
(1, 1, 9, 10, 11),
(1, 6, 18, 18, 18),
(1, 1, 6, 24, 24),
(1, 10, 1, 24, 24),
(1, 10, 6, 24, 24),
(1, 30, 6, 12, 12),
(1, 30, 2, 24, 24),
(1, 30, 6, 24, 24),
(1, 10, 10, 10, 11),
(1, 1, 10, 10, 1025),
(1, 1, 10, 10, 1023),
(1, 1, 10, 1025, 10),
(1, 1, 10, 1023, 10),
(3, 2, 6, 6, 6, 5),
(3, 2, 6, 6, 6, 5, 7), ]
np.random.RandomState(utt.fetch_seed()).shuffle(shps)
test_ws = (2, 2, 2), (3, 2, 3), (1, 1, 1)
test_st = (2, 2, 2), (2, 3, 2), (1, 1, 1)
test_mode = ['max', 'sum', 'average_inc_pad', 'average_exc_pad']
ref_mode = copy.copy(mode_without_gpu)
ref_mode.check_py_code = False
gpu_mode = mode_with_gpu.excluding("cudnn")
gpu_mode.check_py_code = False
for shp in shps:
for mode, ws, st in itertools.product(test_mode, test_ws, test_st):
if ws[0] > shp[-3] or ws[1] > shp[-2] or ws[2] > shp[-1]:
continue
for ignore_border, pad in zip((True, False), [(1, 1, 1), (0, 0, 0)]):
if pad[0] >= ws[0] or pad[1] >= ws[1] or pad[2] >= ws[2]:
continue
if mode == 'average_exc_pad' and (pad[0] > 0 or pad[1] > 0 or pad[2] > 0):
continue
# print('test_pool3d', shp, ws, st, pad, mode, ignore_border)
ds_op = Pool(ndim=len(ws), mode=mode, ignore_border=ignore_border)
a = theano.shared(rand(*shp), 'a')
a_pooled = ds_op(tensor.as_tensor_variable(a), ws, st, pad)
f = theano.function([], a_pooled, mode=gpu_mode)
f2 = theano.function([], a_pooled, mode=ref_mode)
assert any([isinstance(node.op, GpuPool)
for node in f.maker.fgraph.toposort()])
assert any([isinstance(node.op, Pool)
for node in f2.maker.fgraph.toposort()])
assert np.allclose(f(), f2()), (shp, ws, st, pad, mode, ignore_border)
a_pooled_grad = tensor.grad(a_pooled.sum(), a)
g = theano.function([], a_pooled_grad, mode=gpu_mode)
g2 = theano.function([], a_pooled_grad, mode=ref_mode)
if mode == 'max':
gop = GpuMaxPoolGrad
gop2 = MaxPoolGrad
else:
gop = GpuAveragePoolGrad
gop2 = AveragePoolGrad
assert any([isinstance(node.op, gop)
for node in g.maker.fgraph.toposort()])
assert any([isinstance(node.op, gop2)
for node in g2.maker.fgraph.toposort()])
assert np.allclose(g(), g2()), (shp, ws, st, pad, mode, ignore_border)
# test rop and grad grad for max pooling
# for average pooling grad grad is just average pooling grad
if mode != 'max':
continue
ea = theano.shared(rand(*shp), 'ea')
gr = theano.function([], tensor.Rop(a_pooled, a, ea), mode=gpu_mode)
gr2 = theano.function([], tensor.Rop(a_pooled, a, ea), mode=ref_mode)
assert any([
isinstance(node.op, GpuDownsampleFactorMaxGradGrad)
for node in gr.maker.fgraph.toposort()
])
assert any([
isinstance(node.op, DownsampleFactorMaxGradGrad)
for node in gr2.maker.fgraph.toposort()
])
assert np.allclose(gr(), gr2()), (shp, ws, st, pad, mode, ignore_border)
ggf = gradient.Lop(tensor.grad((a_pooled**2).sum(), a), a, a)
gg = theano.function([], ggf, mode=gpu_mode)
gg2 = theano.function([], ggf, mode=ref_mode)
assert any([
isinstance(node.op, GpuDownsampleFactorMaxGradGrad)
for node in gg.maker.fgraph.toposort()
])
assert any([
isinstance(node.op, DownsampleFactorMaxGradGrad)
for node in gg2.maker.fgraph.toposort()
])
assert np.allclose(gg(), gg2()), (shp, ws, st, pad, mode, ignore_border)
| 39.307167
| 90
| 0.4911
| 1,452
| 11,517
| 3.769972
| 0.096419
| 0.018268
| 0.055535
| 0.067227
| 0.836683
| 0.822616
| 0.802886
| 0.786262
| 0.770186
| 0.770186
| 0
| 0.066556
| 0.375098
| 11,517
| 292
| 91
| 39.441781
| 0.694039
| 0.035513
| 0
| 0.621277
| 0
| 0
| 0.014237
| 0
| 0
| 0
| 0
| 0
| 0.114894
| 1
| 0.021277
| false
| 0
| 0.055319
| 0
| 0.080851
| 0.004255
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
54f283bf8a59dbddd54cb6cd0d1020af57731685
| 217
|
py
|
Python
|
pygfunction/__init__.py
|
mitchute/pygfunction
|
a32ac5dc7ca91ebcc7b61f604c7464e8172475b3
|
[
"BSD-3-Clause"
] | null | null | null |
pygfunction/__init__.py
|
mitchute/pygfunction
|
a32ac5dc7ca91ebcc7b61f604c7464e8172475b3
|
[
"BSD-3-Clause"
] | null | null | null |
pygfunction/__init__.py
|
mitchute/pygfunction
|
a32ac5dc7ca91ebcc7b61f604c7464e8172475b3
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import division, print_function, absolute_import
from . import boreholes
from . import gfunction
from . import heat_transfer
from . import load_aggregation
from . import pipes
from . import utilities
| 24.111111
| 64
| 0.820276
| 28
| 217
| 6.071429
| 0.535714
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 217
| 8
| 65
| 27.125
| 0.913978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.142857
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
54f339d6b204951320006c2704fd8e779543139e
| 45
|
py
|
Python
|
tests/tests.py
|
PauCaBu/HappyRandomBirthday
|
5ef4820ec539362fde5aaec2066b80d91f055dbc
|
[
"MIT"
] | 3
|
2021-06-25T18:33:31.000Z
|
2021-06-25T21:32:33.000Z
|
tests/tests.py
|
PauCaBu/HappyRandomBirthday
|
5ef4820ec539362fde5aaec2066b80d91f055dbc
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
PauCaBu/HappyRandomBirthday
|
5ef4820ec539362fde5aaec2066b80d91f055dbc
|
[
"MIT"
] | 2
|
2021-06-24T17:42:46.000Z
|
2021-06-25T17:54:33.000Z
|
def test_HBday():
print('hi')
return
| 11.25
| 17
| 0.577778
| 6
| 45
| 4.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 45
| 3
| 18
| 15
| 0.757576
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
54fc4d3a058b7eb1ddfa9d8c3dae7f47e5586c3d
| 874
|
py
|
Python
|
roomtools/checks.py
|
entchen66/sinbad3.1
|
3353118b8693c84d5572ab2a7a2278a32be2a76c
|
[
"MIT"
] | null | null | null |
roomtools/checks.py
|
entchen66/sinbad3.1
|
3353118b8693c84d5572ab2a7a2278a32be2a76c
|
[
"MIT"
] | null | null | null |
roomtools/checks.py
|
entchen66/sinbad3.1
|
3353118b8693c84d5572ab2a7a2278a32be2a76c
|
[
"MIT"
] | 1
|
2020-02-29T10:57:21.000Z
|
2020-02-29T10:57:21.000Z
|
from typing import TYPE_CHECKING
from redbot.core import commands
if TYPE_CHECKING:
from . import RoomTools
def tmpc_active():
async def check(ctx: commands.Context):
if not ctx.guild:
return False
cog = ctx.bot.get_cog("RoomTools")
if TYPE_CHECKING:
assert isinstance(cog, RoomTools) # nosec
if not cog:
return False
return await cog.tmpc_config.guild(ctx.guild).active()
return commands.check(check)
def aa_active():
async def check(ctx: commands.Context):
if not ctx.guild:
return False
cog = ctx.bot.get_cog("RoomTools")
if TYPE_CHECKING:
assert isinstance(cog, RoomTools) # nosec
if not cog:
return False
return await cog.ar_config.guild(ctx.guild).active()
return commands.check(check)
| 24.971429
| 62
| 0.62357
| 110
| 874
| 4.863636
| 0.281818
| 0.08972
| 0.078505
| 0.071028
| 0.8
| 0.8
| 0.8
| 0.8
| 0.8
| 0.616822
| 0
| 0
| 0.295195
| 874
| 34
| 63
| 25.705882
| 0.868506
| 0.012586
| 0
| 0.730769
| 0
| 0
| 0.02093
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0
| 0.115385
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
074a7c3dc4eaf8460bd414f097940bf4265c632e
| 37
|
py
|
Python
|
vega/algorithms/nas/sgas/__init__.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 724
|
2020-06-22T12:05:30.000Z
|
2022-03-31T07:10:54.000Z
|
vega/algorithms/nas/sgas/__init__.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 147
|
2020-06-30T13:34:46.000Z
|
2022-03-29T11:30:17.000Z
|
vega/algorithms/nas/sgas/__init__.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 160
|
2020-06-29T18:27:58.000Z
|
2022-03-23T08:42:21.000Z
|
from .sgas_trainer_callback import *
| 18.5
| 36
| 0.837838
| 5
| 37
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
074dc617f43f0d4d806ac7fdd336bb1a74badc7a
| 202
|
py
|
Python
|
tefla/utils/__init__.py
|
mkulariya1/tefla
|
8de25c1b67dcf025535f5e8c40539de59acd7fb8
|
[
"MIT"
] | 40
|
2017-09-10T17:11:17.000Z
|
2022-02-01T17:40:53.000Z
|
tefla/utils/__init__.py
|
mkulariya1/tefla
|
8de25c1b67dcf025535f5e8c40539de59acd7fb8
|
[
"MIT"
] | 21
|
2018-06-21T09:58:04.000Z
|
2022-03-11T23:11:37.000Z
|
tefla/utils/__init__.py
|
subex/Tefla
|
34f8fd0e2f2ee02aa73c6289753e08a95cc41880
|
[
"MIT"
] | 11
|
2017-10-13T13:10:02.000Z
|
2020-08-17T07:07:53.000Z
|
from __future__ import absolute_import
# from . import image_utils
from . import quadratic_weighted_kappa
from . import util
from . import postproc
from . import exceptions
from . import seq2seq_utils
| 22.444444
| 38
| 0.816832
| 27
| 202
| 5.777778
| 0.481481
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005814
| 0.148515
| 202
| 8
| 39
| 25.25
| 0.901163
| 0.123762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0768b37ce41126f378cad85b988e241b81720260
| 40
|
py
|
Python
|
turbustat/statistics/genus/__init__.py
|
keflavich/TurbuStat
|
a6fac4c0d10473a74c62cce4a9c6a30773a955b1
|
[
"MIT"
] | null | null | null |
turbustat/statistics/genus/__init__.py
|
keflavich/TurbuStat
|
a6fac4c0d10473a74c62cce4a9c6a30773a955b1
|
[
"MIT"
] | null | null | null |
turbustat/statistics/genus/__init__.py
|
keflavich/TurbuStat
|
a6fac4c0d10473a74c62cce4a9c6a30773a955b1
|
[
"MIT"
] | null | null | null |
from genus import Genus, GenusDistance
| 13.333333
| 38
| 0.825
| 5
| 40
| 6.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 2
| 39
| 20
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4aceda566782284a94ae93a5b212784958baa5f8
| 647
|
py
|
Python
|
frontend/queries.py
|
jiz148/medical_app
|
2f8b3f299ff6a87e62ac7483b6d2bac156a08874
|
[
"MIT"
] | null | null | null |
frontend/queries.py
|
jiz148/medical_app
|
2f8b3f299ff6a87e62ac7483b6d2bac156a08874
|
[
"MIT"
] | null | null | null |
frontend/queries.py
|
jiz148/medical_app
|
2f8b3f299ff6a87e62ac7483b6d2bac156a08874
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, render_template
app = Flask(__name__)
@app.route('/main', methods=['POST'])
def query_main():
return render_template('main.html');
@app.route('/login', methods=['POST'])
def query_login():
return render_template('login.html');
@app.route('/disclaimer', methods=['POST'])
def query_disclaimer():
return render_template('disclaimer.html');
@app.route('/startregistration', methods=['POST'])
def query_startregistration():
return render_template('register.html');
@app.route('/', methods=['GET'])
def index():
return render_template('login.html')
#return render_template('index.html')
| 24.884615
| 50
| 0.703246
| 78
| 647
| 5.641026
| 0.282051
| 0.222727
| 0.272727
| 0.172727
| 0.131818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111283
| 647
| 25
| 51
| 25.88
| 0.765217
| 0.055641
| 0
| 0
| 0
| 0
| 0.191803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.058824
| 0.294118
| 0.647059
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ab07df8396a3d22aec009b3cbc1094014aeeff7b
| 26,221
|
py
|
Python
|
code/counterfactual_graphs.py
|
jonathantelliott/mobile-telecommunications
|
32df68d05bf12088e6a1e635820452f6870f90db
|
[
"MIT"
] | 2
|
2021-07-21T15:37:41.000Z
|
2022-02-04T20:32:57.000Z
|
code/counterfactual_graphs.py
|
jonathantelliott/mobile-telecommunications
|
32df68d05bf12088e6a1e635820452f6870f90db
|
[
"MIT"
] | null | null | null |
code/counterfactual_graphs.py
|
jonathantelliott/mobile-telecommunications
|
32df68d05bf12088e6a1e635820452f6870f90db
|
[
"MIT"
] | 1
|
2021-07-30T07:09:09.000Z
|
2021-07-30T07:09:09.000Z
|
import numpy as np
import matplotlib as mpl
if os.environ.get('DISPLAY','') == '':
print('no display found. Using non-interactive Agg backend')
mpl.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
import paths
import counterfactuals.infrastructurefunctions as infr
import counterfactuals.infrastructureequilibrium as ie
# %%
avg_price_elasts = np.array([-4., -2.5, -1.8])
sigmas = np.array([0., 0.2, 0.4, 0.6, 0.8, 0.9])
# %%
# Define functions to load results
p_stars = lambda x,y: np.load(f"{paths.arrays_path}p_stars_e{x}_n{y}.npy")
R_stars = lambda x,y: np.load(f"{paths.arrays_path}R_stars_e{x}_n{y}.npy")
q_stars = lambda x,y: np.load(f"{paths.arrays_path}q_stars_e{x}_n{y}.npy")
cs_by_type = lambda x,y: np.load(f"{paths.arrays_path}cs_by_type_e{x}_n{y}.npy")
cs = lambda x,y: np.load(f"{paths.arrays_path}cs_e{x}_n{y}.npy")
ps = lambda x,y: np.load(f"{paths.arrays_path}ps_e{x}_n{y}.npy")
ts = lambda x,y: np.load(f"{paths.arrays_path}ts_e{x}_n{y}.npy")
partial_elasts = lambda x,y: np.load(f"{paths.arrays_path}partial_elasts_e{x}_n{y}.npy")
full_elasts = lambda x,y: np.load(f"{paths.arrays_path}full_elasts_e{x}_n{y}.npy")
partial_Pif_partial_bf = lambda x,y: np.load(f"{paths.arrays_path}partial_Pif_partial_bf_e{x}_n{y}.npy")
partial_Pif_partial_b = lambda x,y: np.load(f"{paths.arrays_path}partial_Pif_partial_b_e{x}_n{y}.npy")
partial_CS_partial_b = lambda x,y: np.load(f"{paths.arrays_path}partial_CS_partial_b_e{x}_n{y}.npy")
p_stars_se = lambda x,y: np.load(f"{paths.arrays_path}p_stars_se_e{x}_n{y}.npy")
R_stars_se = lambda x,y: np.load(f"{paths.arrays_path}R_stars_se_e{x}_n{y}.npy")
q_stars_se = lambda x,y: np.load(f"{paths.arrays_path}q_stars_se_e{x}_n{y}.npy")
cs_by_type_se = lambda x,y: np.load(f"{paths.arrays_path}cs_by_type_se_e{x}_n{y}.npy")
cs_se = lambda x,y: np.load(f"{paths.arrays_path}cs_se_e{x}_n{y}.npy")
ps_se = lambda x,y: np.load(f"{paths.arrays_path}ps_se_e{x}_n{y}.npy")
ts_se = lambda x,y: np.load(f"{paths.arrays_path}ts_se_e{x}_n{y}.npy")
partial_elasts_se = lambda x,y: np.load(f"{paths.arrays_path}partial_elasts_se_e{x}_n{y}.npy")
full_elasts_se = lambda x,y: np.load(f"{paths.arrays_path}full_elasts_se_e{x}_n{y}.npy")
partial_Pif_partial_bf_se = lambda x,y: np.load(f"{paths.arrays_path}partial_Pif_partial_bf_se_e{x}_n{y}.npy")
partial_Pif_partial_b_se = lambda x,y: np.load(f"{paths.arrays_path}partial_Pif_partial_b_se_e{x}_n{y}.npy")
partial_CS_partial_b_se = lambda x,y: np.load(f"{paths.arrays_path}partial_CS_partial_b_se_e{x}_n{y}.npy")
# %%
# Define common graph features
num_firms_to_simulate = 6
num_firms_array = np.arange(num_firms_to_simulate, dtype=int) + 1
elast_ids = np.array([1, 2])[::-1]
alpha = 0.6
lw = 3.
# %%
# Plot effect of number of firms
fig, axs = plt.subplots(elast_ids.shape[0], 4, figsize=(15,3.5 * elast_ids.shape[0]), sharex=True)
for i, elast_id in enumerate(elast_ids):
# dlim = 2,000 prices
axs[i,0].plot(num_firms_array, p_stars(elast_id,3)[:,0], color="black", lw=lw, alpha=alpha)
axs[i,0].plot(num_firms_array, p_stars(elast_id,3)[:,0] + 1.96 * p_stars_se(elast_id,3)[:,0], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].plot(num_firms_array, p_stars(elast_id,3)[:,0] - 1.96 * p_stars_se(elast_id,3)[:,0], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].set_xlabel("number of firms")
axs[i,0].set_ylabel("$p_{j}^{*}$ (in \u20ac)")
# dlim = 10,000 prices
axs[i,1].plot(num_firms_array, p_stars(elast_id,3)[:,1], color="black", lw=lw, alpha=alpha)
axs[i,1].plot(num_firms_array, p_stars(elast_id,3)[:,1] + 1.96 * p_stars_se(elast_id,3)[:,1], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].plot(num_firms_array, p_stars(elast_id,3)[:,1] - 1.96 * p_stars_se(elast_id,3)[:,1], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].set_xlabel("number of firms")
axs[i,1].set_ylabel("$p_{j}^{*}$ (in \u20ac)")
# investment
axs[i,2].plot(num_firms_array, R_stars(elast_id,3), color="black", label=f"{-avg_price_elasts[i]}", lw=lw, alpha=alpha)
axs[i,2].plot(num_firms_array, R_stars(elast_id,3) + 1.96 * R_stars_se(elast_id,3), color="black", label=f"{-avg_price_elasts[i]}", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].plot(num_firms_array, R_stars(elast_id,3) - 1.96 * R_stars_se(elast_id,3), color="black", label=f"{-avg_price_elasts[i]}", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].set_xlabel("number of firms")
axs[i,2].set_ylabel("$R_{f}^{*}$ (in km)")
# download speeds
axs[i,3].plot(num_firms_array, q_stars(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,3].plot(num_firms_array, q_stars(elast_id,3) + 1.96 * q_stars_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,3].plot(num_firms_array, q_stars(elast_id,3) - 1.96 * q_stars_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,3].set_xlabel("number of firms")
axs[i,3].set_ylabel("$q_{f}^{*}$ (in Mbps)")
# Set titles
fontsize = 13.5
pad = 14
cols = ["2$\,$000 MB plan prices", "10$\,$000 MB plan prices", "investment", "download speeds"]
for ax, col in zip(axs[0], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size=fontsize, ha='center', va='baseline', weight="bold")
mathbfE = "$\\mathbf{E}$"
rows = [f"{mathbfE} = {-avg_price_elasts[elast_id]}" for elast_id in elast_ids]
for ax, row in zip(axs[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size=fontsize, ha='right', va='center', weight="bold")
# Set axis limits
min_y_p = np.min(np.concatenate(tuple([p_stars(elast_id,3) for elast_id in elast_ids]))) - 5.
max_y_p = np.max(np.concatenate(tuple([p_stars(elast_id,3) for elast_id in elast_ids]))) + 3.
min_y_R = np.min(np.concatenate(tuple([R_stars(elast_id,3) for elast_id in elast_ids]))) - 0.1
max_y_R = np.max(np.concatenate(tuple([R_stars(elast_id,3) for elast_id in elast_ids]))) + 0.1
min_y_q = np.min(np.concatenate(tuple([q_stars(elast_id,3) for elast_id in elast_ids]))) - 5.
max_y_q = np.max(np.concatenate(tuple([q_stars(elast_id,3) for elast_id in elast_ids]))) + 5.
for i, elast_id in enumerate(elast_ids):
for j in range(2): # first two columns
axs[i,j].set_ylim((min_y_p, max_y_p))
axs[i,2].set_ylim((min_y_R, max_y_R))
axs[i,3].set_ylim((min_y_q, max_y_q))
for j in range(4): # all columns
axs[i,j].set_xticks(num_firms_array)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_variables.pdf", bbox_inches = "tight")
# %%
# Plot elasticities
fig, axs = plt.subplots(elast_ids.shape[0], 2, figsize=(8,3.5 * elast_ids.shape[0]), sharex=True)
for i, elast_id in enumerate(elast_ids):
# dlim = 2,000 elasticities
axs[i,0].plot(num_firms_array, partial_elasts(elast_id,3)[:,0], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][0], lw=lw, alpha=alpha, label="partial")
axs[i,0].plot(num_firms_array, full_elasts(elast_id,3)[:,0], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][1], lw=lw, alpha=alpha, label="full")
axs[i,0].set_xlabel("number of firms")
axs[i,0].legend(loc="lower left")
# dlim = 10,000 elasticities
axs[i,1].plot(num_firms_array, partial_elasts(elast_id,3)[:,1], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][0], lw=lw, alpha=alpha, label="partial")
axs[i,1].plot(num_firms_array, full_elasts(elast_id,3)[:,1], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][1], lw=lw, alpha=alpha, label="full")
axs[i,1].set_xlabel("number of firms")
axs[i,1].legend(loc="lower left")
# Set titles
fontsize = 13.5
pad = 14
cols = ["2$\,$000 MB plan", "10$\,$000 MB plan"]
for ax, col in zip(axs[0], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size=fontsize, ha='center', va='baseline', weight="bold")
mathbfE = "$\\mathbf{E}$"
rows = [f"{mathbfE} = {-avg_price_elasts[elast_id]}" for elast_id in elast_ids]
for ax, row in zip(axs[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size=fontsize, ha='right', va='center', weight="bold")
# Set axis limits
min_y = np.min(np.concatenate(tuple([full_elasts(elast_id,3) for elast_id in elast_ids] + [partial_elasts(elast_id,3) for elast_id in elast_ids]))) - 0.3
max_y = np.max(np.concatenate(tuple([full_elasts(elast_id,3) for elast_id in elast_ids] + [partial_elasts(elast_id,3) for elast_id in elast_ids]))) + 0.3
for i, elast_id in enumerate(elast_ids):
for j in range(2): # all columns
axs[i,j].set_ylim((min_y, max_y))
axs[i,j].set_xticks(num_firms_array)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_elasticities.pdf", bbox_inches = "tight")
# %%
# Plot bw derivatives
fig, axs = plt.subplots(elast_ids.shape[0], 3, figsize=(11,3.5 * elast_ids.shape[0]), sharex=True)
for i, elast_id in enumerate(elast_ids):
# partial_Pif_partial_bf
axs[i,0].plot(num_firms_array, partial_Pif_partial_bf(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,0].plot(num_firms_array, partial_Pif_partial_bf(elast_id,3) + 1.96 * partial_Pif_partial_bf_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].plot(num_firms_array, partial_Pif_partial_bf(elast_id,3) - 1.96 * partial_Pif_partial_bf_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].set_xlabel("number of firms")
axs[i,0].set_ylabel("\u20ac per person in market / MHz")
# partial_Pif_partial_b
axs[i,1].plot(num_firms_array, partial_Pif_partial_b(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,1].plot(num_firms_array, partial_Pif_partial_b(elast_id,3) + 1.96 * partial_Pif_partial_b_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].plot(num_firms_array, partial_Pif_partial_b(elast_id,3) - 1.96 * partial_Pif_partial_b_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].set_xlabel("number of firms")
axs[i,1].set_ylabel("\u20ac per person in market / MHz")
# partial_CS_partial_b
axs[i,2].plot(num_firms_array, partial_CS_partial_b(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,2].plot(num_firms_array, partial_CS_partial_b(elast_id,3) + 1.96 * partial_CS_partial_b_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].plot(num_firms_array, partial_CS_partial_b(elast_id,3) - 1.96 * partial_CS_partial_b_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].set_xlabel("number of firms")
axs[i,2].set_ylabel("\u20ac per person in market / MHz")
# Set titles
fontsize = 13.5
pad = 14
cols = ["$\\frac{\\partial \\Pi_{f}}{\\partial b_{f}}$", "$\\frac{\\partial \\Pi_{f}}{\\partial b}$", "$\\frac{\\partial CS}{\\partial b}$"]
for ax, col in zip(axs[0], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size=fontsize + 3., ha='center', va='baseline', weight="bold")
mathbfE = "$\\mathbf{E}$"
rows = [f"{mathbfE} = {-avg_price_elasts[elast_id]}" for elast_id in elast_ids]
for ax, row in zip(axs[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size=fontsize, ha='right', va='center', weight="bold")
# Set axis limits
min_y_Pif_bf = np.min(np.concatenate(tuple([partial_Pif_partial_bf(elast_id,3) for elast_id in elast_ids]))) - 0.005
max_y_Pif_bf = np.max(np.concatenate(tuple([partial_Pif_partial_bf(elast_id,3) for elast_id in elast_ids]))) + 0.008
min_y_Pif_b = np.min(np.concatenate(tuple([partial_Pif_partial_b(elast_id,3) for elast_id in elast_ids]))) - 0.002
max_y_Pif_b = np.max(np.concatenate(tuple([partial_Pif_partial_b(elast_id,3) for elast_id in elast_ids]))) + 0.002
min_y_CS_b = np.min(np.concatenate(tuple([partial_CS_partial_b(elast_id,3) for elast_id in elast_ids]))) - 0.02
max_y_CS_b = np.max(np.concatenate(tuple([partial_CS_partial_b(elast_id,3) for elast_id in elast_ids]))) + 0.03
for i, elast_id in enumerate(elast_ids):
axs[i,0].set_ylim((min_y_Pif_bf, max_y_Pif_bf))
axs[i,1].set_ylim((min_y_Pif_b, max_y_Pif_b))
axs[i,2].set_ylim((min_y_CS_b, max_y_CS_b))
for j in range(3):
axs[i,j].set_xticks(num_firms_array)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_bw_deriv.pdf", bbox_inches = "tight")
# %%
# Plot welfare for number of firms
fig, axs = plt.subplots(elast_ids.shape[0], 3, figsize=(11,3.5 * elast_ids.shape[0]), sharex=True)
for i, elast_id in enumerate(elast_ids):
# consumer surplus
axs[i,0].plot(num_firms_array, cs(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,0].plot(num_firms_array, cs(elast_id,3) + 1.96 * cs_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].plot(num_firms_array, cs(elast_id,3) - 1.96 * cs_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,0].axvline(x=num_firms_array[np.argmax(cs(elast_id,3))], color="black", linestyle="--", alpha=0.25)
axs[i,0].set_xlabel("number of firms")
axs[i,0].set_ylabel("\u20ac")
# producer surplus
axs[i,1].plot(num_firms_array, ps(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,1].plot(num_firms_array, ps(elast_id,3) + 1.96 * ps_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].plot(num_firms_array, ps(elast_id,3) - 1.96 * ps_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,1].axvline(x=num_firms_array[np.argmax(ps(elast_id,3))], color="black", linestyle="--", alpha=0.25)
axs[i,1].set_xlabel("number of firms")
axs[i,1].set_ylabel("\u20ac")
# total surplus
axs[i,2].plot(num_firms_array, ts(elast_id,3), color="black", lw=lw, alpha=alpha)
axs[i,2].plot(num_firms_array, ts(elast_id,3) + 1.96 * ts_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].plot(num_firms_array, ts(elast_id,3) - 1.96 * ts_se(elast_id,3), color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,2].axvline(x=num_firms_array[np.argmax(ts(elast_id,3))], color="black", linestyle="--", alpha=0.25)
axs[i,2].set_xlabel("number of firms")
axs[i,2].set_ylabel("\u20ac")
# Set titles
fontsize = 13.5
pad = 14
cols = ["consumer surplus", "producer surplus", "total surplus"]
for ax, col in zip(axs[0], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size=fontsize, ha='center', va='baseline', weight="bold")
mathbfE = "$\\mathbf{E}$"
rows = [f"{mathbfE} = {-avg_price_elasts[elast_id]}" for elast_id in elast_ids]
for ax, row in zip(axs[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size=fontsize, ha='right', va='center', weight="bold")
# Set axis limits
min_y_cs = np.min(np.concatenate(tuple([cs(elast_id,3) for elast_id in elast_ids]))) - 5.
max_y_cs = np.max(np.concatenate(tuple([cs(elast_id,3) for elast_id in elast_ids]))) + 20.
min_y_ps = np.min(np.concatenate(tuple([ps(elast_id,3) for elast_id in elast_ids]))) - 5.
max_y_ps = np.max(np.concatenate(tuple([ps(elast_id,3) for elast_id in elast_ids]))) + 5.
min_y_ts = np.min(np.concatenate(tuple([ts(elast_id,3) for elast_id in elast_ids]))) - 5.
max_y_ts = np.max(np.concatenate(tuple([ts(elast_id,3) for elast_id in elast_ids]))) + 15.
for i, elast_id in enumerate(elast_ids):
axs[i,0].set_ylim((min_y_cs, max_y_cs))
axs[i,1].set_ylim((min_y_ps, max_y_ps))
axs[i,2].set_ylim((min_y_ts, max_y_ts))
for j in range(3):
axs[i,j].set_xticks(num_firms_array)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_welfare.pdf", bbox_inches = "tight")
# %%
# Plot consumer surplus by type for number of firms
fig, axs = plt.subplots(elast_ids.shape[0], 5, figsize=(15,2.5 * elast_ids.shape[0]), sharex=True)
for i, elast_id in enumerate(elast_ids):
for j in range(5):
axs[i,j].plot(num_firms_array, cs_by_type(elast_id,3)[:,2*j], color="black", lw=lw, alpha=alpha)
axs[i,j].plot(num_firms_array, cs_by_type(elast_id,3)[:,2*j] + 1.96 * cs_by_type_se(elast_id,3)[:,2*j], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,j].plot(num_firms_array, cs_by_type(elast_id,3)[:,2*j] - 1.96 * cs_by_type_se(elast_id,3)[:,2*j], color="black", lw=0.7 * lw, alpha=0.5 * alpha, ls="--")
axs[i,j].axvline(x=num_firms_array[np.argmax(cs_by_type(elast_id,3)[:,2*j])], color="black", linestyle="--", alpha=0.25)
axs[i,j].set_xlabel("number of firms")
axs[i,j].set_ylabel("\u20ac")
# Set titles
fontsize = 13.5
pad = 14
cols = [f"{((2*i)+1)*10}th percentile" for i in range(5)]
for ax, col in zip(axs[0], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size=fontsize, ha='center', va='baseline', weight="bold")
mathbfE = "$\\mathbf{E}$"
rows = [f"{mathbfE} = {-avg_price_elasts[elast_id]}" for elast_id in elast_ids]
for ax, row in zip(axs[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size=fontsize, ha='right', va='center', weight="bold")
for i, elast_id in enumerate(elast_ids):
for j in range(5):
axs[i,j].set_xticks(num_firms_array)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_cs_by_income.pdf", bbox_inches = "tight")
# %%
# Plot effect of number of firms
num_firms_to_simulate = 6
num_firms_array = np.arange(num_firms_to_simulate, dtype=int) + 1
fig, axs = plt.subplots(1, 4, figsize=(14,4), sharex=True)
alpha = 0.6
lw = 3.
min_y = np.min(np.concatenate((p_stars(1,0), p_stars(1,1), p_stars(1,2), p_stars(1,3)))) - 2.5
max_y = np.max(np.concatenate((p_stars(1,0), p_stars(1,1), p_stars(1,2), p_stars(1,3)))) + 5.
for i in range(5):
axs[0].plot(num_firms_array, p_stars(1,i)[:,0], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
# custom_lines = [Line2D([0], [0], color="black", lw=1.5),
# Line2D([0], [0], color="black", lw=1.5, ls="--")]
# axs[0].legend(custom_lines, ["$\\bar{d} = 2\\,000$ MB", "$\\bar{d} = 10\\,000$ MB"], loc="upper right")
axs[0].set_xlabel("number of firms")
axs[0].set_ylabel("$p^{*}$ (in \u20ac)")
axs[0].set_ylim((min_y, max_y))
axs[0].set_title("$\\bar{d} = 2\\,000$ MB plan prices", fontsize=12)
for i in range(5):
axs[1].plot(num_firms_array, p_stars(1,i)[:,1], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[1].set_xlabel("number of firms")
axs[1].set_ylabel("$p^{*}$ (in \u20ac)")
axs[1].set_ylim((min_y, max_y))
axs[1].set_title("$\\bar{d} = 10\\,000$ MB plan prices", fontsize=12)
for i in range(5):
axs[2].plot(num_firms_array, num_firms_array * infr.num_stations(R_stars(1,i), 16.299135), color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], label=f"{sigmas[i]}", lw=lw, alpha=alpha)
axs[2].set_xlabel("number of firms")
axs[2].set_ylabel("total number of stations")
axs[2].set_title("investment", fontsize=12)
for i in range(5):
axs[3].plot(num_firms_array, q_stars(1,i), color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[3].set_xlabel("number of firms")
axs[3].set_ylabel("$q^{*}$ (in Mbps)")
axs[3].set_title("download speeds", fontsize=12)
fig.legend(loc="center right", ncol=1, title="Nesting Parameters", fontsize=12, bbox_to_anchor=(3., 0.5), bbox_transform=axs[2].transAxes)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_variables_sigmas.pdf", bbox_inches = "tight")
# %%
# Plot elasticities
fig, axs = plt.subplots(1, 2, figsize=(8,4), sharex=True)
alpha = 0.6
lw = 3.
min_y = np.min(np.concatenate((partial_elasts(1,0), partial_elasts(1,1), partial_elasts(1,2), partial_elasts(1,3), partial_elasts(1,4), full_elasts(1,0), full_elasts(1,1), full_elasts(1,2), full_elasts(1,3), full_elasts(1,4)))) - 0.2
max_y = np.max(np.concatenate((partial_elasts(1,0), partial_elasts(1,1), partial_elasts(1,2), partial_elasts(1,3), partial_elasts(1,4), full_elasts(1,0), full_elasts(1,1), full_elasts(1,2), full_elasts(1,3), full_elasts(1,4)))) + 0.2
for i in range(5):
axs[0].plot(num_firms_array, partial_elasts(1,i)[:,0], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha, label=f"{sigmas[i]} partial")
axs[0].plot(num_firms_array, full_elasts(1,i)[:,0], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha, linestyle="--", label=f" full")
axs[0].set_xlabel("number of firms")
axs[0].set_ylim((min_y, max_y))
axs[0].set_title("$\\bar{d} = 2\\,000$ MB plan", fontsize=12)
for i in range(5):
axs[1].plot(num_firms_array, partial_elasts(1,i)[:,1], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[1].plot(num_firms_array, full_elasts(1,i)[:,1], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha, linestyle="--")
axs[1].set_xlabel("number of firms")
axs[1].set_ylim((min_y, max_y))
axs[1].set_title("$\\bar{d} = 10\\,000$ MB plan", fontsize=12)
fig.legend(loc="center right", ncol=1, title="Nesting Parameters", fontsize=12, bbox_to_anchor=(1.565, 0.5), bbox_transform=axs[1].transAxes)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_elasticities_sigmas.pdf", bbox_inches = "tight")
# %%
# Plot bw derivatives
fig, axs = plt.subplots(1, 3, figsize=(11,4), sharex=True)
for i in range(5):
axs[0].plot(num_firms_array, partial_Pif_partial_bf(1,i), color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha, label=f"{sigmas[i]}")
axs[0].set_xlabel("number of firms")
axs[0].set_ylabel("\u20ac per person in market / MHz")
axs[0].set_title("$\\frac{\\partial \\Pi_{f}}{\\partial b_{f}}$", fontsize=17, y=1.05)
for i in range(5):
axs[1].plot(num_firms_array, partial_Pif_partial_b(1,i), color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[1].set_xlabel("number of firms")
axs[1].set_ylabel("\u20ac per person in market / MHz")
axs[1].set_title("$\\frac{\\partial \\Pi_{f}}{\\partial b}$", fontsize=17, y=1.05)
for i in range(5):
axs[2].plot(num_firms_array, partial_CS_partial_b(1,i), color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[2].set_xlabel("number of firms")
axs[2].set_ylabel("\u20ac per person in market / MHz")
axs[2].set_title("$\\frac{\\partial CS}{\\partial b}$", fontsize=17, y=1.05)
fig.legend(loc="center right", ncol=1, title="Nesting Parameters", fontsize=12, bbox_to_anchor=(1.75, 0.5), bbox_transform=axs[2].transAxes)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_bw_deriv_sigmas.pdf", bbox_inches = "tight")
# %%
# Plot welfare for number of firms
fig, axs = plt.subplots(1, 3, figsize=(10,4), sharex=True)
alpha = 0.6
lw = 3.
for i in range(5):
axs[0].plot(num_firms_array, cs(1,i) / 10000., color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha, label=f"{sigmas[i]}")
axs[0].axvline(x=num_firms_array[np.argmax(cs(1,i))] + (-1. * (i/5.) + 1. * ((5.-i)/5.)) * 0.15, color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], linestyle="--", alpha=0.75 * alpha)
axs[0].set_xlabel("number of firms")
axs[0].set_ylabel("$10\\,000$ \u20ac")
axs[0].set_title("consumer surplus", fontsize=12)
for i in range(5):
axs[1].plot(num_firms_array, ps(1,i) / 10000., color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[1].axvline(x=num_firms_array[np.argmax(ps(1,i))] + (-1. * (i/5.) + 1. * ((5.-i)/5.)) * 0.15, color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], linestyle="--", alpha=0.75 * alpha)
axs[1].set_xlabel("number of firms")
axs[1].set_ylabel("$10\\,000$ \u20ac")
axs[1].set_title("producer surplus", fontsize=12)
for i in range(5):
axs[2].plot(num_firms_array, ts(1,i) / 10000., color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], lw=lw, alpha=alpha)
axs[2].axvline(x=num_firms_array[np.argmax(ts(1,i))] + (-1. * (i/5.) + 1. * ((5.-i)/5.)) * 0.15, color=plt.rcParams['axes.prop_cycle'].by_key()['color'][i], linestyle="--", alpha=0.75 * alpha)
axs[2].set_xlabel("number of firms")
axs[2].set_ylabel("$10\\,000$ \u20ac")
axs[2].set_title("total surplus", fontsize=12)
fig.legend(loc="center right", ncol=1, title="Nesting Parameters", fontsize=12, bbox_to_anchor=(1.8, 0.5), bbox_transform=axs[2].transAxes)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_welfare_sigmas.pdf", bbox_inches = "tight")
# %%
# Plot consumer surplus by type for number of firms
fig, axs = plt.subplots(1, 5, figsize=(15,4.5), sharex=True)
for i in range(5):
for j in range(5):
axs[i].plot(num_firms_array, cs_by_type(1,j)[:,2*i], color=plt.rcParams['axes.prop_cycle'].by_key()['color'][j], lw=lw, alpha=alpha, label=f"{sigmas[j]}" if i == 0 else None)
axs[i].axvline(x=num_firms_array[np.argmax(cs_by_type(1,j)[:,2*i])] + (-1. * (j/5.) + 1. * ((5.-j)/5.)) * 0.15, color=plt.rcParams['axes.prop_cycle'].by_key()['color'][j], linestyle="--", alpha=0.75 * alpha)
axs[i].set_xlabel("number of firms")
axs[i].set_ylabel("\u20ac")
axs[i].set_title(f"{((2*i)+1)*10}th percentile", fontsize=12)
fig.legend(loc="center right", ncol=1, title="Nesting Parameters", fontsize=12, bbox_to_anchor=(4.5, 0.5), bbox_transform=axs[2].transAxes)
plt.tight_layout()
plt.savefig(f"{paths.graphs_path}counterfactual_cs_by_income_sigmas.pdf", bbox_inches = "tight")
| 53.731557
| 233
| 0.671675
| 4,902
| 26,221
| 3.389841
| 0.048756
| 0.053499
| 0.040922
| 0.053199
| 0.921526
| 0.907264
| 0.889451
| 0.856713
| 0.832701
| 0.796835
| 0
| 0.043578
| 0.123107
| 26,221
| 487
| 234
| 53.841889
| 0.67912
| 0.039167
| 0
| 0.416185
| 0
| 0
| 0.192832
| 0.071596
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020231
| 0
| 0.020231
| 0.00289
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ab19508e4f422a3bae7c1efcf11976f364f176ee
| 257
|
py
|
Python
|
Darlington/phase1/python Basic 2/day 27 solution/qtn10.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Darlington/phase1/python Basic 2/day 27 solution/qtn10.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Darlington/phase1/python Basic 2/day 27 solution/qtn10.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
#program to remove two duplicate numbers from a given number of list.
def two_unique_nums(nums):
return [i for i in nums if nums.count(i)==1]
print(two_unique_nums([1,2,3,2,3,4,5]))
print(two_unique_nums([1,2,3,2,4,5]))
print(two_unique_nums([1,2,3,4,5]))
| 42.833333
| 69
| 0.723735
| 58
| 257
| 3.068966
| 0.465517
| 0.202247
| 0.292135
| 0.303371
| 0.38764
| 0.38764
| 0.38764
| 0.38764
| 0.258427
| 0
| 0
| 0.082251
| 0.101167
| 257
| 6
| 70
| 42.833333
| 0.688312
| 0.264591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.4
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 6
|
ab50045ef05f671e1094fc8645a6fc94eb474cff
| 4,764
|
py
|
Python
|
tests/unit/test_newsrec_model.py
|
suhoy901/recommenders
|
8ec9f1950d694a5aeaa3d463ac23cad661a30a11
|
[
"MIT"
] | 28
|
2021-11-12T08:26:40.000Z
|
2022-03-27T07:21:24.000Z
|
tests/unit/test_newsrec_model.py
|
shobhit-agarwal/recommenders
|
8ec9f1950d694a5aeaa3d463ac23cad661a30a11
|
[
"MIT"
] | 5
|
2021-11-10T02:58:32.000Z
|
2022-03-21T16:13:11.000Z
|
tests/unit/test_newsrec_model.py
|
shobhit-agarwal/recommenders
|
8ec9f1950d694a5aeaa3d463ac23cad661a30a11
|
[
"MIT"
] | 9
|
2021-11-03T07:14:47.000Z
|
2022-02-22T13:42:04.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pytest
import os
from reco_utils.recommender.newsrec.newsrec_utils import prepare_hparams
from reco_utils.recommender.deeprec.deeprec_utils import download_deeprec_resources
from reco_utils.recommender.newsrec.models.nrms import NRMSModel
from reco_utils.recommender.newsrec.models.naml import NAMLModel
from reco_utils.recommender.newsrec.models.lstur import LSTURModel
from reco_utils.recommender.newsrec.models.npa import NPAModel
from reco_utils.recommender.newsrec.io.mind_iterator import MINDIterator
from reco_utils.recommender.newsrec.io.mind_all_iterator import MINDAllIterator
@pytest.fixture
def resource_path():
return os.path.dirname(os.path.realpath(__file__))
@pytest.mark.gpu
def test_nrms_component_definition(tmp):
wordEmb_file = os.path.join(tmp, "utils", "embedding.npy")
userDict_file = os.path.join(tmp, "utils", "uid2index.pkl")
wordDict_file = os.path.join(tmp, "utils", "word_dict.pkl")
yaml_file = os.path.join(tmp, "utils", r"nrms.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.blob.core.windows.net/newsrec/",
os.path.join(tmp, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = NRMSModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_naml_component_definition(tmp):
wordEmb_file = os.path.join(tmp, "utils", "embedding_all.npy")
userDict_file = os.path.join(tmp, "utils", "uid2index.pkl")
wordDict_file = os.path.join(tmp, "utils", "word_dict_all.pkl")
vertDict_file = os.path.join(tmp, "utils", "vert_dict.pkl")
subvertDict_file = os.path.join(tmp, "utils", "subvert_dict.pkl")
yaml_file = os.path.join(tmp, "utils", r"naml.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.blob.core.windows.net/newsrec/",
os.path.join(tmp, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
vertDict_file=vertDict_file,
subvertDict_file=subvertDict_file,
epochs=1,
)
iterator = MINDAllIterator
model = NAMLModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_npa_component_definition(tmp):
wordEmb_file = os.path.join(tmp, "utils", "embedding.npy")
userDict_file = os.path.join(tmp, "utils", "uid2index.pkl")
wordDict_file = os.path.join(tmp, "utils", "word_dict.pkl")
yaml_file = os.path.join(tmp, "utils", r"npa.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.blob.core.windows.net/newsrec/",
os.path.join(tmp, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = NPAModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
@pytest.mark.gpu
def test_lstur_component_definition(tmp):
wordEmb_file = os.path.join(tmp, "utils", "embedding.npy")
userDict_file = os.path.join(tmp, "utils", "uid2index.pkl")
wordDict_file = os.path.join(tmp, "utils", "word_dict.pkl")
yaml_file = os.path.join(tmp, "utils", r"lstur.yaml")
if not os.path.exists(yaml_file):
download_deeprec_resources(
r"https://recodatasets.blob.core.windows.net/newsrec/",
os.path.join(tmp, "utils"),
"MINDdemo_utils.zip",
)
hparams = prepare_hparams(
yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
epochs=1,
)
iterator = MINDIterator
model = LSTURModel(hparams, iterator)
assert model.model is not None
assert model.scorer is not None
assert model.loss is not None
assert model.train_optimizer is not None
| 33.314685
| 83
| 0.691856
| 629
| 4,764
| 5.057234
| 0.143084
| 0.052814
| 0.069161
| 0.089909
| 0.81138
| 0.795347
| 0.734989
| 0.711726
| 0.711726
| 0.711726
| 0
| 0.002106
| 0.202771
| 4,764
| 142
| 84
| 33.549296
| 0.83544
| 0.018682
| 0
| 0.65812
| 0
| 0
| 0.131635
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 1
| 0.042735
| false
| 0
| 0.08547
| 0.008547
| 0.136752
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
db4e0ebac27e61d48a0439f6cee57cf046b54336
| 56,978
|
py
|
Python
|
tests/test_metadata_api.py
|
BookOps-CAT/bookops-worldcat
|
879f91f9fe929246c0c07ed771979f31a59288b3
|
[
"MIT"
] | 18
|
2020-05-01T13:35:12.000Z
|
2022-03-25T15:16:12.000Z
|
tests/test_metadata_api.py
|
BookOps-CAT/bookops-worldcat
|
879f91f9fe929246c0c07ed771979f31a59288b3
|
[
"MIT"
] | 37
|
2020-04-15T00:24:18.000Z
|
2022-03-31T14:07:24.000Z
|
tests/test_metadata_api.py
|
BookOps-CAT/bookops-worldcat
|
879f91f9fe929246c0c07ed771979f31a59288b3
|
[
"MIT"
] | 6
|
2020-04-27T23:25:15.000Z
|
2020-10-05T23:35:07.000Z
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
import datetime
import os
import pytest
from bookops_worldcat import MetadataSession, WorldcatAccessToken
from bookops_worldcat.errors import WorldcatSessionError
@contextmanager
def does_not_raise():
yield
class TestMockedMetadataSession:
"""Tests MetadataSession methods with mocking"""
def test_base_session_initiation(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert type(session.authorization).__name__ == "WorldcatAccessToken"
# test header set up correctly:
assert (
session.headers["Authorization"]
== "Bearer tk_Yebz4BpEp9dAsghA7KpWx6dYD1OZKWBlHjqW"
)
def test_missing_authorization(self):
with pytest.raises(TypeError):
MetadataSession()
def test_invalid_authorizaiton(self):
err_msg = "Argument 'authorization' must include 'WorldcatAccessToken' object."
with pytest.raises(WorldcatSessionError) as exc:
MetadataSession(authorization="my_token")
assert err_msg in str(exc.value)
def test_get_new_access_token(self, mock_token, mock_utcnow):
assert mock_token.is_expired() is False
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
session._get_new_access_token()
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
def test_get_new_access_token_exceptions(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session._get_new_access_token()
@pytest.mark.parametrize(
"oclcNumbers,buckets,expectation",
[
([], 0, []),
(["1", "2", "3"], 1, ["1,2,3"]),
([1, 2, 3], 1, ["1,2,3"]),
(["1"], 1, ["1"]),
(["1"] * 50, 1, [",".join(["1"] * 50)]),
(["1"] * 51, 2, [",".join(["1"] * 50), "1"]),
(
["1"] * 103,
3,
[",".join(["1"] * 50), ",".join(["1"] * 50), "1,1,1"],
),
],
)
def test_split_into_legal_volume(
self, mock_token, oclcNumbers, buckets, expectation
):
token = mock_token
with MetadataSession(authorization=token) as session:
assert session._split_into_legal_volume(oclcNumbers) == expectation
def test_url_base(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert session._url_base() == "https://worldcat.org"
def test_url_search_base(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_search_base()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1"
)
def test_url_shared_print_holdings(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_member_shared_print_holdings()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/bibs-retained-holdings"
)
def test_url_member_general_holdings(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_member_general_holdings()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/bibs-summary-holdings"
)
def test_url_brief_bib_search(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_brief_bib_search()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs"
)
@pytest.mark.parametrize(
"argm, expectation",
[
(
"12345",
"https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs/12345",
),
(
12345,
"https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs/12345",
),
],
)
def test_url_brief_bib_oclc_number(self, argm, expectation, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_brief_bib_oclc_number(oclcNumber=argm)
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs/12345"
)
def test_url_brief_bib_other_editions(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_brief_bib_other_editions(oclcNumber="12345")
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs/12345/other-editions"
)
def test_url_lhr_control_number(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_lhr_control_number(controlNumber="12345")
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/my-holdings/12345"
)
def test_url_lhr_search(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_lhr_search()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/my-holdings"
)
def test_url_lhr_shared_print(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_lhr_shared_print()
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/retained-holdings"
)
def test_url_bib_oclc_number(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_oclc_number(oclcNumber="12345")
== "https://worldcat.org/bib/data/12345"
)
def test_url_bib_check_oclc_numbers(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_check_oclc_numbers()
== "https://worldcat.org/bib/checkcontrolnumbers"
)
def test_url_bib_holding_libraries(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_holding_libraries()
== "https://worldcat.org/bib/holdinglibraries"
)
def test_url_bib_holdings_action(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert session._url_bib_holdings_action() == "https://worldcat.org/ih/data"
def test_url_bib_holdings_check(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_holdings_check()
== "https://worldcat.org/ih/checkholdings"
)
def test_url_bib_holdings_batch_action(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_holdings_batch_action()
== "https://worldcat.org/ih/datalist"
)
def test_url_bib_holdings_multi_institution_batch_action(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
assert (
session._url_bib_holdings_multi_institution_batch_action()
== "https://worldcat.org/ih/institutionlist"
)
def test_get_brief_bib(self, mock_token, mock_successful_session_get_request):
with MetadataSession(authorization=mock_token) as session:
assert session.get_brief_bib(12345).status_code == 200
def test_get_brief_bib_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.get_brief_bib()
def test_get_brief_bib_None_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_brief_bib(oclcNumber=None)
def test_get_brief_bib_with_stale_token(
self, mock_utcnow, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.get_brief_bib(oclcNumber=12345)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_get_brief_bib_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_brief_bib(12345)
def test_get_brief_bib_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_brief_bib(12345)
def test_get_brief_bib_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_brief_bib(12345)
def test_get_brief_bib_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.get_brief_bib(12345)
assert msg in str(exc.value)
def test_get_full_bib(self, mock_token, mock_successful_session_get_request):
with MetadataSession(authorization=mock_token) as session:
assert session.get_full_bib(12345).status_code == 200
def test_get_full_bib_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.get_full_bib()
def test_get_full_bib_None_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_full_bib(oclcNumber=None)
def test_get_full_bib_with_stale_token(
self, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.get_full_bib(12345)
assert session.authorization.is_expired() is False
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert response.status_code == 200
def test_get_full_bib_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_full_bib(12345)
def test_get_full_bib_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_full_bib(12345)
def test_get_full_bib_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.get_full_bib(12345)
def test_get_full_bib_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.get_full_bib(12345)
assert msg in str(exc.value)
def test_holding_get_status(self, mock_token, mock_successful_session_get_request):
with MetadataSession(authorization=mock_token) as session:
assert session.holding_get_status(12345).status_code == 200
def test_holding_get_status_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.holding_get_status()
def test_holding_get_status_None_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_get_status(oclcNumber=None)
def test_holding_get_status_with_stale_token(
self, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.holding_get_status(12345)
assert session.authorization.is_expired() is False
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert response.status_code == 200
def test_holding_get_status_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_get_status(12345)
def test_holding_get_status_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_get_status(12345)
def test_holding_get_status_unexpected_error(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_get_status(12345)
def test_holding_status_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.holding_get_status(12345)
assert msg in str(exc.value)
def test_holding_set(self, mock_token, mock_successful_holdings_post_request):
with MetadataSession(authorization=mock_token) as session:
assert session.holding_set(850940548).status_code == 201
def test_holding_set_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.holding_set()
def test_holding_set_None_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_set(oclcNumber=None)
def test_holding_set_stale_token(
self, mock_token, mock_successful_holdings_post_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.holding_set(850940548)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 201
def test_holding_set_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_set(850940548)
def test_holding_set_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_set(850940548)
def test_holding_set_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_set(850940548)
def test_holding_set_409_error_response(self, mock_token, mock_409_response):
msg = {
"code": {"value": "WS-409", "type": "application"},
"message": "Trying to set hold while holding already exists",
"detail": None,
}
with MetadataSession(authorization=mock_token) as session:
response = session.holding_set(850940548)
assert response.json() == msg
def test_holding_set_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.holding_set(850940548)
assert msg in str(exc.value)
def test_holding_unset(self, mock_token, mock_successful_holdings_delete_request):
with MetadataSession(authorization=mock_token) as session:
assert session.holding_unset(850940548).status_code == 200
def test_holding_unset_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.holding_unset()
def test_holding_unset_None_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_unset(oclcNumber=None)
def test_holding_unset_stale_token(
self, mock_utcnow, mock_token, mock_successful_holdings_delete_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.holding_unset(850940548)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_holding_unset_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_unset(850940548)
def test_holding_unset_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_unset(850940548)
def test_holding_unset_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_unset(850940548)
def test_holding_unset_409_error_response(self, mock_token, mock_409_response):
# cheating here a bit, response is bit different
msg = {
"code": {"value": "WS-409", "type": "application"},
"message": "Trying to set hold while holding already exists",
"detail": None,
}
with MetadataSession(authorization=mock_token) as session:
response = session.holding_unset(850940548)
assert response.json() == msg
def test_holding_unset_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.holding_unset(850940548)
assert msg in str(exc.value)
@pytest.mark.parametrize(
"argm,expectation",
[
(None, pytest.raises(WorldcatSessionError)),
([], pytest.raises(WorldcatSessionError)),
(["bt2111111111"], pytest.raises(WorldcatSessionError)),
(["850940548"], does_not_raise()),
(["ocn850940548"], does_not_raise()),
("850940548,850940552, 850940554", does_not_raise()),
(["850940548", "850940552", "850940554"], does_not_raise()),
([850940548, 850940552, 850940554], does_not_raise()),
],
)
def test_holdings_set(
self, argm, expectation, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
with expectation:
session.holdings_set(argm)
def test_holdings_set_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.holdings_set()
def test_holdings_set_stale_token(
self, mock_utcnow, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
with does_not_raise():
assert session.authorization.is_expired() is True
session.holdings_set([850940548, 850940552, 850940554])
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
def test_holdings_set_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holdings_set([850940548, 850940552, 850940554])
def test_holdings_set_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_set([850940548, 850940552, 850940554])
def test_holdings_set_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holdings_set([850940548, 850940552, 850940554])
def test_holdings_set_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.holdings_set([850940548, 850940552, 850940554])
assert msg in str(exc.value)
@pytest.mark.parametrize(
"argm,expectation",
[
(None, pytest.raises(WorldcatSessionError)),
([], pytest.raises(WorldcatSessionError)),
(["bt2111111111"], pytest.raises(WorldcatSessionError)),
(["850940548"], does_not_raise()),
(["ocn850940548"], does_not_raise()),
("850940548,850940552, 850940554", does_not_raise()),
(["850940548", "850940552", "850940554"], does_not_raise()),
([850940548, 850940552, 850940554], does_not_raise()),
],
)
def test_holdings_unset(
self, argm, expectation, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
with expectation:
session.holdings_unset(argm)
def test_holdings_unset_no_oclcNumber_passed(self, mock_token):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(TypeError):
session.holdings_unset()
def test_holdings_unset_stale_token(
self, mock_utcnow, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
with does_not_raise():
assert session.authorization.is_expired() is True
session.holdings_unset([850940548, 850940552, 850940554])
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
def test_holdings_uset_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holdings_unset([850940548, 850940552, 850940554])
def test_holdings_unset_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holding_unset([850940548, 850940552, 850940554])
def test_holdings_unset_unexpected_error(self, mock_token, mock_unexpected_error):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.holdings_unset([850940548, 850940552, 850940554])
def test_holdings_unset_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.holdings_unset([850940548, 850940552, 850940554])
assert msg in str(exc.value)
def test_search_brief_bib_other_editions(
self, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
assert session.search_brief_bib_other_editions(12345).status_code == 200
def test_search_brief_bibs_other_editions_stale_token(
self, mock_utcnow, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.search_brief_bib_other_editions(12345)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_search_brief_bib_other_editions_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bib_other_editions(12345)
def test_search_brief_bib_other_editions_connectionerror(
self, mock_token, mock_connectionerror
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bib_other_editions(12345)
def test_search_brief_bib_other_editions_unexpected_error(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bib_other_editions(12345)
def test_search_brief_bibs_other_editions_invalid_oclc_number(self, mock_token):
msg = "Invalid OCLC # was passed as an argument"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_brief_bib_other_editions("odn12345")
assert msg in str(exc.value)
def test_search_brief_bib_other_editions_400_error_response(
self, mock_token, mock_400_response
):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_brief_bib_other_editions(oclcNumber=12345)
assert msg in str(exc.value)
def test_seach_brief_bibs(self, mock_token, mock_successful_session_get_request):
with MetadataSession(authorization=mock_token) as session:
assert session.search_brief_bibs(q="ti:Zendegi").status_code == 200
@pytest.mark.parametrize("argm", [(None), ("")])
def test_search_brief_bibs_missing_query(self, mock_token, argm):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_brief_bibs(argm)
assert "Argument 'q' is requried to construct query." in str(exc.value)
def test_search_brief_bibs_with_stale_token(
self, mock_utcnow, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.search_brief_bibs(q="ti:foo")
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_search_brief_bibs_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bibs("ti:foo")
def test_search_brief_bibs_connectionerror(self, mock_token, mock_connectionerror):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bibs("ti:foo")
def test_search_brief_bibs_unexpected_error(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_brief_bibs("ti:foo")
def test_search_brief_bibs_400_error_response(self, mock_token, mock_400_response):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_brief_bibs("ti:foo")
assert msg in str(exc.value)
def test_seach_current_control_numbers(
self, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
assert (
session.search_current_control_numbers(
oclcNumbers=["12345", "65891"]
).status_code
== 207
)
def test_seach_current_control_numbers_passed_as_str(
self, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
assert (
session.search_current_control_numbers(
oclcNumbers="12345,65891"
).status_code
== 207
)
@pytest.mark.parametrize("argm", [(None), (""), ([])])
def test_search_current_control_numbers_missing_numbers(self, mock_token, argm):
err_msg = "Argument 'oclcNumbers' must be a list or comma separated string of valid OCLC #."
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_current_control_numbers(argm)
assert err_msg in str(exc.value)
def test_search_current_control_numbers_with_stale_token(
self, mock_utcnow, mock_token, mock_successful_multi_status_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.search_current_control_numbers(["12345", "65891"])
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 207
def test_search_current_control_numbers_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_current_control_numbers(["12345", "65891"])
def test_search_current_control_numbers_connectionerror(
self, mock_token, mock_connectionerror
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_current_control_numbers(["12345", "65891"])
def test_search_current_control_numbers_unexpected_error(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_current_control_numbers(["12345", "65891"])
def test_search_current_control_numbers_400_error_response(
self, mock_token, mock_400_response
):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_current_control_numbers(["12345", "65891"])
assert msg in str(exc.value)
def test_search_general_holdings(
self, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
assert session.search_general_holdings(oclcNumber=12345).status_code == 200
def test_search_general_holdings_missing_arguments(self, mock_token):
msg = "Missing required argument. One of the following args are required: oclcNumber, issn, isbn"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_general_holdings(holdingsAllEditions=True, limit=20)
assert msg in str(exc.value)
def test_search_general_holdings_invalid_oclc_number(self, mock_token):
msg = "Invalid OCLC # was passed as an argument"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_general_holdings(oclcNumber="odn12345")
assert msg in str(exc.value)
def test_search_general_holdings_with_stale_token(
self, mock_utcnow, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.search_general_holdings(oclcNumber=12345)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_search_general_holdings_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_general_holdings(oclcNumber="12345")
def test_search_general_holdings_connectionerror(
self, mock_token, mock_connectionerror
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_general_holdings(oclcNumber=12345)
def test_search_general_holdings_unexpectederror(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_general_holdings(oclcNumber="12345")
def test_search_general_holdings_400_error_response(
self, mock_token, mock_400_response
):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_general_holdings(oclcNumber=12345)
assert msg in str(exc.value)
def test_search_shared_print_holdings(
self, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
assert (
session.search_shared_print_holdings(oclcNumber=12345).status_code
== 200
)
def test_search_shared_print_holdings_missing_arguments(self, mock_token):
msg = "Missing required argument. One of the following args are required: oclcNumber, issn, isbn"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_shared_print_holdings(heldInState="NY", limit=20)
assert msg in str(exc.value)
def test_search_shared_print_holdings_with_invalid_oclc_number_passsed(
self, mock_token
):
msg = "Invalid OCLC # was passed as an argument"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_shared_print_holdings(oclcNumber="odn12345")
assert msg in str(exc.value)
def test_search_shared_print_holdings_with_stale_token(
self, mock_utcnow, mock_token, mock_successful_session_get_request
):
with MetadataSession(authorization=mock_token) as session:
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.search_shared_print_holdings(oclcNumber=12345)
assert session.authorization.token_expires_at == "2020-01-01 17:19:58Z"
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_search_shared_print_holdings_timeout(self, mock_token, mock_timeout):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_shared_print_holdings(oclcNumber="12345")
def test_search_shared_print_holdings_connectionerror(
self, mock_token, mock_connectionerror
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_shared_print_holdings(oclcNumber=12345)
def test_search_shared_print_holdings_unexpectederror(
self, mock_token, mock_unexpected_error
):
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError):
session.search_shared_print_holdings(oclcNumber="12345")
def test_search_shared_print_holdings_400_error_response(
self, mock_token, mock_400_response
):
msg = "Web service returned 400 error: {'type': 'MISSING_QUERY_PARAMETER', 'title': 'Validation Failure', 'detail': 'details here'}; https://test.org/some_endpoint"
with MetadataSession(authorization=mock_token) as session:
with pytest.raises(WorldcatSessionError) as exc:
session.search_shared_print_holdings(oclcNumber=12345)
assert msg in str(exc.value)
@pytest.mark.webtest
class TestLiveMetadataSession:
"""Runs rudimentary tests against live Metadata API"""
def test_get_brief_bib_print_mat_request(self, live_keys):
fields = sorted(
[
"catalogingInfo",
"creator",
"date",
"edition",
"generalFormat",
"isbns",
"language",
"mergedOclcNumbers",
"oclcNumber",
"publicationPlace",
"publisher",
"specificFormat",
"title",
]
)
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.get_brief_bib(41266045)
assert response.status_code == 200
assert sorted(response.json().keys()) == fields
def test_get_brief_bib_401_error(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
token.token_str = "invalid-token"
err_msg = 'Web service returned 401 error: {"message":"Unauthorized"}; https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs/41266045'
with MetadataSession(authorization=token) as session:
session.headers.update({"Authorization": f"Bearer invalid-token"})
with pytest.raises(WorldcatSessionError) as exc:
session.get_brief_bib(41266045)
assert err_msg in str(exc.value)
def test_get_brief_bib_with_stale_token(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
session.authorization.is_expired() is False
session.authorization.token_expires_at = datetime.datetime.strftime(
datetime.datetime.utcnow() - datetime.timedelta(0, 1),
"%Y-%m-%d %H:%M:%SZ",
)
assert session.authorization.is_expired() is True
response = session.get_brief_bib(oclcNumber=41266045)
assert session.authorization.is_expired() is False
assert response.status_code == 200
def test_get_full_bib(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.get_full_bib(41266045)
assert response.url == "https://worldcat.org/bib/data/41266045"
assert response.status_code == 200
def test_holding_get_status(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.holding_get_status(982651100)
assert (
response.url
== "https://worldcat.org/ih/checkholdings?oclcNumber=982651100"
)
assert response.status_code == 200
assert sorted(response.json().keys()) == ["content", "title", "updated"]
assert sorted(response.json()["content"].keys()) == sorted(
[
"requestedOclcNumber",
"currentOclcNumber",
"institution",
"holdingCurrentlySet",
"id",
]
)
@pytest.mark.holdings
def test_holding_set_unset(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.holding_get_status(850940548)
holdings = response.json()["content"]["holdingCurrentlySet"]
# make sure no holdings are set initially
if holdings is True:
response = session.holding_unset(850940548)
response = session.holding_set(
850940548, response_format="application/atom+json"
)
assert response.url == "https://worldcat.org/ih/data?oclcNumber=850940548"
assert response.status_code == 201
assert response.text == ""
# test setting holdings on bib with already existing holding
response = session.holding_set(850940548)
assert response.status_code == 409
assert response.url == "https://worldcat.org/ih/data?oclcNumber=850940548"
assert response.json() == {
"code": {"value": "WS-409", "type": "application"},
"message": "Trying to set hold while holding already exists",
"detail": None,
}
# test deleting holdings
response = session.holding_unset(850940548)
assert response.status_code == 200
assert (
response.request.url
== "https://worldcat.org/ih/data?oclcNumber=850940548&cascade=0"
)
assert response.text == ""
# test deleting holdings on bib without any
response = session.holding_unset(850940548)
assert response.status_code == 409
assert (
response.request.url
== "https://worldcat.org/ih/data?oclcNumber=850940548&cascade=0"
)
assert response.json() == {
"code": {"value": "WS-409", "type": "application"},
"message": "Trying to unset hold while holding does not exist",
"detail": None,
}
@pytest.mark.holdings
def test_holdings_set(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.holdings_set([850940548, 850940552, 850940554])
assert type(response) is list
assert response[0].status_code == 207
assert (
response[0].url
== "https://worldcat.org/ih/datalist?oclcNumbers=850940548%2C850940552%2C850940554"
)
assert sorted(response[0].json().keys()) == sorted(
["entries", "extensions"]
)
assert sorted(response[0].json()["entries"][0]) == sorted(
["title", "content", "updated"]
)
assert sorted(response[0].json()["entries"][0]["content"]) == sorted(
[
"requestedOclcNumber",
"currentOclcNumber",
"institution",
"status",
"detail",
]
)
@pytest.mark.holdings
def test_holdings_unset(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.holdings_unset([850940548, 850940552, 850940554])
assert type(response) is list
assert response[0].status_code == 207
assert (
response[0].url
== "https://worldcat.org/ih/datalist?oclcNumbers=850940548%2C850940552%2C850940554&cascade=0"
)
assert sorted(response[0].json().keys()) == sorted(
["entries", "extensions"]
)
assert sorted(response[0].json()["entries"][0]) == sorted(
["title", "content", "updated"]
)
assert sorted(response[0].json()["entries"][0]["content"]) == sorted(
[
"requestedOclcNumber",
"currentOclcNumber",
"institution",
"status",
"detail",
]
)
def test_brief_bib_other_editions(self, live_keys):
fields = sorted(["briefRecords", "numberOfRecords"])
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.search_brief_bib_other_editions(41266045)
assert response.status_code == 200
assert sorted(response.json().keys()) == fields
def test_search_brief_bibs(self, live_keys):
fields = sorted(["briefRecords", "numberOfRecords"])
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.search_brief_bibs(
"ti:zendegi AND au:egan",
inLanguage="eng",
inCatalogLanguage="eng",
itemType="book",
# itemSubType="printbook",
catalogSource="dlc",
orderBy="mostWidelyHeld",
limit=5,
)
assert response.status_code == 200
assert sorted(response.json().keys()) == fields
# removed temp &itemSubType=printbook due to OCLC error/issue
assert (
response.request.url
== "https://americas.metadata.api.oclc.org/worldcat/search/v1/brief-bibs?q=ti%3Azendegi+AND+au%3Aegan&inLanguage=eng&inCatalogLanguage=eng&catalogSource=dlc&itemType=book&orderBy=mostWidelyHeld&limit=5"
)
def test_search_general_holdings(self, live_keys):
fields = sorted(["briefRecords", "numberOfRecords"])
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.search_general_holdings(isbn="9781597801744")
assert response.status_code == 200
assert sorted(response.json().keys()) == fields
def test_search_current_control_numbers(self, live_keys):
token = WorldcatAccessToken(
key=os.getenv("WCKey"),
secret=os.getenv("WCSecret"),
scopes=os.getenv("WCScopes"),
principal_id=os.getenv("WCPrincipalID"),
principal_idns=os.getenv("WCPrincipalIDNS"),
)
with MetadataSession(authorization=token) as session:
response = session.search_current_control_numbers([41266045, 519740398])
assert response.status_code == 207
assert (
response.request.url
== "https://worldcat.org/bib/checkcontrolnumbers?oclcNumbers=41266045%2C519740398"
)
jres = response.json()
assert sorted(jres.keys()) == ["entries", "extensions"]
assert sorted(jres["entries"][0].keys()) == ["content", "title", "updated"]
assert sorted(jres["entries"][0]["content"].keys()) == sorted(
[
"currentOclcNumber",
"detail",
"found",
"id",
"institution",
"merged",
"requestedOclcNumber",
"status",
]
)
| 45.839099
| 218
| 0.648092
| 6,001
| 56,978
| 5.896517
| 0.054824
| 0.058754
| 0.114851
| 0.115981
| 0.911968
| 0.883256
| 0.862315
| 0.847336
| 0.831143
| 0.812774
| 0
| 0.042675
| 0.258082
| 56,978
| 1,242
| 219
| 45.876006
| 0.794384
| 0.007722
| 0
| 0.632959
| 0
| 0.023408
| 0.127125
| 0.00798
| 0
| 0
| 0
| 0
| 0.141386
| 1
| 0.121723
| false
| 0.015918
| 0.005618
| 0
| 0.129213
| 0.019663
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
db72ca611509472a1e150b8e2c48c6e097bd10de
| 178
|
py
|
Python
|
smtbx/array_family/__init__.py
|
hbrunie/cctbx_project
|
2d8cb383d50fe20cdbbe4bebae8ed35fabce61e5
|
[
"BSD-3-Clause-LBNL"
] | 2
|
2021-03-18T12:31:57.000Z
|
2022-03-14T06:27:06.000Z
|
smtbx/array_family/__init__.py
|
hbrunie/cctbx_project
|
2d8cb383d50fe20cdbbe4bebae8ed35fabce61e5
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
smtbx/array_family/__init__.py
|
hbrunie/cctbx_project
|
2d8cb383d50fe20cdbbe4bebae8ed35fabce61e5
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-03-26T12:52:30.000Z
|
2021-03-26T12:52:30.000Z
|
from __future__ import absolute_import, division, print_function
import boost.python
ext = boost.python.import_ext("smtbx_array_family_ext")
from smtbx_array_family_ext import *
| 35.6
| 64
| 0.853933
| 26
| 178
| 5.346154
| 0.5
| 0.158273
| 0.230216
| 0.273381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08427
| 178
| 4
| 65
| 44.5
| 0.852761
| 0
| 0
| 0
| 0
| 0
| 0.123596
| 0.123596
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dba3006024a8196a76cbd89c2a97a42ef62a4910
| 128
|
py
|
Python
|
jina/types/request/train.py
|
Immich/jina
|
1f5f7cf4d82029d76ab41df157526fe6f6e0da50
|
[
"Apache-2.0"
] | null | null | null |
jina/types/request/train.py
|
Immich/jina
|
1f5f7cf4d82029d76ab41df157526fe6f6e0da50
|
[
"Apache-2.0"
] | null | null | null |
jina/types/request/train.py
|
Immich/jina
|
1f5f7cf4d82029d76ab41df157526fe6f6e0da50
|
[
"Apache-2.0"
] | null | null | null |
from . import Request
from .mixin import *
class TrainRequest(Request, DocsPropertyMixin, GroundtruthPropertyMixin):
pass
| 18.285714
| 73
| 0.789063
| 12
| 128
| 8.416667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148438
| 128
| 6
| 74
| 21.333333
| 0.926606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
dbb60b42644c98cfaed40bb79fe4b3b9c00c9532
| 23,705
|
py
|
Python
|
tests/integration_test.py
|
pa2515-group2/server
|
b1118f5f67cb5a0faeeb958a6afdcde3305b66b1
|
[
"MIT"
] | null | null | null |
tests/integration_test.py
|
pa2515-group2/server
|
b1118f5f67cb5a0faeeb958a6afdcde3305b66b1
|
[
"MIT"
] | null | null | null |
tests/integration_test.py
|
pa2515-group2/server
|
b1118f5f67cb5a0faeeb958a6afdcde3305b66b1
|
[
"MIT"
] | null | null | null |
import os
import unittest
import uuid
import ast
import time
import threading
from wsgiref import simple_server
import urllib
import json
import requests
from server.gateway import app
from server.verbs import Visit
from server.doctor import register_doctor, get_doctor, edit_doctor
from server.patient import register_patient, get_patient, edit_patient
from server.appointment import make_appointment, get_appointment, check_appointment
from server.obj import upload_obj, get_obj, get_objs, delete_obj
from server.models import create_tables, DoctorModel, PatientModel, ObjectModel, LoginModel
from server.auth import authentication, get_token
from server import rediscli
from server.config import Config
from server.utils import logger
HOST = 'http://192.168.59.200:8080'
ENDPOINT = HOST + '/v1'
SUCCESS_STATUS_CODES = [200, 201, 202, 204]
FAILURE_STATUS_CODES = [400, 401, 403, 404, 405]
def runserver():
httpd = simple_server.make_server('192.168.59.200', 8080, app)
httpd.serve_forever()
def run_server():
thread = threading.Thread(target=runserver)
thread.daemon = True
thread.start()
# # Wait a moment for the thread to start up
time.sleep(0.5)
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.test_conf = Config('tests/configuration_test')
create_tables(self.test_conf)
def tearDown(self):
os.remove('{}.sqlite3'.format(self.test_conf.db_filename))
class TestApiv1(BaseTestCase):
"""
put an account
delete an account
TODO: test 40x situations
"""
def setUp(self):
self.test_conf = Config('tests/configuration_test')
create_tables(self.test_conf)
self.adminid = 'admin_{}'.format(str(uuid.uuid4()))
self.doctorid = '{}@hms.com'.format(str(uuid.uuid4()))
self.patientid = '{}@hms.com'.format(str(uuid.uuid4()))
self.admin_auth()
self.doctor_auth()
self.patient_auth()
def admin_auth(self):
LoginModel.create(
username=self.adminid,
password='admin',
role='admin'
)
headers = {
'content-type': 'application/json'}
adm_login = {
'username':self.adminid,
'password':'admin', }
visit = Visit(ENDPOINT)
# visit.get(headers=headers)
auth_code, resp_auth = visit.post(suffix_url='auth/admin', headers=headers,
data=adm_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
resp_auth = json.loads(resp_auth)
self.admin_token = resp_auth['token']
def doctor_auth(self):
visit = Visit(ENDPOINT)
logger.debug('before doctor_auth')
headers = {
'content-type': 'application/json'}
headers['token'] = self.admin_token
headers['role'] = 'admin'
regdoc_data = {
'email':self.doctorid,
'first_name':'intest',
'last_name':'intest',
'experience':10,
'patients': '["{}"]'.format(self.patientid)
}
doc_code, resp_doc = visit.post(suffix_url='doctor', headers=headers,
data=regdoc_data)
# logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_auth))
resp_doc = json.loads(resp_doc)
did = resp_doc['doctorid']
self.assertEqual(self.doctorid, did)
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
self.doctorpass = 'doctor'
LoginModel.create(
username=self.doctorid,
password=self.doctorpass,
role='doctor'
)
headers = {
'content-type': 'application/json'}
doc_login = {
'username':self.doctorid,
'password':self.doctorpass, }
auth_code, resp_auth = visit.post(suffix_url='auth/doctor', headers=headers,
data=doc_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
self.doctor_token = resp_auth['token']
def patient_auth(self):
visit = Visit(ENDPOINT)
logger.debug('before patient_auth')
headers = {
'content-type': 'application/json'}
headers['token'] = self.admin_token
headers['role'] = 'admin'
regpt_data = {
'email':self.patientid,
'first_name':'intest',
'last_name':'intest',
'height':'177'
}
doc_code, resp_doc = visit.post(suffix_url='patient', headers=headers,
data=regpt_data)
logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_doc))
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
self.patientpass = 'patient'
LoginModel.create(
username=self.patientid,
password=self.patientpass,
role='patient'
)
headers = {
'content-type': 'application/json'}
pat_login = {
'username':self.patientid,
'password':self.patientpass, }
auth_code, resp_auth = visit.post(suffix_url='auth/patient', headers=headers,
data=pat_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
self.pat_token = resp_auth['token']
def test_reg_doctor(self):
adminid = 'admin_{}'.format(str(uuid.uuid4()))
LoginModel.create(
username=adminid,
password='admin',
role='admin'
)
headers = {
'content-type': 'application/json'}
adm_login = {
'username':adminid,
'password':'admin', }
visit = Visit(ENDPOINT)
# visit.get(headers=headers)
auth_code, resp_auth = visit.post(suffix_url='auth/admin', headers=headers,
data=adm_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
admin_token = resp_auth['token']
logger.debug('before admin requests')
headers['token'] = admin_token
headers['role'] = 'admin'
doctorid = '{}@hms.com'.format(str(uuid.uuid4()))
regdoc_data = {
'email':doctorid,
'first_name':'intest',
'last_name':'intest',
'experience':10
}
doc_code, resp_doc = visit.post(suffix_url='doctor', headers=headers,
data=regdoc_data)
# logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_auth))
resp_doc = json.loads(resp_doc)
did = resp_doc['doctorid']
self.assertEqual(doctorid, did)
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
headers['token'] = 'wrong_token'
headers['role'] = 'admin'
doc_code, resp_doc = visit.post(suffix_url='doctor', headers=headers,
data=regdoc_data)
# logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_auth))
self.assertIn(doc_code, FAILURE_STATUS_CODES)
def test_put_doctor(self):
adminid = 'admin_{}'.format(str(uuid.uuid4()))
LoginModel.create(
username=adminid,
password='admin',
role='admin'
)
headers = {
'content-type': 'application/json'}
adm_login = {
'username':adminid,
'password':'admin', }
visit = Visit(ENDPOINT)
# visit.get(headers=headers)
auth_code, resp_auth = visit.post(suffix_url='auth/admin', headers=headers,
data=adm_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
admin_token = resp_auth['token']
logger.debug('before admin requests')
headers['token'] = admin_token
headers['role'] = 'admin'
doctorid = '{}@hms.com'.format(str(uuid.uuid4()))
regdoc_data = {
'email':doctorid,
'first_name':'intest',
'last_name':'intest',
'experience':10
}
doc_code, resp_doc = visit.post(suffix_url='doctor', headers=headers,
data=regdoc_data)
# logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_auth))
resp_doc = json.loads(resp_doc)
did = resp_doc['doctorid']
self.assertEqual(doctorid, did)
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
LoginModel.create(
username=doctorid,
password='doctor',
role='doctor'
)
headers = {
'content-type': 'application/json'}
doc_login = {
'username':doctorid,
'password':'doctor', }
auth_code, resp_auth = visit.post(suffix_url='auth/doctor', headers=headers,
data=doc_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
doctor_token = resp_auth['token']
logger.debug('before doctor requests')
headers['token'] = doctor_token
headers['role'] = 'doctor'
putdoc_data = {
'email':doctorid,
'first_name':'intest_modi',
'last_name':'intest_modi',
'experience':11
}
doc_code, resp_doc = visit.put(suffix_url='doctor/{}'.format(doctorid), headers=headers,
data=putdoc_data)
logger.info('doc_code:{}, resp_doc:{}'.format(doc_code, resp_doc))
resp_doc = json.loads(resp_doc)
did = resp_doc['doctorid']
self.assertEqual(doctorid, did)
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
headers['role'] = 'admin'
putdoc_data = {
'email':doctorid,
'first_name':'intest_modi',
'last_name':'intest_modi',
'experience':11
}
doc_code, resp_doc = visit.put(suffix_url='doctor/{}'.format(doctorid), headers=headers,
data=putdoc_data)
logger.info('doc_code:{}, resp_doc:{}'.format(doc_code, resp_doc))
self.assertIn(doc_code, FAILURE_STATUS_CODES)
def test_get_patient(self):
adminid = 'admin_{}'.format(str(uuid.uuid4()))
LoginModel.create(
username=adminid,
password='admin',
role='admin'
)
headers = {
'content-type': 'application/json'}
adm_login = {
'username':adminid,
'password':'admin', }
visit = Visit(ENDPOINT)
# visit.get(headers=headers)
auth_code, resp_auth = visit.post(suffix_url='auth/admin', headers=headers,
data=adm_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
admin_token = resp_auth['token']
logger.debug('before admin requests')
headers['token'] = admin_token
headers['role'] = 'admin'
patientid = '{}@hms.com'.format(str(uuid.uuid4()))
regpt_data = {
'email':patientid,
'first_name':'intest',
'last_name':'intest',
'height':'177'
}
doc_code, resp_doc = visit.post(suffix_url='patient', headers=headers,
data=regpt_data)
logger.info('doc_code:{}, resp_auth:{}'.format(doc_code, resp_auth))
resp_doc = json.loads(resp_doc)
did = resp_doc['patientid']
self.assertEqual(patientid, did)
self.assertIn(doc_code, SUCCESS_STATUS_CODES)
LoginModel.create(
username=patientid,
password='patient',
role='patient'
)
headers = {
'content-type': 'application/json'}
pat_login = {
'username':patientid,
'password':'patient', }
auth_code, resp_auth = visit.post(suffix_url='auth/patient', headers=headers,
data=pat_login)
logger.info('auth_code:{}, resp_auth:{}'.format(auth_code, resp_auth))
self.assertIn('token', resp_auth)
self.assertIn(auth_code, SUCCESS_STATUS_CODES)
# resp_auth = ast.literal_eval(resp_auth)
resp_auth = json.loads(resp_auth)
pat_token = resp_auth['token']
logger.debug('before patient get request')
headers['token'] = pat_token
headers['role'] = 'patient'
pat_code, resp_pat = visit.get(suffix_url='patient/{}'.format(patientid), headers=headers)
logger.info('pat_code:{}, resp_pat:{}'.format(pat_code, resp_pat))
resp_pat = json.loads(resp_pat)
did = resp_pat['email']
self.assertEqual(patientid, did)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
def test_prescription(self):
visit = Visit(ENDPOINT)
headers = {
'content-type': 'application/json'}
logger.debug('before test_post_prescription')
# headers['token'] = self.admin_token
# headers['role'] = 'admin'
headers['token'] = self.doctor_token
headers['role'] = 'doctor'
# logger.debug('before patient get request')
# headers['token'] = self.pat_token
# headers['role'] = 'patient'
regprescription_data = {
'datetime':'20160101',
'drug_name':'drug1',
'after_meal':'yes',
'amount':'60',
'dosage_per_day':'2',
'description':'with water'
}
pat_code, resp_presc = visit.post(suffix_url='prescription/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=regprescription_data)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
regprescription_data2 = {
'datetime':'20160102',
'drug_name':'drug2',
'after_meal':'yes',
'amount':'10',
'dosage_per_day':'1',
'description':'with water'
}
pat_code, resp_presc = visit.post(suffix_url='prescription/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=regprescription_data2)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
logger.debug('before test_get_prescriptions')
headers['token'] = self.pat_token
headers['role'] = 'patient'
pat_code, resp_prescs = visit.get(suffix_url='prescriptions/{}'.format(
self.patientid), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
# resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertIn(self.doctorid, resp_prescs)
self.assertIn('drug1', resp_prescs)
self.assertIn('drug2', resp_prescs)
def test_comment(self):
visit = Visit(ENDPOINT)
headers = {
'content-type': 'application/json'}
logger.debug('before test_comment')
# headers['token'] = self.admin_token
# headers['role'] = 'admin'
headers['token'] = self.doctor_token
headers['role'] = 'doctor'
# logger.debug('before patient get request')
# headers['token'] = self.pat_token
# headers['role'] = 'patient'
comment_data = {
'datetime':'20160101',
'comment':'drink water'
}
pat_code, resp_presc = visit.post(suffix_url='comment/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=comment_data)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
comment_data2 = {
'datetime':'20160102',
'comment':'eat drug'
}
pat_code, resp_presc = visit.post(suffix_url='comment/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=comment_data2)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
logger.debug('before test_get_comments')
headers['token'] = self.pat_token
headers['role'] = 'patient'
pat_code, resp_prescs = visit.get(suffix_url='comments/{}'.format(
self.patientid), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
# resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertIn(self.doctorid, resp_prescs)
self.assertIn('drink water', resp_prescs)
self.assertIn('drink water', resp_prescs)
def test_discharge(self):
visit = Visit(ENDPOINT)
headers = {
'content-type': 'application/json'}
logger.debug('before test_discharge')
# headers['token'] = self.admin_token
# headers['role'] = 'admin'
headers['token'] = self.doctor_token
headers['role'] = 'doctor'
# logger.debug('before patient get request')
# headers['token'] = self.pat_token
# headers['role'] = 'patient'
comment_data = {
'datetime':'20160101',
'indate':'20151111',
"room":"301",
"bed":"2",
}
pat_code, resp_presc = visit.post(suffix_url='discharge/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=comment_data)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
comment_data2 = {
'datetime':'20160102',
'indate':'20151212',
"room":"402",
"bed":"3",
}
pat_code, resp_presc = visit.post(suffix_url='discharge/{}/{}'.format(
self.doctorid, self.patientid), headers=headers, data=comment_data2)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
comment_data3 = {
'datetime':'20160201',
'outdate':'20160202',
'description':'well',
'bed':'9'
}
pat_code, resp_presc = visit.put(suffix_url='discharge/{}/{}/{}'.format(
self.doctorid, self.patientid, '20151212'), headers=headers, data=comment_data3)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
logger.debug('before test_get_discharges')
headers['token'] = self.pat_token
headers['role'] = 'patient'
pat_code, resp_prescs = visit.get(suffix_url='discharges/{}'.format(
self.patientid), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
# resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertIn(self.doctorid, resp_prescs)
self.assertIn('20160202', resp_prescs)
self.assertIn('20151111', resp_prescs)
self.assertIn('9', resp_prescs)
def test_appointment(self):
visit = Visit(ENDPOINT)
headers = {
'content-type': 'application/json'}
logger.debug('before test_appointment')
# headers['token'] = self.admin_token
# headers['role'] = 'admin'
headers['token'] = self.doctor_token
headers['role'] = 'doctor'
# logger.debug('before patient get request')
# headers['token'] = self.pat_token
# headers['role'] = 'patient'
apmt_data = {
'doctorid':self.doctorid,
'patientid':self.patientid,
'datetimeslot':'201511111300',
'illness':'headache'
}
pat_code, resp_presc = visit.post(suffix_url='appointment', headers=headers, data=apmt_data)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
pat_code, resp_prescs = visit.get(suffix_url='appointment/{}/{}/{}'.format(
self.doctorid, '201511111300',self.patientid), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
# resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertIn('headache', resp_prescs)
apmt_data = {
'doctorid':self.doctorid,
'patientid':self.patientid,
'datetimeslot':'201511111430',
'illness':'cold'
}
pat_code, resp_presc = visit.post(suffix_url='appointment', headers=headers, data=apmt_data)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
# logger.debug('before test_appointment')
headers['token'] = self.pat_token
headers['role'] = 'patient'
pat_code, resp_prescs = visit.get(suffix_url='appointment/{}/{}'.format(
self.doctorid, '20151111'), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertIn('1', resp_prescs['1300'])
self.assertIn('1', resp_prescs['1430'])
pat_code, resp_prescs = visit.delete(suffix_url='appointment/{}/{}/{}'.format(
self.doctorid, '201511111300',self.patientid), headers=headers)
logger.info('pat_code:{}, resp_presc:{}'.format(pat_code, resp_presc))
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
pat_code, resp_prescs = visit.get(suffix_url='appointment/{}/{}'.format(
self.doctorid, '20151111'), headers=headers)
logger.info('pat_code:{}, resp_prescs:{}'.format(pat_code, resp_prescs))
# resp_prescs = json.loads(resp_prescs)
self.assertIn(pat_code, SUCCESS_STATUS_CODES)
self.assertNotIn('1300', resp_prescs)
resp_prescs = json.loads(resp_prescs)
self.assertIn('1', resp_prescs['1430'])
if __name__ == '__main__':
run_server()
unittest.main()
| 37.272013
| 100
| 0.589876
| 2,579
| 23,705
| 5.185343
| 0.084917
| 0.059224
| 0.04195
| 0.049353
| 0.827787
| 0.811486
| 0.792866
| 0.775667
| 0.748598
| 0.721304
| 0
| 0.017729
| 0.276651
| 23,705
| 635
| 101
| 37.330709
| 0.762174
| 0.070196
| 0
| 0.648594
| 0
| 0
| 0.165734
| 0.004187
| 0
| 0
| 0
| 0.001575
| 0.120482
| 1
| 0.03012
| false
| 0.036145
| 0.042169
| 0
| 0.076305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dbc012b8677c149675fd89590be2fbb4228e2795
| 34
|
py
|
Python
|
__init__.py
|
WindfallLabs/spatialreferenceapi
|
3542ea0e9681389836dbeffaa94609085dabbb95
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
WindfallLabs/spatialreferenceapi
|
3542ea0e9681389836dbeffaa94609085dabbb95
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
WindfallLabs/spatialreferenceapi
|
3542ea0e9681389836dbeffaa94609085dabbb95
|
[
"Unlicense"
] | null | null | null |
from spatialreferenceapi import *
| 17
| 33
| 0.852941
| 3
| 34
| 9.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dbc540f081ce7833cc314c7b69d0ced5239a58fd
| 8,160
|
py
|
Python
|
server/tests/test_api/test_accounts.py
|
guiloga/scalade
|
fd59b239fb35e8a7028baea3ed6d4b23282c200d
|
[
"MIT"
] | 4
|
2021-12-22T18:07:10.000Z
|
2021-12-29T09:22:44.000Z
|
server/tests/test_api/test_accounts.py
|
guiloga/scalade
|
fd59b239fb35e8a7028baea3ed6d4b23282c200d
|
[
"MIT"
] | null | null | null |
server/tests/test_api/test_accounts.py
|
guiloga/scalade
|
fd59b239fb35e8a7028baea3ed6d4b23282c200d
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
import pytest
from asgiref.sync import sync_to_async
from django.urls import reverse
from tests.conftest import (
assert_no_auth_forbidden,
check_create_entities_api_call,
check_list_entities_api_call,
check_method_not_allowed,
check_retrieve_entities_api_call,
get_api_uri,
query_account_from_session_id,
)
class TestSetCSRFTokenView:
@pytest.mark.asyncio
async def test_get(self, async_client):
url = reverse("set-csrftoken")
response = await async_client.get(url)
assert response.status_code == 200
assert response.content.decode() == ""
assert response.cookies["csrftoken"]
class TestCheckActiveSessionView:
@pytest.mark.asyncio
async def test_get(self, async_client):
url = reverse("check-session")
response = await async_client.get(url)
assert response.status_code == 200
assert not response.json()["success"]
class TestWorkspaceViewSet:
@pytest.fixture
def url(self):
return get_api_uri("entities/workspaces/")
@pytest.fixture
def sid(self):
# name=test_user-default
return "0be47ce4-a2e9-471f-b92a-a25c7811e261"
@pytest.mark.asyncio
async def test_no_auth_forbidden(self, async_client, url):
assert_no_auth_forbidden(await async_client.get(url))
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_list(self, async_client, url):
await check_list_entities_api_call(async_client, url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_create(self, async_client, url):
request_body = {
"name": "foobar-test-ws",
"business": "0461f477-8194-4d3d-8c3a-1311f3d8eba4",
}
await check_create_entities_api_call(async_client, url, request_body)
bad_response = await async_client.post(
url, request_body, content_type="application/json"
)
assert bad_response.status_code == 400
request_body["business"] = str(uuid4())
bad_response = await async_client.post(
url, request_body, content_type="application/json"
)
assert bad_response.status_code == 400
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_retrieve(self, async_client, url, sid):
response = await check_retrieve_entities_api_call(
async_client, f"{url}{sid}/"
)
body = response.json()
assert body["name"] == "test_user-default"
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_update(self, async_client, url, sid):
await check_method_not_allowed(async_client, "put", f"{url}{sid}/")
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_destroy(self, async_client, url, sid):
await check_method_not_allowed(async_client, "delete", f"{url}{sid}/")
class TestAccountViewSet:
@pytest.fixture
def url(self):
return get_api_uri("entities/accounts/")
@pytest.fixture
def sid(self):
return "561ac10c-f66d-49f9-862f-598e621ecafa"
@pytest.mark.asyncio
async def test_no_auth_forbidden(self, async_client, url):
assert_no_auth_forbidden(await async_client.get(url))
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_list(self, async_client, url):
def _get_account_workspace(account):
workspaces = account.workspaces.all()
return workspaces[0]
await check_list_entities_api_call(async_client, url)
response = await check_list_entities_api_call(
async_client, f"{url}?is_active=true&limit=2&offset=1"
)
body = response.json()
assert body["count"] == 2
assert body["total_queryset"] > 2
account = await query_account_from_session_id(
async_client.session.session_key
)
ws = await sync_to_async(_get_account_workspace)(account)
response = await check_list_entities_api_call(
async_client, f"{url}?related_workspace={ws.uuid}"
)
assert response.status_code == 200
for item in response.json()["data"]:
ws_response = await async_client.get(item["url"])
assert str(ws.uuid) in ws_response.json()["workspaces"]
bad_response = await async_client.get(
f"{url}?related_workspace={uuid4()}"
)
assert bad_response.status_code == 400
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_create(self, async_client, url):
await check_method_not_allowed(async_client, "post", url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_retrieve(self, async_client, url, sid):
response = await check_retrieve_entities_api_call(
async_client, f"{url}{sid}/"
)
body = response.json()
assert body["auth_id"] == "my-company:master"
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_update(self, async_client, url, sid):
await check_method_not_allowed(async_client, "put", f"{url}{sid}/")
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_destroy(self, async_client, url, sid):
await check_method_not_allowed(async_client, "delete", f"{url}{sid}/")
class TestBusinessViewSet:
@pytest.fixture
def url(self):
return get_api_uri("entities/businesses/")
@pytest.fixture
def sid(self):
return "0461f477-8194-4d3d-8c3a-1311f3d8eba4"
@pytest.mark.asyncio
async def test_no_auth_forbidden(self, async_client, url):
assert_no_auth_forbidden(await async_client.get(url))
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_list(self, async_client, url):
await check_list_entities_api_call(async_client, url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_create(self, async_client, url):
await check_method_not_allowed(async_client, "post", url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_retrieve(self, async_client, url, sid):
response = await check_retrieve_entities_api_call(
async_client, f"{url}{sid}/"
)
body = response.json()
assert body["organization_slug"] == "my-company"
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_update(self, async_client, url, sid):
await check_method_not_allowed(async_client, "put", f"{url}{sid}/")
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_destroy(self, async_client, url, sid):
await check_method_not_allowed(async_client, "delete", f"{url}{sid}/")
class TestUserViewSet:
@pytest.fixture
def url(self):
return get_api_uri("entities/users/")
@pytest.fixture
def sid(self):
return "b0442be3-6d21-431a-9e30-45f909eb9ac8"
@pytest.mark.asyncio
async def test_no_auth_forbidden(self, async_client, url):
assert_no_auth_forbidden(await async_client.get(url))
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_list(self, async_client, url):
await check_list_entities_api_call(async_client, url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_create(self, async_client, url):
await check_method_not_allowed(async_client, "post", url)
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_retrieve(self, async_client, url, sid):
response = await check_retrieve_entities_api_call(
async_client, f"{url}{sid}/"
)
body = response.json()
assert body["business"] == "0461f477-8194-4d3d-8c3a-1311f3d8eba4"
assert body["first_name"] == "User 1"
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_update(self, async_client, url, sid):
await check_method_not_allowed(async_client, "put", f"{url}{sid}/")
@pytest.mark.asyncio
@pytest.mark.django_db
async def test_destroy(self, async_client, url, sid):
await check_method_not_allowed(async_client, "delete", f"{url}{sid}/")
| 32.64
| 78
| 0.675245
| 1,053
| 8,160
| 4.967711
| 0.125356
| 0.124068
| 0.082967
| 0.089467
| 0.804626
| 0.742305
| 0.704837
| 0.704837
| 0.704837
| 0.696616
| 0
| 0.024008
| 0.218995
| 8,160
| 249
| 79
| 32.771084
| 0.796799
| 0.002696
| 0
| 0.663366
| 0
| 0
| 0.099803
| 0.039208
| 0
| 0
| 0
| 0
| 0.108911
| 1
| 0.044554
| false
| 0
| 0.024752
| 0.039604
| 0.143564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dbcd95158a64a6d55a2640840f2fb8bb7718cb1d
| 34
|
py
|
Python
|
pycdb/__init__.py
|
moriyoshi/wozozo-cdb
|
73affe565c128161b87997e57ee10e75c0c76347
|
[
"Python-2.0"
] | 1
|
2018-12-07T04:41:28.000Z
|
2018-12-07T04:41:28.000Z
|
pycdb/__init__.py
|
moriyoshi/wozozo-cdb
|
73affe565c128161b87997e57ee10e75c0c76347
|
[
"Python-2.0"
] | null | null | null |
pycdb/__init__.py
|
moriyoshi/wozozo-cdb
|
73affe565c128161b87997e57ee10e75c0c76347
|
[
"Python-2.0"
] | null | null | null |
from ._pycdb import CDB, CDBMake
| 11.333333
| 32
| 0.764706
| 5
| 34
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 34
| 2
| 33
| 17
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
91622c6137bdc00e73e09b847385c572c280c1a4
| 3,583
|
py
|
Python
|
app/conference/migrations/0001_initial.py
|
confbot-telegram-conferences/confbot
|
52fa307275b679748b5a7a6e7cb29bfc7b792875
|
[
"MIT"
] | 1
|
2022-01-18T05:19:45.000Z
|
2022-01-18T05:19:45.000Z
|
app/conference/migrations/0001_initial.py
|
confbot-telegram-conferences/confbot
|
52fa307275b679748b5a7a6e7cb29bfc7b792875
|
[
"MIT"
] | null | null | null |
app/conference/migrations/0001_initial.py
|
confbot-telegram-conferences/confbot
|
52fa307275b679748b5a7a6e7cb29bfc7b792875
|
[
"MIT"
] | 1
|
2022-01-18T13:54:57.000Z
|
2022-01-18T13:54:57.000Z
|
# Generated by Django 3.0.11 on 2021-03-26 16:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Conference',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', model_utils.fields.AutoCreatedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Created at')),
('updated_at', model_utils.fields.AutoLastModifiedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Updated at')),
('name', models.CharField(max_length=255, verbose_name='Name of Conference')),
('description', models.TextField(blank=True, default='', null=True, verbose_name='Description')),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UserConference',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', model_utils.fields.AutoCreatedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Created at')),
('updated_at', model_utils.fields.AutoLastModifiedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Updated at')),
('slide_position', models.IntegerField(default=1, verbose_name='Slide Position')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_conferences', to='conference.Conference')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_conferences', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Slide',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', model_utils.fields.AutoCreatedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Created at')),
('updated_at', model_utils.fields.AutoLastModifiedField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='Updated at')),
('text', models.TextField(blank=True, default='', null=True, verbose_name='Text')),
('image', models.CharField(blank=True, max_length=255, null=True, verbose_name='Image')),
('voice', models.CharField(blank=True, max_length=255, null=True, verbose_name='Voice')),
('position', models.IntegerField(default=1, verbose_name='Position')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='slides', to='conference.Conference')),
],
options={
'abstract': False,
},
),
]
| 55.123077
| 166
| 0.643874
| 385
| 3,583
| 5.846753
| 0.215584
| 0.063527
| 0.059085
| 0.047979
| 0.753887
| 0.753887
| 0.753887
| 0.713905
| 0.713905
| 0.66948
| 0
| 0.010764
| 0.22216
| 3,583
| 64
| 167
| 55.984375
| 0.796914
| 0.012838
| 0
| 0.473684
| 1
| 0
| 0.114286
| 0.011881
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.175439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9180e183f0285a079865e52caad692b09637cffa
| 7,774
|
py
|
Python
|
modules/samtools.py
|
tyrmi/STAPLER
|
fd83eee4be0bb78c67a111fd1c1c1dff4c16aefe
|
[
"BSD-3-Clause"
] | 4
|
2017-07-17T07:45:39.000Z
|
2021-01-12T00:33:10.000Z
|
modules/samtools.py
|
tyrmi/STAPLER
|
fd83eee4be0bb78c67a111fd1c1c1dff4c16aefe
|
[
"BSD-3-Clause"
] | null | null | null |
modules/samtools.py
|
tyrmi/STAPLER
|
fd83eee4be0bb78c67a111fd1c1c1dff4c16aefe
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from GenericBase import GenericBase
from STAPLERerror import VirtualIOError
from STAPLERerror import STAPLERerror
import utils
class samtools_index(GenericBase):
"""Class for creating command lines for samtools index.
Parameters:
in_cmd: String containing a command line
in_dir: Directory object containing input files
out_dir: Directory object containing output files
NOTICE! Keep the directory objects up to date about file edits!
Attributes:
name: Name of the function.
input_type: Input types accepted by this application.
output_types: List of output types produced by the application.
mandatory_args: Args the user be provided in in_cmd when initializing.
user_mandatory_args: Args the user must provide.
remove_user_args: Args that will be removed from the final command.
optional_args: Args that may be part of the command line.
in_cmd: Command entered by user.
parsed_cmd: Final output command as option:value dict.
file_names: Names of output files.
command_ids: File names of input file(s) with no file extensions.
Methods:
get_cmd: Method for getting the final cmd line string for output.
"""
name = 'stapler_samtools_index'
#Accept all defined types:
input_types = {'.bam'}
output_types = []
require_output_dir = False
hidden_mandatory_args = ['--!i']
user_mandatory_args = []
remove_user_args = user_mandatory_args
user_optional_args = ['-b', '-c', '-m']
parallelizable = True
help_description = '''
Tested with samtools 1.2.
The index files are generated into the input directory.
'''
def _select_IO(self, out_cmd, in_dir, out_dir):
"""Infers the input and output file paths.
This method must keep the directory objects up to date of the file
edits!
Parameters:
in_cmd: A dict containing the command line.
in_dir: Input directory (instance of filetypes.Directory).
out_dir: Output directory (instance of filetypes.Directory).
Returns:
out_cmd: Dict containing the output commands
command_identifier: Input file name based identifier for the current command
Raises:
VirtualIOError: No valid input file can be found.
"""
IO_files = {}
file_names = set()
for fl in in_dir.files:
if self.name not in fl.users:
if utils.splitext(fl.name)[-1] in self.input_types:
# Infer input file
IO_files['--!i'] = os.path.join(in_dir.path, fl.name)
command_ids = [utils.infer_path_id(IO_files['--!i'])]
in_dir.use_file(fl.name, self.name)
# Add index file to the input directory
in_dir.add_file(fl.name + '.bai')
break
if not IO_files:
raise VirtualIOError('No more unused input files')
out_cmd.update(IO_files)
return out_cmd, command_ids
def get_cmd(self):
"""Returns the final command line.
Returns:
final_cmd: List of command line produced by the object (line breaks not allowed within command lines!).
"""
run_command = utils.parse_config(self.name, 'cmd_name', 'execute')
final_cmd = [run_command]
for arg, val in self.out_cmd.iteritems():
if arg in {'--!i', '--!o'}: continue
final_cmd.append(arg + ' ' + val)
final_cmd.append(self.out_cmd['--!i'])
return [' '.join(final_cmd)]
class samtools_rmdup(GenericBase):
"""Class for creating command lines for samtools rmdup.
Parameters:
in_cmd: String containing a command line
in_dir: Directory object containing input files
out_dir: Directory object containing output files
NOTICE! Keep the directory objects up to date about file edits!
Attributes:
name: Name of the function.
input_type: Input types accepted by this application.
output_types: List of output types produced by the application.
mandatory_args: Args the user be provided in in_cmd when initializing.
user_mandatory_args: Args the user must provide.
remove_user_args: Args that will be removed from the final command.
optional_args: Args that may be part of the command line.
in_cmd: Command entered by user.
parsed_cmd: Final output command as option:value dict.
file_names: Names of output files.
command_ids: File names of input file(s) with no file extensions.
Methods:
get_cmd: Method for getting the final cmd line string for output.
"""
name = 'stapler_samtools_rmdup'
#Accept all defined types:
input_types = {'.bam'}
output_types = ['.bam']
hidden_mandatory_args = ['--!i', '--!o']
user_mandatory_args = []
remove_user_args = user_mandatory_args
user_optional_args = ['-s', '-S']
parallelizable = True
help_description = '''
Tested with samtools 0.1.19
Notice that this function does not seem to work in the current version of
samtools (1.2), use of older version is therefore recommended. Beware module
conflicts!
'''
def _select_IO(self, out_cmd, in_dir, out_dir):
"""Infers the input and output file paths.
This method must keep the directory objects up to date of the file
edits!
Parameters:
in_cmd: A dict containing the command line.
in_dir: Input directory (instance of filetypes.Directory).
out_dir: Output directory (instance of filetypes.Directory).
Returns:
out_cmd: Dict containing the output commands
command_identifier: Input file name based identifier for the current command
Raises:
VirtualIOError: No valid input file can be found.
"""
IO_files = {}
file_names = set()
for fl in in_dir.files:
if self.name not in fl.users:
if utils.splitext(fl.name)[-1] in self.input_types:
IO_files['--!i'] = os.path.join(in_dir.path, fl.name)
command_ids = [utils.infer_path_id(IO_files['--!i'])]
in_dir.use_file(fl.name, self.name)
assert len(self.output_types) == 1, 'Several output ' \
'types, override ' \
'this method!'
output_name = utils.splitext(fl.name)[0] + \
self.output_types[0]
output_path = os.path.join(out_dir.path, output_name)
IO_files['--!o'] = output_path
file_names.add(output_name)
out_dir.add_file(output_name)
break
if not IO_files:
raise VirtualIOError('No more unused input files')
out_cmd.update(IO_files)
return out_cmd, command_ids
def get_cmd(self):
"""Returns the final command line.
Returns:
final_cmd: List of command line produced by the object (line breaks not allowed within command lines!).
"""
run_command = utils.parse_config(self.name, 'cmd_name', 'execute')
final_cmd = [run_command]
final_cmd.append(self.out_cmd['--!i'])
final_cmd.append(self.out_cmd['--!o'])
for arg, val in self.out_cmd.iteritems():
if arg in {'--!i', '--!o'}: continue
final_cmd.append(arg + ' ' + val)
return [' '.join(final_cmd)]
| 37.375
| 112
| 0.614741
| 995
| 7,774
| 4.639196
| 0.173869
| 0.014081
| 0.015165
| 0.013865
| 0.839255
| 0.839255
| 0.834055
| 0.801127
| 0.779463
| 0.759965
| 0
| 0.00241
| 0.306149
| 7,774
| 207
| 113
| 37.555556
| 0.853356
| 0.41716
| 0
| 0.629213
| 0
| 0
| 0.13984
| 0.011331
| 0
| 0
| 0
| 0
| 0.011236
| 1
| 0.044944
| false
| 0
| 0.05618
| 0
| 0.382022
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9183e72d5a538d43e16bc28b574dbb6147230a7c
| 67
|
py
|
Python
|
lil_lib/__init__.py
|
remingtonc/python-force-ipv4
|
3265881bf994866f1762e3de7e111d8741793389
|
[
"Apache-2.0"
] | null | null | null |
lil_lib/__init__.py
|
remingtonc/python-force-ipv4
|
3265881bf994866f1762e3de7e111d8741793389
|
[
"Apache-2.0"
] | null | null | null |
lil_lib/__init__.py
|
remingtonc/python-force-ipv4
|
3265881bf994866f1762e3de7e111d8741793389
|
[
"Apache-2.0"
] | null | null | null |
from .profile import profile_getaddrinfo, profile_getaddrinfo_async
| 67
| 67
| 0.910448
| 8
| 67
| 7.25
| 0.625
| 0.62069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 67
| 1
| 67
| 67
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
37d1c90552b600196789ec7d3bcb55a433feb103
| 33
|
py
|
Python
|
testpython.py
|
chapman-cs510-2016f/cw-01-nv-shen
|
bf273af15284a5fba837957f533ec7066d78e490
|
[
"MIT"
] | null | null | null |
testpython.py
|
chapman-cs510-2016f/cw-01-nv-shen
|
bf273af15284a5fba837957f533ec7066d78e490
|
[
"MIT"
] | null | null | null |
testpython.py
|
chapman-cs510-2016f/cw-01-nv-shen
|
bf273af15284a5fba837957f533ec7066d78e490
|
[
"MIT"
] | null | null | null |
def test_trivial():
assert True
| 11
| 19
| 0.757576
| 5
| 33
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 2
| 20
| 16.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
37f44e98cf5066210ee16c79918c3cef6288040c
| 149
|
py
|
Python
|
apps/users/tests/test_api/public/__init__.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
apps/users/tests/test_api/public/__init__.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
apps/users/tests/test_api/public/__init__.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
from .. import UsersAPITests
class PublicUsersAPITests(UsersAPITests):
'''Test users API requests that not require authentication'''
pass
| 18.625
| 65
| 0.751678
| 15
| 149
| 7.466667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174497
| 149
| 7
| 66
| 21.285714
| 0.910569
| 0.369128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
5326ca40563c91a8e17e2439ed633b58a633ee36
| 62,857
|
py
|
Python
|
Plot_Results.py
|
sukritranjan/ranjansasselov2016b
|
7931caa57961736a939b67f6b5f87d7612341b7f
|
[
"MIT"
] | 1
|
2017-12-14T03:48:30.000Z
|
2017-12-14T03:48:30.000Z
|
Plot_Results.py
|
sukritranjan/ranjansasselov2016b
|
7931caa57961736a939b67f6b5f87d7612341b7f
|
[
"MIT"
] | null | null | null |
Plot_Results.py
|
sukritranjan/ranjansasselov2016b
|
7931caa57961736a939b67f6b5f87d7612341b7f
|
[
"MIT"
] | null | null | null |
# -*- coding: iso-8859-1 -*-
"""
This code creates the Results plots for Ranjan & Sasselov 2016b.
"""
########################
###Import useful libraries
########################
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pdb
from matplotlib.pyplot import cm
def cm2inch(cm): #function to convert cm to inches; useful for complying with Astrobiology size guidelines
return cm/2.54
########################
###Set important constants
########################
hc=1.98645e-9 #value of h*c in erg*nm, useful to convert from ergs/cm2/s/nm to photons/cm2/s/nm
########################
###Specific which family of plots to generate
########################
plot_intvsflux=False #Plot to demonstrate difference between surface radiance and surface flux.
plot_alb_zenithangle=False #plot to demonstrate the impact of albedo and zenith angle on emergent surface intensity
plot_co2_limits=False #this time, using a fixed level of N2 and variable amounts of CO2.
plot_altgas_limits=False #fixed level of N2, various levels of other gases.
plot_dosimeters_co2=True #Plots the convolution of the various action spectra with the surficial spectra for the co2 study, integrated, and normalized. Helps us see how these parameters vary.
plot_dosimeters_h2o=True #Plots the convolution of the various action spectra with the surficial spectra for h2o
plot_dosimeters_so2=True #Plots the convolution of the various action spectra with the surficial spectra for so2
plot_dosimeters_h2s=True #Plots the convolution of the various action spectra with the surficial spectra for h2s
plot_dosimeters_ch4=False #Plots the convolution of the various action spectra with the surficial spectra for ch4
plot_dosimeters_o2=False #Plots the convolution of the various action spectra with the surficial spectra for o2
plot_dosimeters_o3=False #Plots the convolution of the various action spectra with the surficial spectra for o3
############################
#######Plot to demonstrate the difference between surface flux and surface intensity
############################
if plot_intvsflux:
wav, toa_intensity, surface_flux, total_intensity,surface_intensity=np.genfromtxt('./TwoStreamOutput/rugheimer_earth_epoch0_a=newsnow_z=60.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,5,6), unpack=True)
fig1, (ax1)=plt.subplots(1, figsize=(8,5), sharex=True)
ax1.plot(wav, toa_intensity*wav/hc, linestyle='-', color='black',marker='s', label='TOA Flux')
ax1.plot(wav, total_intensity*wav/hc, linestyle='-', color='red', marker='s',label='BOA Actinic Flux')
ax1.plot(wav, surface_intensity*wav/hc,linestyle='-', color='purple', marker='s',label='Surface Radiance')
ax1.plot(wav, surface_flux*wav/hc, linestyle='-', color='blue', marker='s',label='Surface Flux')
ylimits=[1.e9, 1.e15]
ax1.set_title('R+2015 Atmosphere, A=New Snow, SZA=60')
ax1.legend(loc=0, ncol=1, borderaxespad=0.)
ax1.set_yscale('log')
ax1.set_ylabel(r'photons/s/cm$^2$/nm')
ax1.set_ylim(ylimits)
ax1.set_xlim([130.,500.])
ax1.set_xlabel('nm')
#plt.tight_layout(rect=(0,0,1,0.9))
plt.savefig('./Plots/intvsflux.eps', orientation='portrait',papertype='letter', format='eps')
############################
#######Plot to demonstrate impact of albedo and zenith angle on emergent surface intensity. We use the Rugheimer prebiotic atmosphere model (mixing ratios, surface pressure, T/P profile) for this reference system.
############################
if plot_alb_zenithangle:
###When importing files, variables are:
###wav_x: centers of wavelength bins, nm
###toa_intensity_x: top-of-atmosphere intensity (incident), erg/s/nm/cm2
###surface_flux_x: total flux incident on surface, erg/s/nm/cm2
###surface_intensity_x: total intensity incident on the surface (direct+diffuse), erg/s/nm/cm2
###surface_intensity_diffuse_x: diffuse total intensity incident on the surface, erg/s/nm/cm2
###surface_intensity_direct_x: direct total intensity incident on the surface, erg/s/nm/cm2
#####Set up info about the files to extract
albedolist=['1', 'newsnow', 'oldsnow', '0.2', 'desert', 'ocean', 'tundra', '0'] #list of albedos we consider
AlbedoLabels=['1', 'New Snow', 'Old Snow', '0.2', 'Desert', 'Ocean', 'Tundra', '0']#labels for the figure
#####Plot Figure
#Step 1: Initialize Figure
fig1, (ax1, ax2, ax3)=plt.subplots(3, figsize=(cm2inch(16.5),10), sharex=True)
colorseq1=iter(cm.rainbow(np.linspace(0,1,len(albedolist))))
colorseq2=iter(cm.rainbow(np.linspace(0,1,len(albedolist))))
colorseq3=iter(cm.rainbow(np.linspace(0,1,len(albedolist))))
#Step 2: loop over all files, plot figure
for albind in range(0, len(albedolist)):
albedo=albedolist[albind]
#Load file
wav_z_0, toa_intensity_z_0, surface_flux_z_0, surface_intensity_z_0, surface_intensity_diffuse_z_0, surface_intensity_direct_z_0=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a='+albedo+'_z=0.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True) #albedo=desert, zenith angle=0 degrees
wav_z_48p2, toa_intensity_z_48p2, surface_flux_z_48p2, surface_intensity_z_48p2, surface_intensity_diffuse_z_48p2, surface_intensity_direct_z_48p2=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a='+albedo+'_z=48.2.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True) #albedo=desert, zenith angle=0 degrees
wav_z_66p5, toa_intensity_z_66p5, surface_flux_z_66p5, surface_intensity_z_66p5, surface_intensity_diffuse_z_66p5, surface_intensity_direct_z_66p5=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a='+albedo+'_z=66.5.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True) #albedo=desert, zenith angle=0 degrees
if albind==0: #Initialize with TOA flux
ax1.plot(wav_z_0, toa_intensity_z_0*wav_z_0/hc, marker='.', color='black', label=r'TOA Flux')
ax2.plot(wav_z_48p2, toa_intensity_z_48p2*wav_z_48p2/hc, marker='.', color='black', label=r'TOA Flux')
ax3.plot(wav_z_66p5, toa_intensity_z_66p5*wav_z_66p5/hc, marker='.', color='black', label=r'TOA Flux')
ax1.plot(wav_z_0, surface_intensity_z_0*wav_z_0/hc, marker='.', color=next(colorseq1), label=r'A='+AlbedoLabels[albind])
ax2.plot(wav_z_48p2, surface_intensity_z_48p2*wav_z_48p2/hc, marker='.', color=next(colorseq2), label=r'A='+AlbedoLabels[albind])
ax3.plot(wav_z_66p5, surface_intensity_z_66p5*wav_z_66p5/hc, marker='.', color=next(colorseq3), label=r'A='+AlbedoLabels[albind])
#Step 3: Clean up figure
ylimits=[1.e9, 1.e15]
ax1.set_title(r'z=0$^\circ$')
ax1.legend(bbox_to_anchor=[0, 1.13, 1., .152], loc=3, ncol=3, mode='expand', borderaxespad=0., fontsize=10)
ax2.set_title(r'z=48.2$^\circ$')
ax3.set_title(r'z=66.5$^\circ$')
ax1.set_yscale('log')
ax1.set_ylabel(r'photons/s/cm$^2$/nm')
ax1.set_ylim(ylimits)
ax2.set_yscale('log')
ax2.set_ylabel(r'photons/s/cm$^2$/nm')
ax2.set_ylim(ylimits)
ax3.set_yscale('log')
ax3.set_ylabel(r'photons/s/cm$^2$/nm')
ax3.set_ylim(ylimits)
ax3.set_xlim([100.,500.])
ax3.set_xlabel('nm')
plt.tight_layout(rect=(0,0,1,0.9))
plt.savefig('./Plots/paperplots_a_z_dependence.eps', orientation='portrait',papertype='letter', format='eps')
if plot_co2_limits:
###When importing files, variables are:
###ind 0: wav_x: centers of wavelength bins, nm
###ind 1: toa_intensity_x: top-of-atmosphere intensity (incident), erg/s/nm/cm2
###ind 2: surface_flux_x: total flux incident on surface, erg/s/nm/cm2
###ind 3: surface_intensity_x: total intensity incident on the surface (direct+diffuse), erg/s/nm/cm2
###ind 4: surface_intensity_diffuse_x: diffuse total intensity incident on the surface, erg/s/nm/cm2
###ind 5: surface_intensity_direct_x: direct total intensity incident on the surface, erg/s/nm/cm2
###############Set up info about files to extract
N_co2_base=2.09e24 #column density of CO2 in base case (Rugheimer+2015)
co2multiplelist=[0., 1.e-6,1.e-5, 1.e-4, 1.e-3, 0.00893, 1.e-2, 1.e-1, 0.6, 1., 1.33, 1.e1, 46.6, 1.e2, 470., 1.e3]
co2dict={}
isphysical=[False, False, False, False, False, True, False, False, True, True, True, False, True, False, True, False] #which of these models have a physically motivated column depth
###############Read in base Rugheimer abundance cases
wav_max_rugheimer, toa_intensity_max_rugheimer, surface_flux_max_rugheimer, surface_intensity_max_rugheimer, surface_intensity_diffuse_max_rugheimer, surface_intensity_direct_max_rugheimer=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a=newsnow_z=0.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True)
wav_min_rugheimer, toa_intensity_min_rugheimer, surface_flux_min_rugheimer, surface_intensity_min_rugheimer, surface_intensity_diffuse_min_rugheimer, surface_intensity_direct_min_rugheimer=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a=tundra_z=66.5.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True)
##############Figure comparing surface intensity under different levels of pCO2, and different values for A and Z
#Set up figure basics outside the loop.
fig1, (ax1, ax2)=plt.subplots(2, figsize=(cm2inch(16.5),10), sharex=True)
colorseq1=iter(cm.rainbow(np.linspace(0,1,len(co2multiplelist))))
colorseq2=iter(cm.rainbow(np.linspace(0,1,len(co2multiplelist))))
#Plot TOA intensities
ax1.plot(wav_max_rugheimer, toa_intensity_max_rugheimer*wav_min_rugheimer/hc, linestyle='-', color='black', label='TOA Flux')
ax2.plot(wav_min_rugheimer, toa_intensity_min_rugheimer*wav_min_rugheimer/hc, linestyle='-', color='black', label='TOA Flux')
#In a loop, load the intensities and plot
for ind in range(0, len(co2multiplelist)):
multiple=co2multiplelist[ind]
colden_co2=multiple*N_co2_base
if isphysical[ind]: #have the physically motivated models represented differently
linestylevar='-'
linewidthvar=1.
elif multiple==1.: #represent the base fiducial Rugheimer model with a different line
linestylevar='-'
linewidthvar=2.5
else: #the parametric exploration
linestylevar='--'
linewidthvar=1.
wav_max, toa_intensity_max, surface_flux_max, surface_intensity_max, surface_intensity_diffuse_max, surface_intensity_direct_max=np.genfromtxt('./TwoStreamOutput/CO2lim/surface_intensities_co2limits_co2multiple='+str(multiple)+'_a=newsnow_z=0.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True) #maximum intensity for given atmosphere
wav_min, toa_intensity_min, surface_flux_min, surface_intensity_min, surface_intensity_diffuse_min, surface_intensity_direct_min=np.genfromtxt('./TwoStreamOutput/CO2lim/surface_intensities_co2limits_co2multiple='+str(multiple)+'_a=tundra_z=66.5.dat', skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True) #minimum maximum intensity for given atmosphere
co2dict[str(multiple)]=surface_intensity_max
ax1.plot(wav_max,surface_intensity_max*wav_max/hc, linestyle=linestylevar, linewidth=linewidthvar, color=next(colorseq1), label=r'$N_{CO_{2}}=$'+'{:.2E}'.format(colden_co2)+' cm$^{-2}$')
ax2.plot(wav_min,surface_intensity_min*wav_min/hc, linestyle=linestylevar, linewidth=linewidthvar, color=next(colorseq2), label=r'$N_{CO_{2}}=$'+'{:.2E}'.format(colden_co2)+' cm$^{-2}$')
#print (co2dict[str(co2multiplelist[8])])/(co2dict[str(co2multiplelist[0])])
#pdb.set_trace()
#Set up fine detail on figure
ylimits=[1e7, 1e15]
ax1.set_title(r'z=0$^\circ$, A=Fresh Snow')
ax2.set_title(r'z=66.5$^\circ$, A=Tundra')
ax1.legend(bbox_to_anchor=[0, 1.1, 1., .5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
ax1.set_yscale('log')
ax1.set_ylabel(r'photons/s/cm$^2$/nm')
ax1.set_ylim(ylimits)
ax2.set_yscale('log')
ax2.set_ylabel(r'photons/s/cm$^2$/nm')
ax2.set_ylim(ylimits)
ax2.set_xlim([100.,500.])
ax2.set_xlabel('nm')
plt.tight_layout(rect=(0,0,1,0.75))
plt.savefig('./Plots/paperplots_co2_radiance.eps', orientation='portrait',papertype='letter', format='eps')
if plot_altgas_limits:
###When importing files, variables are:
###ind 0: wav_x: centers of wavelength bins, nm
###ind 1: toa_intensity_x: top-of-atmosphere intensity (incident), erg/s/nm/cm2
###ind 2: surface_flux_x: total flux incident on surface, erg/s/nm/cm2
###ind 3: surface_intensity_x: total intensity incident on the surface (direct+diffuse), erg/s/nm/cm2
###ind 4: surface_intensity_diffuse_x: diffuse total intensity incident on the surface, erg/s/nm/cm2
###ind 5: surface_intensity_direct_x: direct total intensity incident on the surface, erg/s/nm/cm2
#####Set up info about the files to extract ##Maximum possible natural surface radiance case (z=0, albedo=fresh snow) aka "max"
N_tot=2.0925e25#total column density of Rugheimer+2015 model in cm**-2
gaslist=['h2o', 'ch4', 'so2', 'o2', 'o3', 'h2s'] #list of gases we are doing this for
gaslabellist=['H2O', 'CH4', 'SO2', 'O2', 'O3', 'H2S'] #list of nicely formated gas names for plotting
base_abundances=np.array([4.762e-3, 1.647e-6, 3.371e-11, 2.707e-6, 9.160e-11, 6.742e-11]) #molar concentration of each of these gases in the Rugheimer model.
#dict holding the multiples of the molar concentration we are using
gasmultiples={}
gasmultiples['h2o']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3])
gasmultiples['ch4']=np.array([1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3])
gasmultiples['so2']=np.array([1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
gasmultiples['o2']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['o3']=np.array([1., 1.e1, 1.e2, 1.e3])
gasmultiples['h2s']=np.array([1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
#####In a loop, extract and plot the files
for gasind in range(0, len(gaslist)):
gas=gaslist[gasind]
base_abundance=base_abundances[gasind]
multiples=gasmultiples[gas]
gaslabel=gaslabellist[gasind]
#####Set up figure basics
fig, (ax1)=plt.subplots(1, figsize=(cm2inch(16.5),7), sharex=True)
colorseq=iter(cm.rainbow(np.linspace(0,1,len(multiples))))
for multind in range(0, len(multiples)):
multiple=multiples[multind]
colden_X=base_abundance*multiple*N_tot #total column density of gas X
datafile='./TwoStreamOutput/gaslim/surface_intensities_'+gas+'limits_'+gas+'multiple='+str(multiple)+'_a=newsnow_z=0.dat'
wav, toa_intensity, surface_flux, surface_intensity, surface_intensity_diffuse, surface_intensity_direct=np.genfromtxt(datafile, skip_header=1, skip_footer=0, usecols=(2,3,4,6,7,8), unpack=True)
if multind==0:
ax1.plot(wav,toa_intensity*wav/hc, linestyle='-', linewidth=1, color='black', label=r'TOA Flux')
if multiple==1.: #represent the base fiducial Rugheimer model with a different line
linestylevar='-'
linewidthvar=2.0
else: #the parametric exploration
linestylevar='--'
linewidthvar=1.
ax1.plot(wav,surface_intensity*wav/hc, linestyle=linestylevar,linewidth=linewidthvar, color=next(colorseq), label=r'$N_{'+gaslabel+'}=$'+'{:.2E}'.format(colden_X)+' cm$^{-2}$')
#####Finalize and save figure
ax1.set_title(r'Varying Levels of '+gaslabel+r', (z=0$^\circ$, A=Fresh Snow)')
ax1.legend(bbox_to_anchor=[0, 1.1, 1., .5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
ax1.set_yscale('log')
ax1.set_ylabel(r'photons/s/cm$^2$/nm')
ax1.set_ylim([1.e7, 1.e15])
ax1.set_xlim([100.,500.])
ax1.set_xlabel('nm')
plt.tight_layout(rect=(0,0,1,0.75))
plt.savefig('./Plots/paperplots_'+gas+'_radiance.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_co2:
###########First, import dosimeters
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
max_inds=np.arange(0, len(albedos)/2) #first half of data is max radiance case (SZA=0, A=fresh snow)
min_inds=np.arange(len(albedos)/2, len(albedos))#last half of the data is the minimum radiance case (SZA=66.5, A=tundra)
wordsworthind=5
kastingupperind=-4
###########Now, plot the dosimeters vs CO2 concentration
umpgly_193s_max_normed=umpgly_193s[max_inds]/umpgly_193s[max_inds[-7]]
umpgly_230s_max_normed=umpgly_230s[max_inds]/umpgly_230s[max_inds[-7]]
umpgly_254s_max_normed=umpgly_254s[max_inds]/umpgly_254s[max_inds[-7]]
cucn3_254s_max_normed=cucn3_254s[max_inds]/cucn3_254s[max_inds[-7]]
cucn3_300s_max_normed=cucn3_300s[max_inds]/cucn3_300s[max_inds[-7]]
umpgly_193s_min_normed=umpgly_193s[min_inds]/umpgly_193s[min_inds[-7]]
umpgly_230s_min_normed=umpgly_230s[min_inds]/umpgly_230s[min_inds[-7]]
umpgly_254s_min_normed=umpgly_254s[min_inds]/umpgly_254s[min_inds[-7]]
cucn3_254s_min_normed=cucn3_254s[min_inds]/cucn3_254s[min_inds[-7]]
cucn3_300s_min_normed=cucn3_300s[min_inds]/cucn3_300s[min_inds[-7]]
#Initialize plot basics
fig=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(2,2, hspace=0.40,wspace=0.35, width_ratios=[2,1], top=.77, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[2])
ax3=plt.subplot(gs[1])
ax4=plt.subplot(gs[3])
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,5)))
colorseq3=iter(cm.rainbow(np.linspace(0,1,5)))
colorseq4=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot max case
ax1.plot(N_CO2s[max_inds], umpgly_193s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_CO2s[max_inds], umpgly_230s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_CO2s[max_inds], umpgly_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_CO2s[max_inds], cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_CO2s[max_inds], cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title('SZA=0, Albedo=New Snow')
ax1.axvline(N_CO2s[max_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax1.axvline(N_CO2s[max_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
#ax1.axvline(N_CO2s[max_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot min case
ax2.set_title('SZA=66.5, Albedo=Tundra')
ax2.plot(N_CO2s[min_inds], umpgly_193s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax2.plot(N_CO2s[min_inds], umpgly_230s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax2.plot(N_CO2s[min_inds], umpgly_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax2.plot(N_CO2s[min_inds], cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax2.plot(N_CO2s[min_inds], cucn3_300s_min_normed,linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax2.axvline(N_CO2s[min_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax2.axvline(N_CO2s[min_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
#ax2.axvline(N_CO2s[min_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot max case
ax3.plot(N_CO2s[max_inds], umpgly_193s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax3.plot(N_CO2s[max_inds], umpgly_230s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax3.plot(N_CO2s[max_inds], umpgly_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax3.plot(N_CO2s[max_inds], cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax3.plot(N_CO2s[max_inds], cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax3.axvline(N_CO2s[max_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax3.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax3.axvline(N_CO2s[max_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
#ax3.axvline(N_CO2s[max_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot min case
ax4.plot(N_CO2s[min_inds], umpgly_193s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax4.plot(N_CO2s[min_inds], umpgly_230s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax4.plot(N_CO2s[min_inds], umpgly_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax4.plot(N_CO2s[min_inds], cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax4.plot(N_CO2s[min_inds], cucn3_300s_min_normed,linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax4.axvline(N_CO2s[min_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax4.axhline(1., color='black', linewidth=1)
ax4.axvline(N_CO2s[min_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
#ax4.axvline(N_CO2s[min_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#print umpgly_193s_max_normed
#pdb.set_trace()
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D$')
ax2.set_ylabel(r'Relative Dose Rate $D$')
ax3.set_ylabel(r'Relative Dose Rate $D$')
ax4.set_ylabel(r'Relative Dose Rate $D$')
ax1.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax2.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax3.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax4.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax2.set_yscale('linear')
ax3.set_yscale('log')
ax4.set_yscale('log')
ax1.set_xscale('log')
ax2.set_xscale('log')
ax3.set_xscale('log')
ax4.set_xscale('log')
ax1.set_xlim([2.09e18, 2.09e24])
ax2.set_xlim([2.09e18, 2.09e24])
ax3.set_xlim([2.09e24, 9.85e26])
ax4.set_xlim([2.09e24, 9.85e26])
ax1.set_ylim([0.95,1.9])
ax2.set_ylim([0.95,1.6])
ax3.set_ylim([1.e-2, 1.e1])
ax4.set_ylim([1.e-2, 1.e1])
ax1.legend(bbox_to_anchor=[0, 1.2, 1.78, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_co2_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
#####Now, plot stressors normalized by eustressors. In this limit, values greater than 1 mean that the relative photoreaction balance has shifted toward the eustressor.
#Can't compare between
#Initialize plot basics
fig2=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(2,2, hspace=0.40,wspace=0.35, width_ratios=[2,1], top=.77, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[2])
ax3=plt.subplot(gs[1])
ax4=plt.subplot(gs[3])
colorseq1=iter(cm.rainbow(np.linspace(0,1,6)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,6)))
colorseq3=iter(cm.rainbow(np.linspace(0,1,6)))
colorseq4=iter(cm.rainbow(np.linspace(0,1,6)))
#Plot max case
ax1.plot(N_CO2s[max_inds], umpgly_193s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-254')
ax1.plot(N_CO2s[max_inds], umpgly_230s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-254')
ax1.plot(N_CO2s[max_inds], umpgly_254s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-254')
ax1.plot(N_CO2s[max_inds], umpgly_193s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-300')
ax1.plot(N_CO2s[max_inds], umpgly_230s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-300')
ax1.plot(N_CO2s[max_inds], umpgly_254s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-300')
ax1.set_title('SZA=0, Albedo=New Snow')
ax1.axvline(N_CO2s[max_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax1.axvline(N_CO2s[max_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
ax1.axvline(N_CO2s[max_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot min case
ax2.set_title('SZA=66.5, Albedo=Tundra')
ax2.plot(N_CO2s[min_inds], umpgly_193s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-254')
ax2.plot(N_CO2s[min_inds], umpgly_230s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-254')
ax2.plot(N_CO2s[min_inds], umpgly_254s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-254')
ax2.plot(N_CO2s[min_inds], umpgly_193s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-300')
ax2.plot(N_CO2s[min_inds], umpgly_230s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-300')
ax2.plot(N_CO2s[min_inds], umpgly_254s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-300')
ax2.axvline(N_CO2s[min_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax2.axvline(N_CO2s[min_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
ax2.axvline(N_CO2s[min_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot max case
ax3.plot(N_CO2s[max_inds], umpgly_193s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-254')
ax3.plot(N_CO2s[max_inds], umpgly_230s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-254')
ax3.plot(N_CO2s[max_inds], umpgly_254s_max_normed/cucn3_254s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-254')
ax3.plot(N_CO2s[max_inds], umpgly_193s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-300')
ax3.plot(N_CO2s[max_inds], umpgly_230s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-300')
ax3.plot(N_CO2s[max_inds], umpgly_254s_max_normed/cucn3_300s_max_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq3), label=r'UMP-193/CuCN3-300')
ax3.axvline(N_CO2s[max_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax3.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax3.axvline(N_CO2s[max_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
ax3.axvline(N_CO2s[max_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Plot min case
ax4.plot(N_CO2s[min_inds], umpgly_193s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-254')
ax4.plot(N_CO2s[min_inds], umpgly_230s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-254')
ax4.plot(N_CO2s[min_inds], umpgly_254s_min_normed/cucn3_254s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-254')
ax4.plot(N_CO2s[min_inds], umpgly_193s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-300')
ax4.plot(N_CO2s[min_inds], umpgly_230s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-300')
ax4.plot(N_CO2s[min_inds], umpgly_254s_min_normed/cucn3_300s_min_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq4), label=r'UMP-193/CuCN3-300')
ax4.axvline(N_CO2s[min_inds[-7]], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax4.axhline(1., color='black', linewidth=1) #Mark the Rugheimer fiducial value
ax4.axvline(N_CO2s[min_inds[wordsworthind]], color='black', linewidth=1, linestyle='--') #Wordsworth lower limit
ax4.axvline(N_CO2s[min_inds[kastingupperind]], color='black', linewidth=1, linestyle='--') #Kasting upper limit
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D$')
ax2.set_ylabel(r'Relative Dose Rate $D$')
ax3.set_ylabel(r'Relative Dose Rate $D$')
ax4.set_ylabel(r'Relative Dose Rate $D$')
ax1.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax2.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax3.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax4.set_xlabel(r'N$_{CO2}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax2.set_yscale('linear')
ax3.set_yscale('linear')
ax4.set_yscale('linear')
ax1.set_xscale('log')
ax2.set_xscale('log')
ax3.set_xscale('log')
ax4.set_xscale('log')
ax1.set_xlim([2.09e18, 2.09e24])
ax2.set_xlim([2.09e18, 2.09e24])
ax3.set_xlim([2.09e24, 9.85e26])
ax4.set_xlim([2.09e24, 9.85e26])
ax1.set_ylim([0.90,1.8])
ax2.set_ylim([0.95,1.3])
ax3.set_ylim([0.5, 1.8])
ax4.set_ylim([0.8, 1.2])
ax1.legend(bbox_to_anchor=[0, 1.2, 1.78, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_co2_uvdoses_norm.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_ch4:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about CH4 file
gas='ch4'
gaslabel='CH4'
minind=3 #index of minimum plausible CH4 concentration
fiducialind=5 #index of the fiducial Rugheimer gas concentration
maxind=8# index of maximum plausible CH4 concentration
###########Next, import CH4, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig, ax1=plt.subplots(1, figsize=(cm2inch(16.5),7))
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot max case
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[maxind]])
ax1.set_ylim([0.95,1.8])
plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1., 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_h2o:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about H2O file
gas='h2o'
gaslabel='H2O'
minind=0 #index of minimum plausible H2O concentration
fiducialind=5 #index of the fiducial H2O vapor concentration
maxind=8# index of maximum plausible H2O concentration
###########Next, import H2O, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig, ax1=plt.subplots(1, figsize=(cm2inch(16.5),7))
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[maxind]])
ax1.set_ylim([0.7,1.7])
plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1., 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_so2:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about H2O file
gas='so2'
gaslabel='SO2'
minind=0 #index of minimum plausible H2O concentration
fiducialind=5 #index of the fiducial H2O vapor concentration
maxind=12# index of maximum plausible H2O concentration
breakind=9 #index of where to break the plots
###########Next, import H2O, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig2=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(1,2, hspace=0.40,wspace=0.35, width_ratios=[breakind-minind,maxind-breakind], top=.70, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[1])
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Plot log half
ax2.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax2.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax2.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax2.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax2.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
#ax2.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax2.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[breakind]])
#ax1.set_ylim([0.4,1.8])
ax2.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax2.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim([N_gas[breakind],N_gas[maxind]])
#ax2.set_ylim([0.4,1.8])
#plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1.6, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
###########Now, plot the good/bad balance as a function of changing column density
#Initialize plot basics
fig3=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(1,2, hspace=0.40,wspace=0.35, width_ratios=[breakind-minind,maxind-breakind], top=.70, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[1])
colorseq1=iter(cm.rainbow(np.linspace(0,1,6)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,6)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-254')
ax1.plot(N_gas, umpgly_230s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-254')
ax1.plot(N_gas, umpgly_254s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-254')
ax1.plot(N_gas, umpgly_193s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-300')
ax1.plot(N_gas, umpgly_230s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-300')
ax1.plot(N_gas, umpgly_254s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-300')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Plot log half
ax2.plot(N_gas, umpgly_193s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-254')
ax2.plot(N_gas, umpgly_230s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-230/CuCN3-254')
ax2.plot(N_gas, umpgly_254s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-254/CuCN3-254')
ax2.plot(N_gas, umpgly_193s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-300')
ax2.plot(N_gas, umpgly_230s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-230/CuCN3-300')
ax2.plot(N_gas, umpgly_254s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-254/CuCN3-300')
ax2.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D^{UMP-X}/D^{CuCN3-Y}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[breakind]])
#ax1.set_ylim([0.4,1.8])
ax2.set_ylabel(r'Relative Dose Rate $D^{UMP-X}/D^{CuCN3-Y}$')
ax2.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim([N_gas[breakind],N_gas[maxind]])
#ax2.set_ylim([0.4,1.8])
#plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1.6, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses_balance.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_h2s:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about H2O file
gas='h2s'
gaslabel='H2S'
minind=0 #index of minimum plausible H2O concentration
fiducialind=5 #index of the fiducial H2O vapor concentration
maxind=12# index of maximum plausible H2O concentration
breakind=9 #index of where to break the plots
breakind2=8 #index of where to break the second set of plots
###########Next, import H2O, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig2=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(1,2, hspace=0.40,wspace=0.35, width_ratios=[breakind-minind,maxind-breakind], top=.70, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[1])
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Plot log half
ax2.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax2.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax2.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax2.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax2.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
#ax2.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax2.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[breakind]])
#ax1.set_ylim([0.4,1.8])
ax2.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax2.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim([N_gas[breakind],N_gas[maxind]])
#ax2.set_ylim([0.4,1.8])
#plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1.6, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
###########Now, plot the good/bad balance as a function of changing column density
#Initialize plot basics
fig3=plt.figure(figsize=(cm2inch(16.5),7))
gs=gridspec.GridSpec(1,2, hspace=0.40,wspace=0.35, width_ratios=[breakind2-minind,maxind-breakind2], top=.70, bottom=.1, left=.1, right=.95)
ax1=plt.subplot(gs[0])
ax2=plt.subplot(gs[1])
colorseq1=iter(cm.rainbow(np.linspace(0,1,6)))
colorseq2=iter(cm.rainbow(np.linspace(0,1,6)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-254')
ax1.plot(N_gas, umpgly_230s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-254')
ax1.plot(N_gas, umpgly_254s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-254')
ax1.plot(N_gas, umpgly_193s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-193/CuCN3-300')
ax1.plot(N_gas, umpgly_230s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-230/CuCN3-300')
ax1.plot(N_gas, umpgly_254s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP-254/CuCN3-300')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Plot log half
ax2.plot(N_gas, umpgly_193s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-254')
ax2.plot(N_gas, umpgly_230s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-230/CuCN3-254')
ax2.plot(N_gas, umpgly_254s_normed/cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-254/CuCN3-254')
ax2.plot(N_gas, umpgly_193s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-193/CuCN3-300')
ax2.plot(N_gas, umpgly_230s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-230/CuCN3-300')
ax2.plot(N_gas, umpgly_254s_normed/cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq2), label=r'UMP-254/CuCN3-300')
ax2.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax2.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D^{UMP-X}/D^{CuCN3-Y}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[breakind2]])
ax1.set_ylim([0.,4])
ax2.set_ylabel(r'Relative Dose Rate $D^{UMP-X}/D^{CuCN3-Y}$')
ax2.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax2.set_yscale('log')
ax2.set_xscale('log')
ax2.set_xlim([N_gas[breakind2],N_gas[maxind]])
#ax2.set_ylim([0.4,1.8])
#plt.tight_layout(rect=(0,0,1., 0.7))
ax1.legend(bbox_to_anchor=[0, 1.2, 1.7, 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses_balance.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_o2:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about H2O file
gas='o2'
gaslabel='O2'
minind=0 #index of minimum plausible H2O concentration
fiducialind=5 #index of the fiducial H2O vapor concentration
maxind=10# index of maximum plausible H2O concentration
###########Next, import H2O, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig, ax1=plt.subplots(1, figsize=(cm2inch(16.5),7))
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('linear')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[maxind]])
ax1.set_ylim([0.,1.8])
plt.tight_layout(rect=(0,0,1., 0.80))
ax1.legend(bbox_to_anchor=[0, 1.1, 1., 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
if plot_dosimeters_o3:
###########First, import the CO2 0.1 bar base case for comparison
SZAs, albedos, N_CO2s, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/co2_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9), unpack=True, delimiter=' ') #wavelength in nm, relative efficiency unitless
basecaseind=9 #index of 0.1 bar CO2 case (NCO2=2.09e24 cm**-2), max radiance (A=new snow, sza=0) case
umpgly_193s_base=umpgly_193s[basecaseind]
umpgly_230s_base=umpgly_230s[basecaseind]
umpgly_254s_base=umpgly_254s[basecaseind]
cucn3_254s_base=cucn3_254s[basecaseind]
cucn3_300s_base=cucn3_300s[basecaseind]
###########Next, set up information about H2O file
gas='o3'
gaslabel='O3'
minind=0 #index of minimum plausible H2O concentration
fiducialind=5 #index of the fiducial H2O vapor concentration
maxind=8# index of maximum plausible H2O concentration
###########Next, import H2O, and normalize by the base case.
N_gas, rad100_165s, rad200_300s, umpgly_193s, umpgly_230s, umpgly_254s,cucn3_254s, cucn3_300s=np.genfromtxt('./Doses/'+gas+'_uv_doses.dat', skip_header=2, skip_footer=0, usecols=(1, 2, 3, 4, 5, 6, 7, 8), unpack=True, delimiter=' & ') #wavelength in nm, relative efficiency unitless
#values<1: the reaction proceeds slower than in the base case. Values>1: the reaction proceeds faster than in the base case.
umpgly_193s_normed=umpgly_193s/umpgly_193s_base
umpgly_230s_normed=umpgly_230s/umpgly_230s_base
umpgly_254s_normed=umpgly_254s/umpgly_254s_base
cucn3_254s_normed=cucn3_254s/cucn3_254s_base
cucn3_300s_normed=cucn3_300s/cucn3_300s_base
###########Now, plot the dosimeters vs CO2 concentration
#Initialize plot basics
fig, ax1=plt.subplots(1, figsize=(cm2inch(16.5),7))
colorseq1=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot linear half
ax1.plot(N_gas, umpgly_193s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(N_gas, umpgly_230s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(N_gas, umpgly_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_254s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=254$)')
ax1.plot(N_gas, cucn3_300s_normed, linestyle='--',markersize=4, marker='s',linewidth=1, color=next(colorseq1), label=r'CuCN$_3$ Photoionization ($\lambda_0=300$)')
ax1.set_title(gaslabel+' (SZA=0, Albedo=New Snow)')
ax1.axvline(N_gas[fiducialind], color='black', linewidth=2) #Mark the Rugheimer fiducial value
ax1.axhline(1., color='black', linewidth=1) #Values above this: faster rxn than under 0.1 bar CO2
#Finalize plot details.
ax1.set_ylabel(r'Relative Dose Rate $D/D_{NCO2=2.09e24 cm^{-2}}$')
ax1.set_xlabel(r'N$_{'+gaslabel+'}$ (cm$^{-2}$)')
ax1.set_yscale('log')
ax1.set_xscale('log')
ax1.set_xlim([N_gas[minind],N_gas[maxind]])
ax1.set_ylim([1.8e-3,1.8])
plt.tight_layout(rect=(0,0,1., 0.80))
ax1.legend(bbox_to_anchor=[0, 1.1, 1., 0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=9.5)
plt.savefig('./Plots/paperplots_'+gas+'_uvdoses.eps', orientation='portrait',papertype='letter', format='eps')
plt.show()
| 64.734295
| 366
| 0.739583
| 10,315
| 62,857
| 4.331653
| 0.048861
| 0.032452
| 0.036593
| 0.062846
| 0.881292
| 0.864864
| 0.857255
| 0.837202
| 0.828025
| 0.813433
| 0
| 0.082857
| 0.086816
| 62,857
| 971
| 367
| 64.734295
| 0.695557
| 0.200375
| 0
| 0.639571
| 0
| 0
| 0.159279
| 0.019182
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001534
| false
| 0
| 0.007669
| 0.001534
| 0.010736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
53530ae02a6b81d9b9d3e7db4ea585c7c6f61b8e
| 203
|
py
|
Python
|
mysite/form.py
|
andriy47/TFG
|
6574a441513356c11b30a86eb381ae4e5f91831d
|
[
"MIT"
] | null | null | null |
mysite/form.py
|
andriy47/TFG
|
6574a441513356c11b30a86eb381ae4e5f91831d
|
[
"MIT"
] | null | null | null |
mysite/form.py
|
andriy47/TFG
|
6574a441513356c11b30a86eb381ae4e5f91831d
|
[
"MIT"
] | null | null | null |
# from django import forms
#
# class PostForm(forms.Form):
# content = forms.CharField(max_length=256)
from django import forms
class InputNumeroForm(forms.Form):
numero = forms.IntegerField()
| 20.3
| 47
| 0.738916
| 25
| 203
| 5.96
| 0.6
| 0.134228
| 0.214765
| 0.281879
| 0.348993
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.157635
| 203
| 9
| 48
| 22.555556
| 0.853801
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7269c9ded58475f465028d2ae657b53631eb05a4
| 103
|
py
|
Python
|
functions/default.py
|
Lynxtickler/rest-api-terraform
|
d8b8125de80228a4bee2d8ad4ff7593b77eff121
|
[
"MIT"
] | null | null | null |
functions/default.py
|
Lynxtickler/rest-api-terraform
|
d8b8125de80228a4bee2d8ad4ff7593b77eff121
|
[
"MIT"
] | 4
|
2021-10-16T06:37:28.000Z
|
2022-01-05T19:49:44.000Z
|
functions/default.py
|
Lynxtickler/rest-api-terraform
|
d8b8125de80228a4bee2d8ad4ff7593b77eff121
|
[
"MIT"
] | 1
|
2021-11-22T14:24:17.000Z
|
2021-11-22T14:24:17.000Z
|
import shared
def lambda_handler(event, context):
return shared.create_error(404, 'Bad request.')
| 20.6
| 51
| 0.757282
| 14
| 103
| 5.428571
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 0.135922
| 103
| 4
| 52
| 25.75
| 0.820225
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
727b847510fd6f93d056ba70cdf58cbe3e3a98ab
| 7,501
|
py
|
Python
|
tests/seleniumwire/proxy/test_modifier.py
|
nck/selenium-wire
|
be2cca0dc556ebf84daac84a3a0315378f871f48
|
[
"MIT"
] | null | null | null |
tests/seleniumwire/proxy/test_modifier.py
|
nck/selenium-wire
|
be2cca0dc556ebf84daac84a3a0315378f871f48
|
[
"MIT"
] | null | null | null |
tests/seleniumwire/proxy/test_modifier.py
|
nck/selenium-wire
|
be2cca0dc556ebf84daac84a3a0315378f871f48
|
[
"MIT"
] | 1
|
2020-05-23T15:34:49.000Z
|
2020-05-23T15:34:49.000Z
|
from unittest import TestCase
from unittest.mock import Mock
from seleniumwire.proxy.modifier import RequestModifier
class RequestModifierTest(TestCase):
def setUp(self):
self.modifier = RequestModifier()
def test_override_header(self):
self.modifier.headers = {
'User-Agent': 'Test_User_Agent_String'
}
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'], 'Test_User_Agent_String')
def test_override_header_with_single_url_matching(self):
self.modifier.headers = [
(".*prod1.server.com.*", {'User-Agent': 'Test_User_Agent_String'})]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'], 'Test_User_Agent_String')
def test_override_multiple_headers_with_single_url_matching(self):
self.modifier.headers = [
(".*prod1.server.com.*", {'User-Agent': 'Test_User_Agent_String',
'New-Header': 'HeaderValue'})]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'], 'Test_User_Agent_String')
self.assertEqual(
mock_request.headers['New-Header'], 'HeaderValue')
def test_override_headers_with_multiple_url_matching(self):
self.modifier.headers = [
(".*prod1.server.com.*", {'User-Agent': 'Test_User_Agent_String',
'New-Header': 'HeaderValue'}),
(".*prod2.server.com.*", {'User-Agent2': 'Test_User_Agent_String2',
'New-Header2': 'HeaderValue'})]
path = "https://prod1.server.com/some/path/12345"
mock_request = self._create_mock_request(path)
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'], 'Test_User_Agent_String')
self.assertEqual(mock_request.path, path)
self.assertFalse('User-Agent2' in mock_request.headers
or 'New-Header2' in mock_request.headers)
path = "https://prod2.server.com/some/path/12345"
mock_request = self._create_mock_request(path)
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['New-Header2'], 'HeaderValue')
self.assertEqual(mock_request.path, path)
self.assertFalse('New-Header' in mock_request.headers)
def test_not_override_header_withurl_matching(self):
self.modifier.headers = [
(".*prod.server.com.*", {'User-Agent': 'Test_User_Agent_String'})]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'],
'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/20100101 '
'Firefox/10.0')
def test_override_header_case_insensitive(self):
self.modifier.headers = {
'user-agent': 'Test_User_Agent_String'
}
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(
mock_request.headers['User-Agent'], 'Test_User_Agent_String')
def test_add_new_header(self):
self.modifier.headers = {
'New-Header': 'Some-Value'
}
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(mock_request.headers['New-Header'], 'Some-Value')
def test_filter_out_header(self):
self.modifier.headers = {
'User-Agent': None
}
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertNotIn('User-Agent', mock_request.headers)
def test_filter_out_non_existent_header(self):
self.modifier.headers = {
'Host': None # Does not exist in the request
}
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertNotIn('Host', mock_request.headers)
def test_clear_header_overrides(self):
self.modifier.headers = {
'User-Agent': 'Test_User_Agent_String'
}
mock_request = self._create_mock_request()
del self.modifier.headers
self.modifier.modify(mock_request)
self.assertEqual(mock_request.headers['User-Agent'],
'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/'
'20100101 Firefox/10.0')
def test_get_header_overrides(self):
self.modifier.headers = {
'User-Agent': 'Test_User_Agent_String'
}
self.assertEqual(self.modifier.headers, {
'User-Agent': 'Test_User_Agent_String'
})
def test_rewrite_url(self):
self.modifier.rewrite_rules = [
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/foo/'),
]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(mock_request.path,
'https://prod2.server.com/some/path/12345/foo/')
def test_rewrite_url_first_match(self):
self.modifier.rewrite_rules = [
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/foo/'),
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/bar/'),
]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertEqual(mock_request.path,
'https://prod2.server.com/some/path/12345/foo/')
def test_does_not_rewrite_url(self):
self.modifier.rewrite_rules = [
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/foo/'),
]
mock_request = self._create_mock_request()
mock_request.path = 'https://prod3.server.com/some/path/12345'
self.modifier.modify(mock_request)
self.assertEqual(mock_request.path,
'https://prod3.server.com/some/path/12345')
def test_rewrite_url_updates_host_header(self):
self.modifier.rewrite_rules = [
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/foo/'),
]
mock_request = self._create_mock_request()
mock_request.headers['Host'] = 'prod1.server.com'
self.modifier.modify(mock_request)
self.assertEqual(mock_request.headers['Host'], 'prod2.server.com')
def test_rewrite_url_does_not_update_host_header(self):
"""Should not update the Host header if it does not already exist."""
self.modifier.rewrite_rules = [
(r'(https?://)prod1.server.com(.*)', r'\1prod2.server.com\2/foo/'),
]
mock_request = self._create_mock_request()
self.modifier.modify(mock_request)
self.assertNotIn('Host', mock_request.headers)
def _create_mock_request(self,
path="https://prod1.server.com/some/path/12345"):
mock_request = Mock()
mock_request.path = path
mock_request.headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/'
'20100101 Firefox/10.0'
}
return mock_request
| 35.215962
| 79
| 0.619251
| 859
| 7,501
| 5.136205
| 0.111758
| 0.191976
| 0.149592
| 0.076156
| 0.808024
| 0.769492
| 0.769492
| 0.754533
| 0.735041
| 0.719402
| 0
| 0.025737
| 0.254099
| 7,501
| 212
| 80
| 35.382075
| 0.762824
| 0.012532
| 0
| 0.515924
| 0
| 0
| 0.228618
| 0.090123
| 0
| 0
| 0
| 0
| 0.140127
| 1
| 0.11465
| false
| 0
| 0.019108
| 0
| 0.146497
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
72934cd731c0551daca0e67d6d3ebf32074457f1
| 33
|
py
|
Python
|
but/users/admin/__init__.py
|
yevgnenll/but
|
2cb3d7b8fd4b898440f9a74ee4b6b8fbdff32bb1
|
[
"MIT"
] | 4
|
2017-02-25T04:46:41.000Z
|
2021-03-16T21:41:51.000Z
|
but/users/admin/__init__.py
|
yevgnenll/but
|
2cb3d7b8fd4b898440f9a74ee4b6b8fbdff32bb1
|
[
"MIT"
] | 18
|
2016-04-09T07:29:33.000Z
|
2017-04-06T04:39:54.000Z
|
but/users/admin/__init__.py
|
yevgnenll/but
|
2cb3d7b8fd4b898440f9a74ee4b6b8fbdff32bb1
|
[
"MIT"
] | null | null | null |
from .user import UserAdminModel
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
729987231d2fd613b630b4004ee8825872d946c9
| 1,891
|
py
|
Python
|
holoviews/tests/plotting/plotly/testimageplot.py
|
Jacob-Barhak/holoviews
|
5df0269595ca7befca202f9d05522c68983dc974
|
[
"BSD-3-Clause"
] | null | null | null |
holoviews/tests/plotting/plotly/testimageplot.py
|
Jacob-Barhak/holoviews
|
5df0269595ca7befca202f9d05522c68983dc974
|
[
"BSD-3-Clause"
] | 1
|
2021-04-17T15:31:36.000Z
|
2021-04-17T15:31:36.000Z
|
holoviews/tests/plotting/plotly/testimageplot.py
|
Jacob-Barhak/holoviews
|
5df0269595ca7befca202f9d05522c68983dc974
|
[
"BSD-3-Clause"
] | 1
|
2018-11-22T18:45:51.000Z
|
2018-11-22T18:45:51.000Z
|
import numpy as np
from holoviews.element import Image
from .testplot import TestPlotlyPlot
class TestImagePlot(TestPlotlyPlot):
def test_image_state(self):
img = Image(([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]])))
state = self._get_plot_state(img)
self.assertEqual(state['data'][0]['type'], 'heatmap')
self.assertEqual(state['data'][0]['x0'], 1)
self.assertEqual(state['data'][0]['dx'], 1)
self.assertEqual(state['data'][0]['y0'], 0)
self.assertEqual(state['data'][0]['dy'], 1)
self.assertEqual(state['data'][0]['z'], np.array([[0, 1, 2], [2, 3, 4]]))
self.assertEqual(state['data'][0]['zmin'], 0)
self.assertEqual(state['data'][0]['zmax'], 4)
self.assertEqual(state['layout']['xaxis']['range'], [0.5, 3.5])
self.assertEqual(state['layout']['yaxis']['range'], [-0.5, 1.5])
def test_image_state_inverted(self):
img = Image(([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]]))).options(
invert_axes=True)
state = self._get_plot_state(img)
self.assertEqual(state['data'][0]['y0'], 1)
self.assertEqual(state['data'][0]['dy'], 1)
self.assertEqual(state['data'][0]['x0'], 0)
self.assertEqual(state['data'][0]['dx'], 1)
self.assertEqual(state['data'][0]['z'], np.array([[0, 1, 2], [2, 3, 4]]).T)
self.assertEqual(state['data'][0]['zmin'], 0)
self.assertEqual(state['data'][0]['zmax'], 4)
self.assertEqual(state['layout']['yaxis']['range'], [0.5, 3.5])
self.assertEqual(state['layout']['xaxis']['range'], [-0.5, 1.5])
def test_visible(self):
element = Image(
([1, 2, 3], [0, 1], np.array([[0, 1, 2], [2, 3, 4]]))
).options(visible=False)
state = self._get_plot_state(element)
self.assertEqual(state['data'][0]['visible'], False)
| 42.977273
| 83
| 0.554204
| 268
| 1,891
| 3.850746
| 0.182836
| 0.290698
| 0.387597
| 0.372093
| 0.786822
| 0.742248
| 0.706395
| 0.688953
| 0.656008
| 0.588178
| 0
| 0.061712
| 0.203067
| 1,891
| 43
| 84
| 43.976744
| 0.623092
| 0
| 0
| 0.277778
| 0
| 0
| 0.095188
| 0
| 0
| 0
| 0
| 0
| 0.555556
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
72c708d23803d8a4db3c86afb4f0f3f9c44c8aa9
| 13,381
|
py
|
Python
|
tests/test_clustering.py
|
ElsevierSoftwareX/SOFTX-D-20-00048
|
14ad08a492ec23d70e0a18d2b9a8493b2d681616
|
[
"MIT"
] | 2
|
2021-06-23T12:57:40.000Z
|
2021-06-24T17:17:23.000Z
|
tests/test_clustering.py
|
ElsevierSoftwareX/SOFTX-D-20-00048
|
14ad08a492ec23d70e0a18d2b9a8493b2d681616
|
[
"MIT"
] | null | null | null |
tests/test_clustering.py
|
ElsevierSoftwareX/SOFTX-D-20-00048
|
14ad08a492ec23d70e0a18d2b9a8493b2d681616
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from PCAfold import preprocess
class TestClustering(unittest.TestCase):
################################################################################
#
# Clustering functions
#
################################################################################
def test_variable_bins_allowed_calls(self):
try:
idx = preprocess.variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 4, verbose=False)
self.assertTrue(True)
except:
self.assertTrue(False)
self.assertTrue(isinstance(idx, np.ndarray))
self.assertTrue(idx.ndim == 1)
def test_variable_bins_not_allowed_calls(self):
with self.assertRaises(ValueError):
idx = preprocess.variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 0, verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), -1, verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 4, verbose=1)
with self.assertRaises(ValueError):
idx = preprocess.variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 4, verbose='True')
def test_predefined_variable_bins_allowed_calls(self):
try:
idx = preprocess.predefined_variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), [3.5, 8.5], verbose=False)
self.assertTrue(True)
except:
self.assertTrue(False)
self.assertTrue(isinstance(idx, np.ndarray))
self.assertTrue(idx.ndim == 1)
def test_predefined_variable_bins_not_allowed_calls(self):
with self.assertRaises(ValueError):
idx = preprocess.predefined_variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), [3, 11], verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.predefined_variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), [0, 6], verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.predefined_variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), [3, 8], verbose=1)
with self.assertRaises(ValueError):
idx = preprocess.predefined_variable_bins(np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), [3, 8], verbose='True')
def test_mixture_fraction_bins_allowed_calls(self):
try:
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), 2, 0.2)
self.assertTrue(True)
except:
self.assertTrue(False)
try:
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), 1, 0.2)
self.assertTrue(True)
except:
self.assertTrue(False)
self.assertTrue(isinstance(idx, np.ndarray))
self.assertTrue(idx.ndim == 1)
def test_mixture_fraction_bins_not_allowed_calls(self):
with self.assertRaises(ValueError):
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), 0, 0.2)
with self.assertRaises(ValueError):
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), -1, 0.2)
with self.assertRaises(ValueError):
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), 2, 0.2, verbose=1)
with self.assertRaises(ValueError):
idx = preprocess.mixture_fraction_bins(np.array([0.1, 0.15, 0.2, 0.25, 0.6, 0.8, 1]), 2, 0.2, verbose='True')
def test_zero_neighborhood_bins_allowed_calls(self):
try:
idx = preprocess.zero_neighborhood_bins(np.array([-100, -20, -0.1, 0, 0.1, 1, 10, 20, 200, 300, 400]), k=4, split_at_zero=True, verbose=False)
self.assertTrue(True)
except:
self.assertTrue(False)
self.assertTrue(isinstance(idx, np.ndarray))
self.assertTrue(idx.ndim == 1)
def test_zero_neighborhood_bins_not_allowed_calls(self):
with self.assertRaises(ValueError):
idx = preprocess.zero_neighborhood_bins(np.array([-100, -20, -0.1, 0, 0.1, 1, 10, 20, 200, 300, 400]), k=0, split_at_zero=True, verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.zero_neighborhood_bins(np.array([-100, -20, -0.1, 0, 0.1, 1, 10, 20, 200, 300, 400]), k=-1, split_at_zero=True, verbose=False)
with self.assertRaises(ValueError):
idx = preprocess.zero_neighborhood_bins(np.array([-100, -20, -0.1, 0, 0.1, 1, 10, 20, 200, 300, 400]), k=4, split_at_zero=True, verbose=1)
with self.assertRaises(ValueError):
idx = preprocess.zero_neighborhood_bins(np.array([-100, -20, -0.1, 0, 0.1, 1, 10, 20, 200, 300, 400]), k=4, split_at_zero=True, verbose='True')
################################################################################
#
# Auxiliary functions
#
################################################################################
def test_degrade_clusters_allowed_calls(self):
try:
idx_undegraded = [1, 1, 2, 2, 3, 3]
idx_degraded = [0, 0, 1, 1, 2, 2]
(idx, k) = preprocess.degrade_clusters(idx_undegraded, verbose=False)
self.assertTrue(np.min(idx) == 0)
self.assertTrue(k == 3)
self.assertTrue(list(idx) == idx_degraded)
except:
self.assertTrue(False)
try:
idx_undegraded = [-1, -1, 1, 1, 2, 2, 3, 3]
idx_degraded = [0, 0, 1, 1, 2, 2, 3, 3]
(idx, k) = preprocess.degrade_clusters(idx_undegraded, verbose=False)
self.assertTrue(np.min(idx) == 0)
self.assertTrue(k == 4)
self.assertTrue(list(idx) == idx_degraded)
except:
self.assertTrue(False)
try:
idx_undegraded = [-1, 1, 3, -1, 1, 1, 2, 2, 3, 3]
idx_degraded = [0, 1, 3, 0, 1, 1, 2, 2, 3, 3]
(idx, k) = preprocess.degrade_clusters(idx_undegraded, verbose=False)
self.assertTrue(np.min(idx) == 0)
self.assertTrue(k == 4)
self.assertTrue(list(idx) == idx_degraded)
except:
self.assertTrue(False)
try:
idx = np.array([-1,-1,0,0,0,0,1,1,1,1,5])
(idx, k) = preprocess.degrade_clusters(idx, verbose=False)
self.assertTrue(np.min(idx) == 0)
self.assertTrue(k == 4)
except:
self.assertTrue(False)
def test_degrade_clusters_not_allowed_calls(self):
idx_test = [0,0,0,1,1,1,True,2,2,2]
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters(idx_test, verbose=False)
idx_test = [0,0,0,1,1,1,5.1,2,2,2]
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters(idx_test, verbose=False)
idx_test = np.array([0,0,0,1.1,1,1,2,2,2])
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters(idx_test, verbose=False)
idx_test = np.array([-1.2,0,0,0,1,1,1,2,2,2])
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters(idx_test, verbose=False)
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters(1, verbose=False)
with self.assertRaises(ValueError):
(idx, k) = preprocess.degrade_clusters('list', verbose=False)
def test_flip_clusters_allowed_calls(self):
try:
idx_unflipped = np.array([0,0,0,1,1,1,2,2,2])
idx_flipped = np.array([0,0,0,2,2,2,1,1,1])
idx = preprocess.flip_clusters(idx_unflipped, dictionary={1:2, 2:1})
comparison = idx_flipped == idx
self.assertTrue(comparison.all())
except:
self.assertTrue(False)
try:
idx_unflipped = np.array([0,0,0,1,1,1,2,2,2])
idx_flipped = np.array([0,0,0,10,10,10,20,20,20])
idx = preprocess.flip_clusters(idx_unflipped, dictionary={1:10, 2:20})
comparison = idx_flipped == idx
self.assertTrue(comparison.all())
except:
self.assertTrue(False)
def test_flip_clusters_not_allowed_calls(self):
idx_unflipped = np.array([0,0,0,1,1,1,2,2,2])
with self.assertRaises(ValueError):
idx = preprocess.flip_clusters(idx_unflipped, dictionary={3:2,2:3})
with self.assertRaises(ValueError):
idx = preprocess.flip_clusters(idx_unflipped, dictionary={0:1,1:1.5})
def test_get_centroids_allowed_calls(self):
try:
x = np.array([[1,2,10],[1,2,10],[1,2,10]])
idx = np.array([0,0,0])
idx_centroids = np.array([[1, 2, 10]])
centroids = preprocess.get_centroids(x, idx)
comparison = (idx_centroids == centroids)
self.assertTrue(comparison.all())
except Exception:
self.assertTrue(False)
try:
x = np.array([[1,2,10],[1,2,10],[20,30,40]])
idx = np.array([0,0,1])
idx_centroids = np.array([[1, 2, 10], [20,30,40]])
centroids = preprocess.get_centroids(x, idx)
comparison = (idx_centroids == centroids)
self.assertTrue(comparison.all())
except Exception:
self.assertTrue(False)
def test_get_centroidss_not_allowed_calls(self):
X = np.random.rand(100,10)
idx = np.zeros((90,))
with self.assertRaises(ValueError):
centroids = preprocess.get_centroids(X, idx)
X = np.random.rand(100,10)
idx = np.zeros((110,))
with self.assertRaises(ValueError):
centroids = preprocess.get_centroids(X, idx)
def test_get_partition_allowed_calls(self):
try:
x = np.array([[1,2,10],[1,2,10],[1,2,10]])
idx = np.array([0,0,0])
pre_x_in_clusters = [np.array([[1,2,10],[1,2,10],[1,2,10]])]
pre_idx_in_clusters = [np.array([0,1,2])]
(x_in_clusters, idx_in_clusters) = preprocess.get_partition(x, idx)
comparison_1 = (pre_x_in_clusters[0] == x_in_clusters[0])
self.assertTrue(comparison_1.all())
comparison_2 = (pre_idx_in_clusters[0] == idx_in_clusters[0])
self.assertTrue(comparison_2.all())
except Exception:
self.assertTrue(False)
try:
x = np.array([[1,2,10],[1,2,10],[30,40,50]])
idx = np.array([0,0,1])
pre_x_in_clusters = [np.array([[1,2,10],[1,2,10]]), np.array([[30,40,50]])]
pre_idx_in_clusters = [np.array([0,1]), np.array([2])]
(x_in_clusters, idx_in_clusters) = preprocess.get_partition(x, idx)
comparison_1 = (pre_x_in_clusters[0] == x_in_clusters[0])
comparison_2 = (pre_x_in_clusters[1] == x_in_clusters[1])
self.assertTrue(comparison_1.all())
self.assertTrue(comparison_2.all())
comparison_3 = (pre_idx_in_clusters[0] == idx_in_clusters[0])
comparison_4 = (pre_idx_in_clusters[1] == idx_in_clusters[1])
self.assertTrue(comparison_3.all())
self.assertTrue(comparison_4.all())
except Exception:
self.assertTrue(False)
def test_get_parition_not_allowed_calls(self):
X = np.random.rand(100,10)
idx = np.zeros((90,))
with self.assertRaises(ValueError):
(x_in_clusters, idx_in_clusters) = preprocess.get_partition(X, idx)
X = np.random.rand(100,10)
idx = np.zeros((110,))
with self.assertRaises(ValueError):
(x_in_clusters, idx_in_clusters) = preprocess.get_partition(X, idx)
def test_get_populations_allowed_calls(self):
x = np.linspace(-1,1,100)
try:
idx = preprocess.variable_bins(x, 4, verbose=False)
idx_populations = [25, 25, 25, 25]
populations = preprocess.get_populations(idx)
self.assertTrue(populations == idx_populations)
except Exception:
self.assertTrue(False)
try:
idx = preprocess.variable_bins(x, 5, verbose=False)
idx_populations = [20, 20, 20, 20, 20]
populations = preprocess.get_populations(idx)
self.assertTrue(populations == idx_populations)
except Exception:
self.assertTrue(False)
try:
idx = preprocess.variable_bins(x, 2, verbose=False)
idx_populations = [50, 50]
populations = preprocess.get_populations(idx)
self.assertTrue(populations == idx_populations)
except Exception:
self.assertTrue(False)
try:
idx = preprocess.variable_bins(x, 1, verbose=False)
idx_populations = [100]
populations = preprocess.get_populations(idx)
self.assertTrue(populations == idx_populations)
except Exception:
self.assertTrue(False)
try:
idx_populations = [1]
populations = preprocess.get_populations(np.array([0]))
self.assertTrue(populations == idx_populations)
except Exception:
self.assertTrue(False)
| 39.011662
| 155
| 0.574845
| 1,788
| 13,381
| 4.14821
| 0.052573
| 0.111366
| 0.075502
| 0.113253
| 0.914251
| 0.885264
| 0.836187
| 0.823244
| 0.79129
| 0.769449
| 0
| 0.075862
| 0.263134
| 13,381
| 342
| 156
| 39.125731
| 0.676369
| 0.002989
| 0
| 0.653543
| 0
| 0
| 0.001537
| 0
| 0
| 0
| 0
| 0
| 0.34252
| 1
| 0.066929
| false
| 0
| 0.011811
| 0
| 0.082677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
72c7770c60783c071b9b01ccf7456b8e9afcc277
| 7,490
|
py
|
Python
|
tests/behaviour/concept/thing/attribute/attribute_steps.py
|
rpatil524/client-python
|
e8daba79842a81669f4f4c2799bcc8610e610551
|
[
"Apache-2.0"
] | 47
|
2019-01-22T19:17:13.000Z
|
2021-02-06T15:39:59.000Z
|
tests/behaviour/concept/thing/attribute/attribute_steps.py
|
rpatil524/client-python
|
e8daba79842a81669f4f4c2799bcc8610e610551
|
[
"Apache-2.0"
] | 85
|
2019-01-22T14:51:34.000Z
|
2021-04-08T15:41:43.000Z
|
tests/behaviour/concept/thing/attribute/attribute_steps.py
|
rpatil524/client-python
|
e8daba79842a81669f4f4c2799bcc8610e610551
|
[
"Apache-2.0"
] | 24
|
2019-01-22T13:21:42.000Z
|
2021-03-02T18:06:03.000Z
|
#
# Copyright (C) 2021 Vaticle
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from datetime import datetime
from behave import *
from hamcrest import *
from typedb.client import *
from tests.behaviour.context import Context
@step("attribute({type_label}) get instances contain: {var:Var}")
def step_impl(context: Context, type_label: str, var: str):
assert_that(context.get(var), is_in(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).get_instances()))
@step("attribute({type_label}) get instances is empty")
def step_impl(context: Context, type_label: str):
assert_that(calling(next).with_args(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).get_instances()), raises(StopIteration))
@step("attribute {var1:Var} get owners contain: {var2:Var}")
def step_impl(context: Context, var1: str, var2: str):
assert_that(context.get(var2), is_in(context.get(var1).as_attribute().as_remote(context.tx()).get_owners()))
@step("attribute {var1:Var} get owners do not contain: {var2:Var}")
def step_impl(context: Context, var1: str, var2: str):
assert_that(context.get(var2), not_(is_in(context.get(var1).as_attribute().as_remote(context.tx()).get_owners())))
@step("attribute {var:Var} has value type: {value_type:ValueType}")
def step_impl(context: Context, var: str, value_type: AttributeType.ValueType):
assert_that(context.get(var).as_attribute().get_type().get_value_type(), is_(value_type))
@step("attribute({type_label}) as(boolean) put: {value:Bool}; throws exception")
def step_impl(context: Context, type_label: str, value: bool):
assert_that(calling(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_boolean().put).with_args(value), raises(TypeDBClientException))
@step("{var:Var} = attribute({type_label}) as(boolean) put: {value:Bool}")
def step_impl(context: Context, var: str, type_label: str, value: bool):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_boolean().put(value))
@step("attribute({type_label}) as(long) put: {value:Int}; throws exception")
def step_impl(context: Context, type_label: str, value: int):
assert_that(calling(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_long().put).with_args(value), raises(TypeDBClientException))
@step("{var:Var} = attribute({type_label}) as(long) put: {value:Int}")
def step_impl(context: Context, var: str, type_label: str, value: int):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_long().put(value))
@step("attribute({type_label}) as(double) put: {value:Float}; throws exception")
def step_impl(context: Context, type_label: str, value: float):
assert_that(calling(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_double().put).with_args(value), raises(TypeDBClientException))
@step("{var:Var} = attribute({type_label}) as(double) put: {value:Float}")
def step_impl(context: Context, var: str, type_label: str, value: float):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_double().put(value))
@step("attribute({type_label}) as(string) put: {value}; throws exception")
def step_impl(context: Context, type_label: str, value: str):
assert_that(calling(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_string().put).with_args(value), raises(TypeDBClientException))
@step("{var:Var} = attribute({type_label}) as(string) put: {value}")
def step_impl(context: Context, var: str, type_label: str, value: str):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_string().put(value))
@step("attribute({type_label}) as(datetime) put: {value:DateTime}; throws exception")
def step_impl(context: Context, type_label: str, value: datetime):
assert_that(calling(context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_datetime().put).with_args(value), raises(TypeDBClientException))
@step("{var:Var} = attribute({type_label}) as(datetime) put: {value:DateTime}")
def step_impl(context: Context, var: str, type_label: str, value: datetime):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_datetime().put(value))
@step("{var:Var} = attribute({type_label}) as(boolean) get: {value:Bool}")
def step_impl(context: Context, var: str, type_label: str, value: bool):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_boolean().get(value))
@step("{var:Var} = attribute({type_label}) as(long) get: {value:Int}")
def step_impl(context: Context, var: str, type_label: str, value: int):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_long().get(value))
@step("{var:Var} = attribute({type_label}) as(double) get: {value:Float}")
def step_impl(context: Context, var: str, type_label: str, value: float):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_double().get(value))
@step("{var:Var} = attribute({type_label}) as(string) get: {value}")
def step_impl(context: Context, var: str, type_label: str, value: str):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_string().get(value))
@step("{var:Var} = attribute({type_label}) as(datetime) get: {value:DateTime}")
def step_impl(context: Context, var: str, type_label: str, value: datetime):
context.put(var, context.tx().concepts().get_attribute_type(type_label).as_remote(context.tx()).as_datetime().get(value))
@step("attribute {var:Var} has boolean value: {value:Bool}")
def step_impl(context: Context, var: str, value: bool):
assert_that(context.get(var).as_attribute().get_value(), is_(value))
@step("attribute {var:Var} has long value: {value:Int}")
def step_impl(context: Context, var: str, value: int):
assert_that(context.get(var).as_attribute().get_value(), is_(value))
@step("attribute {var:Var} has double value: {value:Float}")
def step_impl(context: Context, var: str, value: float):
assert_that(context.get(var).as_attribute().get_value(), is_(value))
@step("attribute {var:Var} has string value: {value}")
def step_impl(context: Context, var: str, value: str):
assert_that(context.get(var).as_attribute().get_value(), is_(value))
@step("attribute {var:Var} has datetime value: {value:DateTime}")
def step_impl(context: Context, var: str, value: datetime):
assert_that(context.get(var).as_attribute().get_value(), is_(value))
| 48.954248
| 170
| 0.739653
| 1,114
| 7,490
| 4.795332
| 0.113106
| 0.085923
| 0.065893
| 0.084238
| 0.816174
| 0.805691
| 0.773493
| 0.759828
| 0.671097
| 0.611943
| 0
| 0.002949
| 0.094393
| 7,490
| 152
| 171
| 49.276316
| 0.784609
| 0.104005
| 0
| 0.2125
| 0
| 0.1
| 0.225561
| 0.061734
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.3125
| false
| 0
| 0.0625
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f4142cf5c616746464b2fd554d5be4da9db42ee2
| 31,974
|
py
|
Python
|
thirdweb/abi/ierc1155.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 30
|
2021-10-31T13:17:58.000Z
|
2022-02-04T13:41:13.000Z
|
thirdweb/abi/ierc1155.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 36
|
2021-11-03T20:30:38.000Z
|
2022-02-14T10:15:40.000Z
|
thirdweb/abi/ierc1155.py
|
nftlabs/nftlabs-sdk-python
|
ea533142dc0881872b347cd8ce635dc0bfff3153
|
[
"Apache-2.0"
] | 10
|
2021-11-10T19:59:41.000Z
|
2022-01-21T21:26:55.000Z
|
"""Generated wrapper for IERC1155 Solidity contract."""
# pylint: disable=too-many-arguments
import json
from typing import ( # pylint: disable=unused-import
Any,
List,
Optional,
Tuple,
Union,
)
from eth_utils import to_checksum_address
from mypy_extensions import TypedDict # pylint: disable=unused-import
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import ContractFunction
from web3.datastructures import AttributeDict
from web3.providers.base import BaseProvider
from zero_ex.contract_wrappers.bases import ContractMethod, Validator
from zero_ex.contract_wrappers.tx_params import TxParams
# Try to import a custom validator class definition; if there isn't one,
# declare one that we can instantiate for the default argument to the
# constructor for IERC1155 below.
try:
# both mypy and pylint complain about what we're doing here, but this
# works just fine, so their messages have been disabled here.
from . import ( # type: ignore # pylint: disable=import-self
IERC1155Validator,
)
except ImportError:
class IERC1155Validator(Validator): # type: ignore
"""No-op input validator."""
try:
from .middleware import MIDDLEWARE # type: ignore
except ImportError:
pass
class BalanceOfMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOf method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str, _id: int):
"""Validate the inputs to the balanceOf method."""
self.validator.assert_valid(
method_name="balanceOf",
parameter_name="_owner",
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name="balanceOf",
parameter_name="_id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
return (owner, _id)
def call(
self, owner: str, _id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner, _id) = self.validate_and_normalize_inputs(owner, _id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owner, _id).call(
tx_params.as_dict()
)
return int(returned)
def send_transaction(
self, owner: str, _id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner, _id) = self.validate_and_normalize_inputs(owner, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, _id).transact(
tx_params.as_dict()
)
def build_transaction(
self, owner: str, _id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(owner, _id) = self.validate_and_normalize_inputs(owner, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, _id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, owner: str, _id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(owner, _id) = self.validate_and_normalize_inputs(owner, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, _id).estimateGas(
tx_params.as_dict()
)
class BalanceOfBatchMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOfBatch method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owners: List[str], ids: List[int]):
"""Validate the inputs to the balanceOfBatch method."""
self.validator.assert_valid(
method_name="balanceOfBatch",
parameter_name="_owners",
argument_value=owners,
)
self.validator.assert_valid(
method_name="balanceOfBatch",
parameter_name="_ids",
argument_value=ids,
)
return (owners, ids)
def call(
self,
owners: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> List[int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owners, ids) = self.validate_and_normalize_inputs(owners, ids)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owners, ids).call(
tx_params.as_dict()
)
return [int(element) for element in returned]
def send_transaction(
self,
owners: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owners, ids) = self.validate_and_normalize_inputs(owners, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owners, ids).transact(
tx_params.as_dict()
)
def build_transaction(
self,
owners: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(owners, ids) = self.validate_and_normalize_inputs(owners, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owners, ids).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
owners: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(owners, ids) = self.validate_and_normalize_inputs(owners, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owners, ids).estimateGas(
tx_params.as_dict()
)
class IsApprovedForAllMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isApprovedForAll method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, owner: str, operator: str):
"""Validate the inputs to the isApprovedForAll method."""
self.validator.assert_valid(
method_name="isApprovedForAll",
parameter_name="_owner",
argument_value=owner,
)
owner = self.validate_and_checksum_address(owner)
self.validator.assert_valid(
method_name="isApprovedForAll",
parameter_name="_operator",
argument_value=operator,
)
operator = self.validate_and_checksum_address(operator)
return (owner, operator)
def call(
self, owner: str, operator: str, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(owner, operator) = self.validate_and_normalize_inputs(owner, operator)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(owner, operator).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self, owner: str, operator: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(owner, operator) = self.validate_and_normalize_inputs(owner, operator)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, operator).transact(
tx_params.as_dict()
)
def build_transaction(
self, owner: str, operator: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(owner, operator) = self.validate_and_normalize_inputs(owner, operator)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, operator).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, owner: str, operator: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(owner, operator) = self.validate_and_normalize_inputs(owner, operator)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(owner, operator).estimateGas(
tx_params.as_dict()
)
class SafeBatchTransferFromMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the safeBatchTransferFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
_from: str,
to: str,
ids: List[int],
values: List[int],
data: Union[bytes, str],
):
"""Validate the inputs to the safeBatchTransferFrom method."""
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="_from",
argument_value=_from,
)
_from = self.validate_and_checksum_address(_from)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="_to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="_ids",
argument_value=ids,
)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="_values",
argument_value=values,
)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="_data",
argument_value=data,
)
return (_from, to, ids, values, data)
def call(
self,
_from: str,
to: str,
ids: List[int],
values: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_from, to, ids, values, data) = self.validate_and_normalize_inputs(
_from, to, ids, values, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(_from, to, ids, values, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
_from: str,
to: str,
ids: List[int],
values: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_from, to, ids, values, data) = self.validate_and_normalize_inputs(
_from, to, ids, values, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, ids, values, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
_from: str,
to: str,
ids: List[int],
values: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(_from, to, ids, values, data) = self.validate_and_normalize_inputs(
_from, to, ids, values, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, ids, values, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
_from: str,
to: str,
ids: List[int],
values: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(_from, to, ids, values, data) = self.validate_and_normalize_inputs(
_from, to, ids, values, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, ids, values, data
).estimateGas(tx_params.as_dict())
class SafeTransferFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the safeTransferFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
_from: str,
to: str,
_id: int,
value: int,
data: Union[bytes, str],
):
"""Validate the inputs to the safeTransferFrom method."""
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="_from",
argument_value=_from,
)
_from = self.validate_and_checksum_address(_from)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="_to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="_id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="_value",
argument_value=value,
)
# safeguard against fractional inputs
value = int(value)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="_data",
argument_value=data,
)
return (_from, to, _id, value, data)
def call(
self,
_from: str,
to: str,
_id: int,
value: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_from, to, _id, value, data) = self.validate_and_normalize_inputs(
_from, to, _id, value, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(_from, to, _id, value, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
_from: str,
to: str,
_id: int,
value: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_from, to, _id, value, data) = self.validate_and_normalize_inputs(
_from, to, _id, value, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, _id, value, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
_from: str,
to: str,
_id: int,
value: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(_from, to, _id, value, data) = self.validate_and_normalize_inputs(
_from, to, _id, value, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, _id, value, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
_from: str,
to: str,
_id: int,
value: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(_from, to, _id, value, data) = self.validate_and_normalize_inputs(
_from, to, _id, value, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, _id, value, data
).estimateGas(tx_params.as_dict())
class SetApprovalForAllMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setApprovalForAll method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, operator: str, approved: bool):
"""Validate the inputs to the setApprovalForAll method."""
self.validator.assert_valid(
method_name="setApprovalForAll",
parameter_name="_operator",
argument_value=operator,
)
operator = self.validate_and_checksum_address(operator)
self.validator.assert_valid(
method_name="setApprovalForAll",
parameter_name="_approved",
argument_value=approved,
)
return (operator, approved)
def call(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(operator, approved).call(tx_params.as_dict())
def send_transaction(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).transact(
tx_params.as_dict()
)
def build_transaction(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).estimateGas(
tx_params.as_dict()
)
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class IERC1155:
"""Wrapper class for IERC1155 Solidity contract.
All method parameters of type `bytes`:code: should be encoded as UTF-8,
which can be accomplished via `str.encode("utf_8")`:code:.
"""
balance_of: BalanceOfMethod
"""Constructor-initialized instance of
:class:`BalanceOfMethod`.
"""
balance_of_batch: BalanceOfBatchMethod
"""Constructor-initialized instance of
:class:`BalanceOfBatchMethod`.
"""
is_approved_for_all: IsApprovedForAllMethod
"""Constructor-initialized instance of
:class:`IsApprovedForAllMethod`.
"""
safe_batch_transfer_from: SafeBatchTransferFromMethod
"""Constructor-initialized instance of
:class:`SafeBatchTransferFromMethod`.
"""
safe_transfer_from: SafeTransferFromMethod
"""Constructor-initialized instance of
:class:`SafeTransferFromMethod`.
"""
set_approval_for_all: SetApprovalForAllMethod
"""Constructor-initialized instance of
:class:`SetApprovalForAllMethod`.
"""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
validator: IERC1155Validator = None,
):
"""Get an instance of wrapper for smart contract.
:param web3_or_provider: Either an instance of `web3.Web3`:code: or
`web3.providers.base.BaseProvider`:code:
:param contract_address: where the contract has been deployed
:param validator: for validation of method inputs.
"""
# pylint: disable=too-many-statements
self.contract_address = contract_address
if not validator:
validator = IERC1155Validator(web3_or_provider, contract_address)
web3 = None
if isinstance(web3_or_provider, BaseProvider):
web3 = Web3(web3_or_provider)
elif isinstance(web3_or_provider, Web3):
web3 = web3_or_provider
else:
raise TypeError(
"Expected parameter 'web3_or_provider' to be an instance of either"
+ " Web3 or BaseProvider"
)
# if any middleware was imported, inject it
try:
MIDDLEWARE
except NameError:
pass
else:
try:
for middleware in MIDDLEWARE:
web3.middleware_onion.inject(
middleware["function"],
layer=middleware["layer"],
)
except ValueError as value_error:
if value_error.args == (
"You can't add the same un-named instance twice",
):
pass
self._web3_eth = web3.eth
functions = self._web3_eth.contract(
address=to_checksum_address(contract_address), abi=IERC1155.abi()
).functions
self.balance_of = BalanceOfMethod(
web3_or_provider, contract_address, functions.balanceOf, validator
)
self.balance_of_batch = BalanceOfBatchMethod(
web3_or_provider,
contract_address,
functions.balanceOfBatch,
validator,
)
self.is_approved_for_all = IsApprovedForAllMethod(
web3_or_provider,
contract_address,
functions.isApprovedForAll,
validator,
)
self.safe_batch_transfer_from = SafeBatchTransferFromMethod(
web3_or_provider,
contract_address,
functions.safeBatchTransferFrom,
validator,
)
self.safe_transfer_from = SafeTransferFromMethod(
web3_or_provider,
contract_address,
functions.safeTransferFrom,
validator,
)
self.set_approval_for_all = SetApprovalForAllMethod(
web3_or_provider,
contract_address,
functions.setApprovalForAll,
validator,
)
def get_approval_for_all_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ApprovalForAll event.
:param tx_hash: hash of transaction emitting ApprovalForAll event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=IERC1155.abi(),
)
.events.ApprovalForAll()
.processReceipt(tx_receipt)
)
def get_transfer_batch_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TransferBatch event.
:param tx_hash: hash of transaction emitting TransferBatch event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=IERC1155.abi(),
)
.events.TransferBatch()
.processReceipt(tx_receipt)
)
def get_transfer_single_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TransferSingle event.
:param tx_hash: hash of transaction emitting TransferSingle event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=IERC1155.abi(),
)
.events.TransferSingle()
.processReceipt(tx_receipt)
)
def get_uri_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for URI event.
:param tx_hash: hash of transaction emitting URI event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=IERC1155.abi(),
)
.events.URI()
.processReceipt(tx_receipt)
)
@staticmethod
def abi():
"""Return the ABI to the underlying contract."""
return json.loads(
'[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"_owner","type":"address"},{"indexed":true,"internalType":"address","name":"_operator","type":"address"},{"indexed":false,"internalType":"bool","name":"_approved","type":"bool"}],"name":"ApprovalForAll","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"_operator","type":"address"},{"indexed":true,"internalType":"address","name":"_from","type":"address"},{"indexed":true,"internalType":"address","name":"_to","type":"address"},{"indexed":false,"internalType":"uint256[]","name":"_ids","type":"uint256[]"},{"indexed":false,"internalType":"uint256[]","name":"_values","type":"uint256[]"}],"name":"TransferBatch","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"_operator","type":"address"},{"indexed":true,"internalType":"address","name":"_from","type":"address"},{"indexed":true,"internalType":"address","name":"_to","type":"address"},{"indexed":false,"internalType":"uint256","name":"_id","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_value","type":"uint256"}],"name":"TransferSingle","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"string","name":"_value","type":"string"},{"indexed":true,"internalType":"uint256","name":"_id","type":"uint256"}],"name":"URI","type":"event"},{"inputs":[{"internalType":"address","name":"_owner","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address[]","name":"_owners","type":"address[]"},{"internalType":"uint256[]","name":"_ids","type":"uint256[]"}],"name":"balanceOfBatch","outputs":[{"internalType":"uint256[]","name":"","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_owner","type":"address"},{"internalType":"address","name":"_operator","type":"address"}],"name":"isApprovedForAll","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_from","type":"address"},{"internalType":"address","name":"_to","type":"address"},{"internalType":"uint256[]","name":"_ids","type":"uint256[]"},{"internalType":"uint256[]","name":"_values","type":"uint256[]"},{"internalType":"bytes","name":"_data","type":"bytes"}],"name":"safeBatchTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_from","type":"address"},{"internalType":"address","name":"_to","type":"address"},{"internalType":"uint256","name":"_id","type":"uint256"},{"internalType":"uint256","name":"_value","type":"uint256"},{"internalType":"bytes","name":"_data","type":"bytes"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_operator","type":"address"},{"internalType":"bool","name":"_approved","type":"bool"}],"name":"setApprovalForAll","outputs":[],"stateMutability":"nonpayable","type":"function"}]' # noqa: E501 (line-too-long)
)
# pylint: disable=too-many-lines
| 36.794016
| 3,256
| 0.622068
| 3,302
| 31,974
| 5.769836
| 0.078134
| 0.055847
| 0.025194
| 0.040941
| 0.81456
| 0.769106
| 0.731787
| 0.709217
| 0.682868
| 0.595161
| 0
| 0.008213
| 0.261275
| 31,974
| 868
| 3,257
| 36.836406
| 0.798391
| 0.147526
| 0
| 0.693878
| 1
| 0.00157
| 0.143755
| 0.127054
| 0
| 0
| 0
| 0
| 0.028257
| 1
| 0.065934
| false
| 0.00471
| 0.023548
| 0
| 0.161695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f445df571ed470337f97aa74ceb203d28fe8c4af
| 28
|
py
|
Python
|
glue/plugins/tools/spectrum_tool/qt/__init__.py
|
ejeschke/glue
|
21689e3474aeaeb70e258d76c60755596856976c
|
[
"BSD-3-Clause"
] | 3
|
2015-09-10T22:23:55.000Z
|
2019-04-04T18:47:33.000Z
|
glue/plugins/tools/spectrum_tool/qt/__init__.py
|
ejeschke/glue
|
21689e3474aeaeb70e258d76c60755596856976c
|
[
"BSD-3-Clause"
] | null | null | null |
glue/plugins/tools/spectrum_tool/qt/__init__.py
|
ejeschke/glue
|
21689e3474aeaeb70e258d76c60755596856976c
|
[
"BSD-3-Clause"
] | null | null | null |
from .spectrum_tool import *
| 28
| 28
| 0.821429
| 4
| 28
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f48c7561590846037b25065c3c0b2580db128a7f
| 27
|
py
|
Python
|
game/__init__.py
|
Kulbear/endless-2048
|
939479e6ae5d4dae6fb636c9803f8d4ebf5be0e8
|
[
"MIT"
] | 11
|
2017-05-14T19:29:56.000Z
|
2020-05-24T07:02:03.000Z
|
game/__init__.py
|
Kulbear/endless-2048
|
939479e6ae5d4dae6fb636c9803f8d4ebf5be0e8
|
[
"MIT"
] | null | null | null |
game/__init__.py
|
Kulbear/endless-2048
|
939479e6ae5d4dae6fb636c9803f8d4ebf5be0e8
|
[
"MIT"
] | 7
|
2017-07-08T05:54:55.000Z
|
2021-11-13T14:01:39.000Z
|
from .game import Game2048
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0.148148
| 27
| 1
| 27
| 27
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
be766349ae45b8d5dfd96b5fcc8fc5ae1a54df25
| 37
|
py
|
Python
|
srcflib/__init__.py
|
mas90/srcf-python
|
09ce45c65d2ddbec2cdfc559a7b5983398dbdfa0
|
[
"MIT"
] | null | null | null |
srcflib/__init__.py
|
mas90/srcf-python
|
09ce45c65d2ddbec2cdfc559a7b5983398dbdfa0
|
[
"MIT"
] | 2
|
2020-08-23T17:23:44.000Z
|
2020-12-21T17:05:50.000Z
|
srcflib/__init__.py
|
mas90/srcf-python
|
09ce45c65d2ddbec2cdfc559a7b5983398dbdfa0
|
[
"MIT"
] | null | null | null |
from . import email, plumbing, tasks
| 18.5
| 36
| 0.756757
| 5
| 37
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 37
| 1
| 37
| 37
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
be819efb2465099705e29b479f3450aebce4ba8c
| 267
|
py
|
Python
|
dsutils/__init__.py
|
RTJ19/dsutils_dev
|
92dc6f6583d80cd23a1afa935ec33df796efdc39
|
[
"MIT"
] | null | null | null |
dsutils/__init__.py
|
RTJ19/dsutils_dev
|
92dc6f6583d80cd23a1afa935ec33df796efdc39
|
[
"MIT"
] | null | null | null |
dsutils/__init__.py
|
RTJ19/dsutils_dev
|
92dc6f6583d80cd23a1afa935ec33df796efdc39
|
[
"MIT"
] | null | null | null |
from dsutils_dev.dsutils.evaluate import get_eda_plots
from dsutils_dev.dsutils.convert import DataFrameConverter
from dsutils_dev.dsutils.colab_utils import mount_drive #, get_spark_environment
__version__ = '0.0.1'
#from dsutils_dev.evaluate import get_eda_plots
| 33.375
| 80
| 0.857678
| 40
| 267
| 5.325
| 0.475
| 0.206573
| 0.262911
| 0.295775
| 0.234742
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012295
| 0.086142
| 267
| 7
| 81
| 38.142857
| 0.860656
| 0.258427
| 0
| 0
| 0
| 0
| 0.02551
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fe45223de10387a883213a6e51c3ff826b62daaf
| 532
|
py
|
Python
|
leanai/training/losses/__init__.py
|
penguinmenac3/leanai
|
6d26575b248ff03c4a24009cd82f26ea99d96d15
|
[
"MIT"
] | 1
|
2021-03-28T21:32:59.000Z
|
2021-03-28T21:32:59.000Z
|
leanai/training/losses/__init__.py
|
penguinmenac3/leanai
|
6d26575b248ff03c4a24009cd82f26ea99d96d15
|
[
"MIT"
] | null | null | null |
leanai/training/losses/__init__.py
|
penguinmenac3/leanai
|
6d26575b248ff03c4a24009cd82f26ea99d96d15
|
[
"MIT"
] | null | null | null |
from leanai.training.losses.classification import BinaryCrossEntropyLossFromLogits, SparseCrossEntropyLossFromLogits, SparseCategoricalAccuracy
from leanai.training.losses.masking import NaNMaskedLoss, NegMaskedLoss, MaskedLoss
from leanai.training.losses.regression import SmoothL1Loss
from leanai.training.losses.multiloss import MultiLossV2, NormalizedLoss
from leanai.training.losses.sumloss import SumLoss, WeightedSumLoss
from leanai.training.losses.detection import DetectionLoss
from leanai.training.losses.loss import Loss
| 66.5
| 143
| 0.885338
| 55
| 532
| 8.563636
| 0.418182
| 0.14862
| 0.267516
| 0.356688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004016
| 0.06391
| 532
| 7
| 144
| 76
| 0.941767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fe8dac2a7f59492c2b4068a8cf70d417f4e77610
| 9,256
|
py
|
Python
|
src/the_tale/the_tale/linguistics/lexicon/groups/action_event.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | 1
|
2020-04-02T11:51:20.000Z
|
2020-04-02T11:51:20.000Z
|
src/the_tale/the_tale/linguistics/lexicon/groups/action_event.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/linguistics/lexicon/groups/action_event.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
V = lexicon_relations.VARIABLE
KEYS = [('ACTION_EVENT_HABIT_IN_PLACE_AGGRESSIVE_ARTIFACT', 40000, 'Дневник: В городе, черты, агрессивность (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города агрессивным героем (артефакт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_IN_PLACE_AGGRESSIVE_EXPERIENCE', 40001, 'Дневник: В городе, черты, агрессивность (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города агрессивным героем (опыт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_IN_PLACE_AGGRESSIVE_MONEY', 40002, 'Дневник: В городе, черты, агрессивность (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города агрессивным героем (деньги)',
[V.DATE, V.TIME, V.HERO, V.COINS, V.PLACE], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_IN_PLACE_AGGRESSIVE_NOTHING', 40003, 'Дневник: В городе, черты, агрессивность (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города агрессивным героем (без бонуса)',
[V.DATE, V.TIME, V.HERO, V.PLACE], None),
('ACTION_EVENT_HABIT_IN_PLACE_DISHONORABLE_ARTIFACT', 40004, 'Дневник: В городе, черты, бесчестие (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города бесчестным героем (артефакт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_IN_PLACE_DISHONORABLE_EXPERIENCE', 40005, 'Дневник: В городе, черты, бесчестие (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города бесчестным героем (опыт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_IN_PLACE_DISHONORABLE_MONEY', 40006, 'Дневник: В городе, черты, бесчестие (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города бесчестным героем (деньги)',
[V.DATE, V.TIME, V.HERO, V.COINS, V.PLACE], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_IN_PLACE_DISHONORABLE_NOTHING', 40007, 'Дневник: В городе, черты, бесчестие (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города бесчестным героем (без бонуса)',
[V.DATE, V.TIME, V.HERO, V.PLACE], None),
('ACTION_EVENT_HABIT_IN_PLACE_NOBLE_ARTIFACT', 40008, 'Дневник: В городе, черты, благородство (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города благородным героем (артефакт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_IN_PLACE_NOBLE_EXPERIENCE', 40009, 'Дневник: В городе, черты, благородство (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города благородным героем (опыт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_IN_PLACE_NOBLE_MONEY', 40010, 'Дневник: В городе, черты, благородство (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города благородным героем (деньги)',
[V.DATE, V.TIME, V.HERO, V.COINS, V.PLACE], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_IN_PLACE_NOBLE_NOTHING', 40011, 'Дневник: В городе, черты, благородство (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города благородным героем (без бонуса)',
[V.DATE, V.TIME, V.HERO, V.PLACE], None),
('ACTION_EVENT_HABIT_IN_PLACE_PEACEABLE_ARTIFACT', 40012, 'Дневник: В городе, черты, миролюбие (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города миролюбивым героем (артефакт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_IN_PLACE_PEACEABLE_EXPERIENCE', 40013, 'Дневник: В городе, черты, миролюбие (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города миролюбивым героем (опыт)',
[V.DATE, V.TIME, V.HERO, V.PLACE, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_IN_PLACE_PEACEABLE_MONEY', 40014, 'Дневник: В городе, черты, миролюбие (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города миролюбивым героем (деньги)',
[V.DATE, V.TIME, V.HERO, V.COINS, V.PLACE], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_IN_PLACE_PEACEABLE_NOTHING', 40015, 'Дневник: В городе, черты, миролюбие (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при посещении города миролюбивым героем (без бонуса)',
[V.DATE, V.TIME, V.HERO, V.PLACE], None),
('ACTION_EVENT_HABIT_MOVE_TO_AGGRESSIVE_ARTIFACT', 40016, 'Дневник: В движении, черты, агрессивность (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии агрессивного героя (артефакт)',
[V.DATE, V.TIME, V.HERO, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_MOVE_TO_AGGRESSIVE_EXPERIENCE', 40017, 'Дневник: В движении, черты, агрессивность (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии агрессивного героя (опыт)',
[V.DATE, V.TIME, V.HERO, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_MOVE_TO_AGGRESSIVE_MONEY', 40018, 'Дневник: В движении, черты, агрессивность (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии агрессивного героя (деньги)',
[V.DATE, V.TIME, V.COINS, V.HERO], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_MOVE_TO_AGGRESSIVE_NOTHING', 40019, 'Дневник: В движении, черты, агрессивность (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии агрессивного героя (без бонуса)',
[V.DATE, V.TIME, V.HERO], None),
('ACTION_EVENT_HABIT_MOVE_TO_DISHONORABLE_ARTIFACT', 40020, 'Дневник: В движении, черты, бесчестие (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии бесчестного героя (артефакт)',
[V.DATE, V.TIME, V.HERO, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_MOVE_TO_DISHONORABLE_EXPERIENCE', 40021, 'Дневник: В движении, черты, бесчестие (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии бесчестного героя (опыт)',
[V.DATE, V.TIME, V.HERO, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_MOVE_TO_DISHONORABLE_MONEY', 40022, 'Дневник: В движении, черты, бесчестие (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии бесчестного героя (деньги)',
[V.DATE, V.TIME, V.COINS, V.HERO], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_MOVE_TO_DISHONORABLE_NOTHING', 40023, 'Дневник: В движении, черты, бесчестие (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии бесчестного героя (без бонуса)',
[V.DATE, V.TIME, V.HERO], None),
('ACTION_EVENT_HABIT_MOVE_TO_NOBLE_ARTIFACT', 40024, 'Дневник: В движении, черты, благородство (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии благородного героя (артефакт)',
[V.DATE, V.TIME, V.HERO, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_MOVE_TO_NOBLE_EXPERIENCE', 40025, 'Дневник: В движении, черты, благородство (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии благородного героя (опыт)',
[V.DATE, V.TIME, V.HERO, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_MOVE_TO_NOBLE_MONEY', 40026, 'Дневник: В движении, черты, благородство (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии благородного героя (деньги)',
[V.DATE, V.TIME, V.COINS, V.HERO], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_MOVE_TO_NOBLE_NOTHING', 40027, 'Дневник: В движении, черты, благородство (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии благородного героя (без бонуса)',
[V.DATE, V.TIME, V.HERO], None),
('ACTION_EVENT_HABIT_MOVE_TO_PEACEABLE_ARTIFACT', 40028, 'Дневник: В движении, черты, миролюбие (артефакт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии миролюбивого героя (артефакт)',
[V.DATE, V.TIME, V.HERO, V.ARTIFACT], None),
('ACTION_EVENT_HABIT_MOVE_TO_PEACEABLE_EXPERIENCE', 40029, 'Дневник: В движении, черты, миролюбие (опыт)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии миролюбивого героя (опыт)',
[V.DATE, V.TIME, V.HERO, V.EXPERIENCE], 'hero#N +experience#EXP'),
('ACTION_EVENT_HABIT_MOVE_TO_PEACEABLE_MONEY', 40030, 'Дневник: В движении, черты, миролюбие (деньги)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии миролюбивого героя (деньги)',
[V.DATE, V.TIME, V.COINS, V.HERO], 'hero#N +coins#G'),
('ACTION_EVENT_HABIT_MOVE_TO_PEACEABLE_NOTHING', 40031, 'Дневник: В движении, черты, миролюбие (без бонуса)', relations.LEXICON_GROUP.ACTION_EVENT,
'События при путешествии миролюбивого героя (без бонуса)',
[V.DATE, V.TIME, V.HERO], None),
]
| 66.589928
| 160
| 0.704516
| 1,195
| 9,256
| 5.240167
| 0.078661
| 0.112424
| 0.081763
| 0.137975
| 0.926222
| 0.849569
| 0.819706
| 0.798946
| 0.798946
| 0.798946
| 0
| 0.020831
| 0.17016
| 9,256
| 138
| 161
| 67.072464
| 0.794428
| 0
| 0
| 0.32
| 0
| 0
| 0.540249
| 0.155592
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02
| 0
| 0.02
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
228d8f8cd9ec4e71fa40c1fea41844ee921f6e72
| 68
|
py
|
Python
|
adbc/zql/parsers/sqlite.py
|
aleontiev/apg
|
c6a10a9b0a576913c63ed4f093e2a0fa7469af87
|
[
"MIT"
] | 2
|
2020-07-17T16:33:42.000Z
|
2020-07-21T04:48:38.000Z
|
adbc/zql/parsers/sqlite.py
|
aleontiev/apg
|
c6a10a9b0a576913c63ed4f093e2a0fa7469af87
|
[
"MIT"
] | null | null | null |
adbc/zql/parsers/sqlite.py
|
aleontiev/apg
|
c6a10a9b0a576913c63ed4f093e2a0fa7469af87
|
[
"MIT"
] | null | null | null |
from .sql import SQLParser
class SqliteParser(SQLParser):
pass
| 13.6
| 30
| 0.764706
| 8
| 68
| 6.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 68
| 4
| 31
| 17
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
22978578538554a5bce017f796cafc82244d3aea
| 58,125
|
py
|
Python
|
parsers/C/CLexer.py
|
philok55/CRDS
|
b7fc3a7f461505d0ba41a7da68da85c3055d98cf
|
[
"Apache-2.0"
] | null | null | null |
parsers/C/CLexer.py
|
philok55/CRDS
|
b7fc3a7f461505d0ba41a7da68da85c3055d98cf
|
[
"Apache-2.0"
] | null | null | null |
parsers/C/CLexer.py
|
philok55/CRDS
|
b7fc3a7f461505d0ba41a7da68da85c3055d98cf
|
[
"Apache-2.0"
] | null | null | null |
# Generated from .\C.g4 by ANTLR 4.9.2
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2x")
buf.write("\u054c\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4")
buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080")
buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083")
buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087")
buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a")
buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e")
buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091")
buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095")
buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098")
buf.write("\4\u0099\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c")
buf.write("\t\u009c\4\u009d\t\u009d\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3")
buf.write("\2\3\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3")
buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3")
buf.write("\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b")
buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\n\3\n")
buf.write("\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3")
buf.write("\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r")
buf.write("\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16")
buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20")
buf.write("\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22")
buf.write("\3\22\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24")
buf.write("\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26")
buf.write("\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\30")
buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33")
buf.write("\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35")
buf.write("\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3 \3 \3 \3 \3")
buf.write(" \3 \3 \3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3")
buf.write("#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3")
buf.write("%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'")
buf.write("\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3")
buf.write("*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3")
buf.write(",\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3")
buf.write("/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3\60\3")
buf.write("\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62")
buf.write("\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63")
buf.write("\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65")
buf.write("\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66")
buf.write("\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67")
buf.write("\3\67\38\38\38\38\38\38\38\38\38\38\38\39\39\39\39\39")
buf.write("\39\39\39\39\39\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3:\3")
buf.write(":\3:\3:\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3<\3")
buf.write("<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3C\3D\3D\3")
buf.write("E\3E\3E\3F\3F\3F\3G\3G\3G\3H\3H\3I\3I\3I\3J\3J\3K\3K\3")
buf.write("K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3Q\3R\3R\3R\3S\3")
buf.write("S\3T\3T\3U\3U\3V\3V\3W\3W\3X\3X\3Y\3Y\3Z\3Z\3[\3[\3[\3")
buf.write("\\\3\\\3\\\3]\3]\3]\3^\3^\3^\3_\3_\3_\3`\3`\3`\3`\3a\3")
buf.write("a\3a\3a\3b\3b\3b\3c\3c\3c\3d\3d\3d\3e\3e\3e\3f\3f\3f\3")
buf.write("g\3g\3g\3h\3h\3i\3i\3i\3i\3j\3j\3j\7j\u038b\nj\fj\16j")
buf.write("\u038e\13j\3k\3k\5k\u0392\nk\3l\3l\3m\3m\3n\3n\3n\3n\3")
buf.write("n\3n\3n\3n\3n\3n\5n\u03a2\nn\3o\3o\3o\3o\3o\3p\3p\3p\5")
buf.write("p\u03ac\np\3q\3q\5q\u03b0\nq\3q\3q\5q\u03b4\nq\3q\3q\5")
buf.write("q\u03b8\nq\3q\5q\u03bb\nq\3r\3r\3r\6r\u03c0\nr\rr\16r")
buf.write("\u03c1\3s\3s\7s\u03c6\ns\fs\16s\u03c9\13s\3t\3t\7t\u03cd")
buf.write("\nt\ft\16t\u03d0\13t\3u\3u\6u\u03d4\nu\ru\16u\u03d5\3")
buf.write("v\3v\3v\3w\3w\3x\3x\3y\3y\3z\3z\5z\u03e3\nz\3z\3z\3z\3")
buf.write("z\3z\5z\u03ea\nz\3z\3z\5z\u03ee\nz\5z\u03f0\nz\3{\3{\3")
buf.write("|\3|\3}\3}\3}\3}\5}\u03fa\n}\3~\3~\5~\u03fe\n~\3\177\3")
buf.write("\177\5\177\u0402\n\177\3\177\5\177\u0405\n\177\3\177\3")
buf.write("\177\3\177\5\177\u040a\n\177\5\177\u040c\n\177\3\u0080")
buf.write("\3\u0080\3\u0080\5\u0080\u0411\n\u0080\3\u0080\3\u0080")
buf.write("\5\u0080\u0415\n\u0080\3\u0081\5\u0081\u0418\n\u0081\3")
buf.write("\u0081\3\u0081\3\u0081\3\u0081\3\u0081\5\u0081\u041f\n")
buf.write("\u0081\3\u0082\3\u0082\5\u0082\u0423\n\u0082\3\u0082\3")
buf.write("\u0082\3\u0083\3\u0083\3\u0084\6\u0084\u042a\n\u0084\r")
buf.write("\u0084\16\u0084\u042b\3\u0085\5\u0085\u042f\n\u0085\3")
buf.write("\u0085\3\u0085\3\u0085\3\u0085\3\u0085\5\u0085\u0436\n")
buf.write("\u0085\3\u0086\3\u0086\5\u0086\u043a\n\u0086\3\u0086\3")
buf.write("\u0086\3\u0087\6\u0087\u043f\n\u0087\r\u0087\16\u0087")
buf.write("\u0440\3\u0088\3\u0088\3\u0089\3\u0089\3\u0089\3\u0089")
buf.write("\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089")
buf.write("\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089")
buf.write("\3\u0089\3\u0089\3\u0089\3\u0089\5\u0089\u045b\n\u0089")
buf.write("\3\u008a\6\u008a\u045e\n\u008a\r\u008a\16\u008a\u045f")
buf.write("\3\u008b\3\u008b\5\u008b\u0464\n\u008b\3\u008c\3\u008c")
buf.write("\3\u008c\3\u008c\5\u008c\u046a\n\u008c\3\u008d\3\u008d")
buf.write("\3\u008d\3\u008e\3\u008e\3\u008e\5\u008e\u0472\n\u008e")
buf.write("\3\u008e\5\u008e\u0475\n\u008e\3\u008f\3\u008f\3\u008f")
buf.write("\3\u008f\6\u008f\u047b\n\u008f\r\u008f\16\u008f\u047c")
buf.write("\3\u0090\5\u0090\u0480\n\u0090\3\u0090\3\u0090\5\u0090")
buf.write("\u0484\n\u0090\3\u0090\3\u0090\3\u0091\3\u0091\3\u0091")
buf.write("\5\u0091\u048b\n\u0091\3\u0092\6\u0092\u048e\n\u0092\r")
buf.write("\u0092\16\u0092\u048f\3\u0093\3\u0093\3\u0093\3\u0093")
buf.write("\3\u0093\3\u0093\3\u0093\5\u0093\u0499\n\u0093\3\u0094")
buf.write("\3\u0094\5\u0094\u049d\n\u0094\3\u0094\3\u0094\3\u0094")
buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\7\u0094\u04a7")
buf.write("\n\u0094\f\u0094\16\u0094\u04aa\13\u0094\3\u0094\3\u0094")
buf.write("\3\u0095\3\u0095\5\u0095\u04b0\n\u0095\3\u0095\3\u0095")
buf.write("\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095")
buf.write("\5\u0095\u04bb\n\u0095\3\u0095\3\u0095\7\u0095\u04bf\n")
buf.write("\u0095\f\u0095\16\u0095\u04c2\13\u0095\3\u0095\3\u0095")
buf.write("\3\u0095\7\u0095\u04c7\n\u0095\f\u0095\16\u0095\u04ca")
buf.write("\13\u0095\3\u0095\5\u0095\u04cd\n\u0095\3\u0095\5\u0095")
buf.write("\u04d0\n\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0096")
buf.write("\3\u0096\3\u0096\3\u0096\3\u0096\7\u0096\u04db\n\u0096")
buf.write("\f\u0096\16\u0096\u04de\13\u0096\3\u0096\3\u0096\7\u0096")
buf.write("\u04e2\n\u0096\f\u0096\16\u0096\u04e5\13\u0096\3\u0096")
buf.write("\3\u0096\3\u0096\3\u0096\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\7\u0097\u04f2\n\u0097\f\u0097")
buf.write("\16\u0097\u04f5\13\u0097\3\u0097\7\u0097\u04f8\n\u0097")
buf.write("\f\u0097\16\u0097\u04fb\13\u0097\3\u0097\3\u0097\3\u0098")
buf.write("\3\u0098\5\u0098\u0501\n\u0098\3\u0098\3\u0098\5\u0098")
buf.write("\u0505\n\u0098\3\u0098\3\u0098\7\u0098\u0509\n\u0098\f")
buf.write("\u0098\16\u0098\u050c\13\u0098\3\u0098\3\u0098\3\u0099")
buf.write("\3\u0099\5\u0099\u0512\n\u0099\3\u0099\3\u0099\3\u0099")
buf.write("\3\u0099\3\u0099\3\u0099\3\u0099\3\u0099\3\u0099\7\u0099")
buf.write("\u051d\n\u0099\f\u0099\16\u0099\u0520\13\u0099\3\u0099")
buf.write("\3\u0099\3\u009a\6\u009a\u0525\n\u009a\r\u009a\16\u009a")
buf.write("\u0526\3\u009a\3\u009a\3\u009b\3\u009b\5\u009b\u052d\n")
buf.write("\u009b\3\u009b\5\u009b\u0530\n\u009b\3\u009b\3\u009b\3")
buf.write("\u009c\3\u009c\3\u009c\3\u009c\7\u009c\u0538\n\u009c\f")
buf.write("\u009c\16\u009c\u053b\13\u009c\3\u009c\3\u009c\3\u009c")
buf.write("\3\u009c\3\u009c\3\u009d\3\u009d\3\u009d\3\u009d\7\u009d")
buf.write("\u0546\n\u009d\f\u009d\16\u009d\u0549\13\u009d\3\u009d")
buf.write("\3\u009d\3\u0539\2\u009e\3\3\5\4\7\5\t\6\13\7\r\b\17\t")
buf.write("\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23")
buf.write("%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36")
buf.write(";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63")
buf.write("e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081B\u0083C")
buf.write("\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093")
buf.write("K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3")
buf.write("S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3")
buf.write("[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3")
buf.write("c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3")
buf.write("k\u00d5\2\u00d7\2\u00d9\2\u00db\2\u00dd\2\u00dfl\u00e1")
buf.write("\2\u00e3\2\u00e5\2\u00e7\2\u00e9\2\u00eb\2\u00ed\2\u00ef")
buf.write("\2\u00f1\2\u00f3\2\u00f5\2\u00f7\2\u00f9\2\u00fb\2\u00fd")
buf.write("\2\u00ff\2\u0101\2\u0103\2\u0105\2\u0107m\u0109\2\u010b")
buf.write("\2\u010d\2\u010f\2\u0111\2\u0113\2\u0115\2\u0117\2\u0119")
buf.write("\2\u011b\2\u011d\2\u011fn\u0121\2\u0123\2\u0125\2\u0127")
buf.write("o\u0129p\u012bq\u012dr\u012fs\u0131t\u0133u\u0135v\u0137")
buf.write("w\u0139x\3\2\31\5\2C\\aac|\3\2\62;\4\2DDdd\3\2\62\63\4")
buf.write("\2ZZzz\3\2\63;\3\2\629\5\2\62;CHch\4\2WWww\4\2NNnn\4\2")
buf.write("GGgg\4\2--//\4\2RRrr\6\2HHNNhhnn\6\2\f\f\17\17))^^\f\2")
buf.write("$$))AA^^cdhhppttvvxx\5\2NNWWww\6\2\f\f\17\17$$^^\5\2\f")
buf.write("\f\17\17%%\4\2\f\f\17\17\3\2}}\3\2\177\177\4\2\13\13\"")
buf.write("\"\2\u0573\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2")
buf.write("\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2")
buf.write("\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2")
buf.write("\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#")
buf.write("\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2")
buf.write("\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65")
buf.write("\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2")
buf.write("\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2")
buf.write("\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2")
buf.write("\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3")
buf.write("\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e")
buf.write("\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2")
buf.write("o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2")
buf.write("\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081")
buf.write("\3\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2")
buf.write("\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f")
buf.write("\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2")
buf.write("\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d")
buf.write("\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2")
buf.write("\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab")
buf.write("\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2")
buf.write("\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9")
buf.write("\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2")
buf.write("\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7")
buf.write("\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2")
buf.write("\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2\2\2\u00df")
buf.write("\3\2\2\2\2\u0107\3\2\2\2\2\u011f\3\2\2\2\2\u0127\3\2\2")
buf.write("\2\2\u0129\3\2\2\2\2\u012b\3\2\2\2\2\u012d\3\2\2\2\2\u012f")
buf.write("\3\2\2\2\2\u0131\3\2\2\2\2\u0133\3\2\2\2\2\u0135\3\2\2")
buf.write("\2\2\u0137\3\2\2\2\2\u0139\3\2\2\2\3\u013b\3\2\2\2\5\u0149")
buf.write("\3\2\2\2\7\u015a\3\2\2\2\t\u016d\3\2\2\2\13\u0174\3\2")
buf.write("\2\2\r\u017c\3\2\2\2\17\u0184\3\2\2\2\21\u018f\3\2\2\2")
buf.write("\23\u019a\3\2\2\2\25\u01a4\3\2\2\2\27\u01af\3\2\2\2\31")
buf.write("\u01b5\3\2\2\2\33\u01c3\3\2\2\2\35\u01cb\3\2\2\2\37\u01d8")
buf.write("\3\2\2\2!\u01dd\3\2\2\2#\u01e3\3\2\2\2%\u01e8\3\2\2\2")
buf.write("\'\u01ed\3\2\2\2)\u01f3\3\2\2\2+\u01fc\3\2\2\2-\u0204")
buf.write("\3\2\2\2/\u0207\3\2\2\2\61\u020e\3\2\2\2\63\u0213\3\2")
buf.write("\2\2\65\u0218\3\2\2\2\67\u021f\3\2\2\29\u0225\3\2\2\2")
buf.write(";\u0229\3\2\2\2=\u022e\3\2\2\2?\u0231\3\2\2\2A\u0238\3")
buf.write("\2\2\2C\u023c\3\2\2\2E\u0241\3\2\2\2G\u024a\3\2\2\2I\u0253")
buf.write("\3\2\2\2K\u025a\3\2\2\2M\u0260\3\2\2\2O\u0267\3\2\2\2")
buf.write("Q\u026e\3\2\2\2S\u0275\3\2\2\2U\u027c\3\2\2\2W\u0283\3")
buf.write("\2\2\2Y\u028b\3\2\2\2[\u0291\3\2\2\2]\u029a\3\2\2\2_\u029f")
buf.write("\3\2\2\2a\u02a8\3\2\2\2c\u02ae\3\2\2\2e\u02b7\3\2\2\2")
buf.write("g\u02c0\3\2\2\2i\u02c8\3\2\2\2k\u02ce\3\2\2\2m\u02d7\3")
buf.write("\2\2\2o\u02e0\3\2\2\2q\u02eb\3\2\2\2s\u02f5\3\2\2\2u\u0304")
buf.write("\3\2\2\2w\u0312\3\2\2\2y\u0314\3\2\2\2{\u0316\3\2\2\2")
buf.write("}\u0318\3\2\2\2\177\u031a\3\2\2\2\u0081\u031c\3\2\2\2")
buf.write("\u0083\u031e\3\2\2\2\u0085\u0320\3\2\2\2\u0087\u0323\3")
buf.write("\2\2\2\u0089\u0325\3\2\2\2\u008b\u0328\3\2\2\2\u008d\u032b")
buf.write("\3\2\2\2\u008f\u032e\3\2\2\2\u0091\u0330\3\2\2\2\u0093")
buf.write("\u0333\3\2\2\2\u0095\u0335\3\2\2\2\u0097\u0338\3\2\2\2")
buf.write("\u0099\u033a\3\2\2\2\u009b\u033c\3\2\2\2\u009d\u033e\3")
buf.write("\2\2\2\u009f\u0340\3\2\2\2\u00a1\u0342\3\2\2\2\u00a3\u0345")
buf.write("\3\2\2\2\u00a5\u0348\3\2\2\2\u00a7\u034a\3\2\2\2\u00a9")
buf.write("\u034c\3\2\2\2\u00ab\u034e\3\2\2\2\u00ad\u0350\3\2\2\2")
buf.write("\u00af\u0352\3\2\2\2\u00b1\u0354\3\2\2\2\u00b3\u0356\3")
buf.write("\2\2\2\u00b5\u0358\3\2\2\2\u00b7\u035b\3\2\2\2\u00b9\u035e")
buf.write("\3\2\2\2\u00bb\u0361\3\2\2\2\u00bd\u0364\3\2\2\2\u00bf")
buf.write("\u0367\3\2\2\2\u00c1\u036b\3\2\2\2\u00c3\u036f\3\2\2\2")
buf.write("\u00c5\u0372\3\2\2\2\u00c7\u0375\3\2\2\2\u00c9\u0378\3")
buf.write("\2\2\2\u00cb\u037b\3\2\2\2\u00cd\u037e\3\2\2\2\u00cf\u0381")
buf.write("\3\2\2\2\u00d1\u0383\3\2\2\2\u00d3\u0387\3\2\2\2\u00d5")
buf.write("\u0391\3\2\2\2\u00d7\u0393\3\2\2\2\u00d9\u0395\3\2\2\2")
buf.write("\u00db\u03a1\3\2\2\2\u00dd\u03a3\3\2\2\2\u00df\u03ab\3")
buf.write("\2\2\2\u00e1\u03ba\3\2\2\2\u00e3\u03bc\3\2\2\2\u00e5\u03c3")
buf.write("\3\2\2\2\u00e7\u03ca\3\2\2\2\u00e9\u03d1\3\2\2\2\u00eb")
buf.write("\u03d7\3\2\2\2\u00ed\u03da\3\2\2\2\u00ef\u03dc\3\2\2\2")
buf.write("\u00f1\u03de\3\2\2\2\u00f3\u03ef\3\2\2\2\u00f5\u03f1\3")
buf.write("\2\2\2\u00f7\u03f3\3\2\2\2\u00f9\u03f9\3\2\2\2\u00fb\u03fd")
buf.write("\3\2\2\2\u00fd\u040b\3\2\2\2\u00ff\u040d\3\2\2\2\u0101")
buf.write("\u041e\3\2\2\2\u0103\u0420\3\2\2\2\u0105\u0426\3\2\2\2")
buf.write("\u0107\u0429\3\2\2\2\u0109\u0435\3\2\2\2\u010b\u0437\3")
buf.write("\2\2\2\u010d\u043e\3\2\2\2\u010f\u0442\3\2\2\2\u0111\u045a")
buf.write("\3\2\2\2\u0113\u045d\3\2\2\2\u0115\u0463\3\2\2\2\u0117")
buf.write("\u0469\3\2\2\2\u0119\u046b\3\2\2\2\u011b\u046e\3\2\2\2")
buf.write("\u011d\u0476\3\2\2\2\u011f\u047f\3\2\2\2\u0121\u048a\3")
buf.write("\2\2\2\u0123\u048d\3\2\2\2\u0125\u0498\3\2\2\2\u0127\u049a")
buf.write("\3\2\2\2\u0129\u04ad\3\2\2\2\u012b\u04d5\3\2\2\2\u012d")
buf.write("\u04ea\3\2\2\2\u012f\u04fe\3\2\2\2\u0131\u050f\3\2\2\2")
buf.write("\u0133\u0524\3\2\2\2\u0135\u052f\3\2\2\2\u0137\u0533\3")
buf.write("\2\2\2\u0139\u0541\3\2\2\2\u013b\u013c\7a\2\2\u013c\u013d")
buf.write("\7a\2\2\u013d\u013e\7g\2\2\u013e\u013f\7z\2\2\u013f\u0140")
buf.write("\7v\2\2\u0140\u0141\7g\2\2\u0141\u0142\7p\2\2\u0142\u0143")
buf.write("\7u\2\2\u0143\u0144\7k\2\2\u0144\u0145\7q\2\2\u0145\u0146")
buf.write("\7p\2\2\u0146\u0147\7a\2\2\u0147\u0148\7a\2\2\u0148\4")
buf.write("\3\2\2\2\u0149\u014a\7a\2\2\u014a\u014b\7a\2\2\u014b\u014c")
buf.write("\7d\2\2\u014c\u014d\7w\2\2\u014d\u014e\7k\2\2\u014e\u014f")
buf.write("\7n\2\2\u014f\u0150\7v\2\2\u0150\u0151\7k\2\2\u0151\u0152")
buf.write("\7p\2\2\u0152\u0153\7a\2\2\u0153\u0154\7x\2\2\u0154\u0155")
buf.write("\7c\2\2\u0155\u0156\7a\2\2\u0156\u0157\7c\2\2\u0157\u0158")
buf.write("\7t\2\2\u0158\u0159\7i\2\2\u0159\6\3\2\2\2\u015a\u015b")
buf.write("\7a\2\2\u015b\u015c\7a\2\2\u015c\u015d\7d\2\2\u015d\u015e")
buf.write("\7w\2\2\u015e\u015f\7k\2\2\u015f\u0160\7n\2\2\u0160\u0161")
buf.write("\7v\2\2\u0161\u0162\7k\2\2\u0162\u0163\7p\2\2\u0163\u0164")
buf.write("\7a\2\2\u0164\u0165\7q\2\2\u0165\u0166\7h\2\2\u0166\u0167")
buf.write("\7h\2\2\u0167\u0168\7u\2\2\u0168\u0169\7g\2\2\u0169\u016a")
buf.write("\7v\2\2\u016a\u016b\7q\2\2\u016b\u016c\7h\2\2\u016c\b")
buf.write("\3\2\2\2\u016d\u016e\7a\2\2\u016e\u016f\7a\2\2\u016f\u0170")
buf.write("\7o\2\2\u0170\u0171\7\63\2\2\u0171\u0172\7\64\2\2\u0172")
buf.write("\u0173\7:\2\2\u0173\n\3\2\2\2\u0174\u0175\7a\2\2\u0175")
buf.write("\u0176\7a\2\2\u0176\u0177\7o\2\2\u0177\u0178\7\63\2\2")
buf.write("\u0178\u0179\7\64\2\2\u0179\u017a\7:\2\2\u017a\u017b\7")
buf.write("f\2\2\u017b\f\3\2\2\2\u017c\u017d\7a\2\2\u017d\u017e\7")
buf.write("a\2\2\u017e\u017f\7o\2\2\u017f\u0180\7\63\2\2\u0180\u0181")
buf.write("\7\64\2\2\u0181\u0182\7:\2\2\u0182\u0183\7k\2\2\u0183")
buf.write("\16\3\2\2\2\u0184\u0185\7a\2\2\u0185\u0186\7a\2\2\u0186")
buf.write("\u0187\7v\2\2\u0187\u0188\7{\2\2\u0188\u0189\7r\2\2\u0189")
buf.write("\u018a\7g\2\2\u018a\u018b\7q\2\2\u018b\u018c\7h\2\2\u018c")
buf.write("\u018d\7a\2\2\u018d\u018e\7a\2\2\u018e\20\3\2\2\2\u018f")
buf.write("\u0190\7a\2\2\u0190\u0191\7a\2\2\u0191\u0192\7k\2\2\u0192")
buf.write("\u0193\7p\2\2\u0193\u0194\7n\2\2\u0194\u0195\7k\2\2\u0195")
buf.write("\u0196\7p\2\2\u0196\u0197\7g\2\2\u0197\u0198\7a\2\2\u0198")
buf.write("\u0199\7a\2\2\u0199\22\3\2\2\2\u019a\u019b\7a\2\2\u019b")
buf.write("\u019c\7a\2\2\u019c\u019d\7u\2\2\u019d\u019e\7v\2\2\u019e")
buf.write("\u019f\7f\2\2\u019f\u01a0\7e\2\2\u01a0\u01a1\7c\2\2\u01a1")
buf.write("\u01a2\7n\2\2\u01a2\u01a3\7n\2\2\u01a3\24\3\2\2\2\u01a4")
buf.write("\u01a5\7a\2\2\u01a5\u01a6\7a\2\2\u01a6\u01a7\7f\2\2\u01a7")
buf.write("\u01a8\7g\2\2\u01a8\u01a9\7e\2\2\u01a9\u01aa\7n\2\2\u01aa")
buf.write("\u01ab\7u\2\2\u01ab\u01ac\7r\2\2\u01ac\u01ad\7g\2\2\u01ad")
buf.write("\u01ae\7e\2\2\u01ae\26\3\2\2\2\u01af\u01b0\7a\2\2\u01b0")
buf.write("\u01b1\7a\2\2\u01b1\u01b2\7c\2\2\u01b2\u01b3\7u\2\2\u01b3")
buf.write("\u01b4\7o\2\2\u01b4\30\3\2\2\2\u01b5\u01b6\7a\2\2\u01b6")
buf.write("\u01b7\7a\2\2\u01b7\u01b8\7c\2\2\u01b8\u01b9\7v\2\2\u01b9")
buf.write("\u01ba\7v\2\2\u01ba\u01bb\7t\2\2\u01bb\u01bc\7k\2\2\u01bc")
buf.write("\u01bd\7d\2\2\u01bd\u01be\7w\2\2\u01be\u01bf\7v\2\2\u01bf")
buf.write("\u01c0\7g\2\2\u01c0\u01c1\7a\2\2\u01c1\u01c2\7a\2\2\u01c2")
buf.write("\32\3\2\2\2\u01c3\u01c4\7a\2\2\u01c4\u01c5\7a\2\2\u01c5")
buf.write("\u01c6\7c\2\2\u01c6\u01c7\7u\2\2\u01c7\u01c8\7o\2\2\u01c8")
buf.write("\u01c9\7a\2\2\u01c9\u01ca\7a\2\2\u01ca\34\3\2\2\2\u01cb")
buf.write("\u01cc\7a\2\2\u01cc\u01cd\7a\2\2\u01cd\u01ce\7x\2\2\u01ce")
buf.write("\u01cf\7q\2\2\u01cf\u01d0\7n\2\2\u01d0\u01d1\7c\2\2\u01d1")
buf.write("\u01d2\7v\2\2\u01d2\u01d3\7k\2\2\u01d3\u01d4\7n\2\2\u01d4")
buf.write("\u01d5\7g\2\2\u01d5\u01d6\7a\2\2\u01d6\u01d7\7a\2\2\u01d7")
buf.write("\36\3\2\2\2\u01d8\u01d9\7c\2\2\u01d9\u01da\7w\2\2\u01da")
buf.write("\u01db\7v\2\2\u01db\u01dc\7q\2\2\u01dc \3\2\2\2\u01dd")
buf.write("\u01de\7d\2\2\u01de\u01df\7t\2\2\u01df\u01e0\7g\2\2\u01e0")
buf.write("\u01e1\7c\2\2\u01e1\u01e2\7m\2\2\u01e2\"\3\2\2\2\u01e3")
buf.write("\u01e4\7e\2\2\u01e4\u01e5\7c\2\2\u01e5\u01e6\7u\2\2\u01e6")
buf.write("\u01e7\7g\2\2\u01e7$\3\2\2\2\u01e8\u01e9\7e\2\2\u01e9")
buf.write("\u01ea\7j\2\2\u01ea\u01eb\7c\2\2\u01eb\u01ec\7t\2\2\u01ec")
buf.write("&\3\2\2\2\u01ed\u01ee\7e\2\2\u01ee\u01ef\7q\2\2\u01ef")
buf.write("\u01f0\7p\2\2\u01f0\u01f1\7u\2\2\u01f1\u01f2\7v\2\2\u01f2")
buf.write("(\3\2\2\2\u01f3\u01f4\7e\2\2\u01f4\u01f5\7q\2\2\u01f5")
buf.write("\u01f6\7p\2\2\u01f6\u01f7\7v\2\2\u01f7\u01f8\7k\2\2\u01f8")
buf.write("\u01f9\7p\2\2\u01f9\u01fa\7w\2\2\u01fa\u01fb\7g\2\2\u01fb")
buf.write("*\3\2\2\2\u01fc\u01fd\7f\2\2\u01fd\u01fe\7g\2\2\u01fe")
buf.write("\u01ff\7h\2\2\u01ff\u0200\7c\2\2\u0200\u0201\7w\2\2\u0201")
buf.write("\u0202\7n\2\2\u0202\u0203\7v\2\2\u0203,\3\2\2\2\u0204")
buf.write("\u0205\7f\2\2\u0205\u0206\7q\2\2\u0206.\3\2\2\2\u0207")
buf.write("\u0208\7f\2\2\u0208\u0209\7q\2\2\u0209\u020a\7w\2\2\u020a")
buf.write("\u020b\7d\2\2\u020b\u020c\7n\2\2\u020c\u020d\7g\2\2\u020d")
buf.write("\60\3\2\2\2\u020e\u020f\7g\2\2\u020f\u0210\7n\2\2\u0210")
buf.write("\u0211\7u\2\2\u0211\u0212\7g\2\2\u0212\62\3\2\2\2\u0213")
buf.write("\u0214\7g\2\2\u0214\u0215\7p\2\2\u0215\u0216\7w\2\2\u0216")
buf.write("\u0217\7o\2\2\u0217\64\3\2\2\2\u0218\u0219\7g\2\2\u0219")
buf.write("\u021a\7z\2\2\u021a\u021b\7v\2\2\u021b\u021c\7g\2\2\u021c")
buf.write("\u021d\7t\2\2\u021d\u021e\7p\2\2\u021e\66\3\2\2\2\u021f")
buf.write("\u0220\7h\2\2\u0220\u0221\7n\2\2\u0221\u0222\7q\2\2\u0222")
buf.write("\u0223\7c\2\2\u0223\u0224\7v\2\2\u02248\3\2\2\2\u0225")
buf.write("\u0226\7h\2\2\u0226\u0227\7q\2\2\u0227\u0228\7t\2\2\u0228")
buf.write(":\3\2\2\2\u0229\u022a\7i\2\2\u022a\u022b\7q\2\2\u022b")
buf.write("\u022c\7v\2\2\u022c\u022d\7q\2\2\u022d<\3\2\2\2\u022e")
buf.write("\u022f\7k\2\2\u022f\u0230\7h\2\2\u0230>\3\2\2\2\u0231")
buf.write("\u0232\7k\2\2\u0232\u0233\7p\2\2\u0233\u0234\7n\2\2\u0234")
buf.write("\u0235\7k\2\2\u0235\u0236\7p\2\2\u0236\u0237\7g\2\2\u0237")
buf.write("@\3\2\2\2\u0238\u0239\7k\2\2\u0239\u023a\7p\2\2\u023a")
buf.write("\u023b\7v\2\2\u023bB\3\2\2\2\u023c\u023d\7n\2\2\u023d")
buf.write("\u023e\7q\2\2\u023e\u023f\7p\2\2\u023f\u0240\7i\2\2\u0240")
buf.write("D\3\2\2\2\u0241\u0242\7t\2\2\u0242\u0243\7g\2\2\u0243")
buf.write("\u0244\7i\2\2\u0244\u0245\7k\2\2\u0245\u0246\7u\2\2\u0246")
buf.write("\u0247\7v\2\2\u0247\u0248\7g\2\2\u0248\u0249\7t\2\2\u0249")
buf.write("F\3\2\2\2\u024a\u024b\7t\2\2\u024b\u024c\7g\2\2\u024c")
buf.write("\u024d\7u\2\2\u024d\u024e\7v\2\2\u024e\u024f\7t\2\2\u024f")
buf.write("\u0250\7k\2\2\u0250\u0251\7e\2\2\u0251\u0252\7v\2\2\u0252")
buf.write("H\3\2\2\2\u0253\u0254\7t\2\2\u0254\u0255\7g\2\2\u0255")
buf.write("\u0256\7v\2\2\u0256\u0257\7w\2\2\u0257\u0258\7t\2\2\u0258")
buf.write("\u0259\7p\2\2\u0259J\3\2\2\2\u025a\u025b\7u\2\2\u025b")
buf.write("\u025c\7j\2\2\u025c\u025d\7q\2\2\u025d\u025e\7t\2\2\u025e")
buf.write("\u025f\7v\2\2\u025fL\3\2\2\2\u0260\u0261\7u\2\2\u0261")
buf.write("\u0262\7k\2\2\u0262\u0263\7i\2\2\u0263\u0264\7p\2\2\u0264")
buf.write("\u0265\7g\2\2\u0265\u0266\7f\2\2\u0266N\3\2\2\2\u0267")
buf.write("\u0268\7u\2\2\u0268\u0269\7k\2\2\u0269\u026a\7|\2\2\u026a")
buf.write("\u026b\7g\2\2\u026b\u026c\7q\2\2\u026c\u026d\7h\2\2\u026d")
buf.write("P\3\2\2\2\u026e\u026f\7u\2\2\u026f\u0270\7v\2\2\u0270")
buf.write("\u0271\7c\2\2\u0271\u0272\7v\2\2\u0272\u0273\7k\2\2\u0273")
buf.write("\u0274\7e\2\2\u0274R\3\2\2\2\u0275\u0276\7u\2\2\u0276")
buf.write("\u0277\7v\2\2\u0277\u0278\7t\2\2\u0278\u0279\7w\2\2\u0279")
buf.write("\u027a\7e\2\2\u027a\u027b\7v\2\2\u027bT\3\2\2\2\u027c")
buf.write("\u027d\7u\2\2\u027d\u027e\7y\2\2\u027e\u027f\7k\2\2\u027f")
buf.write("\u0280\7v\2\2\u0280\u0281\7e\2\2\u0281\u0282\7j\2\2\u0282")
buf.write("V\3\2\2\2\u0283\u0284\7v\2\2\u0284\u0285\7{\2\2\u0285")
buf.write("\u0286\7r\2\2\u0286\u0287\7g\2\2\u0287\u0288\7f\2\2\u0288")
buf.write("\u0289\7g\2\2\u0289\u028a\7h\2\2\u028aX\3\2\2\2\u028b")
buf.write("\u028c\7w\2\2\u028c\u028d\7p\2\2\u028d\u028e\7k\2\2\u028e")
buf.write("\u028f\7q\2\2\u028f\u0290\7p\2\2\u0290Z\3\2\2\2\u0291")
buf.write("\u0292\7w\2\2\u0292\u0293\7p\2\2\u0293\u0294\7u\2\2\u0294")
buf.write("\u0295\7k\2\2\u0295\u0296\7i\2\2\u0296\u0297\7p\2\2\u0297")
buf.write("\u0298\7g\2\2\u0298\u0299\7f\2\2\u0299\\\3\2\2\2\u029a")
buf.write("\u029b\7x\2\2\u029b\u029c\7q\2\2\u029c\u029d\7k\2\2\u029d")
buf.write("\u029e\7f\2\2\u029e^\3\2\2\2\u029f\u02a0\7x\2\2\u02a0")
buf.write("\u02a1\7q\2\2\u02a1\u02a2\7n\2\2\u02a2\u02a3\7c\2\2\u02a3")
buf.write("\u02a4\7v\2\2\u02a4\u02a5\7k\2\2\u02a5\u02a6\7n\2\2\u02a6")
buf.write("\u02a7\7g\2\2\u02a7`\3\2\2\2\u02a8\u02a9\7y\2\2\u02a9")
buf.write("\u02aa\7j\2\2\u02aa\u02ab\7k\2\2\u02ab\u02ac\7n\2\2\u02ac")
buf.write("\u02ad\7g\2\2\u02adb\3\2\2\2\u02ae\u02af\7a\2\2\u02af")
buf.write("\u02b0\7C\2\2\u02b0\u02b1\7n\2\2\u02b1\u02b2\7k\2\2\u02b2")
buf.write("\u02b3\7i\2\2\u02b3\u02b4\7p\2\2\u02b4\u02b5\7c\2\2\u02b5")
buf.write("\u02b6\7u\2\2\u02b6d\3\2\2\2\u02b7\u02b8\7a\2\2\u02b8")
buf.write("\u02b9\7C\2\2\u02b9\u02ba\7n\2\2\u02ba\u02bb\7k\2\2\u02bb")
buf.write("\u02bc\7i\2\2\u02bc\u02bd\7p\2\2\u02bd\u02be\7q\2\2\u02be")
buf.write("\u02bf\7h\2\2\u02bff\3\2\2\2\u02c0\u02c1\7a\2\2\u02c1")
buf.write("\u02c2\7C\2\2\u02c2\u02c3\7v\2\2\u02c3\u02c4\7q\2\2\u02c4")
buf.write("\u02c5\7o\2\2\u02c5\u02c6\7k\2\2\u02c6\u02c7\7e\2\2\u02c7")
buf.write("h\3\2\2\2\u02c8\u02c9\7a\2\2\u02c9\u02ca\7D\2\2\u02ca")
buf.write("\u02cb\7q\2\2\u02cb\u02cc\7q\2\2\u02cc\u02cd\7n\2\2\u02cd")
buf.write("j\3\2\2\2\u02ce\u02cf\7a\2\2\u02cf\u02d0\7E\2\2\u02d0")
buf.write("\u02d1\7q\2\2\u02d1\u02d2\7o\2\2\u02d2\u02d3\7r\2\2\u02d3")
buf.write("\u02d4\7n\2\2\u02d4\u02d5\7g\2\2\u02d5\u02d6\7z\2\2\u02d6")
buf.write("l\3\2\2\2\u02d7\u02d8\7a\2\2\u02d8\u02d9\7I\2\2\u02d9")
buf.write("\u02da\7g\2\2\u02da\u02db\7p\2\2\u02db\u02dc\7g\2\2\u02dc")
buf.write("\u02dd\7t\2\2\u02dd\u02de\7k\2\2\u02de\u02df\7e\2\2\u02df")
buf.write("n\3\2\2\2\u02e0\u02e1\7a\2\2\u02e1\u02e2\7K\2\2\u02e2")
buf.write("\u02e3\7o\2\2\u02e3\u02e4\7c\2\2\u02e4\u02e5\7i\2\2\u02e5")
buf.write("\u02e6\7k\2\2\u02e6\u02e7\7p\2\2\u02e7\u02e8\7c\2\2\u02e8")
buf.write("\u02e9\7t\2\2\u02e9\u02ea\7{\2\2\u02eap\3\2\2\2\u02eb")
buf.write("\u02ec\7a\2\2\u02ec\u02ed\7P\2\2\u02ed\u02ee\7q\2\2\u02ee")
buf.write("\u02ef\7t\2\2\u02ef\u02f0\7g\2\2\u02f0\u02f1\7v\2\2\u02f1")
buf.write("\u02f2\7w\2\2\u02f2\u02f3\7t\2\2\u02f3\u02f4\7p\2\2\u02f4")
buf.write("r\3\2\2\2\u02f5\u02f6\7a\2\2\u02f6\u02f7\7U\2\2\u02f7")
buf.write("\u02f8\7v\2\2\u02f8\u02f9\7c\2\2\u02f9\u02fa\7v\2\2\u02fa")
buf.write("\u02fb\7k\2\2\u02fb\u02fc\7e\2\2\u02fc\u02fd\7a\2\2\u02fd")
buf.write("\u02fe\7c\2\2\u02fe\u02ff\7u\2\2\u02ff\u0300\7u\2\2\u0300")
buf.write("\u0301\7g\2\2\u0301\u0302\7t\2\2\u0302\u0303\7v\2\2\u0303")
buf.write("t\3\2\2\2\u0304\u0305\7a\2\2\u0305\u0306\7V\2\2\u0306")
buf.write("\u0307\7j\2\2\u0307\u0308\7t\2\2\u0308\u0309\7g\2\2\u0309")
buf.write("\u030a\7c\2\2\u030a\u030b\7f\2\2\u030b\u030c\7a\2\2\u030c")
buf.write("\u030d\7n\2\2\u030d\u030e\7q\2\2\u030e\u030f\7e\2\2\u030f")
buf.write("\u0310\7c\2\2\u0310\u0311\7n\2\2\u0311v\3\2\2\2\u0312")
buf.write("\u0313\7*\2\2\u0313x\3\2\2\2\u0314\u0315\7+\2\2\u0315")
buf.write("z\3\2\2\2\u0316\u0317\7]\2\2\u0317|\3\2\2\2\u0318\u0319")
buf.write("\7_\2\2\u0319~\3\2\2\2\u031a\u031b\7}\2\2\u031b\u0080")
buf.write("\3\2\2\2\u031c\u031d\7\177\2\2\u031d\u0082\3\2\2\2\u031e")
buf.write("\u031f\7>\2\2\u031f\u0084\3\2\2\2\u0320\u0321\7>\2\2\u0321")
buf.write("\u0322\7?\2\2\u0322\u0086\3\2\2\2\u0323\u0324\7@\2\2\u0324")
buf.write("\u0088\3\2\2\2\u0325\u0326\7@\2\2\u0326\u0327\7?\2\2\u0327")
buf.write("\u008a\3\2\2\2\u0328\u0329\7>\2\2\u0329\u032a\7>\2\2\u032a")
buf.write("\u008c\3\2\2\2\u032b\u032c\7@\2\2\u032c\u032d\7@\2\2\u032d")
buf.write("\u008e\3\2\2\2\u032e\u032f\7-\2\2\u032f\u0090\3\2\2\2")
buf.write("\u0330\u0331\7-\2\2\u0331\u0332\7-\2\2\u0332\u0092\3\2")
buf.write("\2\2\u0333\u0334\7/\2\2\u0334\u0094\3\2\2\2\u0335\u0336")
buf.write("\7/\2\2\u0336\u0337\7/\2\2\u0337\u0096\3\2\2\2\u0338\u0339")
buf.write("\7,\2\2\u0339\u0098\3\2\2\2\u033a\u033b\7\61\2\2\u033b")
buf.write("\u009a\3\2\2\2\u033c\u033d\7\'\2\2\u033d\u009c\3\2\2\2")
buf.write("\u033e\u033f\7(\2\2\u033f\u009e\3\2\2\2\u0340\u0341\7")
buf.write("~\2\2\u0341\u00a0\3\2\2\2\u0342\u0343\7(\2\2\u0343\u0344")
buf.write("\7(\2\2\u0344\u00a2\3\2\2\2\u0345\u0346\7~\2\2\u0346\u0347")
buf.write("\7~\2\2\u0347\u00a4\3\2\2\2\u0348\u0349\7`\2\2\u0349\u00a6")
buf.write("\3\2\2\2\u034a\u034b\7#\2\2\u034b\u00a8\3\2\2\2\u034c")
buf.write("\u034d\7\u0080\2\2\u034d\u00aa\3\2\2\2\u034e\u034f\7A")
buf.write("\2\2\u034f\u00ac\3\2\2\2\u0350\u0351\7<\2\2\u0351\u00ae")
buf.write("\3\2\2\2\u0352\u0353\7=\2\2\u0353\u00b0\3\2\2\2\u0354")
buf.write("\u0355\7.\2\2\u0355\u00b2\3\2\2\2\u0356\u0357\7?\2\2\u0357")
buf.write("\u00b4\3\2\2\2\u0358\u0359\7,\2\2\u0359\u035a\7?\2\2\u035a")
buf.write("\u00b6\3\2\2\2\u035b\u035c\7\61\2\2\u035c\u035d\7?\2\2")
buf.write("\u035d\u00b8\3\2\2\2\u035e\u035f\7\'\2\2\u035f\u0360\7")
buf.write("?\2\2\u0360\u00ba\3\2\2\2\u0361\u0362\7-\2\2\u0362\u0363")
buf.write("\7?\2\2\u0363\u00bc\3\2\2\2\u0364\u0365\7/\2\2\u0365\u0366")
buf.write("\7?\2\2\u0366\u00be\3\2\2\2\u0367\u0368\7>\2\2\u0368\u0369")
buf.write("\7>\2\2\u0369\u036a\7?\2\2\u036a\u00c0\3\2\2\2\u036b\u036c")
buf.write("\7@\2\2\u036c\u036d\7@\2\2\u036d\u036e\7?\2\2\u036e\u00c2")
buf.write("\3\2\2\2\u036f\u0370\7(\2\2\u0370\u0371\7?\2\2\u0371\u00c4")
buf.write("\3\2\2\2\u0372\u0373\7`\2\2\u0373\u0374\7?\2\2\u0374\u00c6")
buf.write("\3\2\2\2\u0375\u0376\7~\2\2\u0376\u0377\7?\2\2\u0377\u00c8")
buf.write("\3\2\2\2\u0378\u0379\7?\2\2\u0379\u037a\7?\2\2\u037a\u00ca")
buf.write("\3\2\2\2\u037b\u037c\7#\2\2\u037c\u037d\7?\2\2\u037d\u00cc")
buf.write("\3\2\2\2\u037e\u037f\7/\2\2\u037f\u0380\7@\2\2\u0380\u00ce")
buf.write("\3\2\2\2\u0381\u0382\7\60\2\2\u0382\u00d0\3\2\2\2\u0383")
buf.write("\u0384\7\60\2\2\u0384\u0385\7\60\2\2\u0385\u0386\7\60")
buf.write("\2\2\u0386\u00d2\3\2\2\2\u0387\u038c\5\u00d5k\2\u0388")
buf.write("\u038b\5\u00d5k\2\u0389\u038b\5\u00d9m\2\u038a\u0388\3")
buf.write("\2\2\2\u038a\u0389\3\2\2\2\u038b\u038e\3\2\2\2\u038c\u038a")
buf.write("\3\2\2\2\u038c\u038d\3\2\2\2\u038d\u00d4\3\2\2\2\u038e")
buf.write("\u038c\3\2\2\2\u038f\u0392\5\u00d7l\2\u0390\u0392\5\u00db")
buf.write("n\2\u0391\u038f\3\2\2\2\u0391\u0390\3\2\2\2\u0392\u00d6")
buf.write("\3\2\2\2\u0393\u0394\t\2\2\2\u0394\u00d8\3\2\2\2\u0395")
buf.write("\u0396\t\3\2\2\u0396\u00da\3\2\2\2\u0397\u0398\7^\2\2")
buf.write("\u0398\u0399\7w\2\2\u0399\u039a\3\2\2\2\u039a\u03a2\5")
buf.write("\u00ddo\2\u039b\u039c\7^\2\2\u039c\u039d\7W\2\2\u039d")
buf.write("\u039e\3\2\2\2\u039e\u039f\5\u00ddo\2\u039f\u03a0\5\u00dd")
buf.write("o\2\u03a0\u03a2\3\2\2\2\u03a1\u0397\3\2\2\2\u03a1\u039b")
buf.write("\3\2\2\2\u03a2\u00dc\3\2\2\2\u03a3\u03a4\5\u00f1y\2\u03a4")
buf.write("\u03a5\5\u00f1y\2\u03a5\u03a6\5\u00f1y\2\u03a6\u03a7\5")
buf.write("\u00f1y\2\u03a7\u00de\3\2\2\2\u03a8\u03ac\5\u00e1q\2\u03a9")
buf.write("\u03ac\5\u00fb~\2\u03aa\u03ac\5\u0111\u0089\2\u03ab\u03a8")
buf.write("\3\2\2\2\u03ab\u03a9\3\2\2\2\u03ab\u03aa\3\2\2\2\u03ac")
buf.write("\u00e0\3\2\2\2\u03ad\u03af\5\u00e5s\2\u03ae\u03b0\5\u00f3")
buf.write("z\2\u03af\u03ae\3\2\2\2\u03af\u03b0\3\2\2\2\u03b0\u03bb")
buf.write("\3\2\2\2\u03b1\u03b3\5\u00e7t\2\u03b2\u03b4\5\u00f3z\2")
buf.write("\u03b3\u03b2\3\2\2\2\u03b3\u03b4\3\2\2\2\u03b4\u03bb\3")
buf.write("\2\2\2\u03b5\u03b7\5\u00e9u\2\u03b6\u03b8\5\u00f3z\2\u03b7")
buf.write("\u03b6\3\2\2\2\u03b7\u03b8\3\2\2\2\u03b8\u03bb\3\2\2\2")
buf.write("\u03b9\u03bb\5\u00e3r\2\u03ba\u03ad\3\2\2\2\u03ba\u03b1")
buf.write("\3\2\2\2\u03ba\u03b5\3\2\2\2\u03ba\u03b9\3\2\2\2\u03bb")
buf.write("\u00e2\3\2\2\2\u03bc\u03bd\7\62\2\2\u03bd\u03bf\t\4\2")
buf.write("\2\u03be\u03c0\t\5\2\2\u03bf\u03be\3\2\2\2\u03c0\u03c1")
buf.write("\3\2\2\2\u03c1\u03bf\3\2\2\2\u03c1\u03c2\3\2\2\2\u03c2")
buf.write("\u00e4\3\2\2\2\u03c3\u03c7\5\u00edw\2\u03c4\u03c6\5\u00d9")
buf.write("m\2\u03c5\u03c4\3\2\2\2\u03c6\u03c9\3\2\2\2\u03c7\u03c5")
buf.write("\3\2\2\2\u03c7\u03c8\3\2\2\2\u03c8\u00e6\3\2\2\2\u03c9")
buf.write("\u03c7\3\2\2\2\u03ca\u03ce\7\62\2\2\u03cb\u03cd\5\u00ef")
buf.write("x\2\u03cc\u03cb\3\2\2\2\u03cd\u03d0\3\2\2\2\u03ce\u03cc")
buf.write("\3\2\2\2\u03ce\u03cf\3\2\2\2\u03cf\u00e8\3\2\2\2\u03d0")
buf.write("\u03ce\3\2\2\2\u03d1\u03d3\5\u00ebv\2\u03d2\u03d4\5\u00f1")
buf.write("y\2\u03d3\u03d2\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d3")
buf.write("\3\2\2\2\u03d5\u03d6\3\2\2\2\u03d6\u00ea\3\2\2\2\u03d7")
buf.write("\u03d8\7\62\2\2\u03d8\u03d9\t\6\2\2\u03d9\u00ec\3\2\2")
buf.write("\2\u03da\u03db\t\7\2\2\u03db\u00ee\3\2\2\2\u03dc\u03dd")
buf.write("\t\b\2\2\u03dd\u00f0\3\2\2\2\u03de\u03df\t\t\2\2\u03df")
buf.write("\u00f2\3\2\2\2\u03e0\u03e2\5\u00f5{\2\u03e1\u03e3\5\u00f7")
buf.write("|\2\u03e2\u03e1\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3\u03f0")
buf.write("\3\2\2\2\u03e4\u03e5\5\u00f5{\2\u03e5\u03e6\5\u00f9}\2")
buf.write("\u03e6\u03f0\3\2\2\2\u03e7\u03e9\5\u00f7|\2\u03e8\u03ea")
buf.write("\5\u00f5{\2\u03e9\u03e8\3\2\2\2\u03e9\u03ea\3\2\2\2\u03ea")
buf.write("\u03f0\3\2\2\2\u03eb\u03ed\5\u00f9}\2\u03ec\u03ee\5\u00f5")
buf.write("{\2\u03ed\u03ec\3\2\2\2\u03ed\u03ee\3\2\2\2\u03ee\u03f0")
buf.write("\3\2\2\2\u03ef\u03e0\3\2\2\2\u03ef\u03e4\3\2\2\2\u03ef")
buf.write("\u03e7\3\2\2\2\u03ef\u03eb\3\2\2\2\u03f0\u00f4\3\2\2\2")
buf.write("\u03f1\u03f2\t\n\2\2\u03f2\u00f6\3\2\2\2\u03f3\u03f4\t")
buf.write("\13\2\2\u03f4\u00f8\3\2\2\2\u03f5\u03f6\7n\2\2\u03f6\u03fa")
buf.write("\7n\2\2\u03f7\u03f8\7N\2\2\u03f8\u03fa\7N\2\2\u03f9\u03f5")
buf.write("\3\2\2\2\u03f9\u03f7\3\2\2\2\u03fa\u00fa\3\2\2\2\u03fb")
buf.write("\u03fe\5\u00fd\177\2\u03fc\u03fe\5\u00ff\u0080\2\u03fd")
buf.write("\u03fb\3\2\2\2\u03fd\u03fc\3\2\2\2\u03fe\u00fc\3\2\2\2")
buf.write("\u03ff\u0401\5\u0101\u0081\2\u0400\u0402\5\u0103\u0082")
buf.write("\2\u0401\u0400\3\2\2\2\u0401\u0402\3\2\2\2\u0402\u0404")
buf.write("\3\2\2\2\u0403\u0405\5\u010f\u0088\2\u0404\u0403\3\2\2")
buf.write("\2\u0404\u0405\3\2\2\2\u0405\u040c\3\2\2\2\u0406\u0407")
buf.write("\5\u0107\u0084\2\u0407\u0409\5\u0103\u0082\2\u0408\u040a")
buf.write("\5\u010f\u0088\2\u0409\u0408\3\2\2\2\u0409\u040a\3\2\2")
buf.write("\2\u040a\u040c\3\2\2\2\u040b\u03ff\3\2\2\2\u040b\u0406")
buf.write("\3\2\2\2\u040c\u00fe\3\2\2\2\u040d\u0410\5\u00ebv\2\u040e")
buf.write("\u0411\5\u0109\u0085\2\u040f\u0411\5\u010d\u0087\2\u0410")
buf.write("\u040e\3\2\2\2\u0410\u040f\3\2\2\2\u0411\u0412\3\2\2\2")
buf.write("\u0412\u0414\5\u010b\u0086\2\u0413\u0415\5\u010f\u0088")
buf.write("\2\u0414\u0413\3\2\2\2\u0414\u0415\3\2\2\2\u0415\u0100")
buf.write("\3\2\2\2\u0416\u0418\5\u0107\u0084\2\u0417\u0416\3\2\2")
buf.write("\2\u0417\u0418\3\2\2\2\u0418\u0419\3\2\2\2\u0419\u041a")
buf.write("\7\60\2\2\u041a\u041f\5\u0107\u0084\2\u041b\u041c\5\u0107")
buf.write("\u0084\2\u041c\u041d\7\60\2\2\u041d\u041f\3\2\2\2\u041e")
buf.write("\u0417\3\2\2\2\u041e\u041b\3\2\2\2\u041f\u0102\3\2\2\2")
buf.write("\u0420\u0422\t\f\2\2\u0421\u0423\5\u0105\u0083\2\u0422")
buf.write("\u0421\3\2\2\2\u0422\u0423\3\2\2\2\u0423\u0424\3\2\2\2")
buf.write("\u0424\u0425\5\u0107\u0084\2\u0425\u0104\3\2\2\2\u0426")
buf.write("\u0427\t\r\2\2\u0427\u0106\3\2\2\2\u0428\u042a\5\u00d9")
buf.write("m\2\u0429\u0428\3\2\2\2\u042a\u042b\3\2\2\2\u042b\u0429")
buf.write("\3\2\2\2\u042b\u042c\3\2\2\2\u042c\u0108\3\2\2\2\u042d")
buf.write("\u042f\5\u010d\u0087\2\u042e\u042d\3\2\2\2\u042e\u042f")
buf.write("\3\2\2\2\u042f\u0430\3\2\2\2\u0430\u0431\7\60\2\2\u0431")
buf.write("\u0436\5\u010d\u0087\2\u0432\u0433\5\u010d\u0087\2\u0433")
buf.write("\u0434\7\60\2\2\u0434\u0436\3\2\2\2\u0435\u042e\3\2\2")
buf.write("\2\u0435\u0432\3\2\2\2\u0436\u010a\3\2\2\2\u0437\u0439")
buf.write("\t\16\2\2\u0438\u043a\5\u0105\u0083\2\u0439\u0438\3\2")
buf.write("\2\2\u0439\u043a\3\2\2\2\u043a\u043b\3\2\2\2\u043b\u043c")
buf.write("\5\u0107\u0084\2\u043c\u010c\3\2\2\2\u043d\u043f\5\u00f1")
buf.write("y\2\u043e\u043d\3\2\2\2\u043f\u0440\3\2\2\2\u0440\u043e")
buf.write("\3\2\2\2\u0440\u0441\3\2\2\2\u0441\u010e\3\2\2\2\u0442")
buf.write("\u0443\t\17\2\2\u0443\u0110\3\2\2\2\u0444\u0445\7)\2\2")
buf.write("\u0445\u0446\5\u0113\u008a\2\u0446\u0447\7)\2\2\u0447")
buf.write("\u045b\3\2\2\2\u0448\u0449\7N\2\2\u0449\u044a\7)\2\2\u044a")
buf.write("\u044b\3\2\2\2\u044b\u044c\5\u0113\u008a\2\u044c\u044d")
buf.write("\7)\2\2\u044d\u045b\3\2\2\2\u044e\u044f\7w\2\2\u044f\u0450")
buf.write("\7)\2\2\u0450\u0451\3\2\2\2\u0451\u0452\5\u0113\u008a")
buf.write("\2\u0452\u0453\7)\2\2\u0453\u045b\3\2\2\2\u0454\u0455")
buf.write("\7W\2\2\u0455\u0456\7)\2\2\u0456\u0457\3\2\2\2\u0457\u0458")
buf.write("\5\u0113\u008a\2\u0458\u0459\7)\2\2\u0459\u045b\3\2\2")
buf.write("\2\u045a\u0444\3\2\2\2\u045a\u0448\3\2\2\2\u045a\u044e")
buf.write("\3\2\2\2\u045a\u0454\3\2\2\2\u045b\u0112\3\2\2\2\u045c")
buf.write("\u045e\5\u0115\u008b\2\u045d\u045c\3\2\2\2\u045e\u045f")
buf.write("\3\2\2\2\u045f\u045d\3\2\2\2\u045f\u0460\3\2\2\2\u0460")
buf.write("\u0114\3\2\2\2\u0461\u0464\n\20\2\2\u0462\u0464\5\u0117")
buf.write("\u008c\2\u0463\u0461\3\2\2\2\u0463\u0462\3\2\2\2\u0464")
buf.write("\u0116\3\2\2\2\u0465\u046a\5\u0119\u008d\2\u0466\u046a")
buf.write("\5\u011b\u008e\2\u0467\u046a\5\u011d\u008f\2\u0468\u046a")
buf.write("\5\u00dbn\2\u0469\u0465\3\2\2\2\u0469\u0466\3\2\2\2\u0469")
buf.write("\u0467\3\2\2\2\u0469\u0468\3\2\2\2\u046a\u0118\3\2\2\2")
buf.write("\u046b\u046c\7^\2\2\u046c\u046d\t\21\2\2\u046d\u011a\3")
buf.write("\2\2\2\u046e\u046f\7^\2\2\u046f\u0471\5\u00efx\2\u0470")
buf.write("\u0472\5\u00efx\2\u0471\u0470\3\2\2\2\u0471\u0472\3\2")
buf.write("\2\2\u0472\u0474\3\2\2\2\u0473\u0475\5\u00efx\2\u0474")
buf.write("\u0473\3\2\2\2\u0474\u0475\3\2\2\2\u0475\u011c\3\2\2\2")
buf.write("\u0476\u0477\7^\2\2\u0477\u0478\7z\2\2\u0478\u047a\3\2")
buf.write("\2\2\u0479\u047b\5\u00f1y\2\u047a\u0479\3\2\2\2\u047b")
buf.write("\u047c\3\2\2\2\u047c\u047a\3\2\2\2\u047c\u047d\3\2\2\2")
buf.write("\u047d\u011e\3\2\2\2\u047e\u0480\5\u0121\u0091\2\u047f")
buf.write("\u047e\3\2\2\2\u047f\u0480\3\2\2\2\u0480\u0481\3\2\2\2")
buf.write("\u0481\u0483\7$\2\2\u0482\u0484\5\u0123\u0092\2\u0483")
buf.write("\u0482\3\2\2\2\u0483\u0484\3\2\2\2\u0484\u0485\3\2\2\2")
buf.write("\u0485\u0486\7$\2\2\u0486\u0120\3\2\2\2\u0487\u0488\7")
buf.write("w\2\2\u0488\u048b\7:\2\2\u0489\u048b\t\22\2\2\u048a\u0487")
buf.write("\3\2\2\2\u048a\u0489\3\2\2\2\u048b\u0122\3\2\2\2\u048c")
buf.write("\u048e\5\u0125\u0093\2\u048d\u048c\3\2\2\2\u048e\u048f")
buf.write("\3\2\2\2\u048f\u048d\3\2\2\2\u048f\u0490\3\2\2\2\u0490")
buf.write("\u0124\3\2\2\2\u0491\u0499\n\23\2\2\u0492\u0499\5\u0117")
buf.write("\u008c\2\u0493\u0494\7^\2\2\u0494\u0499\7\f\2\2\u0495")
buf.write("\u0496\7^\2\2\u0496\u0497\7\17\2\2\u0497\u0499\7\f\2\2")
buf.write("\u0498\u0491\3\2\2\2\u0498\u0492\3\2\2\2\u0498\u0493\3")
buf.write("\2\2\2\u0498\u0495\3\2\2\2\u0499\u0126\3\2\2\2\u049a\u049c")
buf.write("\7%\2\2\u049b\u049d\5\u0133\u009a\2\u049c\u049b\3\2\2")
buf.write("\2\u049c\u049d\3\2\2\2\u049d\u049e\3\2\2\2\u049e\u049f")
buf.write("\7f\2\2\u049f\u04a0\7g\2\2\u04a0\u04a1\7h\2\2\u04a1\u04a2")
buf.write("\7k\2\2\u04a2\u04a3\7p\2\2\u04a3\u04a4\7g\2\2\u04a4\u04a8")
buf.write("\3\2\2\2\u04a5\u04a7\n\24\2\2\u04a6\u04a5\3\2\2\2\u04a7")
buf.write("\u04aa\3\2\2\2\u04a8\u04a6\3\2\2\2\u04a8\u04a9\3\2\2\2")
buf.write("\u04a9\u04ab\3\2\2\2\u04aa\u04a8\3\2\2\2\u04ab\u04ac\b")
buf.write("\u0094\2\2\u04ac\u0128\3\2\2\2\u04ad\u04af\7%\2\2\u04ae")
buf.write("\u04b0\5\u0133\u009a\2\u04af\u04ae\3\2\2\2\u04af\u04b0")
buf.write("\3\2\2\2\u04b0\u04b1\3\2\2\2\u04b1\u04b2\7k\2\2\u04b2")
buf.write("\u04b3\7p\2\2\u04b3\u04b4\7e\2\2\u04b4\u04b5\7n\2\2\u04b5")
buf.write("\u04b6\7w\2\2\u04b6\u04b7\7f\2\2\u04b7\u04b8\7g\2\2\u04b8")
buf.write("\u04ba\3\2\2\2\u04b9\u04bb\5\u0133\u009a\2\u04ba\u04b9")
buf.write("\3\2\2\2\u04ba\u04bb\3\2\2\2\u04bb\u04cc\3\2\2\2\u04bc")
buf.write("\u04c0\7$\2\2\u04bd\u04bf\n\25\2\2\u04be\u04bd\3\2\2\2")
buf.write("\u04bf\u04c2\3\2\2\2\u04c0\u04be\3\2\2\2\u04c0\u04c1\3")
buf.write("\2\2\2\u04c1\u04c3\3\2\2\2\u04c2\u04c0\3\2\2\2\u04c3\u04cd")
buf.write("\7$\2\2\u04c4\u04c8\7>\2\2\u04c5\u04c7\n\25\2\2\u04c6")
buf.write("\u04c5\3\2\2\2\u04c7\u04ca\3\2\2\2\u04c8\u04c6\3\2\2\2")
buf.write("\u04c8\u04c9\3\2\2\2\u04c9\u04cb\3\2\2\2\u04ca\u04c8\3")
buf.write("\2\2\2\u04cb\u04cd\7@\2\2\u04cc\u04bc\3\2\2\2\u04cc\u04c4")
buf.write("\3\2\2\2\u04cd\u04cf\3\2\2\2\u04ce\u04d0\5\u0133\u009a")
buf.write("\2\u04cf\u04ce\3\2\2\2\u04cf\u04d0\3\2\2\2\u04d0\u04d1")
buf.write("\3\2\2\2\u04d1\u04d2\5\u0135\u009b\2\u04d2\u04d3\3\2\2")
buf.write("\2\u04d3\u04d4\b\u0095\2\2\u04d4\u012a\3\2\2\2\u04d5\u04d6")
buf.write("\7c\2\2\u04d6\u04d7\7u\2\2\u04d7\u04d8\7o\2\2\u04d8\u04dc")
buf.write("\3\2\2\2\u04d9\u04db\n\26\2\2\u04da\u04d9\3\2\2\2\u04db")
buf.write("\u04de\3\2\2\2\u04dc\u04da\3\2\2\2\u04dc\u04dd\3\2\2\2")
buf.write("\u04dd\u04df\3\2\2\2\u04de\u04dc\3\2\2\2\u04df\u04e3\7")
buf.write("}\2\2\u04e0\u04e2\n\27\2\2\u04e1\u04e0\3\2\2\2\u04e2\u04e5")
buf.write("\3\2\2\2\u04e3\u04e1\3\2\2\2\u04e3\u04e4\3\2\2\2\u04e4")
buf.write("\u04e6\3\2\2\2\u04e5\u04e3\3\2\2\2\u04e6\u04e7\7\177\2")
buf.write("\2\u04e7\u04e8\3\2\2\2\u04e8\u04e9\b\u0096\2\2\u04e9\u012c")
buf.write("\3\2\2\2\u04ea\u04eb\7%\2\2\u04eb\u04ec\7n\2\2\u04ec\u04ed")
buf.write("\7k\2\2\u04ed\u04ee\7p\2\2\u04ee\u04ef\7g\2\2\u04ef\u04f3")
buf.write("\3\2\2\2\u04f0\u04f2\5\u0133\u009a\2\u04f1\u04f0\3\2\2")
buf.write("\2\u04f2\u04f5\3\2\2\2\u04f3\u04f1\3\2\2\2\u04f3\u04f4")
buf.write("\3\2\2\2\u04f4\u04f9\3\2\2\2\u04f5\u04f3\3\2\2\2\u04f6")
buf.write("\u04f8\n\25\2\2\u04f7\u04f6\3\2\2\2\u04f8\u04fb\3\2\2")
buf.write("\2\u04f9\u04f7\3\2\2\2\u04f9\u04fa\3\2\2\2\u04fa\u04fc")
buf.write("\3\2\2\2\u04fb\u04f9\3\2\2\2\u04fc\u04fd\b\u0097\2\2\u04fd")
buf.write("\u012e\3\2\2\2\u04fe\u0500\7%\2\2\u04ff\u0501\5\u0133")
buf.write("\u009a\2\u0500\u04ff\3\2\2\2\u0500\u0501\3\2\2\2\u0501")
buf.write("\u0502\3\2\2\2\u0502\u0504\5\u00e5s\2\u0503\u0505\5\u0133")
buf.write("\u009a\2\u0504\u0503\3\2\2\2\u0504\u0505\3\2\2\2\u0505")
buf.write("\u0506\3\2\2\2\u0506\u050a\5\u011f\u0090\2\u0507\u0509")
buf.write("\n\25\2\2\u0508\u0507\3\2\2\2\u0509\u050c\3\2\2\2\u050a")
buf.write("\u0508\3\2\2\2\u050a\u050b\3\2\2\2\u050b\u050d\3\2\2\2")
buf.write("\u050c\u050a\3\2\2\2\u050d\u050e\b\u0098\2\2\u050e\u0130")
buf.write("\3\2\2\2\u050f\u0511\7%\2\2\u0510\u0512\5\u0133\u009a")
buf.write("\2\u0511\u0510\3\2\2\2\u0511\u0512\3\2\2\2\u0512\u0513")
buf.write("\3\2\2\2\u0513\u0514\7r\2\2\u0514\u0515\7t\2\2\u0515\u0516")
buf.write("\7c\2\2\u0516\u0517\7i\2\2\u0517\u0518\7o\2\2\u0518\u0519")
buf.write("\7c\2\2\u0519\u051a\3\2\2\2\u051a\u051e\5\u0133\u009a")
buf.write("\2\u051b\u051d\n\25\2\2\u051c\u051b\3\2\2\2\u051d\u0520")
buf.write("\3\2\2\2\u051e\u051c\3\2\2\2\u051e\u051f\3\2\2\2\u051f")
buf.write("\u0521\3\2\2\2\u0520\u051e\3\2\2\2\u0521\u0522\b\u0099")
buf.write("\2\2\u0522\u0132\3\2\2\2\u0523\u0525\t\30\2\2\u0524\u0523")
buf.write("\3\2\2\2\u0525\u0526\3\2\2\2\u0526\u0524\3\2\2\2\u0526")
buf.write("\u0527\3\2\2\2\u0527\u0528\3\2\2\2\u0528\u0529\b\u009a")
buf.write("\2\2\u0529\u0134\3\2\2\2\u052a\u052c\7\17\2\2\u052b\u052d")
buf.write("\7\f\2\2\u052c\u052b\3\2\2\2\u052c\u052d\3\2\2\2\u052d")
buf.write("\u0530\3\2\2\2\u052e\u0530\7\f\2\2\u052f\u052a\3\2\2\2")
buf.write("\u052f\u052e\3\2\2\2\u0530\u0531\3\2\2\2\u0531\u0532\b")
buf.write("\u009b\2\2\u0532\u0136\3\2\2\2\u0533\u0534\7\61\2\2\u0534")
buf.write("\u0535\7,\2\2\u0535\u0539\3\2\2\2\u0536\u0538\13\2\2\2")
buf.write("\u0537\u0536\3\2\2\2\u0538\u053b\3\2\2\2\u0539\u053a\3")
buf.write("\2\2\2\u0539\u0537\3\2\2\2\u053a\u053c\3\2\2\2\u053b\u0539")
buf.write("\3\2\2\2\u053c\u053d\7,\2\2\u053d\u053e\7\61\2\2\u053e")
buf.write("\u053f\3\2\2\2\u053f\u0540\b\u009c\2\2\u0540\u0138\3\2")
buf.write("\2\2\u0541\u0542\7\61\2\2\u0542\u0543\7\61\2\2\u0543\u0547")
buf.write("\3\2\2\2\u0544\u0546\n\25\2\2\u0545\u0544\3\2\2\2\u0546")
buf.write("\u0549\3\2\2\2\u0547\u0545\3\2\2\2\u0547\u0548\3\2\2\2")
buf.write("\u0548\u054a\3\2\2\2\u0549\u0547\3\2\2\2\u054a\u054b\b")
buf.write("\u009d\2\2\u054b\u013a\3\2\2\2F\2\u038a\u038c\u0391\u03a1")
buf.write("\u03ab\u03af\u03b3\u03b7\u03ba\u03c1\u03c7\u03ce\u03d5")
buf.write("\u03e2\u03e9\u03ed\u03ef\u03f9\u03fd\u0401\u0404\u0409")
buf.write("\u040b\u0410\u0414\u0417\u041e\u0422\u042b\u042e\u0435")
buf.write("\u0439\u0440\u045a\u045f\u0463\u0469\u0471\u0474\u047c")
buf.write("\u047f\u0483\u048a\u048f\u0498\u049c\u04a8\u04af\u04ba")
buf.write("\u04c0\u04c8\u04cc\u04cf\u04dc\u04e3\u04f3\u04f9\u0500")
buf.write("\u0504\u050a\u0511\u051e\u0526\u052c\u052f\u0539\u0547")
buf.write("\3\b\2\2")
return buf.getvalue()
class CLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
T__10 = 11
T__11 = 12
T__12 = 13
T__13 = 14
Auto = 15
Break = 16
Case = 17
Char = 18
Const = 19
Continue = 20
Default = 21
Do = 22
Double = 23
Else = 24
Enum = 25
Extern = 26
Float = 27
For = 28
Goto = 29
If = 30
Inline = 31
Int = 32
Long = 33
Register = 34
Restrict = 35
Return = 36
Short = 37
Signed = 38
Sizeof = 39
Static = 40
Struct = 41
Switch = 42
Typedef = 43
Union = 44
Unsigned = 45
Void = 46
Volatile = 47
While = 48
Alignas = 49
Alignof = 50
Atomic = 51
Bool = 52
Complex = 53
Generic = 54
Imaginary = 55
Noreturn = 56
StaticAssert = 57
ThreadLocal = 58
LeftParen = 59
RightParen = 60
LeftBracket = 61
RightBracket = 62
LeftBrace = 63
RightBrace = 64
Less = 65
LessEqual = 66
Greater = 67
GreaterEqual = 68
LeftShift = 69
RightShift = 70
Plus = 71
PlusPlus = 72
Minus = 73
MinusMinus = 74
Star = 75
Div = 76
Mod = 77
And = 78
Or = 79
AndAnd = 80
OrOr = 81
Caret = 82
Not = 83
Tilde = 84
Question = 85
Colon = 86
Semi = 87
Comma = 88
Assign = 89
StarAssign = 90
DivAssign = 91
ModAssign = 92
PlusAssign = 93
MinusAssign = 94
LeftShiftAssign = 95
RightShiftAssign = 96
AndAssign = 97
XorAssign = 98
OrAssign = 99
Equal = 100
NotEqual = 101
Arrow = 102
Dot = 103
Ellipsis = 104
Identifier = 105
Constant = 106
DigitSequence = 107
StringLiteral = 108
ComplexDefine = 109
IncludeDirective = 110
AsmBlock = 111
LineAfterPreprocessing = 112
LineDirective = 113
PragmaDirective = 114
Whitespace = 115
Newline = 116
BlockComment = 117
LineComment = 118
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'__extension__'", "'__builtin_va_arg'", "'__builtin_offsetof'",
"'__m128'", "'__m128d'", "'__m128i'", "'__typeof__'", "'__inline__'",
"'__stdcall'", "'__declspec'", "'__asm'", "'__attribute__'",
"'__asm__'", "'__volatile__'", "'auto'", "'break'", "'case'",
"'char'", "'const'", "'continue'", "'default'", "'do'", "'double'",
"'else'", "'enum'", "'extern'", "'float'", "'for'", "'goto'",
"'if'", "'inline'", "'int'", "'long'", "'register'", "'restrict'",
"'return'", "'short'", "'signed'", "'sizeof'", "'static'", "'struct'",
"'switch'", "'typedef'", "'union'", "'unsigned'", "'void'",
"'volatile'", "'while'", "'_Alignas'", "'_Alignof'", "'_Atomic'",
"'_Bool'", "'_Complex'", "'_Generic'", "'_Imaginary'", "'_Noreturn'",
"'_Static_assert'", "'_Thread_local'", "'('", "')'", "'['",
"']'", "'{'", "'}'", "'<'", "'<='", "'>'", "'>='", "'<<'", "'>>'",
"'+'", "'++'", "'-'", "'--'", "'*'", "'/'", "'%'", "'&'", "'|'",
"'&&'", "'||'", "'^'", "'!'", "'~'", "'?'", "':'", "';'", "','",
"'='", "'*='", "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='",
"'&='", "'^='", "'|='", "'=='", "'!='", "'->'", "'.'", "'...'" ]
symbolicNames = [ "<INVALID>",
"Auto", "Break", "Case", "Char", "Const", "Continue", "Default",
"Do", "Double", "Else", "Enum", "Extern", "Float", "For", "Goto",
"If", "Inline", "Int", "Long", "Register", "Restrict", "Return",
"Short", "Signed", "Sizeof", "Static", "Struct", "Switch", "Typedef",
"Union", "Unsigned", "Void", "Volatile", "While", "Alignas",
"Alignof", "Atomic", "Bool", "Complex", "Generic", "Imaginary",
"Noreturn", "StaticAssert", "ThreadLocal", "LeftParen", "RightParen",
"LeftBracket", "RightBracket", "LeftBrace", "RightBrace", "Less",
"LessEqual", "Greater", "GreaterEqual", "LeftShift", "RightShift",
"Plus", "PlusPlus", "Minus", "MinusMinus", "Star", "Div", "Mod",
"And", "Or", "AndAnd", "OrOr", "Caret", "Not", "Tilde", "Question",
"Colon", "Semi", "Comma", "Assign", "StarAssign", "DivAssign",
"ModAssign", "PlusAssign", "MinusAssign", "LeftShiftAssign",
"RightShiftAssign", "AndAssign", "XorAssign", "OrAssign", "Equal",
"NotEqual", "Arrow", "Dot", "Ellipsis", "Identifier", "Constant",
"DigitSequence", "StringLiteral", "ComplexDefine", "IncludeDirective",
"AsmBlock", "LineAfterPreprocessing", "LineDirective", "PragmaDirective",
"Whitespace", "Newline", "BlockComment", "LineComment" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
"Auto", "Break", "Case", "Char", "Const", "Continue",
"Default", "Do", "Double", "Else", "Enum", "Extern", "Float",
"For", "Goto", "If", "Inline", "Int", "Long", "Register",
"Restrict", "Return", "Short", "Signed", "Sizeof", "Static",
"Struct", "Switch", "Typedef", "Union", "Unsigned", "Void",
"Volatile", "While", "Alignas", "Alignof", "Atomic", "Bool",
"Complex", "Generic", "Imaginary", "Noreturn", "StaticAssert",
"ThreadLocal", "LeftParen", "RightParen", "LeftBracket",
"RightBracket", "LeftBrace", "RightBrace", "Less", "LessEqual",
"Greater", "GreaterEqual", "LeftShift", "RightShift",
"Plus", "PlusPlus", "Minus", "MinusMinus", "Star", "Div",
"Mod", "And", "Or", "AndAnd", "OrOr", "Caret", "Not",
"Tilde", "Question", "Colon", "Semi", "Comma", "Assign",
"StarAssign", "DivAssign", "ModAssign", "PlusAssign",
"MinusAssign", "LeftShiftAssign", "RightShiftAssign",
"AndAssign", "XorAssign", "OrAssign", "Equal", "NotEqual",
"Arrow", "Dot", "Ellipsis", "Identifier", "IdentifierNondigit",
"Nondigit", "Digit", "UniversalCharacterName", "HexQuad",
"Constant", "IntegerConstant", "BinaryConstant", "DecimalConstant",
"OctalConstant", "HexadecimalConstant", "HexadecimalPrefix",
"NonzeroDigit", "OctalDigit", "HexadecimalDigit", "IntegerSuffix",
"UnsignedSuffix", "LongSuffix", "LongLongSuffix", "FloatingConstant",
"DecimalFloatingConstant", "HexadecimalFloatingConstant",
"FractionalConstant", "ExponentPart", "Sign", "DigitSequence",
"HexadecimalFractionalConstant", "BinaryExponentPart",
"HexadecimalDigitSequence", "FloatingSuffix", "CharacterConstant",
"CCharSequence", "CChar", "EscapeSequence", "SimpleEscapeSequence",
"OctalEscapeSequence", "HexadecimalEscapeSequence", "StringLiteral",
"EncodingPrefix", "SCharSequence", "SChar", "ComplexDefine",
"IncludeDirective", "AsmBlock", "LineAfterPreprocessing",
"LineDirective", "PragmaDirective", "Whitespace", "Newline",
"BlockComment", "LineComment" ]
grammarFileName = "C.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| 66.277081
| 103
| 0.589247
| 12,289
| 58,125
| 2.775979
| 0.149809
| 0.11913
| 0.063405
| 0.073049
| 0.250777
| 0.195433
| 0.126341
| 0.10802
| 0.102275
| 0.097262
| 0
| 0.3641
| 0.14314
| 58,125
| 876
| 104
| 66.35274
| 0.320851
| 0.000619
| 0
| 0.002331
| 1
| 0.344988
| 0.65931
| 0.610502
| 0
| 0
| 0
| 0
| 0.004662
| 1
| 0.002331
| false
| 0
| 0.005828
| 0
| 0.157343
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a3a935bd8c97b8c242aaf501bd31dc6d11b2ed9b
| 99
|
py
|
Python
|
jumpscale/tools/notificationsqueue/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 13
|
2020-09-02T09:05:08.000Z
|
2022-03-12T02:43:24.000Z
|
jumpscale/tools/notificationsqueue/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 1,998
|
2020-06-15T11:46:10.000Z
|
2022-03-24T22:12:41.000Z
|
jumpscale/tools/notificationsqueue/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 8
|
2020-09-29T06:50:35.000Z
|
2021-06-14T03:30:52.000Z
|
def export_module_as():
from .queue import NotificationsQueue
return NotificationsQueue()
| 19.8
| 41
| 0.767677
| 10
| 99
| 7.4
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171717
| 99
| 4
| 42
| 24.75
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a3af87179a60a2c4e9d1c09a6ec3f3e00a1e8246
| 1,814
|
py
|
Python
|
lesleyloraine/apps/core/migrations/0002_auto_20160130_1756.py
|
ninapavlich/lesleyloraine
|
33bb44755208ef3dd879ea12d514671076085bcb
|
[
"MIT"
] | null | null | null |
lesleyloraine/apps/core/migrations/0002_auto_20160130_1756.py
|
ninapavlich/lesleyloraine
|
33bb44755208ef3dd879ea12d514671076085bcb
|
[
"MIT"
] | null | null | null |
lesleyloraine/apps/core/migrations/0002_auto_20160130_1756.py
|
ninapavlich/lesleyloraine
|
33bb44755208ef3dd879ea12d514671076085bcb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import carbon.compounds.core.models
import lesleyloraine.s3utils
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='csspackage',
name='file_minified',
field=models.FileField(storage=lesleyloraine.s3utils._MediaS3BotoStorage(bucket=b'lesleyloraine-dev', custom_domain=b'lesleyloraine-dev.s3.amazonaws.com', location=b'media'), null=True, upload_to=carbon.compounds.core.models.title_file_name, blank=True),
),
migrations.AlterField(
model_name='csspackage',
name='file_source',
field=models.FileField(storage=lesleyloraine.s3utils._MediaS3BotoStorage(bucket=b'lesleyloraine-dev', custom_domain=b'lesleyloraine-dev.s3.amazonaws.com', location=b'media'), null=True, upload_to=carbon.compounds.core.models.title_file_name, blank=True),
),
migrations.AlterField(
model_name='jspackage',
name='file_minified',
field=models.FileField(storage=lesleyloraine.s3utils._MediaS3BotoStorage(bucket=b'lesleyloraine-dev', custom_domain=b'lesleyloraine-dev.s3.amazonaws.com', location=b'media'), null=True, upload_to=carbon.compounds.core.models.title_file_name, blank=True),
),
migrations.AlterField(
model_name='jspackage',
name='file_source',
field=models.FileField(storage=lesleyloraine.s3utils._MediaS3BotoStorage(bucket=b'lesleyloraine-dev', custom_domain=b'lesleyloraine-dev.s3.amazonaws.com', location=b'media'), null=True, upload_to=carbon.compounds.core.models.title_file_name, blank=True),
),
]
| 49.027027
| 266
| 0.706725
| 203
| 1,814
| 6.147783
| 0.251232
| 0.089744
| 0.108974
| 0.10016
| 0.838141
| 0.838141
| 0.838141
| 0.798878
| 0.798878
| 0.798878
| 0
| 0.011944
| 0.169239
| 1,814
| 36
| 267
| 50.388889
| 0.816191
| 0.011577
| 0
| 0.666667
| 0
| 0
| 0.182021
| 0.075935
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.133333
| 0
| 0.233333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a3c4399776028ce2132563f702351ac8df043899
| 161,632
|
py
|
Python
|
spytest/apis/routing/evpn.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 132
|
2016-10-19T12:34:44.000Z
|
2022-03-16T09:00:39.000Z
|
spytest/apis/routing/evpn.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 3,152
|
2016-09-21T23:05:58.000Z
|
2022-03-31T23:29:08.000Z
|
spytest/apis/routing/evpn.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 563
|
2016-09-20T01:00:15.000Z
|
2022-03-31T22:43:54.000Z
|
import re
from spytest.utils import filter_and_select
from spytest import st, utils
import apis.system.port as port1
from apis.system.rest import get_rest,delete_rest,config_rest
from utilities.utils import get_interface_number_from_name
def config_bgp_evpn(dut, **kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
config_bgp_evpn(dut=data.dut1,neighbor ='21.1.1.2',remote_as='20',config='yes',config_type_list =["activate"])
config_bgp_evpn(dut=dut1,config = 'yes',config_type_list=["advertise_all_vni"],local_as="10")
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="8:8",config="yes",local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_both_rt="50:50",config="no", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_import_rt="51:50",config="yes", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_export_rt="52:50",config="yes", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="8:8",config="no", local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=dut1,config_type_list=["vrf_rd_rt"],vrf_name="Vrf1",l3_rd="9:9",l3_both_rt="50:50",config="no",local_as=evpn_dict["leaf3"]['local_as'])
config_bgp_evpn(dut=data.dut1,neighbor ='21.1.1.2',remote_as='20',config='yes',config_type_list =["activate"], cli_type='klish')
config_bgp_evpn(dut=dut1,config = 'yes',config_type_list=["advertise_all_vni"],local_as="10", cli_type='klish')
Configure bgp l2vpn evpn specific commands
:param dut:
:param neighbor:
:param local_as:
:param config_type_list:
:param allowas_in:
:param attribute_unchanged:
:param route_map:
:param direction:
:param network:
:param rd:
:param vni:
:param vrf_name:
:param l3_vni_id:
:param ethtag:
:param bgp_label:
:param esi_id:
:param gw_ip:
:param router_mac:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if cli_type == 'click': cli_type = "vtysh"
skip_rest_cfg_type_list = [ 'nexthop_self', 'route_map', 'allowas_in', 'network', 'route_target', 'autort',
'attribute_unchanged', 'default_originate_ipv4', 'default_originate_ipv6',
'default_originate_ipv4_vrf', 'default_originate_ipv6_vrf',
'dup_addr_detection', 'flooding_disable', 'flooding_head_end_replication',
"route_server_client", "route_reflector_client" ]
if 'config' in kwargs:
config = kwargs['config']
else:
config = 'yes'
if 'vrf_name' in kwargs:
vrf_name = kwargs['vrf_name']
else:
vrf_name = "default"
if 'l3_vni_id' in kwargs:
l3_vni_id = kwargs['l3_vni_id']
if 'vtep_name' in kwargs:
vtep_name = kwargs['vtep_name']
if 'config_type_list' in kwargs:
config_type_list = kwargs['config_type_list']
if 'neighbor' in kwargs:
neighbor = kwargs['neighbor']
if 'peergroup' in kwargs and 'neighbor' not in kwargs:
neighbor = kwargs['peergroup']
if 'addr_family' in kwargs:
addr_family = kwargs['addr_family']
else:
addr_family = 'l2vpn'
if 'addr_family_modifier' in kwargs:
addr_family_modifier = kwargs['addr_family_modifier']
else:
addr_family_modifier = "evpn"
st.log('Configure BGP L2VPN address family')
addr_family_str = addr_family.upper() + '_' + addr_family_modifier.upper()
if cli_type in ['rest-put','rest-patch']:
st.banner("CFG list: {}, cli_type:{}".format(config_type_list,cli_type))
for cfg_type in config_type_list:
if cfg_type in skip_rest_cfg_type_list:
cli_type = 'klish'
st.banner("CFG type skipped: {}, cli_type:{}".format(cfg_type, cli_type))
break
if cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config.lower() == 'yes' and 'vrf_vni' not in config_type_list:
if 'local_as' in kwargs:
### AS URI
url = rest_urls['bgp_as_config'].format(vrf_name)
payload = {'openconfig-network-instance:as': int(kwargs['local_as'])}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP local-as config Failed')
return False
### L2VPN global URI
url = rest_urls['bgp_l2vpn_global_config'].format(vrf_name)
payload = { 'openconfig-network-instance:afi-safis': {
'afi-safi': [
{'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
}
}
]
}}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family global config Failed'.format(addr_family_str))
return False
else:
if 'local_as' in kwargs:
my_cmd = 'router bgp {}\n'.format(kwargs['local_as'])
else:
my_cmd = 'router bgp\n'
my_cmd += 'address-family {} {}\n'.format(addr_family,addr_family_modifier)
if 'allowas_in' in kwargs:
allowas_in = kwargs['allowas_in']
if 'attribute_unchanged' in kwargs:
attribute_unchanged = kwargs['attribute_unchanged']
if 'route_map' in kwargs:
route_map = kwargs['route_map']
if 'direction' in kwargs:
direction = kwargs['direction']
else:
direction = 'in'
if 'advertise_ipv4' in kwargs:
advertise_ipv4 = kwargs['advertise_ipv4']
if 'advertise_ipv6' in kwargs:
advertise_ipv6 = kwargs['advertise_ipv6']
if 'advertise_ipv4_vrf' in kwargs:
advertise_ipv4 = kwargs['advertise_ipv4_vrf']
if 'advertise_ipv6_vrf' in kwargs:
advertise_ipv6 = kwargs['advertise_ipv6_vrf']
if 'dup_addr_detection' in kwargs:
dup_addr_detection = kwargs['dup_addr_detection']
if 'network' in kwargs:
network = kwargs['network']
rd = kwargs['rd']
ethtag = kwargs['ethtag']
bgp_label = kwargs['bgp_label']
esi_id = kwargs['esi_id']
gw_ip = kwargs['gw_ip']
router_mac = kwargs['router_mac']
if config.lower() == 'yes':
config_cmd = ''
elif config.lower() == 'remove_vrf':
config_cmd = 'remove_vrf'
elif config.lower() == 'remove_vni':
config_cmd = 'remove_vni'
else:
config_cmd = 'no'
if 'vni_unconfig' not in kwargs:
vni_unconfig = ''
elif kwargs['vni_unconfig'] == "yes":
vni_unconfig = 'no'
for type1 in config_type_list:
cur_type = type1
if type1 == 'vrf_vni' and config_cmd == '':
if cli_type in ['klish','rest-put','rest-patch']:
map_vrf_vni(dut, vrf_name, l3_vni_id, config='yes', vtep_name=vtep_name, cli_type=cli_type)
my_cmd = ''
else:
my_cmd = ''
my_cmd += 'vrf {} \n'.format(vrf_name)
my_cmd += 'vni {} \n'.format(l3_vni_id)
elif type1 == 'vrf_vni' and config_cmd != '':
my_cmd = ''
if cli_type in ['klish','rest-put','rest-patch']:
if config_cmd == 'remove_vrf' or config_cmd == 'remove_vni' or config_cmd == 'no':
map_vrf_vni(dut, vrf_name, l3_vni_id, config='no', vtep_name=vtep_name, cli_type=cli_type)
my_cmd = ''
else:
if config_cmd == 'remove_vrf':
my_cmd += 'no vrf {} \n'.format(vrf_name)
if config_cmd == 'remove_vni' or config_cmd == 'no':
my_cmd += 'vrf {} \n'.format(vrf_name)
my_cmd += 'no vni {} \n'.format(l3_vni_id)
elif type1 == 'activate':
if cli_type == 'klish':
neigh_name = get_interface_number_from_name(neighbor)
if isinstance(neigh_name, dict):
my_cmd += "neighbor interface {} {}\n".format(neigh_name["type"],neigh_name["number"])
else:
my_cmd += "neighbor {}\n".format(neigh_name)
my_cmd += "remote-as {}\n".format(kwargs['remote_as'])
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} activate\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
elif cli_type in ['click','vtysh']:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} neighbor {} activate\n'.format(config_cmd, neighbor)
elif cli_type in ['rest-put','rest-patch']:
if config.lower() == 'yes':
st.log("BGP EVPN neigh config")
url = rest_urls['bgp_neighbor_config'].format(vrf_name)
if kwargs['remote_as'] == 'external':
payload = {'openconfig-network-instance:neighbors':
{'neighbor': [
{'neighbor-address': neighbor,
'config': {
'neighbor-address': neighbor,
'peer-type': kwargs['remote_as'].upper(),
'enabled': bool(1)
}
}
]}
}
else:
payload = {'openconfig-network-instance:neighbors':
{'neighbor': [
{'neighbor-address': neighbor,
'config': {
'neighbor-address': neighbor,
'peer-as': int(kwargs['remote_as']),
'enabled': bool(1)
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP EVPN neighbor configuration Failed')
return False
url = rest_urls['bgp_l2vpn_neighbor_config'].format(vrf_name,neighbor)
payload = {'openconfig-network-instance:afi-safis': {
'afi-safi':[
{
'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
'enabled': True
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family configuration Failed'.format(addr_family_str))
return False
else:
url = rest_urls['bgp_l2vpn_neighbor_config'].format(vrf_name, neighbor)
payload = {'openconfig-network-instance:afi-safis': {
'afi-safi':[
{
'afi-safi-name': addr_family_str,
'config':{
'afi-safi-name': addr_family_str,
'enabled': False
}
}
]}
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: BGP {} address-family no activate Failed'.format(addr_family_str))
return False
elif type1 == 'allowas_in':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} allowas-in\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
else:
my_cmd += '{} neighbor {} allowas-in {}\n'.format(config_cmd,neighbor,allowas_in)
elif type1 == 'attribute_unchanged':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} attribute-unchanged\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} attribute-unchanged {}\n'.format(config_cmd,neighbor,attribute_unchanged)
elif type1 == 'nexthop_self':
#convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} next-hop-self\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} next-hop-self\n'.format(config_cmd, neighbor)
elif type1 == 'route_map':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-map {} {}\n".format(config_cmd,route_map,direction)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-map {} {}\n'.format(config_cmd,neighbor,route_map,direction)
my_cmd += 'exit\n'
elif type1 == 'route_reflector_client':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-reflector-client\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-reflector-client\n'.format(config_cmd, neighbor)
elif type1 == 'route_server_client':
# convert to REST as and when used
if cli_type == 'klish':
my_cmd += "neighbor {}\n".format(neighbor)
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += "{} route-server-client\n".format(config_cmd)
my_cmd += "exit\n"
my_cmd += "exit\n"
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
else:
my_cmd += '{} neighbor {} route-server-client\n'.format(config_cmd, neighbor)
elif type1 == 'disable_ebgp_connected_route_check':
if cli_type == 'klish':
my_cmd += '{} disable-ebgp-connected-route-check \n'.format(config_cmd)
my_cmd += "exit\n"
elif cli_type in ['click','vtysh']:
my_cmd += '{} bgp disable-ebgp-connected-route-check \n'.format(config_cmd)
elif cli_type in ['rest-put','rest-patch']:
url = rest_urls['ebgp_connected_route_check'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-ext:disable-ebgp-connected-route-check': True}
elif config.lower() == 'no':
payload = {'openconfig-bgp-ext:disable-ebgp-connected-route-check': False}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload)
if not response:
st.banner('FAIL-OCYANG: disable-ebgp-connected-route-check configuration:{} Failed'.format(config_cmd))
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
elif type1 == 'advertise_ipv4':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV4_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise ipv4 {}\n'.format(config_cmd,advertise_ipv4)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv6':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV6_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise ipv6 {}\n'.format(config_cmd,advertise_ipv6)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv4_vrf':
# convert to REST as and when used
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} default-originate ipv4\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv6_vrf':
# convert to REST as and when used
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} default-originate ipv6\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv4_vrf':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list':['IPV4_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv4 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} advertise ipv4 {}\n'.format(config_cmd,advertise_ipv4)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_ipv6_vrf':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-list': ['IPV6_UNICAST']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} config Failed'.format(vrf_name))
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_config'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-ipv6 in vrf:{} delete Failed'.format(vrf_name))
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
my_cmd += '{} advertise ipv6 {}\n'.format(config_cmd,advertise_ipv6)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'vrf_rd_rt':
if cli_type in ["rest-put", "rest-patch"]:
if 'l3_rd' in kwargs:
url = rest_urls['bgp_route_distinguisher'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:route-distinguisher': kwargs['l3_rd']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN route-distinguisher config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN route-distinguisher delete Failed')
return False
if 'l3_both_rt' in kwargs:
url_i = rest_urls['bgp_import_rt'].format(vrf_name)
url_e = rest_urls['bgp_export_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['l3_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN import rt config Failed')
return False
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['l3_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN import rt delete Failed')
return False
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN export rt delete Failed')
return False
if 'l3_import_rt' in kwargs:
url = rest_urls['bgp_import_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['l3_import_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN import rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN import rt delete Failed')
return False
if 'l3_export_rt' in kwargs:
url = rest_urls['bgp_export_rt'].format(vrf_name)
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['l3_export_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN export rt delete Failed')
return False
else:
my_cmd = 'router bgp {} vrf {}\n'.format(kwargs['local_as'],vrf_name)
my_cmd += 'address-family l2vpn evpn\n'
if 'l3_rd' in kwargs:
my_cmd += '{} rd {}\n'.format(config_cmd,kwargs['l3_rd'])
if 'l3_both_rt' in kwargs:
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['l3_both_rt'])
if 'l3_import_rt' in kwargs:
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['l3_import_rt'])
if 'l3_export_rt' in kwargs:
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['l3_export_rt'])
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_all_vni':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_all_vni'].format(vrf_name)
payload = { 'openconfig-bgp-evpn-ext:advertise-all-vni': True}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-all-vni config Failed')
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_all_vni'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-all-vni delete Failed')
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise-all-vni\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'advertise_default_gw':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url = rest_urls['bgp_advertise_default_gw'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:advertise-default-gw': True}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN advertise-default-gw config Failed')
return False
elif config.lower() == 'no':
url = rest_urls['bgp_advertise_default_gw'].format(vrf_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN advertise-default-gw delete Failed')
return False
else:
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} advertise-default-gw\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'autort':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} autort rfc8365-compatible\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv4':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} default-originate ipv4\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'default_originate_ipv6':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} default-originate ipv6\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'dup_addr_detection':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} dup-addr-detection {}\n'.format(config_cmd,dup_addr_detection)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'flooding_disable':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} flooding disable\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'flooding_head_end_replication':
# convert to REST as and when used
my_cmd += "address-family l2vpn {}\n".format(addr_family_modifier)
my_cmd += '{} flooding head-end-replication\n'.format(config_cmd)
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'network' and config_cmd == '':
# convert to REST as and when used
if cli_type not in ['klish']:
my_cmd += 'network {} rd {} ethtag {} label {} esi {} gwip {} routermac {}\n'.format(network,rd,ethtag,bgp_label,esi_id,gw_ip,router_mac)
else:
st.error("Support not added to config - 'network'")
elif type1 == 'network' and config_cmd == 'no':
# convert to REST as and when used
if cli_type not in ['klish']:
my_cmd += '{} network {} rd {} ethtag {} label {} esi {} gwip {}\n'.format(config_cmd,network,rd,ethtag,bgp_label,esi_id,gw_ip)
else:
st.error("Support not added to config - 'network'")
elif type1 == 'route_target':
# convert to REST as and when used
if 'both_rt' in kwargs:
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['both_rt'])
if 'import_rt' in kwargs:
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['import_rt'])
if 'export_rt' in kwargs:
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['export_rt'])
if cli_type == 'klish':
my_cmd += "exit\n"
elif type1 == 'vni':
if cli_type in ["rest-put", "rest-patch"]:
if config.lower() == 'yes':
url_vni = rest_urls['bgp_vni_config'].format(vrf_name)
payload = {'openconfig-bgp-evpn-ext:vni': [{
'vni-number': int(kwargs['vni']) ,
'config':{
'vni-number': int(kwargs['vni']) ,
'advertise-default-gw': True
}
}]
}
if not config_rest(dut, http_method=cli_type, rest_url=url_vni, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni config Failed')
return False
if vni_unconfig == 'no':
url_vni = rest_urls['bgp_vni_unconfig'].format(vrf_name,kwargs['vni'])
if not delete_rest(dut, rest_url=url_vni):
st.banner('FAIL-OCYANG: BGP EVPN vni delete Failed')
return False
if 'vni_rd' in kwargs and vni_unconfig == '':
url = rest_urls['bgp_vni_route_distinguisher'].format(vrf_name,kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:route-distinguisher': kwargs['vni_rd']}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni route-distinguisher config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: BGP EVPN vni route-distinguisher delete Failed')
return False
if 'vni_both_rt' in kwargs and vni_unconfig == '':
url_i = rest_urls['bgp_vni_import_rt'].format(vrf_name,kwargs['vni'])
url_e = rest_urls['bgp_vni_export_rt'].format(vrf_name,kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['vni_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt config Failed')
return False
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['vni_both_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt delete Failed')
return False
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt delete Failed')
return False
if 'vni_import_rt' in kwargs and vni_unconfig == '':
url_i = rest_urls['bgp_vni_import_rt'].format(vrf_name, kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:import-rts': [kwargs['vni_import_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_i, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_i):
st.banner('FAIL-OCYANG: BGP EVPN vni import rt delete Failed')
return False
if 'vni_export_rt' in kwargs and vni_unconfig == '':
url_e = rest_urls['bgp_vni_export_rt'].format(vrf_name, kwargs['vni'])
if config.lower() == 'yes':
payload = {'openconfig-bgp-evpn-ext:export-rts': [kwargs['vni_export_rt']]}
if not config_rest(dut, http_method=cli_type, rest_url=url_e, json_data=payload):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt config Failed')
return False
elif config.lower() == 'no':
if not delete_rest(dut, rest_url=url_e):
st.banner('FAIL-OCYANG: BGP EVPN vni export rt delete Failed')
return False
else:
my_cmd += '{} vni {}\n'.format(vni_unconfig,kwargs['vni'])
if 'vni_rd' in kwargs and vni_unconfig == '':
my_cmd += '{} rd {}\n'.format(config_cmd,kwargs['vni_rd'])
if 'vni_both_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target both {}\n'.format(config_cmd,kwargs['vni_both_rt'])
if 'vni_import_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target import {}\n'.format(config_cmd,kwargs['vni_import_rt'])
if 'vni_export_rt' in kwargs and vni_unconfig == '':
my_cmd += '{} route-target export {}\n'.format(config_cmd,kwargs['vni_export_rt'])
if vni_unconfig != 'no':
my_cmd += 'exit\n'
if cli_type == 'klish':
my_cmd += "exit\n"
else:
st.error("config_type_list is not matching - {}".format(type1))
return False
if cli_type in ['klish'] and cur_type != 'vrf_vni':
#my_cmd += 'exit\n'
my_cmd += 'exit\n'
if cli_type not in ['rest-put', 'rest-patch']:
st.debug('\n'+my_cmd+'\n')
st.debug(my_cmd.split("\n"))
st.config(dut, my_cmd.split("\n") if cli_type == 'klish' else my_cmd, type=cli_type)
def parse_rest_output_l2vpn_evpn_vni(response):
dict = {}
vni_data = response['output'].get('openconfig-bgp-evpn-ext:vni',[])
if vni_data:
vni_item = vni_data[0]
dict['vni'] = str(vni_item.get('state',{}).get('vni-number',0))
dict['type'] = vni_item.get('state',{}).get('type','')
dict['vrfname'] = ''
dict['rd'] = vni_item.get('state',{}).get('route-distinguisher','')
dict['originip'] = vni_item.get('state',{}).get('originator','')
dict['gwmac'] = vni_item.get('state',{}).get('advertise-gw-mac',False)
dict['rt'] = vni_item.get('state',{}).get('import-rts',[])
dict['rt'] = dict['rt'] + vni_item.get('state', {}).get('export-rts', [])
return [dict]
else:
return []
def verify_bgp_l2vpn_evpn_vni_id(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_vni_id(dut=dut1,vni="100",rd="11:11",type="L2",vrfname="default",originip="1.1.1.1",gwmac="No",rt=['20:20','20:20'])
To verify bgp l2vpn evpn vni <vni-id>
:param dut:
:param vni:
:param type:
:param vrfname:
:param rd:
:param rt:
:param gwmac:
:param originip:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
#cli_type = 'klish'
if 'vni' not in kwargs:
st.error("Mandatory arg vni is not present")
return False
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, "show bgp l2vpn evpn vni {}".format(kwargs['vni']), type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['bgp_vni_unconfig'].format('default',kwargs['vni'])
response = get_rest(dut, rest_url=url)
if response['output']:
result = parse_rest_output_l2vpn_evpn_vni(response)
else:
st.error("OCYANG-FAIL: verify bgp l2vpn evpn vni <id> - Get Response is empty")
return False
else:
result = st.show(dut, "show bgp l2vpn evpn vni {}".format(kwargs['vni']), type=cli_type)
if len(result) == 0:
st.error("Output is Empty")
return False
ret_val = False
for rlist in result:
count = 0
for key in kwargs:
if rlist[key] == kwargs[key]:
count = count + 1
if len(kwargs) == count:
ret_val = True
for key in kwargs:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
break
else:
for key in kwargs:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
if ret_val is False:
st.log("Fail: Not Matched all args in passed dict {} from parsed dict".format(kwargs))
return ret_val
def verify_bgp_l2vpn_evpn_summary(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_summary(dut=dut1,identifier="1.1.1.1",local_as="10",vrf_id="0",neighbor="21.1.1.2",version="4",pfxrcd="1",inq="0",outq="0",tblver="0",msgrcvd="3552")
verify_bgp_l2vpn_evpn_summary(dut=dut1,neighbor=["21.1.1.2","2001::2"],version=["4","4"],pfxrcd=["1","1"],inq=["0","0"],outq=["0","0"],tblver=["0","0"],as_no=["20","20"])
To verify bgp l2vpn evpn summary
:param dut:
:param identifier:
:param local_as:
:param vrf_id:
:param neighbor:
:param version:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
if cli_type == 'click':
cli_type = "vtysh"
if cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_neigh"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
no_match=match=False
for key, val in kwargs.items():
if type(val) is not list:
kwargs[key] = [val]
rest_out = rest_out["output"]["openconfig-network-instance:neighbors"]["neighbor"]
for elem in rest_out:
neigh_list = elem['afi-safis']['afi-safi']
for neigh in neigh_list:
if neigh["state"]['afi-safi-name'] == "openconfig-bgp-types:L2VPN_EVPN":
evpn_neigh = elem['neighbor-address']
if 'neighbor' in kwargs:
try:
index_num = kwargs["neighbor"].index(evpn_neigh)
exp_status=kwargs["updown"][index_num]
if neigh['state']['prefixes']['received'] >= 0:
status="up"
else:
status="down"
if exp_status==status:
st.log("Match found for neighbor {} with status as {}".format(evpn_neigh,status))
match=True
else:
st.log("Match NOT found for neighbor {}; expected status: {}"
" but found: {}".format(evpn_neigh,exp_status,status))
no_match=True
except Exception:
continue
else:
st.log("specify the neighbor argument to be verified ")
return False
if no_match:
st.log("At least one of the neighbor status is wrong;"
"kindly check above logs")
return False
if match:
return True
else:
st.log("Neighbors {} not present in show output".format(kwargs["neighbor"]))
return False
else:
output = st.show(dut,"show bgp l2vpn evpn summary",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
for i in range (len(output)):
pfx = output[i]['pfxrcd']
if pfx.isdigit():
if int(output[i]['pfxrcd']) > 0 or int(output[i]['pfxrcd']) == 0:
output[i]['updown'] = 'up'
else:
output[i]['updown'] = 'down'
else:
output[i]['updown'] = 'down'
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['identifier','local_as','vrf_id','rib_entries','no_peers']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_l2vpn_evpn_route(route):
dict = {}
dict['evpn_prefix'] = route.get('prefix','')
rd_str = route.get('route-distinguisher','')
regexp_match = re.search(r"\d+\:\d+|\d+\.\d+\.\d+\.\d+", rd_str)
dict['rd'] = regexp_match.group() if regexp_match else ''
dict['status_code'] = '*' if route.get("state",{}).get('valid-route',False) else ''
if route.get("state",{}).get('openconfig-rib-bgp-ext:best-path',False):
dict['status_code'] += '>'
dict['next_hop'] = route.get("attr-sets",{}).get("next-hop",'')
route_as_list = route.get("attr-sets",{}).get('as-path',{}).get('as-segment',[])
as_list = route_as_list[0].get('state',[]).get('member',[]) if route_as_list else []
as_path = ''
for as_num in as_list:
as_path = as_path + str(as_num) + " "
as_path = as_path.strip()
dict["path"] = as_path
return dict
def verify_bgp_l2vpn_evpn_route(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[5]:[0]:[24]:[15.1.1.0]",rd="13:2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[3]:[0]:[32]:[11.1.1.1]",rd="1.1.1.1:2",status_code="*>",metric="3276",next_hop="11.1.1.1",weight="8",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
verify_bgp_l2vpn_evpn_route(dut=dut1,evpn_prefix="[2]:[0]:[48]:[00:21:ee:00:10:17]:[32]:[59.1.1.7]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="11.1.1.1",weight="32768",path="",origin_code="i")
To verify bgp l2vpn evpn route
:param dut:
:param bgp_verion:
:param router_id:
:param evpn_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
#cli_type = 'klish'
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, "show bgp l2vpn evpn route", type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['get_evpn_routes']
response = get_rest(dut, rest_url=url)
if response['output']:
route_list = response["output"].get("openconfig-bgp-evpn-ext:routes",{}).get("route",[])
output = []
for route in route_list:
out_dict = {}
try:
if 'evpn_prefix' in kwargs:
if kwargs['evpn_prefix'] == route['prefix']:
out_dict = parse_rest_output_l2vpn_evpn_route(route)
elif 'rd' in kwargs:
rd_str = route.get('route-distinguisher', '')
regexp_match = re.search(r"\d+\:\d+|\d+\.\d+\.\d+\.\d+", rd_str)
current_rd = regexp_match.group() if regexp_match else ''
if kwargs['rd'] == current_rd :
out_dict = parse_rest_output_l2vpn_evpn_route(route)
output.append(out_dict)
except Exception as e:
st.log("{}".format(e))
continue
else:
st.error("OCYANG-FAIL: show bgp l2vpn evpn route - Get Response is empty")
return False
skip_key_list = ['bgp_version', 'router_id', 'metric', 'weight', 'origin_code', 'rt', 'et', 'rmac',
'displayed_prefixes', 'no_of_paths']
for skip_key in skip_key_list:
if skip_key in kwargs:
del kwargs[skip_key]
else:
output = st.show(dut, "show bgp l2vpn evpn route", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_vni(dut,**kwargs):
### NOT USED
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni="100",rd="11:11",type="L2",tenant_vrf="default",import_rt='20:20',export_rt='20:20',gw_macip="Enabled")
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni="200",rd="15:15",type="L2",tenant_vrf="default",import_rt='5:5',export_rt='6:6',gw_macip="Enabled")
verify_bgp_l2vpn_evpn_vni(dut=dut1,vni=["100","200"],rd=["11:11","15:15"],type=["L2","L2"],tenant_vrf=["default","default"],import_rt=['20:20','5:5'],export_rt=['20:20','6:6'])
To verify bgp l2vpn evpn vni
:param dut:
:param vni:
:param type:
:param tenant_vrf:
:param rd:
:param bum_flooding:
:param all_vni_flag:
:param no_l2vni:
:param no_l3vni:
:param gw_macip:
:param import_rt:
:param export_rt:
:return:
:reteturn:
"""
output = st.show(dut,"show bgp l2vpn evpn vni",type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['gw_macip','all_vni_flag','bum_flooding','no_l2vni','no_l3vni']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_rd(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_rd(dut=dut1,evpn_type_5_prefix="[5]:[0]:[24]:[15.1.1.0]",rd="13:1",rd_name="as2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",origin_code="i",displayed_prefixes="1")
To verify bgp l2vpn evpn rd <rd-value>
:param dut:
:param evpn_type_2_prefix:
:param evpn_type_3_prefix:
:param evpn_type_4_prefix:
:param evpn_type_5_prefix:
:param rd:
:param rd_name:
:param status_code:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param total_prefixes:
:return:
:reteturn:
"""
if 'rd' not in kwargs:
st.error("Mandetory arg rd is not present")
return False
output = st.show(dut,"show bgp l2vpn evpn rd {}".format(kwargs['rd']),type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['rd_name','rd','displayed_prefixes','total_prefixes']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_route_type_prefix(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_route_type_prefix(dut=dut1,evpn_type_5_prefix="[5]:[0]:[24]:[15.1.1.0]",rd="13:1",rd_name="as2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="32768",origin_code="i",displayed_prefixes="1")
evpn.verify_bgp_l2vpn_evpn_route_type_prefix(dut=data.dut1,evpn_type_5_prefix="[5]:[0]:[24]:[26.1.1.0]",rd="13:2",status_code="*>",metric="0",next_hop="0.0.0.0",weight="0",path="20",origin_code="i",displayed_prefixes="4",no_of_paths="6")
To verify bgp l2vpn evpn route type prefix
:param dut:
:param evpn_type_5_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
output = st.show(dut,"show bgp l2vpn evpn route type prefix",type="vtysh")
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def create_overlay_intf(dut, vtep_name, ip_addr, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create overlay interface
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name to be created
:type vtep_name: string
:param ip_addr: ip address to be bound to overlay gateway
:type ip_addr: string
:param config: it takes value as 'yes' or 'no' to configure or remove overlay respectively
:type config: string
:param : cli_type
:return: None
usage:
create_overlay_intf(dut1, "dut1VTEP", "1.1.1.1", cli_type='click')
create_overlay_intf(dut1, "dut1VTEP", "1.1.1.1", config='no', cli_type='klish')
Created by: Julius <julius.mariyan@broadcom.com
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'add'
else:
conf_str = 'no'
ip_addr = ''
action = 'del'
if cli_type == 'click':
command = "config vxlan {} {} {}".format(action, vtep_name, ip_addr)
elif cli_type == 'klish':
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} source-ip {}'.format(conf_str, ip_addr))
command.append('exit')
elif cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_vxlan_with_ip']
payload = { "openconfig-interfaces:interface":
[ { "name": vtep_name,
"config": { "name": vtep_name, "type": "IF_NVE" },
"openconfig-vxlan:vxlan-if": { "config": { "source-vtep-ip": ip_addr } }
} ]
}
### PUT and PATCH doesn't work for this URI hence use POST
### PUT and PATCH URIs does config similar to klish clis
if not config_rest(dut, http_method='post', rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Create Vxlan Interface with src vtep IP failed')
return False
else:
url = rest_urls['delete_vxlan_ip'].format(vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
url = rest_urls['delete_vxlan'].format(vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, type=cli_type, skip_error_check=skip_error)
def create_evpn_instance(dut, nvo_name, vtep_name, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create EVPN instance
Arguments:
:param dut: device to be configured
:type dut: string
:param nvo_name: evpn instance name to be created
:type nvo_name: string
:param vtep_name: vtep name to be bound to evpn instance
:type vtep_name: string
:param config: it takes value as 'yes' or 'no' to configure or remove evpn instance respectively
:type config: string
:param : cli_type
:param : skip_error
:return: None
usage:
create_evpn_instance(dut1, "dut1EVPN", "dut1VTEP", cli_type='click')
create_evpn_instance(dut1, "dut1EVPN", "dut1VTEP", config='no', cli_type='klish')
Created by: Julius <julius.mariyan@broadcom.com
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
action = 'add'
else:
vtep_name = ''
action = 'del'
if cli_type == 'click':
command = "config vxlan evpn_nvo {} {} {}".format(action, nvo_name, vtep_name)
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
elif cli_type == 'klish':
st.error("NVO command is not supported in klish")
return False
elif cli_type in ['rest-put','rest-patch']:
st.error("NVO config through OCYANG URI not supported")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def map_vlan_vni(dut, vtep_name, vlan_id, vni_id, range_val='1', config='yes', skip_error=False, cli_type=''):
"""
purpose:
This definition is used to create VLAN to VNI mapping under EVPN instance
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name where VLAN to VNI mapping needs to be done
:type vtep_name: string
:param vlan_id: vlan id to be mapped to VNI
:type vlan_id: string
:param vni_id: VNI id where vlan to be mapped
:type vni_id: string
:param range_val: range of vlans to be mapped to VNI
:type range_val: string
:param config: it takes value as 'yes' or 'no' to configure or remove evpn instance respectively
:type config: string
:param : cli_type
:param : skip_error
:return: None
usage:
map_vlan_vni(dut1, "dut1VTEP", "100", "100", cli_type='click')
map_vlan_vni(dut1, "dut1VTEP", "100", "100", config="no", cli_type='click')
map_vlan_vni(dut1, "dut1VTEP", "100", "100", range="10")
map_vlan_vni(dut1, "dut1VTEP", "100", "100", range="10", config="no")
Created by: Julius <julius.mariyan@broadcom.com
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
range_val = int(range_val)
if config == 'yes':
conf_str = ''
action = 'add'
else:
conf_str = 'no'
action = 'del'
if cli_type == 'click':
if range_val > 1:
vlan_end = int(vlan_id) + range_val - 1
command = "config vxlan map_range {} {} {} {} {}".format(action, vtep_name, vlan_id, vlan_end, vni_id)
elif range_val == 1:
command = "config vxlan map {} {} {} {}".format(action, vtep_name, vlan_id, vni_id)
elif cli_type == 'klish':
command = []
command.append('interface vxlan {}'.format(vtep_name))
if range_val == 1:
command.append('{} map vni {} vlan {}'.format(conf_str, vni_id, vlan_id))
elif range_val > 1:
command.append('{} map vni {} vlan {} count {}'.format(conf_str, vni_id, vlan_id, range_val))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
if range_val == 1:
rest_urls = st.get_datastore(dut, "rest_urls")
vlan_data = str(vlan_id) if type(vlan_id) is not str else vlan_id
vlan_str = 'Vlan'+vlan_data
vni_id = int(vni_id) if type(vni_id) is not int else vni_id
if config == 'yes':
url = rest_urls['config_vlan_vni_mapping'].format(vlan_str)
payload = { "openconfig-vxlan:vni-instance":
[{"vni-id": vni_id,
"source-nve": vtep_name,
"config": {"vni-id": vni_id, "source-nve": vtep_name}
}]
}
response = config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload,get_response=True)
error_list = response['output'].get('ietf-restconf:errors', {}).get('error', [])
if error_list:
err_msg = error_list[0].get('error-message', '')
st.banner('FAIL-OCYANG: vlan-vni map failed')
return err_msg
else:
url = rest_urls['delete_vlan_vni_mapping'].format(vlan_str,vni_id,vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return False
return
elif range_val > 1:
### In case of range , need to call above URI multiple times, instead fallback to klish
cli_type = 'klish'
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} map vni {} vlan {} count {}'.format(conf_str, vni_id, vlan_id, range_val))
command.append('exit')
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def parse_rest_output_vxlan_tunnel(response):
tunnel_list = response['output'].get('openconfig-vxlan:vxlan-tunnel-infos',{}).get('vxlan-tunnel-info',[])
tunnel_count = len(tunnel_list)
result = []
for tunnel in tunnel_list:
dict = {}
dict['total_count'] = tunnel_count
dict['src_vtep'] = tunnel.get('state',{}).get('source-ip',"")
dict['rem_vtep'] = tunnel.get('state',{}).get('peer-ip',"")
tunnel_status = tunnel.get('state',{}).get('status',"")
if tunnel_status == 'UP':
dict['tun_status'] = 'oper_up'
elif tunnel_status == 'DOWN':
dict['tun_status'] = 'oper_down'
else:
## To handle later for any other type
dict['tun_status'] = tunnel['state']['status']
result.append(dict)
return result
def verify_vxlan_tunnel_status(dut, src_vtep, rem_vtep_list, exp_status_list, cli_type=''):
'''
purpose:
This definition is used to verify operational status of VxLAN tunnel
Arguments:
:param dut: Device name where the command to be executed
:type dut: string
:param src_vtep: ip address of local VTEP
:type src_vtep: string
:param rem_vtep_list: list of remote VTEP ip address
:type rem_vtep_list: string
:param exp_status_list: list of expected operational status of VTEP's; example ['oper_down','oper_up']
:type exp_status_list: list
:return: True/False True - success case; False - Failure case
usage: verify_vxlan_tunnel_status(dut1,'1.1.1.1',['2.2.2.2','3.3.3.3'],['oper_up','oper_up'])
verify_vxlan_tunnel_status(dut1,'1.1.1.1',['2.2.2.2','3.3.3.3'],['oper_down','oper_up'])
Created by: Julius <julius.mariyan@broadcom.com
'''
cli_type = st.get_ui_type(dut,cli_type=cli_type)
success = True
if cli_type in ['click','klish']:
cli_out = st.show(dut, 'show vxlan tunnel', type=cli_type)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_tunnel_info']
response = get_rest(dut, rest_url=url)
if response['output']:
cli_out = parse_rest_output_vxlan_tunnel(response)
else:
st.error("OCYANG-FAIL: verify vxlan tunnel - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
for rem_vtep,status in zip(rem_vtep_list,exp_status_list):
fil_out = filter_and_select(cli_out, ["tun_status"], {"src_vtep" : src_vtep,
"rem_vtep" : rem_vtep})
if not fil_out:
st.error('No entry found for source VTEP: {} and remote VTEP: {} in '
'output: {}'.format(src_vtep,rem_vtep,cli_out))
success = False
continue
else:
fil_out = fil_out[0]
if fil_out["tun_status"] == status:
st.log('Match found; remote VTEP {} status {}; expected '
'{}'.format(rem_vtep,fil_out["tun_status"],status))
else:
st.error('Match NOT found; expected status for remote VTEP: {} is : {} '
'but found: {}'.format(rem_vtep,status,fil_out["tun_status"]))
success = False
return True if success else False
def verify_bgp_l2vpn_evpn_route_type_macip(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_route_type_macip(dut=data.dut1,evpn_type_2_prefix="[2]:[0]:[48]:[00:21:ee:00:10:16]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="11.1.1.1",weight="32768",path="",origin_code="i")
verify_bgp_l2vpn_evpn_route_type_macip(dut=data.dut1,evpn_type_2_prefix="[2]:[0]:[48]:[00:21:ee:00:10:16]:[32]:[59.1.1.6]",rd="1.1.1.1:2",status_code="*>",metric="",next_hop="11.1.1.1",weight="32768",path="",origin_code="i")
To verify bgp l2vpn evpn route type macip
:param dut:
:param evpn_type_2_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else cli_type
if cli_type in ["rest-put", "rest-patch"]:
ret_val=True
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict = {}
rest_out = rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
match = False
for i in rest_out:
try:
prefix = i["prefix"]
if prefix == kwargs["evpn_type_2_prefix"] and i["state"]['valid-route']:
out_dict["next_hop"]=i["attr-sets"]["next-hop"]
if 'rd' in kwargs:
out_dict["rd"]=i["route-distinguisher"]
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "IGP":
out_dict["origin_code"]= "i"
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "EGP":
out_dict["origin_code"]= "e"
if 'origin_code' in kwargs and i['attr-sets']['origin'] == "incomplete":
out_dict["origin_code"]= "?"
if 'path' in kwargs:
as_path = ""
for as1 in i['attr-sets']['as-path']['as-segment'][0]['state']['member']:
as_path = as_path + str(as1) + " "
as_path = as_path.strip()
out_dict["path"]= as_path
for key in out_dict.keys():
if key in kwargs:
if out_dict[key] == kwargs[key]:
st.log("Expected value {} found for key: {} for route {}".format(out_dict[key], key,prefix))
match = True
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key], out_dict[key]))
ret_val = False
if match:
break
except Exception:
continue
if not match:
st.log("MAC IP Route {} was not found in the rest output".format(kwargs["evpn_type_2_prefix"]))
return False
elif not ret_val:
return False
else:
return True
else:
st.log("REST command execution failed")
ret_val = False
else:
output = st.show(dut,"show bgp l2vpn evpn route type macip",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
st.log("output is {}".format(output))
st.log("input_dict is {}".format(input_dict))
ret_val = False
return ret_val
def map_vrf_vni(dut, vrf_name, vni, config='yes', vtep_name='', skip_error=False, cli_type=''):
"""
purpose:
This API is used to configure VRF to VNI mapping
Arguments:
:param dut: device to be configured
:type dut: string
:param vrf_name: name of the vrf to be mapped to VNI
:type vrf_name: string
:param vni: VNI to be mapped to the VRF
:type vni: string
:param config: it takes value as 'yes' or 'no' to configure or remove the mapping
:type config: string
:param : cli_type
:param : vtep_name
:param : skip_error
:return: None
usage:
map_vrf_vni(dut1, "Vrf-1", "100", cli_type='click')
map_vrf_vni(dut1, "Vrf-1", "100", config='no', cli_type='click')
Created by: Gangadhara Sahu <gangadhara.sahu@broadcom.com>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'add_vrf_vni_map'
else:
conf_str = 'no'
if cli_type == 'click':
vni = ''
action = 'del_vrf_vni_map'
if cli_type == 'click':
command = "config vrf {} {} {}".format(action, vrf_name, vni)
elif cli_type == 'klish':
if not vtep_name:
st.error('Mandatory argument vtep_name MISSING')
return False
command = []
command.append('interface vxlan {}'.format(vtep_name))
command.append('{} map vni {} vrf {}'.format(conf_str, vni, vrf_name))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
if not vtep_name:
st.error('Mandatory argument vtep_name MISSING')
return False
rest_urls = st.get_datastore(dut, "rest_urls")
vni = int(vni) if type(vni) is not int else vni
if config == 'yes':
url = rest_urls['config_vlan_vni_mapping'].format(vrf_name)
payload = { "openconfig-vxlan:vni-instance":
[{"vni-id": vni,
"source-nve": vtep_name,
"config": {"vni-id": vni, "source-nve": vtep_name}
}]
}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG')
return False
else:
url = rest_urls['delete_vlan_vni_mapping'].format(vrf_name,vni,vtep_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def parse_rest_output_remote_vni(response):
tunnel_vni_list = response['output'].get('openconfig-vxlan:vxlan-vni-peer-infos', {}).get('vxlan-vni-peer-info', [])
tunnel_vni_count = len(tunnel_vni_list)
result = []
for tunnel in tunnel_vni_list:
dict = {}
dict['total_count'] = str(tunnel_vni_count)
### vlan missing in ocyang output
dict['vlan'] = ''
dict['rvtep'] = tunnel.get('peer-ip',"")
dict['vni'] = tunnel.get('state',{}).get('vni-id',0)
result.append(dict)
return result
def verify_vxlan_evpn_remote_vni_id(dut, **kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_vxlan_evpn_remote_vni_id(dut=dut1,vni="100",vlan="Vlan100",rvtep="11.1.1.1",type="dynamic",identifier="all")
To verify show vxlan evpn_remote_vni <vni-id|all>
:param dut:
:param vni:
:param vlan:
:param rvtep:
:param type:
:param total_count:
:param identifier: all | specific vni id which we want to parse using show command
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'identifier' not in kwargs:
st.error("Mandetory arg identifier is not present")
return False
if cli_type == 'klish':
if kwargs['identifier'] == 'all':
kwargs['identifier'] = ''
cmd = 'evpn_remote_vni' if cli_type == 'click' else 'remote vni'
command = 'show vxlan {}'.format(cmd)
if kwargs['identifier']:
command += " {}".format(kwargs['identifier'])
if cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_vni_peer_info']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan remote vni', type='klish')
if response['output']:
output = parse_rest_output_remote_vni(response)
else:
st.error("OCYANG-FAIL: verify vxlan remote vni - Get Response is empty")
return False
if 'vlan' in kwargs:
del kwargs['vlan']
else:
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
del kwargs['identifier']
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_vxlan_evpn_remote_mac_id(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_vxlan_evpn_remote_mac_id(dut=dut1,vni="100",vlan="Vlan100",rvtep="11.1.1.1",type="dynamic",identifier="all",mac="00:21:ee:00:10:33")
To verify show vxlan evpn_remote_mac <mac|all>
:param dut:
:param vni:
:param vlan:
:param rvtep:
:param type:
:param mac:
:param total_count:
:param identifier: all | specific mac which we want to parse using show command
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### NO ocyang URI support for <show vxlan remote mac". Hence fallback to klish
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if 'identifier' not in kwargs:
st.error("Mandatory arg identifier is not present")
return False
if cli_type == 'klish':
if kwargs['identifier'] == 'all':
kwargs['identifier'] = ''
cmd = 'evpn_remote_mac' if cli_type == 'click' else 'remote mac'
command = 'show vxlan {}'.format(cmd)
if kwargs['identifier']:
command += " {}".format(kwargs['identifier'])
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
del kwargs['identifier']
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count','min_total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if key != 'min_total_count':
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
elif key == 'min_total_count':
if rlist['total_count'] >= kwargs[key] and key in common_key_list:
count = count + 1
st.log("Match: Match key {} found => {} out of {}".format(key,kwargs[key],rlist['total_count']))
if 'min_total_count' in kwargs:
del kwargs['min_total_count']
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_vlanvni_map(dut,vlan_data,url):
response = get_rest(dut, rest_url=url)
if response['output']:
dict = {}
dict['vlan'] = vlan_data
vni_map = response['output'].get('openconfig-vxlan:vni-instance', [])
if vni_map:
vni_id = vni_map[0].get('state', {}).get('vni-id', 0)
else:
vni_id = 0
dict['vni'] = str(vni_id) if type(vni_id) is int else vni_id
return dict
else:
st.error("OCYANG-FAIL: verify vxlan vlanvnimap - Get Response is empty for vlan:{}".format(vlan_data))
return False
def verify_vxlan_vlanvnimap(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_vxlan_vlanvnimap(dut=dut1,vni=["100","101'],vlan=["Vlan100","Vlan100"],total_count="2")
To verify show vxlan vlanvnimap
:param dut:
:param vni:
:param vlan:
:param total_count:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### There is no direct vlan-vni mapping output in ocyang.
#cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if "return_output" in kwargs:
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan vlanvnimap', type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
result = []
vlan_list = [kwargs['vlan']] if type(kwargs['vlan']) is not list else kwargs['vlan']
for vlan_id in vlan_list:
vlan_data = str(vlan_id) if type(vlan_id) is not str else vlan_id
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
url = rest_urls['config_vlan_vni_mapping'].format(vlan_str)
dict = parse_rest_output_vlanvni_map(dut,vlan_data,url)
if dict:
result.append(dict)
count = len(result)
for dict in result:
dict.update({'total_count': count})
output = result
st.log("parsed output:{}".format(result))
kwargs.pop('total_count')
else:
output = st.show(dut, "show vxlan vlanvnimap", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def parse_rest_output_vrfvni_map(dut, vrf_str, url):
response = get_rest(dut, rest_url=url)
if response['output']:
dict = {}
dict['vrf'] = vrf_str
vni_map = response['output'].get('openconfig-vxlan:vni-instance', [])
if vni_map:
vni_id = vni_map[0].get('state', {}).get('vni-id', 0)
else:
vni_id = 0
dict['vni'] = str(vni_id) if type(vni_id) is int else vni_id
return dict
else:
st.error("OCYANG-FAIL: verify vxlan vlanvnimap - Get Response is empty for vrf:{}".format(vrf_str))
return False
def verify_vxlan_vrfvnimap(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_vxlan_vrfvnimap(dut=dut1,vni=["500","501'],vrf=["Vrf1","Vrf2"],total_count="2")
To verify show vxlan vrfvnimap
:param dut:
:param vni:
:param vlan:
:param total_count:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
### There is no direct vrf-vni mapping output in ocyang.
if "return_output" in kwargs:
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type in ['rest-put', 'rest-patch']:
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan vrfvnimap', type='klish')
rest_urls = st.get_datastore(dut, "rest_urls")
result = []
vrf_list = [kwargs['vrf']] if type(kwargs['vrf']) is str else kwargs['vrf']
for vrf in vrf_list:
vrf_str = str(vrf) if type(vrf) is not str else vrf
url = rest_urls['config_vlan_vni_mapping'].format(vrf_str)
dict = parse_rest_output_vrfvni_map(dut, vrf_str, url)
if dict:
result.append(dict)
count = len(result)
for dict in result:
dict.update({'total_count': count})
output = result
st.log("parsed output:{}".format(result))
kwargs.pop('total_count')
else:
output = st.show(dut, "show vxlan vrfvnimap", type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_bgp_l2vpn_evpn_route_detail_type_prefix(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_route_detail_type_prefix(dut=evpn_dict["leaf_node_list"][3],
prefix="[5]:[0]:[24]:[55.55.1.0]",rd="9:9",rt="500:500",rvtep="5.5.5.2")
To verify show bgp l2vpn evpn route detail type prefix
:param dut:
:param rd:
:param as_path:
:param vni_id:
:param prefix:
:param rvtep:
:param bgp_peer:
:param origin:
:param rt:
:param et:
:param rmac:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
ret_val = True
if cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict={}
rest_out=rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
for i in rest_out:
try:
if "rmac" in kwargs:
rmac = i["attr-sets"]["ext-community"][2]
rmac=":".join(rmac.split(":")[1:7])
rt=i["attr-sets"]["ext-community"][0]
rt=":".join(rt.split(":")[1:3])
out_dict["rt"]=rt
prefix = i["prefix"]
nexthop = i["attr-sets"]["next-hop"]
if prefix == kwargs["prefix"] and nexthop == kwargs["rvtep"] and rmac == kwargs["rmac"]:
if i["state"]["openconfig-rib-bgp-ext:best-path"]:
vni=i["attr-sets"]["tag"]
out_dict["vni_id"] = vni
rd=i["route-distinguisher"]
rd=rd.split(":")[0]
out_dict["rd"]=rd
for key in out_dict.keys():
if key in kwargs:
if out_dict[key]==kwargs[key]:
st.log("Expected value {} found for key: {}".format(out_dict[key],key))
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key],out_dict[key]))
ret_val = False
if ret_val:
return True
except Exception:
continue
else:
st.log("REST command execution failed")
ret_val=False
else:
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn route detail type prefix",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def clear_bgp_evpn(dut,clear_type,**kwargs):
'''
:param dut:
:type dut: string
:param clear_type:
:type clear_type: string
:param kwargs:
:type kwargs: dictionary
:return: None
Usage:
to clear all neighbors:
clear_bgp_evpn(dut1,"*")
clear_bgp_evpn(dut1,"*",dir="in")
clear_bgp_evpn(dut1,"*",dir="in",prefix="yes")
clear_bgp_evpn(dut1,"*",dir="out")
clear_bgp_evpn(dut1,"*",soft_dir="in")
clear_bgp_evpn(dut1,"*",soft_dir="out")
to clear specific neighbors:
clear_bgp_evpn(dut1,"1.1.1.1")
clear_bgp_evpn(dut1,"1.1.1.1",dir="in")
clear_bgp_evpn(dut1,"1.1.1.1",dir="in",prefix="yes")
clear_bgp_evpn(dut1,"1.1.1.1",dir="out")
clear_bgp_evpn(dut1,"1.1.1.1",soft_dir="in")
clear_bgp_evpn(dut1,"1.1.1.1",soft_dir="out")
'''
cli_type = kwargs.get('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else "klish"
cmd = "clear bgp l2vpn evpn {}".format(clear_type)
supported_args = ["dir","prefix","soft_dir","cli_type"]
for key in kwargs.keys():
if key not in supported_args:
st.error("kindly specify the supported argument among {}".format(supported_args))
return None
if "dir" in kwargs:
cmd += " {}".format(kwargs["dir"])
if kwargs["dir"] == "in" and "prefix" in kwargs:
cmd += " prefix-filter"
if "soft_dir" in kwargs:
cmd += " soft {}".format(kwargs["soft_dir"])
return st.config(dut,cmd,type=cli_type,skip_tmpl=True,conf=False)
def fetch_evpn_neigh_output(dut,**kwargs):
'''
:param dut:
:type dut: string
:return: cli output in success case; False in failure case
Usage:
fetch_evpn_neigh_output(dut1)
'''
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn summary",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
else:
return output
def verify_bgp_l2vpn_evpn_route_type_multicast(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_bgp_l2vpn_evpn_route_type_multicast(dut=dut1,evpn_type_3_prefix="[3]:[0]:[32]:[11.1.1.1]",rd="1.1.1.1:2",status_code="*>",metric="3276",next_hop="11.1.1.1",weight="8",path="",origin_code="i",displayed_prefixes="5",no_of_paths="5",bgp_version="1",router_id="1.1.1.1")
To verify bgp l2vpn evpn route type multicast
:param dut:
:param bgp_verion:
:param router_id:
:param evpn_type_3_prefix:
:param rd:
:param path:
:param status_code:
:param weight:
:param metric:
:param next_hop:
:param origin_code:
:param displayed_prefixes:
:param no_of_paths:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut, **kwargs))
if cli_type in ["rest-put", "rest-patch"]:
ret_val=True
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["get_evpn_routes"]
rest_out = get_rest(dut, rest_url=url, timeout=30)
if rest_out["status"] == 200:
out_dict = {}
rest_out = rest_out["output"]["openconfig-bgp-evpn-ext:routes"]["route"]
for i in rest_out:
try:
prefix = i["prefix"]
if prefix == kwargs["evpn_type_3_prefix"] and i["state"]["openconfig-rib-bgp-ext:best-path"]:
nexthop = i["attr-sets"]["next-hop"]
out_dict["next_hop"]=nexthop
for key in out_dict.keys():
if key in kwargs:
if out_dict[key] == kwargs[key]:
st.log("Expected value {} found for key: {}".format(out_dict[key], key))
else:
st.log("Match NOT found; expected value {} but got"
" {}".format(kwargs[key], out_dict[key]))
ret_val = False
if ret_val:
return True
except Exception:
continue
else:
st.log("REST command execution failed")
ret_val = False
else:
cli_type = "vtysh" if cli_type == 'click' else "klish"
output = st.show(dut,"show bgp l2vpn evpn route type multicast",type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
for a in output:
for key in a:
output[output.index(a)][key]=output[output.index(a)][key].lstrip()
output[output.index(a)][key]=output[output.index(a)][key].rstrip()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['bgp_version','router_id','displayed_prefixes','no_of_paths']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def verify_vxlan_tunnel_count(dut, exp_count, cli_type=''):
"""
:param dut:
:param exp_count:
:param cli_type:
:return:
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if cli_type == "click":
command = 'show vxlan tunnel | grep "Total count "'
output = st.show(dut, command, skip_tmpl=True, type=cli_type)
x = re.search(r"\d+", output)
elif cli_type == "klish":
command = 'show vxlan tunnel | grep "EVPN"'
output = st.show(dut, command, skip_tmpl=True, type=cli_type)
x = output.count("EVPN_")
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['vxlan_tunnel_info']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show vxlan tunnel', type='klish')
tunnel_list = response['output']['openconfig-vxlan:vxlan-tunnel-infos']['vxlan-tunnel-info']
x = len(tunnel_list)
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if x:
if cli_type == "click":
if int(x.group()) == exp_count:
return True
else:
st.log('FAIL: Expected tunnel count not found.')
return False
elif cli_type in ["klish", "rest-put", "rest-patch"]:
if x == exp_count:
return True
else:
st.log('FAIL: Expected tunnel count not found.')
return False
else:
return -1
def create_linktrack(dut, track_group_name, config='yes', **kwargs):
'''
purpose:
This definition is used to create link track
Arguments:
:param dut: device to be configured
:type dut: string
:param track_group_name: interface track group name name to be created
:param config: it takes value as 'yes' or 'no' to configure or remove interface link tracking
:type config: string
:return: None
usage:
create_linktrack(dut1, "group1")
create_linktrack(dut1, "group1",config='no')
Created by: Gangadhara <gangadhara.sahu@broadcom.com>
'''
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if cli_type == 'click':
if config=='yes':
command = "config linktrack add {}".format(track_group_name)
else:
command = "config linktrack del {}".format(track_group_name)
return st.config(dut=dut,cmd=command)
elif cli_type == 'klish':
config = 'no ' if config != 'yes' else ''
exit_cmd = '\nexit' if config == '' else ''
command = '{}link state track {}{}'.format(config, track_group_name, exit_cmd)
return st.config(dut=dut,cmd=command, type="klish", conf=True)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_link_track']
payload = {"openconfig-lst-ext:lst-group":[{"name":track_group_name}]}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Config Link track group Failed')
return False
elif config == 'no':
url = rest_urls['delete_link_track'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Delete Link track group Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def update_linktrack_interface(dut, track_group_name, upinterface, timeout, config='yes', **kwargs):
'''
purpose:
This definition is used to update link track interface
Arguments:
:param dut: device to be configured
:type dut: string
:param track_group_name: interface track group name name to be created or deleted
:param upinterface: upstream interface to be added or removed
:param config: it takes value as 'yes' or 'no' to configure or remove interface link tracking
:param upinterface: upinterface to be added or removed
:param description: downinterface to be added or removed
:param downinterface: timeout value to be configured
:type config: string
:return: None
usage:
update_linktrack_interface(dut1, "Ethernet0,Vlan10","10")
update_linktrack_interface(dut1, "Ethernet0,Vlan10","",config='no')
Created by: Gangadhara <gangadhara.sahu@broadcom.com>
'''
cli_type = kwargs.get('cli_type', st.get_ui_type(dut,**kwargs))
description = kwargs.get('description', '')
downinterface = kwargs.get('downinterface', 'all-mclag')
if cli_type == 'click':
if config=='yes':
if description == '':
command = "config linktrack update {} --upstream {} --downstream {} --timeout {}".format(track_group_name,upinterface,downinterface,timeout)
else:
command = "config linktrack update {} --upstream {} --downstream {} --timeout {} --description {}".format(track_group_name,upinterface,downinterface,timeout,description)
else:
command = "config linktrack update {} -nu {} -nd {}".format(track_group_name,upinterface,downinterface)
return st.config(dut=dut,cmd=command)
elif cli_type == 'klish':
config = 'no ' if config != 'yes' else ''
command = 'link state track {}'.format(track_group_name)
intf = get_interface_number_from_name(upinterface)
dintf = get_interface_number_from_name(downinterface)
if config == '':
if downinterface == 'all-mclag':
command = command + "\n" + "downstream {}".format(downinterface)
if timeout != '':
command = command + "\n" + "timeout {}".format(timeout)
if description != '':
command = command + "\n" + "description {}".format(description)
command = command + "\n" + "exit"
command = command + "\n" + "interface {} {}".format(intf["type"], intf["number"])
command = command + "\n" + "link state track {} upstream".format(track_group_name)
command = command + "\n" + "exit"
if downinterface != 'all-mclag':
command = command + "\n" + "interface {} {}".format(dintf["type"], dintf["number"])
command = command + "\n" + "link state track {} downstream".format(track_group_name)
command = command + "\n" + "exit"
else:
if downinterface == 'all-mclag':
command = command + "\n" + "{}downstream {}".format(config,downinterface)
if timeout != '':
command = command + "\n" + "{}timeout".format(config)
if description != '':
command = command + "\n" + "{}description".format(config)
command = command + "\n" + "exit"
command = command + "\n" + "interface {} {}".format(intf["type"], intf["number"])
command = command + "\n" + "{}link state track {} upstream".format(config, track_group_name)
command = command + "\n" + "exit"
if downinterface != 'all-mclag':
command = command + "\n" + "interface {} {}".format(dintf["type"], dintf["number"])
command = command + "\n" + "{}link state track {} downstream".format(config,track_group_name)
command = command + "\n" + "exit"
return st.config(dut, command, type="klish", conf=True)
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
if config == 'yes':
url = rest_urls['config_link_track_params'].format(track_group_name)
#up_intf = get_interface_number_from_name(upinterface)
#dw_intf = get_interface_number_from_name(downinterface)
payload = {"openconfig-lst-ext:config":
{"name": track_group_name}
}
if downinterface == 'all-mclag':
payload["openconfig-lst-ext:config"].update({'all-mclags-downstream':True})
if timeout != '':
payload["openconfig-lst-ext:config"].update({'timeout':int(timeout)})
if description != '':
payload["openconfig-lst-ext:config"].update({'description':description})
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Config Link track group parameters Failed')
return False
if downinterface != 'all-mclag':
url = rest_urls['add_rem_link_track_downstream'].format(downinterface)
payload = { "openconfig-lst-ext:group-name": track_group_name }
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Assign Link track to downstream interface Failed')
return False
url = rest_urls['add_rem_link_track_upstream'].format(upinterface)
payload = {"openconfig-lst-ext:upstream-groups":
{"upstream-group":
[{"group-name":track_group_name,
"config":{"group-name":track_group_name}
}]
}
}
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: Assign Link track to upstream interface Failed')
return False
elif config == 'no':
url = rest_urls['add_rem_link_track_upstream'].format(upinterface)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track from upstream interface Failed')
return False
if downinterface == 'all-mclag':
url = rest_urls['link_track_all_mclag'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove all-mclag downstream Failed')
return False
else:
url = rest_urls['add_rem_link_track_downstream'].format(upinterface)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track from downstream interface Failed')
return False
if timeout != '':
url = rest_urls['link_track_timeout'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track group timeout Failed')
return False
if description != '':
url = rest_urls['link_track_description'].format(track_group_name)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: Remove Link track group description Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
def verify_mac(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_mac(dut=dut1,mac="00:11:00:22:00:11",total="1")
To verify mac
:param dut:
:param macaddress:
:param vlan:
:param port:
:param type:
:param dest_ip:
:param total:
:return:
:reteturn:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "click":
cmd = "show mac"
elif cli_type == "klish":
cmd = "show mac address-table"
output = st.show(dut,cmd,type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
if cli_type == "klish":
if 'type' in kwargs:
if not kwargs['type'].isupper():
kwargs['type']=kwargs['type'].upper()
count = 0
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
#Converting all kwargs to list type to handle single or list of instances
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def _clear_vxlan_config_helper(dut_list, cli_type='click'):
"""
Helper routine to cleanup vxlan config from devices.
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
for dut in dut_li:
st.log("############## {} : VxLAN Config Cleanup ################".format(dut))
output = st.show(dut, "show vxlan interface")
st.log("##### VXLAN INTERFACE: {}".format(output))
if len(output) == 0:
continue
entry = output[0]
if entry['vtep_name']:
vtep_name = entry['vtep_name']
nvo_name = entry['nvo_name']
vrf_vni = st.show(dut, "show vxlan vrfvnimap", type=cli_type)
st.log("##### [{}] VXLAN VRF L3VNI MAP: {}".format(dut, vrf_vni))
for entry in vrf_vni:
if not entry['vrf'] or not entry['vni'] or entry['vni'] == '0':
continue
vrf = entry['vrf']
map_vrf_vni(dut, vrf, entry['vni'], config="no")
vlan_vni = st.show(dut, "show vxlan vlanvnimap", type=cli_type)
st.log("##### [{}] VXLAN VLAN VNI MAP: {}".format(dut, vlan_vni))
for entry in vlan_vni:
if not entry['vlan'] or not entry['vni']:
continue
vlan = entry['vlan']
if vlan[:4] == "Vlan":
vlan = vlan[4:]
map_vlan_vni(dut, vtep_name, vlan, entry['vni'], config='no', cli_type=cli_type)
if nvo_name:
create_evpn_instance(dut, nvo_name, vtep_name, config='no', cli_type=cli_type)
create_overlay_intf(dut, vtep_name, '0.0.0.0', config='no', cli_type=cli_type)
return True
def clear_vxlan_configuration(dut_list, thread=True, cli_type='click'):
"""
Find and cleanup all vxlan configuration.
:param dut_list
:return:
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
[out, exceptions] = utils.exec_foreach(thread, dut_li, _clear_vxlan_config_helper, cli_type)
st.log(exceptions)
return False if False in out else True
def parse_rest_output_linktrack_summary(response):
lst_group = response['output']['openconfig-lst-ext:state']
dict = {}
dict['timeout'] = str(lst_group.get('timeout',""))
dict['name'] = lst_group.get('name',"")
dict['description'] = lst_group.get('description',"")
return [dict]
def verify_linktrack_summary(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_linktrack_summary(dut=dut1,name="group1",description="MLAG_LINK_TRACK",timeout="10")
To verify linktrack summary
:param dut:
:param name:
:param description:
:param timeout:
:return: True or False
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'name' not in kwargs:
st.error("Mandatory arg name is not present")
return False
if cli_type == 'click':
result = st.show(dut,"show linktrack summary")
elif cli_type == 'klish':
result = st.show(dut, 'show link state tracking', type='klish')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls['get_link_track_summary'].format(kwargs['name'])
response = get_rest(dut,rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show link state tracking', type='klish')
if response['output']:
result = parse_rest_output_linktrack_summary(response)
else:
st.error("OCYANG-FAIL: verify link track summary - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if len(result) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
ret_val = False
for rlist in result:
count = 0
for key in kwargs:
if rlist[key] == kwargs[key]:
count = count + 1
if len(kwargs) == count:
ret_val = True
for key in kwargs:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
break
else:
for key in kwargs:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
if ret_val is False:
st.log("Fail: Not Matched all args in passed dict {} from parsed dict".format(kwargs))
return ret_val
def parse_rest_output_linktrack_group(dut,response,timeout,description='',lst_bringup_time='0'):
lst_interfaces = response['output']['openconfig-lst-ext:interface']
result = []
for interface in lst_interfaces:
lst_dict = {}
lst_dict['description'] = description
lst_dict['timeout'] = timeout
lst_dict['startup_remain_time'] = lst_bringup_time
if 'upstream-groups' in interface.keys():
lst_dict['name'] = interface.get('upstream-groups',{}).get('upstream-group',[])[0].get('group-name',"")
lst_dict['direction'] = "Upstream"
lst_dict['interface'] = interface.get('id',"")
port1.get_interface_status(dut, lst_dict['interface'],cli_type='click')
interface_state = str(port1.get_interface_status(dut,lst_dict['interface']))
st.log("DEBUG==>Interface:{}, Inf state from Rest:{}".format(lst_dict['interface'],interface_state))
if interface_state.lower() == 'up':
lst_dict['direction_state'] = 'Up'
elif interface_state.lower() == 'down':
lst_dict['direction_state'] = 'Down'
else:
lst_dict['direction_state'] = interface_state
elif 'downstream-group' in interface.keys():
lst_dict['name'] = interface.get('downstream-group',{}).get('state',{}).get('group-name',"")
lst_dict['direction'] = "Downstream"
lst_dict['interface'] = interface.get('id',"")
if interface.get('downstream-group',{}).get('state',{}).get('disabled',""):
lst_dict['direction_state'] = 'Disabled'
else:
port1.get_interface_status(dut, lst_dict['interface'], cli_type='click')
interface_state = str(port1.get_interface_status(dut, lst_dict['interface']))
st.log("DEBUG==>Interface:{}, Inf state from Rest:{}".format(lst_dict['interface'], interface_state))
if interface_state.lower() == 'up':
lst_dict['direction_state'] = 'Up'
elif interface_state.lower() == 'down':
lst_dict['direction_state'] = 'Down'
else:
lst_dict['direction_state'] = interface_state
result.append(lst_dict)
return result
def verify_linktrack_group_name(dut,**kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_linktrack_group_name(dut=dut1,name="group1",description="MLAG_LINK_TRACK",timeout="10",
upstream_plist=["Ethernet3","Ethernet9"],downstream_plist=["PortChannel10"])
To verify linktrack group <group-name>
:param dut:
:param name:
:param description:
:param timeout:
:param upstream_plist: List of upstream interfaces
:param downstream_plist: List of downstream portchannels
:return: True or False
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'name' not in kwargs:
st.error("Mandatory arg name is not present")
return False
if cli_type == 'click':
output = st.show(dut,"show linktrack group {}".format(kwargs['name']))
elif cli_type == 'klish':
output = st.show(dut,"show link state tracking {}".format(kwargs['name']), type='klish')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
lst_name = kwargs['name']
url = rest_urls['get_link_track_description'].format(lst_name)
lst_description = get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:description']
url = rest_urls['get_link_track_timeout'].format(lst_name)
lst_timeout = str(get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:timeout'])
url = rest_urls['get_link_track_bringup_remain_time'].format(lst_name)
lst_bringup_time = str(get_rest(dut, rest_url=url)['output']['openconfig-lst-ext:bringup-remaining-time'])
url = rest_urls['get_link_track_interfaces']
response = get_rest(dut, rest_url=url)
st.log('KLISH output for debugging REST')
st.show(dut, 'show link state tracking {}'.format(kwargs['name']), type='klish')
if response['output']:
output = parse_rest_output_linktrack_group(dut,response,lst_timeout,lst_description,lst_bringup_time)
else:
st.error("OCYANG-FAIL: verify link track group - Get Response is empty")
return False
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if len(output) == 0:
st.error("Output is Empty")
return False
if "return_output" in kwargs:
return True
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['name','description','timeout','startup_remain_time']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
if len(kwargs.keys()) > 0:
#Converting all kwargs to list type to handle single or list of instances
input_dict_list =[]
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
#convert kwargs into list of dictionary
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def get_port_counters(dut, port, counter,**kwargs):
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
port1.clear_interface_counters(dut,cli_type=cli_type)
port_range_list = list(port) if isinstance(port, list) else [port]
cntr_range_list = list(counter) if isinstance(counter, list) else [counter]
if cli_type == 'click':
st.wait(3)
else:
st.wait(1)
list1 = []
for prt, cntr in zip (port_range_list, cntr_range_list):
if cli_type == "click":
if '/' in prt:
prt = st.get_other_names(dut,[prt])[0]
st.show(dut, "show interface counters -i {}".format(prt),type=cli_type)
output = st.show(dut, "show interface counters -i {}".format(prt),type=cli_type)
entries = filter_and_select(output, (cntr,), {'iface': prt})
list1.append(entries[0])
if cli_type == "klish":
output = port1.get_interface_counters_all(dut,port=prt,cli_type=cli_type)
entries = filter_and_select(output, (cntr,), {'iface': prt})
if output == [] or entries == []:
st.log("interface {} is not found in the show interface counters O/P".format(prt))
dict1 = {}
dict1.update({"rx_bps":"0.0 KB/s"})
list1.append(dict1)
else:
if float(entries[0][cntr]) >= 1.0:
value = float(entries[0][cntr])
entries[0][cntr] = str(value) + " MB/s"
elif float(entries[0][cntr]) < 1.0 and float(entries[0][cntr]) >= 0.001:
value = float(entries[0][cntr])*1024
entries[0][cntr] = str(value) + " KB/s"
elif float(entries[0][cntr]) < 0.001:
entries[0][cntr] = get_port_rate_inklish(dut,prt=prt,cntr=cntr)
list1.append(entries[0])
return list1
def get_port_rate_inklish(dut,prt,cntr):
for i in range(4):
st.wait(5,"\n\n###### Retry attempt {} for interface {} {} check #### \n".format(i,prt,cntr))
output = port1.get_interface_counters_all(dut,port=prt,cli_type="klish")
entries = filter_and_select(output, (cntr,), {'iface': prt})
st.log("\n\n###### interface {} {} shows {} #####\n".format(prt,cntr,float(entries[0][cntr])))
if output == [] or entries == []:
st.log("interface {} is not found in the show interface counters O/P".format(prt))
return "0.0 B/s"
else:
if float(entries[0][cntr]) >= 1.0:
value = float(entries[0][cntr])
entries[0][cntr] = str(value) + " MB/s"
return str(value) + " MB/s"
elif float(entries[0][cntr]) < 1.0 and float(entries[0][cntr]) >= 0.001:
value = float(entries[0][cntr])*1024
entries[0][cntr] = str(value) + " KB/s"
return str(value) + " KB/s"
elif float(entries[0][cntr]) < 0.001:
continue
return "0.0 B/s"
def neigh_suppress_config(dut, vlan, config='yes', skip_error=False, cli_type=''):
"""
purpose:
This API used to enable or disable neighbor suppression on vlan
Arguments:
:param dut: device to be configured
:type dut: string
:param vlan: VLAN name
:type vlan: string
:type action: enable|disable
:return: None
usage:
neigh_suppress_config(dut1, "Vlan100", config="yes", cli_type='click')
neigh_suppress_config(dut1, "Vlan100", config="yes", cli_type='klish')
neigh_suppress_config(dut1, "Vlan100", config="no", cli_type='click')
Created by: Ganagadhar <gangadhara.sahu@broadcom.com>
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
if config == 'yes':
conf_str = ''
action = 'enable'
else:
conf_str = 'no'
action = 'disable'
if cli_type == 'click':
command = "config neigh_suppress {} {}".format(action,vlan)
elif cli_type == 'klish':
command = []
command.append('interface Vlan {}'.format(vlan))
command.append('{} neigh-suppress'.format(conf_str))
command.append('exit')
elif cli_type in ['rest-put','rest-patch']:
rest_urls = st.get_datastore(dut, "rest_urls")
vlan_data = str(vlan) if type(vlan) is not str else vlan
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
payload = {"openconfig-vxlan:config":{
"arp-and-nd-suppress":"ENABLE"}
}
if config == 'yes':
url = rest_urls['vxlan_arp_nd_suppress'].format(vlan_str)
if not config_rest(dut, http_method=cli_type, rest_url=url, json_data=payload):
st.banner('FAIL-OCYANG: ARP and ND suppress config on vlan Failed')
return False
else:
url = rest_urls['vxlan_arp_nd_suppress_delete'].format(vlan_str)
if not delete_rest(dut, rest_url=url):
st.banner('FAIL-OCYANG: ARP and ND suppress UnConfig on vlan Failed')
return False
return
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
st.debug(command)
return st.config(dut, command, skip_error_check=skip_error, type=cli_type)
def verify_neigh_suppress(dut, **kwargs):
"""
Author: Gangadhara Sahu (gangadhara.sahu@broadcom.com)
verify_neigh_suppress(dut=dut1,identifier="all",cli_type="click",vlan="Vlan450",status="Configured",netdevice="vtepLeaf4-450")
verify_neigh_suppress(dut=dut1,identifier="450",cli_type="click",vlan="Vlan450",status="Configured",netdevice="vtepLeaf4-450")
verify_neigh_suppress(dut=dut1,identifier="all",cli_type="click",vlan="Vlan100",
status="Not Configured",netdevice="vtepLeaf4-100")
verify_neigh_suppress(dut=dut1,identifier="450",cli_type="klish",vlan="Vlan450",status="on")
To verify neighbour suppress for <vlan|all>
:param dut:
:param total_count:
:param identifier: all | specific vlan id
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
if 'identifier' not in kwargs:
st.error("Mandetory arg identifier is not present")
return False
cmd = 'neigh-suppress' if cli_type == 'click' else 'neighbor-suppress-status'
command = 'show {}'.format(cmd)
if kwargs['identifier'] == "all" and cli_type == 'click':
command += " all"
elif kwargs['identifier'] != "all" and cli_type == 'click':
command += " vlan {}".format(kwargs['identifier'])
elif kwargs['identifier'] != "all" and cli_type == 'klish':
command += " {}".format(kwargs['identifier'])
elif kwargs['identifier'] == "all" and cli_type == 'klish':
command += ""
elif cli_type in ['rest-put','rest-patch']:
st.log('KLISH output for debugging REST')
output = st.show(dut, 'show neighbor-suppress-status', type='klish')
### URI to be used only if neighbor-suppression is enabled for the VLAN -SONIC-31990
## So When expected status is off, verification wil continue based on above klish output
if kwargs['status'].lower() == 'on':
rest_urls = st.get_datastore(dut, "rest_urls")
vlan = kwargs['vlan']
vlan_data = str(vlan) if type(vlan) is not str else vlan
vlan_str = 'Vlan' + vlan_data if 'Vlan' not in vlan_data else vlan_data
url = rest_urls['vxlan_arp_nd_suppress_state'].format(vlan_str)
response = get_rest(dut,rest_url=url)
output = {}
if response['output']:
output['vlan'] = vlan_str
if response.get('output',{}).get('openconfig-vxlan:arp-and-nd-suppress',"") == "ENABLE":
output['status'] ='on'
elif response.get('output',{}).get('openconfig-vxlan:arp-and-nd-suppress',"") == "DISABLE":
output['status'] ='off'
output = [output]
else:
st.error("Invalid CLI type - {}".format(cli_type))
return False
if cli_type not in ['rest-put', 'rest-patch']:
output = st.show(dut, command, type=cli_type)
if len(output) == 0:
st.error("Output is Empty")
return False
del kwargs['identifier']
no_common_key = 0
ret_val1 = False
dict1 = {}
common_key_list = ['total_count']
for key in kwargs:
if key in common_key_list:
no_common_key = no_common_key + 1
if no_common_key > 0:
rlist = output[0]
count = 0
for key in kwargs:
if rlist[key] == kwargs[key] and key in common_key_list:
count = count + 1
if no_common_key == count:
ret_val1 = True
for key in kwargs:
if key in common_key_list:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
for key in kwargs:
if key in common_key_list:
if rlist[key] == kwargs[key]:
st.log("Match: Match key {} found => {} : {}".format(key,kwargs[key],rlist[key]))
else:
st.log("No-Match: Match key {} NOT found => {} : {}".format(key,kwargs[key],rlist[key]))
st.log("\n")
for key in common_key_list:
if key in kwargs:
dict1[key] = kwargs[key]
del kwargs[key]
if no_common_key > 0 and ret_val1 is False:
st.error("DUT {} -> Match Not Found {}".format(dut,dict1))
return ret_val1
ret_val = "True"
for key in kwargs:
if type(kwargs[key]) is list:
kwargs[key] = list(kwargs[key])
else:
kwargs[key] = [kwargs[key]]
input_dict_list =[]
for i in range(len(kwargs[kwargs.keys()[0]])):
temp_dict = {}
for key in kwargs.keys():
temp_dict[key] = kwargs[key][i]
input_dict_list.append(temp_dict)
for input_dict in input_dict_list:
entries = filter_and_select(output,None,match=input_dict)
if entries:
st.log("DUT {} -> Match Found {} ".format(dut,input_dict))
else:
st.error("DUT {} -> Match Not Found {}".format(dut,input_dict))
ret_val = False
return ret_val
def show_mclag_uniqueip(dut, **kwargs):
"""
API to display the mclag unique ip
:param dut:
:param cli_type:
:param mclag_id:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "klish":
command = "show mclag separate-ip-interfaces"
elif cli_type == "click":
command = "mclagdctl -i {} dump unique_ip".format(kwargs['mclag_id'])
st.show(dut, command,skip_tmpl=True,type=cli_type)
def show_ip_neigh(dut, **kwargs):
"""
API to display ip neighbor
:param dut:
:param cli_type:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type == "klish":
command1 = "show ip arp"
command2 = "show ipv6 neighbors"
st.show(dut, command1,skip_tmpl=True,type=cli_type)
st.show(dut, command2,skip_tmpl=True,type=cli_type)
elif cli_type == "click":
command = "ip neigh show"
st.show(dut, command,skip_tmpl=True,type=cli_type)
def get_tunnel_list(dut,**kwargs):
"""
API to return the list if tunnels present in the dut
:param dut:
:param cli_type:
:return:
"""
cli_type = kwargs.pop('cli_type', st.get_ui_type(dut,**kwargs))
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
res=st.show(dut, 'show vxlan tunnel',type=cli_type)
tun_lst=[]
for l1 in res:
tun_lst.append(l1['rem_vtep'])
return tun_lst
def config_vxlan_qos_mode(dut, vtep_name,**kwargs):
"""
purpose:
This definition is used to configure VxLAN QOS mode
Arguments:
:param dut: device to be configured
:type dut: string
:param vtep_name: VTEP name to be created
:type vtep_name: string
:param : cli_type
:type cli_type: string
:param kwargs["qos_mode"]: qos mode to be configured either uniform/pipe
:type kwargs["qos_mode"]: dict
:param kwargs["pipe_dscp"]: dscp value to be set for PIPE mode
:type kwargs["pipe_dscp"]: dict
:return: None
usage:
config_vxlan_qos_mode(dut1, "dut1VTEP", qos_mode="uniform")
config_vxlan_qos_mode(dut1, "dut1VTEP", qos_mode="pipe",pipe_dscp="10")
Created by: Julius <julius.mariyan@broadcom.com
"""
cli_type = st.get_ui_type(dut,**kwargs)
qosMode = kwargs.get("qos_mode", "pipe dscp 0")
if cli_type == "klish":
command = []
command.append('interface vxlan {}'.format(vtep_name))
if qosMode == "pipe" and "pipe_dscp" in kwargs:
command.append("qos-mode pipe dscp {}".format(kwargs["pipe_dscp"]))
elif qosMode == "uniform":
command.append("qos-mode uniform")
else:
command.append("qos-mode {}".format(qosMode))
command.append('exit')
return st.config(dut, command, type=cli_type)
elif cli_type == "rest-put":
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["config_vxlan_qos"]
if qosMode == "uniform":
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "UNIFORM"}}}]}
elif qosMode == "pipe" and "pipe_dscp" in kwargs:
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "PIPE",
"dscp" : int(kwargs["pipe_dscp"])}}}]}
else:
payload = {"openconfig-interfaces:interface" : [{"name": vtep_name,"config":{"name": vtep_name,
"type": "IF_NVE"}, "openconfig-vxlan:vxlan-if": {"config": {"qos-mode": "PIPE",
"dscp" : 0}}}]}
return config_rest(dut, http_method='post', rest_url=url, json_data=payload,timeout=10)
elif cli_type == "rest-patch":
rest_urls = st.get_datastore(dut, "rest_urls")
url = rest_urls["modify_vxlan_qos"].format(vtep_name)
if qosMode == "uniform":
payload = {"openconfig-vxlan:config":{"qos-mode": qosMode.upper()}}
elif qosMode == "pipe" and "pipe_dscp" in kwargs:
payload = {"openconfig-vxlan:config": {"qos-mode": qosMode.upper(),"dscp": int(kwargs["pipe_dscp"])}}
else:
payload = {"openconfig-vxlan:config": {"qos-mode": qosMode.upper(),"dscp": 0}}
return config_rest(dut, http_method='patch', rest_url=url, json_data=payload,timeout=10)
else:
st.error("Nothing configured for this UI-TYPE {}").format(cli_type)
return False
def verify_vxlan_qos_mode(dut, vtep_name, qos_mode,**kwargs):
'''
purpose:
This definition is used to verify VxLAN interface QOS mode
Arguments:
:param dut: Device name where the command to be executed
:type dut: string
:param vtep_name: vtep name to be verified
:type vtep_name: string
:param qos_mode: qos mode name to be verified
:type qos_mode: string
:param kwargs["pipe_dscp"]:PIPE DSCP value to be verified
:type kwargs["pipe_dscp"]: dict
:return: True/False True - success case; False - Failure case
usage: verify_vxlan_qos_mode(dut1,qos_mode="uniform")
verify_vxlan_qos_mode(dut1,qos_mode="pipe",pipe_dscp=10)
Created by: Julius <julius.mariyan@broadcom.com
'''
success = True
cli_type = st.get_ui_type(dut, **kwargs)
if cli_type == "klish":
if qos_mode == "pipe" and "pipe_dscp" in kwargs:
verify_dict = {"qos_mode" : "pipe", "pipe_dscp" : kwargs["pipe_dscp"]}
elif qos_mode == "uniform":
verify_dict = {"qos_mode" : "uniform"}
else:
verify_dict = {"qos_mode" : "pipe", "pipe_dscp" : "0"}
cli_out = st.show(dut, 'show vxlan interface', type=cli_type)
fil_out = filter_and_select(cli_out, verify_dict.keys(), {"vtep_name": vtep_name})
if len(fil_out) == 0:
st.error("QOS details {} not found in show output".format(verify_dict.keys()))
return False
else:
dut_out = fil_out[0]
for key in verify_dict.keys():
if dut_out[key] == verify_dict[key]:
st.log("Match found for key {}; expected val: {} and "
"obtained val: {}".format(key, verify_dict[key], dut_out[key]))
else:
st.error("Match NOT found for key {}; expected val: {} but "
"obtained val: {}".format(key, verify_dict[key], dut_out[key]))
success = False
elif cli_type in ["rest-put", "rest-patch"]:
rest_urls = st.get_datastore(dut, "rest_urls")
if "pipe_dscp" not in kwargs:
url = rest_urls["get_vxlan_qos_mode"].format(vtep_name)
rest_out = get_rest(dut,rest_url=url,timeout=30)
if rest_out["status"] == 200:
if qos_mode == rest_out["output"]["openconfig-vxlan:qos-mode"].lower():
st.log("Match found for QOS mode; expected val: {} and "
"obtained val: {}".format(qos_mode,
rest_out["output"]["openconfig-vxlan:qos-mode"].lower()))
else:
st.error("Match NOT found for QOS mode; expected val: {} "
"but got: {}".format(qos_mode,
rest_out["output"]["openconfig-vxlan:qos-mode"].lower()))
success = False
else:
st.error("VxLAN QOS mode value NOT found in rest output")
return False
else:
url = rest_urls["get_vxlan_qos_pipe_val"].format(vtep_name)
rest_out = get_rest(dut,rest_url=url,timeout=30)
if rest_out["status"] == 200:
if int(kwargs["pipe_dscp"]) == rest_out["output"]["openconfig-vxlan:dscp"]:
st.log("Match found for PIPE DSCP; expected val: {} and "
"obtained val: {}".format(int(kwargs["pipe_dscp"]),
rest_out["output"]["openconfig-vxlan:dscp"]))
else:
st.error("Match NOT found for PIPE DSCP; expected val: {} "
"but got: {}".format(int(kwargs["pipe_dscp"]),
rest_out["output"]["openconfig-vxlan:dscp"]))
success = False
else:
st.error("PIPE DSCP value not found in rest output")
return False
return success
| 42.759788
| 277
| 0.550497
| 20,337
| 161,632
| 4.186753
| 0.029552
| 0.031816
| 0.01804
| 0.015291
| 0.818394
| 0.781657
| 0.742407
| 0.706199
| 0.672985
| 0.64333
| 0
| 0.012891
| 0.320896
| 161,632
| 3,779
| 278
| 42.771103
| 0.762821
| 0.133012
| 0
| 0.711285
| 0
| 0
| 0.19898
| 0.028128
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016376
| false
| 0.000712
| 0.011036
| 0
| 0.100036
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a3e80c16655b119386446787f82457faa01024f6
| 887
|
py
|
Python
|
mimic/internet.py
|
ramazanpolat/mimic
|
ac2c0b749d7027147acb2f48eb7c0edc3480b643
|
[
"MIT"
] | null | null | null |
mimic/internet.py
|
ramazanpolat/mimic
|
ac2c0b749d7027147acb2f48eb7c0edc3480b643
|
[
"MIT"
] | null | null | null |
mimic/internet.py
|
ramazanpolat/mimic
|
ac2c0b749d7027147acb2f48eb7c0edc3480b643
|
[
"MIT"
] | null | null | null |
import random
class Internet:
@classmethod
def top_level_domain(cls, domain=None):
return None
@classmethod
def domain(cls, top_level_domain=None):
return None
@classmethod
def url(cls, domain=None):
return None
@classmethod
def ip_v4(cls):
return None
@classmethod
def ip_v6(cls):
return None
@classmethod
def subnet(cls, net=None):
return None
@classmethod
def mac_address(cls):
return None
@classmethod
def sha1(cls):
return None
@classmethod
def sha256(cls):
return None
@classmethod
def user_agent(cls):
return None
@classmethod
def browser(cls):
return None
@classmethod
def mime_type(cls):
return None
@classmethod
def color(cls, as_hex=False):
return None
| 15.561404
| 43
| 0.594138
| 102
| 887
| 5.068627
| 0.303922
| 0.352031
| 0.487427
| 0.55706
| 0.688588
| 0.208897
| 0.143133
| 0
| 0
| 0
| 0
| 0.010169
| 0.334837
| 887
| 56
| 44
| 15.839286
| 0.866102
| 0
| 0
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.317073
| false
| 0
| 0.02439
| 0.317073
| 0.682927
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
a3ef35f3a44d6eecad9b48be287d005949e09d7b
| 23
|
py
|
Python
|
ImageEdit/__init__.py
|
stonzw/PythonBasicImageProcessingTools
|
143a02e7ecc47f29ea0bc15d0afd376243a1cb37
|
[
"MIT"
] | null | null | null |
ImageEdit/__init__.py
|
stonzw/PythonBasicImageProcessingTools
|
143a02e7ecc47f29ea0bc15d0afd376243a1cb37
|
[
"MIT"
] | null | null | null |
ImageEdit/__init__.py
|
stonzw/PythonBasicImageProcessingTools
|
143a02e7ecc47f29ea0bc15d0afd376243a1cb37
|
[
"MIT"
] | null | null | null |
from .CutTools import *
| 23
| 23
| 0.782609
| 3
| 23
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.