hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
df9dbb72754e868f1162abb0dc064d8e2d7df267
| 204
|
py
|
Python
|
PYTHON/pythonDesafios/desa107/moeda.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desa107/moeda.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desa107/moeda.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
def aumentar(preço):
return peço + (preço * taxa/100)
def diminuir(preço, taxa):
return preço - (preço * taxa/100)
def dobro(preço):
return preço * 2
def metade(preço):
return preço / 2
| 18.545455
| 37
| 0.647059
| 29
| 204
| 4.551724
| 0.37931
| 0.25
| 0.181818
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050955
| 0.230392
| 204
| 11
| 38
| 18.545455
| 0.789809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
dfb7679fee8b4f4ccd14d1b4b3323d95af978da4
| 4,550
|
py
|
Python
|
test/emit.py
|
JustAnotherArchivist/ircstates
|
1e187db35f0af072985827d2b9bfde81fc3b524a
|
[
"MIT"
] | 11
|
2020-03-12T15:11:24.000Z
|
2021-09-24T14:23:20.000Z
|
test/emit.py
|
JustAnotherArchivist/ircstates
|
1e187db35f0af072985827d2b9bfde81fc3b524a
|
[
"MIT"
] | 4
|
2020-09-24T17:08:16.000Z
|
2021-02-16T22:10:22.000Z
|
test/emit.py
|
JustAnotherArchivist/ircstates
|
1e187db35f0af072985827d2b9bfde81fc3b524a
|
[
"MIT"
] | 6
|
2020-03-12T17:27:50.000Z
|
2022-01-09T17:09:40.000Z
|
import unittest
import ircstates, irctokens
class EmitTest(unittest.TestCase):
def test_join(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
emit = server.parse_tokens(
irctokens.tokenise(":nickname JOIN #chan"))
self.assertEqual(emit.command, "JOIN")
self.assertEqual(emit.self, True)
self.assertEqual(emit.user, server.users["nickname"])
self.assertEqual(emit.channel, server.channels["#chan"])
emit = server.parse_tokens(
irctokens.tokenise(":other JOIN #chan"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "JOIN")
self.assertEqual(emit.self, None)
self.assertEqual(emit.user, server.users["other"])
self.assertEqual(emit.channel, server.channels["#chan"])
def test_privmsg(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
server.parse_tokens(irctokens.tokenise(":nickname JOIN #chan"))
emit = server.parse_tokens(
irctokens.tokenise(":nickname PRIVMSG #chan :hello"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "PRIVMSG")
self.assertEqual(emit.text, "hello")
self.assertEqual(emit.self_source, True)
self.assertEqual(emit.user, server.users["nickname"])
self.assertEqual(emit.channel, server.channels["#chan"])
server.parse_tokens(irctokens.tokenise(":other JOIN #chan"))
emit = server.parse_tokens(
irctokens.tokenise(":other PRIVMSG #chan :hello2"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "PRIVMSG")
self.assertEqual(emit.text, "hello2")
self.assertEqual(emit.self_source, None)
self.assertEqual(emit.user, server.users["other"])
self.assertEqual(emit.channel, server.channels["#chan"])
def test_privmsg_nojoin(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
server.parse_tokens(irctokens.tokenise(":nickname JOIN #chan"))
emit = server.parse_tokens(
irctokens.tokenise(":other PRIVMSG #chan :hello"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "PRIVMSG")
self.assertEqual(emit.text, "hello")
self.assertEqual(emit.self_source, None)
self.assertIsNotNone(emit.user)
channel = server.channels["#chan"]
self.assertEqual(emit.channel, channel)
def test_kick(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
server.parse_tokens(irctokens.tokenise(":nickname JOIN #chan"))
user = server.users["nickname"]
channel = server.channels["#chan"]
server.parse_tokens(irctokens.tokenise(":other JOIN #chan"))
user_other = server.users["other"]
emit = server.parse_tokens(
irctokens.tokenise(":nickname KICK #chan other :reason"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "KICK")
self.assertEqual(emit.text, "reason")
self.assertEqual(emit.self_source, True)
self.assertEqual(emit.user_source, user)
self.assertEqual(emit.user_target, user_other)
self.assertEqual(emit.channel, channel)
def test_mode_self(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
emit = server.parse_tokens(
irctokens.tokenise("MODE nickname x+i-i+wi-wi"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "MODE")
self.assertTrue(emit.self_target)
self.assertEqual(emit.tokens,
["+x", "+i", "-i", "+w", "+i", "-w", "-i"])
def test_mode_channel(self):
server = ircstates.Server("test")
server.parse_tokens(irctokens.tokenise("001 nickname *"))
server.parse_tokens(irctokens.tokenise(":nickname JOIN #chan"))
channel = server.channels["#chan"]
emit = server.parse_tokens(
irctokens.tokenise(":server MODE #chan +im-m+b-k asd!*@* key"))
self.assertIsNotNone(emit)
self.assertEqual(emit.command, "MODE")
self.assertEqual(emit.channel, channel)
self.assertEqual(emit.tokens,
["+i", "+m", "-m", "+b asd!*@*", "-k key"])
| 42.523364
| 75
| 0.63011
| 488
| 4,550
| 5.797131
| 0.102459
| 0.174973
| 0.221633
| 0.183811
| 0.839873
| 0.827854
| 0.827854
| 0.741958
| 0.731707
| 0.653941
| 0
| 0.005729
| 0.232747
| 4,550
| 106
| 76
| 42.924528
| 0.804641
| 0
| 0
| 0.673913
| 0
| 0
| 0.135385
| 0
| 0
| 0
| 0
| 0
| 0.456522
| 1
| 0.065217
| false
| 0
| 0.021739
| 0
| 0.097826
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dfecf137dfc9e249dd8f91b4f80be15d4ddee81e
| 539
|
py
|
Python
|
test.py
|
Oprax/sampling
|
85cd07f38be3d54a052afaa293bcee74f6c4d31b
|
[
"MIT"
] | null | null | null |
test.py
|
Oprax/sampling
|
85cd07f38be3d54a052afaa293bcee74f6c4d31b
|
[
"MIT"
] | null | null | null |
test.py
|
Oprax/sampling
|
85cd07f38be3d54a052afaa293bcee74f6c4d31b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf-8
from sampling import Sampling
fluct = Sampling()
print(fluct.intervalFluctuation(10000, 1/1000))
print(fluct.valideSample(15, 10000, 1/1000))
print(fluct.intervalEstimate(22, 100))
print(fluct.intervalEstimate(22, 100, strict=True))
print('====================================')
fluct = Sampling(precision=15)
print(fluct.intervalFluctuation(10000, 1/1000))
print(fluct.valideSample(15, 10000, 1/1000))
print(fluct.intervalEstimate(22, 100))
print(fluct.intervalEstimate(22, 100, strict=True))
| 25.666667
| 51
| 0.71243
| 68
| 539
| 5.647059
| 0.352941
| 0.234375
| 0.104167
| 0.15625
| 0.755208
| 0.755208
| 0.755208
| 0.755208
| 0.755208
| 0.755208
| 0
| 0.136821
| 0.077922
| 539
| 20
| 52
| 26.95
| 0.635815
| 0.064935
| 0
| 0.666667
| 0
| 0
| 0.071713
| 0.071713
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0.75
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
5f073e265ef3123e8b993fb84ecbc89c342fd859
| 7,523
|
py
|
Python
|
tests/modifier/TestMergeEvent.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | 15
|
2015-12-14T19:07:28.000Z
|
2022-02-28T13:32:11.000Z
|
tests/modifier/TestMergeEvent.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | null | null | null |
tests/modifier/TestMergeEvent.py
|
dstore-dbap/LumberMill
|
b7cbadc209a83386871735b8ad88b61da917a6ab
|
[
"Apache-2.0"
] | 4
|
2017-02-08T10:49:55.000Z
|
2019-03-19T18:47:46.000Z
|
import os
import time
import mock
import lumbermill.utils.DictUtils as DictUtils
from tests.ModuleBaseTestCase import ModuleBaseTestCase
from lumbermill.modifier import MergeEvent
class TestMergeEvent(ModuleBaseTestCase):
def setUp(self):
super(TestMergeEvent, self).setUp(MergeEvent.MergeEvent(mock.Mock()))
def testMergeEventWithMatchingLines(self):
example_input_data = """2015-02-18 14:25:10,661 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-02-18 14:52:08,829 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-02-18 14:53:35,493 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-02-18 14:53:35,493 [http-bio-8080]"""
self.test_object.configure({'pattern': '\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d+ [^]]*\]'})
self.checkConfiguration()
self.test_object.initAfterFork()
for input_line in example_input_data.split("\n"):
event = DictUtils.getDefaultEventDict({'data': input_line}, received_from='TestMergeEvent_%s' % os.getpid())
self.test_object.receiveEvent(event)
time.sleep(2)
events = []
for event in self.receiver.getEvent():
events.append(event)
self.assertEqual(4, len(events))
def testMergeEventWithNonMatchingLines(self):
example_input_data = """Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.
Spam, Spam, Spam, lovely Spam
Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.
Wonderful Spam, Lovely Spam."""
self.test_object.configure({'pattern': '\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d+ [^]]*\]'})
self.checkConfiguration()
self.test_object.initAfterFork()
for input_line in example_input_data.split("\n"):
event = DictUtils.getDefaultEventDict({'data': input_line}, received_from='TestMergeEvent_%s' % os.getpid())
self.test_object.receiveEvent(event)
time.sleep(1)
events = []
for event in self.receiver.getEvent():
events.append(event)
self.assertEqual(len(events), 4)
def testMergeEventWithMixedMatchingLines(self):
example_input_data = """2015-02-18 14:25:10,661 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-02-18 14:52:00,829 [http-bio-8080] Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.
2015-02-18 14:52:08,829 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-02-18 14:53:01,829 [http-bio-8080] Beethoven, Mozart, Chopin, Liszt, Brahms, Panties...I'm sorry...Schumann, Schubert, Mendelssohn and Bach. Names that will live for ever.
2015-02-18 14:53:35,493 [http-bio-8080] ERROR errors.GrailsExceptionResolver - IllegalArgumentException occurred when processing request: [GET] /en
no category found for name: en. Stacktrace follows:
java.lang.IllegalArgumentException: no category found for name: en
at de.dbap.data.ECategory.getByName(ECategory.java:26)
at de.dbap.controller.FacetedNavController.index(FacetedNavController.groovy:37)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)"""
self.test_object.configure({'pattern': '\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d+ [^]]*\]'})
self.checkConfiguration()
self.test_object.initAfterFork()
for input_line in example_input_data.split("\n"):
event = DictUtils.getDefaultEventDict({'data': input_line}, received_from='TestMergeEvent_%s' % os.getpid())
self.test_object.receiveEvent(event)
time.sleep(1.5)
events = []
for event in self.receiver.getEvent():
events.append(event)
self.assertEqual(len(events), 5)
def testNewlineEndEvent(self):
self.test_object.configure({'pattern': "\n$",
'pattern_marks': 'EndOfEvent'})
self.checkConfiguration()
self.test_object.initAfterFork()
event = DictUtils.getDefaultEventDict({'data': 'No newline.'}, received_from='TestMergeEvent_%s' % os.getpid())
self.test_object.receiveEvent(event)
event = DictUtils.getDefaultEventDict({'data': "But now: \n"}, received_from='TestMergeEvent_%s' % os.getpid())
self.test_object.receiveEvent(event)
time.sleep(1.5)
events = []
for event in self.receiver.getEvent():
events.append(event)
self.assertEqual(len(events), 1)
self.assertEqual(events[0]['data'], 'No newline.But now: \n')
| 61.162602
| 180
| 0.737206
| 952
| 7,523
| 5.781513
| 0.147059
| 0.019622
| 0.008176
| 0.039244
| 0.884266
| 0.878815
| 0.869913
| 0.869913
| 0.869913
| 0.869913
| 0
| 0.046605
| 0.138642
| 7,523
| 123
| 181
| 61.162602
| 0.802778
| 0
| 0
| 0.721739
| 0
| 0.113043
| 0.637693
| 0.336922
| 0
| 0
| 0
| 0
| 0.043478
| 1
| 0.043478
| false
| 0
| 0.052174
| 0
| 0.104348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5f0aeca4c2aad9706f0727b0cfd4f0678aa3b727
| 5,341
|
py
|
Python
|
imtoolkit/tests/IdealRicianChannelTest.py
|
ishikawalab/imtoolk
|
e0b2d06fe734d7084644c1357ba68bd9cf79a309
|
[
"MIT"
] | 7
|
2020-10-15T11:28:21.000Z
|
2022-01-01T07:46:09.000Z
|
imtoolkit/tests/IdealRicianChannelTest.py
|
ishikawalab/imtoolk
|
e0b2d06fe734d7084644c1357ba68bd9cf79a309
|
[
"MIT"
] | null | null | null |
imtoolkit/tests/IdealRicianChannelTest.py
|
ishikawalab/imtoolk
|
e0b2d06fe734d7084644c1357ba68bd9cf79a309
|
[
"MIT"
] | 3
|
2021-01-15T08:50:24.000Z
|
2022-01-19T03:19:17.000Z
|
# Copyright (c) IMToolkit Development Team
# This toolkit is released under the MIT License, see LICENSE.txt
import unittest
import numpy as np
from imtoolkit.Util import frequencyToWavelength
from imtoolkit.IdealRicianChannel import IdealRicianChannel
class IdealRicianChannelTest(unittest.TestCase):
def test_ChannelM2ULA(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N, ae_spacing, distance_tx_rx = 10000, 2, 2, wavelength / 2, 5.0
tx, ty, tz = IdealRicianChannel.getPositionsUniformLinearArray(M, ae_spacing, 0)
rx, ry, rz = IdealRicianChannel.getPositionsUniformLinearArray(N, ae_spacing, distance_tx_rx)
channel = IdealRicianChannel(IT, 10, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, M)
def test_ChannelM4ULA(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N, ae_spacing, distance_tx_rx = 100000, 4, 4, wavelength / 2, 5.0
tx, ty, tz = IdealRicianChannel.getPositionsUniformLinearArray(M, ae_spacing, 0)
rx, ry, rz = IdealRicianChannel.getPositionsUniformLinearArray(N, ae_spacing, distance_tx_rx)
channel = IdealRicianChannel(IT, 2.5, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, M)
print(np.mean(np.linalg.svd(H)[1], axis=0))
def test_ChannelM16ULA(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N, ae_spacing, distance_tx_rx = 100000, 16, 4, wavelength / 2, 5.0
tx, ty, tz = IdealRicianChannel.getPositionsUniformLinearArray(M, ae_spacing, 0)
rx, ry, rz = IdealRicianChannel.getPositionsUniformLinearArray(N, ae_spacing, distance_tx_rx)
channel = IdealRicianChannel(IT, 2.5, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, N)
print(np.mean(np.linalg.svd(H)[1], axis=0))
def test_ChannelM16ULAbohagen(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N, ae_spacing, distance_tx_rx = 100000, 16, 4, wavelength, 5.0
rx, ry, rz = IdealRicianChannel.getPositionsUniformLinearArray(N, ae_spacing, distance_tx_rx)
dtx = distance_tx_rx / max(M, N)
tx, ty, tz = IdealRicianChannel.getPositionsUniformLinearArray(M, dtx, 0)
channel = IdealRicianChannel(IT, 2.5, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, N)
print(np.mean(np.linalg.svd(H)[1], axis=0))
def test_ChannelM16N4Rec(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N = 100000, 16, 4
tx, ty, tz = IdealRicianChannel.getPositionsRectangular2d(M, wavelength, 3.0)
rx, ry, rz = IdealRicianChannel.getPositionsRectangular2d(N, wavelength, 0.0)
channel = IdealRicianChannel(IT, 2.5, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, N)
print(np.mean(np.linalg.svd(H)[1], axis=0))
def test_ChannelM16N8Rec(self):
np.set_printoptions(linewidth=np.inf)
wavelength = frequencyToWavelength(5.0 * 10 ** 9) # 5 [GHz]
IT, M, N = 100000, 16, 8
tx, ty, tz = IdealRicianChannel.getPositionsRectangular2d(M, wavelength, 3.0)
rx, ry, rz = IdealRicianChannel.getPositionsRectangular2d(N, wavelength, 0.0)
channel = IdealRicianChannel(IT, 2.5, wavelength, tx, ty, tz, rx, ry, rz)
channel.randomize()
H = channel.getChannel().reshape(IT, N, M)
norms = np.square(np.linalg.norm(H, axis=(1, 2)))
self.assertAlmostEqual(np.mean(norms), M * N, places=1)
meanrank = np.mean(np.linalg.matrix_rank(H))
self.assertAlmostEqual(meanrank, N)
print(np.mean(np.linalg.svd(H)[1], axis=0))
if __name__ == '__main__':
unittest.main()
| 46.043103
| 101
| 0.652687
| 703
| 5,341
| 4.880512
| 0.129445
| 0.039639
| 0.020985
| 0.044885
| 0.878753
| 0.878753
| 0.862722
| 0.862722
| 0.862722
| 0.862722
| 0
| 0.039618
| 0.215503
| 5,341
| 115
| 102
| 46.443478
| 0.779236
| 0.028459
| 0
| 0.758242
| 0
| 0
| 0.001544
| 0
| 0
| 0
| 0
| 0
| 0.131868
| 1
| 0.065934
| false
| 0
| 0.043956
| 0
| 0.120879
| 0.120879
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f0c75aad0e8bfdb428515d873019c3f2b800757
| 419
|
py
|
Python
|
tests/parser/wellfounded.9.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.9.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.9.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
bluePath( X, Y ) :- blue( X, Y ).
bluePath( X, Y ) :- blue( X, Z ), bluePath( Z, Y ).
monopoly( X, Y ) :- red( X, Y ), not bluePath( X, Y ).
red( 1, 2 ).
red( 2, 3 ).
blue( 1, 2 ).
"""
output = """
bluePath( X, Y ) :- blue( X, Y ).
bluePath( X, Y ) :- blue( X, Z ), bluePath( Z, Y ).
monopoly( X, Y ) :- red( X, Y ), not bluePath( X, Y ).
red( 1, 2 ).
red( 2, 3 ).
blue( 1, 2 ).
"""
| 18.217391
| 55
| 0.436754
| 70
| 419
| 2.614286
| 0.185714
| 0.131148
| 0.327869
| 0.306011
| 0.939891
| 0.939891
| 0.939891
| 0.939891
| 0.939891
| 0.939891
| 0
| 0.041237
| 0.305489
| 419
| 22
| 56
| 19.045455
| 0.587629
| 0
| 0
| 0.875
| 0
| 0
| 0.922693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a029fbd7c45c3bd831e54a70314867d460f5b307
| 15,461
|
py
|
Python
|
keystone/test/functional/test_authentication.py
|
admiyo/keystone
|
9452cf04bc8b0a4dc66dc640615d5ace1ca715f2
|
[
"Apache-2.0"
] | null | null | null |
keystone/test/functional/test_authentication.py
|
admiyo/keystone
|
9452cf04bc8b0a4dc66dc640615d5ace1ca715f2
|
[
"Apache-2.0"
] | null | null | null |
keystone/test/functional/test_authentication.py
|
admiyo/keystone
|
9452cf04bc8b0a4dc66dc640615d5ace1ca715f2
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2 as unittest
from keystone.test.functional import common
class AuthenticationTest(common.FunctionalTestCase):
def setUp(self, *args, **kwargs):
super(AuthenticationTest, self).setUp(*args, **kwargs)
password = common.unique_str()
self.tenant = self.create_tenant().json['tenant']
self.user = self.create_user(user_password=password,
tenant_id=self.tenant['id']).json['user']
self.user['password'] = password
self.services = {}
self.endpoint_templates = {}
self.services = self.create_service().json['OS-KSADM:service']
self.endpoint_templates = self.create_endpoint_template(
name=self.services['name'], \
type=self.services['type']).\
json['OS-KSCATALOG:endpointTemplate']
self.create_endpoint_for_tenant(self.tenant['id'],
self.endpoint_templates['id'])
def test_authenticate_for_a_tenant(self):
response = self.authenticate(self.user['name'], self.user['password'],
self.tenant['id'], assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
service_catalog = response.json['access']['serviceCatalog']
self.assertIsNotNone(service_catalog)
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_for_a_tenant_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.user['name'], self.user['password'])
response = self.post_token(as_xml=data, assert_status=200)
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
def test_authenticate_for_a_tenant_on_admin_api(self):
response = self.authenticate(self.user['name'], self.user['password'],
self.tenant['id'], assert_status=200, request_type='admin')
self.assertIsNotNone(response.json['access']['token'])
self.assertIsNotNone(response.json['access']['serviceCatalog'])
service_catalog = response.json['access']['serviceCatalog']
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_for_a_tenant_xml_on_admin_api(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.user['name'], self.user['password'])
response = self.post_token(as_xml=data, assert_status=200,
request_type='admin')
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
def test_authenticate_user_disabled(self):
self.disable_user(self.user['id'])
self.authenticate(self.user['name'], self.user['password'],
self.tenant['id'], assert_status=403)
def test_authenticate_user_wrong(self):
data = {
"auth": {
"passwordCredentials": {
"username-field-completely-wrong": self.user['name'],
"password": self.user['password']},
"tenantId": self.tenant['id']}}
self.post_token(as_json=data, assert_status=400)
def test_authenticate_user_wrong_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<passwordCredentials '
'xmlns="http://docs.openstack.org/identity/api/v2.0" '
'usernamefieldcompletelywrong="%s" '
'password="%s" '
'tenantId="%s"/>') % (
self.user['name'], self.user['password'], self.tenant['id'])
self.post_token(as_xml=data, assert_status=400)
class AuthenticationUsingTokenTest(common.FunctionalTestCase):
def setUp(self, *args, **kwargs):
super(AuthenticationUsingTokenTest, self).setUp(*args, **kwargs)
password = common.unique_str()
self.tenant = self.create_tenant().json['tenant']
self.user = self.create_user(user_password=password,
tenant_id=self.tenant['id']).json['user']
self.user['password'] = password
self.services = {}
self.endpoint_templates = {}
for x in range(0, 5):
self.services[x] = self.create_service().json['OS-KSADM:service']
self.endpoint_templates[x] = self.create_endpoint_template(
name=self.services[x]['name'], \
type=self.services[x]['type']).\
json['OS-KSCATALOG:endpointTemplate']
self.create_endpoint_for_tenant(self.tenant['id'],
self.endpoint_templates[x]['id'])
self.token = self.authenticate(self.user['name'],
self.user['password']).json['access']['token']['id']
def test_authenticate_for_a_tenant_using_token(self):
response = self.authenticate_using_token(self.token,
self.tenant['id'], assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
service_catalog = response.json['access']['serviceCatalog']
self.assertIsNotNone(service_catalog)
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_for_a_tenant_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<token id="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.token)
response = self.post_token(as_xml=data, assert_status=200)
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
def test_authenticate_for_a_tenant_on_admin_api(self):
response = self.authenticate_using_token(self.token,
self.tenant['id'], request_type='admin')
self.assertIsNotNone(response.json['access']['token'])
self.assertIsNotNone(response.json['access']['serviceCatalog'])
service_catalog = response.json['access']['serviceCatalog']
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_for_a_tenant_xml_on_admin_api(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<token id="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.token)
response = self.post_token(as_xml=data, assert_status=200,
request_type='admin')
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
class UnScopedAuthenticationTest(common.FunctionalTestCase):
def setUp(self, *args, **kwargs):
super(UnScopedAuthenticationTest, self).setUp(*args, **kwargs)
self.tenant = self.create_tenant().json['tenant']
self.user = self.create_user_with_known_password(
tenant_id=self.tenant['id']).json['user']
self.services = {}
self.endpoint_templates = {}
for x in range(0, 5):
self.services[x] = self.create_service().json['OS-KSADM:service']
self.endpoint_templates[x] = self.create_endpoint_template(
name=self.services[x]['name'], \
type=self.services[x]['type']).\
json['OS-KSCATALOG:endpointTemplate']
self.create_endpoint_for_tenant(self.tenant['id'],
self.endpoint_templates[x]['id'])
def test_authenticate(self):
response = self.authenticate(self.user['name'], self.user['password'],\
assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
service_catalog = response.json['access'].get('serviceCatalog')
self.assertIsNotNone(service_catalog, response.json)
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" >'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.user['name'],
self.user['password'])
response = self.post_token(as_xml=data, assert_status=200)
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
def test_authenticate_on_admin_api(self):
response = self.authenticate(self.user['name'], self.user['password'],
assert_status=200, request_type='admin')
self.assertIsNotNone(response.json['access'].get('token'),
response.json)
self.assertIsNotNone(response.json['access'].get('serviceCatalog'),
response.json)
service_catalog = response.json['access']['serviceCatalog']
self.check_urls_for_regular_user(service_catalog)
def test_authenticate_for_a_tenant_xml_on_admin_api(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.user['name'], self.user['password'])
response = self.post_token(as_xml=data,
assert_status=200, request_type='admin')
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_regular_user_xml(service_catalog)
def test_authenticate_without_default_tenant(self):
# Create user with no default tenant set (but granted a role)
self.nodefaultuser = self.create_user_with_known_password()\
.json['user']
self.role = self.create_role().json['role']
self.grant_role_to_user(self.nodefaultuser['id'], self.role['id'],
self.tenant['id'])
response = self.authenticate(self.nodefaultuser['name'],
self.nodefaultuser['password'],
tenant_id=None, assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
self.assertNotIn('tenant', response.json['access']['token'])
class AdminUserAuthenticationTest(common.FunctionalTestCase):
def setUp(self, *args, **kwargs):
super(AdminUserAuthenticationTest, self).setUp(*args, **kwargs)
password = common.unique_str()
self.tenant = self.create_tenant().json['tenant']
self.user = self.create_user(user_password=password,
tenant_id=self.tenant['id']).json['user']
self.role = self.get_role_by_name('Admin').json['role']
self.grant_global_role_to_user(self.user['id'], self.role['id'])
self.user['password'] = password
self.services = {}
self.endpoint_templates = {}
for x in range(0, 5):
self.services[x] = self.create_service().json['OS-KSADM:service']
self.endpoint_templates[x] = self.create_endpoint_template(
name=self.services[x]['name'], \
type=self.services[x]['type']).\
json['OS-KSCATALOG:endpointTemplate']
self.create_endpoint_for_tenant(self.tenant['id'],
self.endpoint_templates[x]['id'])
def test_authenticate(self):
response = self.authenticate(self.user['name'], self.user['password'],\
assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
service_catalog = response.json['access']['serviceCatalog']
self.assertIsNotNone(service_catalog)
self.check_urls_for_admin_user(service_catalog)
def test_authenticate_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" >'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.user['name'],
self.user['password'])
response = self.post_token(as_xml=data, assert_status=200)
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_admin_user_xml(service_catalog)
def test_authenticate_for_a_tenant(self):
response = self.authenticate(self.user['name'], self.user['password'],
self.tenant['id'], assert_status=200)
self.assertIsNotNone(response.json['access']['token'])
service_catalog = response.json['access']['serviceCatalog']
self.assertIsNotNone(service_catalog)
self.check_urls_for_admin_user(service_catalog)
def test_authenticate_for_a_tenant_xml(self):
data = ('<?xml version="1.0" encoding="UTF-8"?> '
'<auth xmlns="%s" tenantId="%s">'
'<passwordCredentials username="%s" password="%s" '
'/> </auth>') % (
self.xmlns, self.tenant['id'],
self.user['name'], self.user['password'])
response = self.post_token(as_xml=data, assert_status=200)
self.assertEquals(response.xml.tag, '{%s}access' % self.xmlns)
service_catalog = response.xml.find('{%s}serviceCatalog' % self.xmlns)
self.check_urls_for_admin_user_xml(service_catalog)
class MultiTokenTest(common.FunctionalTestCase):
def setUp(self, *args, **kwargs):
super(MultiTokenTest, self).setUp(*args, **kwargs)
self.tenants = {}
self.users = {}
for x in range(0, 2):
self.tenants[x] = self.create_tenant().json['tenant']
password = common.unique_str()
self.users[x] = self.create_user(user_password=password,
tenant_id=self.tenants[x]['id']).json['user']
self.users[x]['password'] = password
def test_unassigned_user(self):
self.authenticate(self.users[1]['name'], self.users[1]['password'],
self.tenants[0]['id'], assert_status=401)
if __name__ == '__main__':
unittest.main()
| 43.798867
| 79
| 0.625186
| 1,769
| 15,461
| 5.27247
| 0.099491
| 0.035167
| 0.029592
| 0.027447
| 0.820199
| 0.785998
| 0.777099
| 0.765734
| 0.730031
| 0.71084
| 0
| 0.009727
| 0.22864
| 15,461
| 352
| 80
| 43.923295
| 0.772346
| 0.042817
| 0
| 0.731618
| 0
| 0
| 0.161615
| 0.012177
| 0
| 0
| 0
| 0
| 0.169118
| 1
| 0.095588
| false
| 0.150735
| 0.007353
| 0
| 0.121324
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a053564c3722299b4e19c6c85d30e3fb6a395b00
| 16,157
|
py
|
Python
|
calc/lib/python3.6/site-packages/_rinterface_cffi_abi.py
|
mxc42/calc
|
09d68df01d4ed72e0524afa7b0b1b4f3755d8075
|
[
"MIT"
] | null | null | null |
calc/lib/python3.6/site-packages/_rinterface_cffi_abi.py
|
mxc42/calc
|
09d68df01d4ed72e0524afa7b0b1b4f3755d8075
|
[
"MIT"
] | null | null | null |
calc/lib/python3.6/site-packages/_rinterface_cffi_abi.py
|
mxc42/calc
|
09d68df01d4ed72e0524afa7b0b1b4f3755d8075
|
[
"MIT"
] | null | null | null |
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI('_rinterface_cffi_abi',
_version = 0x2601,
_types = b'\x00\x00\x1D\x0D\x00\x00\x00\x0F\x00\x00\x40\x0D\x00\x01\x1D\x03\x00\x00\x00\x0F\x00\x01\x1C\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x59\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x60\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x24\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x22\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x68\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x99\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x01\x16\x03\x00\x01\x1A\x03\x00\x01\x1B\x03\x00\x00\x1E\x11\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x01\x21\x03\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x24\x11\x00\x00\xBA\x03\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x07\x01\x00\x00\x32\x03\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x07\x01\x00\x01\x21\x03\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x07\x01\x00\x00\x24\x03\x00\x00\x37\x11\x00\x00\x24\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x07\x01\x00\x00\x37\x11\x00\x00\x37\x11\x00\x00\x24\x11\x00\x00\x01\x0B\x00\x00\x24\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x03\x11\x00\x00\x04\x0B\x00\x00\x40\x11\x00\x00\x40\x11\x00\x00\x24\x11\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x29\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x0A\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x09\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x24\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x24\x11\x00\x00\x03\x0B\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x24\x11\x00\x00\x07\x01\x00\x00\x60\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\xC5\x03\x00\x01\x3D\x03\x00\x00\xB5\x03\x00\x00\x6F\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x40\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x01\x19\x03\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x40\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x29\x03\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x6F\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x04\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x08\x01\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\xD2\x03\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x6F\x11\x00\x00\x00\x0F\x00\x00\x28\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\xBA\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x6F\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x6F\x0D\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x02\x0B\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x24\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x24\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x24\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x99\x11\x00\x00\x2E\x11\x00\x01\x32\x03\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x07\x01\x00\x00\x2E\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x03\x11\x00\x00\xF1\x03\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\xE8\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x00\x0F\x00\x00\x08\x09\x00\x01\x18\x03\x00\x00\x09\x09\x00\x00\x00\x0B\x00\x00\x01\x09\x00\x00\x02\x09\x00\x00\x59\x03\x00\x00\x07\x09\x00\x00\x04\x09\x00\x01\x20\x03\x00\x00\x05\x09\x00\x00\x02\x01\x00\x00\x68\x03\x00\x00\x23\x03\x00\x00\x26\x03\x00\x00\x30\x03\x00\x00\x35\x03\x00\x00\x3B\x03\x00\x00\x1C\x01\x00\x00\xA0\x03\x00\x00\x0A\x09\x00\x00\x0B\x09\x00\x00\x0C\x09\x00\x00\x0D\x09\x00\x00\x0E\x09\x00\x00\x0F\x09\x00\x00\x10\x09\x00\x00\x11\x09\x00\x00\x06\x09\x00\x00\x1A\x01\x00\x00\x00\x09\x00\x00\xBD\x03\x00\x00\xD4\x03\x00\x00\xD9\x03\x00\x00\xDC\x03\x00\x00\xE0\x03\x00\x00\xEA\x03\x00\x01\x07\x03\x00\x01\x14\x03\x00\x00\x00\x01',
_globals = (b'\xFF\xFF\xFF\x1FANYSXP',18,b'\x00\x00\x73\x23ATTRIB',0,b'\xFF\xFF\xFF\x1FBCODESXP',21,b'\xFF\xFF\xFF\x1FBUILTINSXP',8,b'\xFF\xFF\xFF\x0BBytes',0,b'\x00\x00\x73\x23CAR',0,b'\x00\x00\x73\x23CDR',0,b'\xFF\xFF\xFF\x0BCE_ANY',99,b'\xFF\xFF\xFF\x0BCE_BYTES',3,b'\xFF\xFF\xFF\x0BCE_LATIN1',2,b'\xFF\xFF\xFF\x0BCE_NATIVE',0,b'\xFF\xFF\xFF\x0BCE_SYMBOL',5,b'\xFF\xFF\xFF\x0BCE_UTF8',1,b'\xFF\xFF\xFF\x1FCHARSXP',9,b'\x00\x00\x73\x23CLOENV',0,b'\xFF\xFF\xFF\x1FCLOSXP',3,b'\x00\x00\x05\x23COMPLEX',0,b'\x00\x00\x08\x23COMPLEX_ELT',0,b'\xFF\xFF\xFF\x1FCPLXSXP',15,b'\xFF\xFF\xFF\x0BChars',1,b'\x00\x00\xCF\x23DATAPTR',0,b'\xFF\xFF\xFF\x1FDOTSXP',17,b'\x00\x00\x73\x23ENCLOS',0,b'\x00\x00\x43\x23ENVFLAGS',0,b'\xFF\xFF\xFF\x1FENVSXP',4,b'\xFF\xFF\xFF\x1FEXPRSXP',20,b'\xFF\xFF\xFF\x1FEXTPTRSXP',22,b'\xFF\xFF\xFF\x0BFALSE',0,b'\x00\x00\x73\x23FRAME',0,b'\xFF\xFF\xFF\x1FFREESXP',31,b'\xFF\xFF\xFF\x1FFUNSXP',99,b'\x00\x00\x73\x23HASHTAB',0,b'\x00\x00\x19\x23INTEGER',0,b'\x00\x00\x4D\x23INTEGER_ELT',0,b'\xFF\xFF\xFF\x1FINTSXP',13,b'\xFF\xFF\xFF\x1FLANGSXP',6,b'\xFF\xFF\xFF\x1FLGLSXP',10,b'\xFF\xFF\xFF\x1FLISTSXP',2,b'\x00\x00\x19\x23LOGICAL',0,b'\x00\x00\x4D\x23LOGICAL_ELT',0,b'\xFF\xFF\xFF\x1FNEWSXP',30,b'\xFF\xFF\xFF\x1FNILSXP',0,b'\xFF\xFF\xFF\x0BPARSE_EOF',4,b'\xFF\xFF\xFF\x0BPARSE_ERROR',3,b'\xFF\xFF\xFF\x0BPARSE_INCOMPLETE',2,b'\xFF\xFF\xFF\x0BPARSE_NULL',0,b'\xFF\xFF\xFF\x0BPARSE_OK',1,b'\x00\x00\x73\x23PRINTNAME',0,b'\xFF\xFF\xFF\x1FPROMSXP',5,b'\x00\x00\xC8\x23RAW',0,b'\xFF\xFF\xFF\x1FRAWSXP',24,b'\x00\x00\xCB\x23RAW_ELT',0,b'\x00\x00\x12\x23REAL',0,b'\xFF\xFF\xFF\x1FREALSXP',14,b'\x00\x00\x15\x23REAL_ELT',0,b'\x00\x00\x03\x21R_BaseEnv',0,b'\x00\x00\x03\x21R_BaseNamespace',0,b'\x00\x00\x03\x21R_BlankScalarString',0,b'\x00\x00\x03\x21R_BlankString',0,b'\x00\x00\x0F\x23R_CHAR',0,b'\x00\x01\x33\x21R_CStackLimit',0,b'\x00\x00\x03\x21R_ClassSymbol',0,b'\x00\x01\x14\x23R_CleanTempDir',0,b'\x00\x00\xF1\x23R_ClearExternalPtr',0,b'\x00\x01\x17\x21R_Consolefile',0,b'\x00\x01\x11\x23R_DefParams',0,b'\x00\x00\x03\x21R_DimSymbol',0,b'\x00\x00\x03\x21R_EmptyEnv',0,b'\x00\x00\x02\x23R_EnvironmentIsLocked',0,b'\x00\x00\xCF\x23R_ExternalPtrAddr',0,b'\x00\x00\x03\x21R_GlobalEnv',0,b'\x00\x00\x40\x21R_Interactive',0,b'\x00\x00\xC0\x23R_MakeExternalPtr',0,b'\x00\x00\x03\x21R_MissingArg',0,b'\x00\x00\x29\x21R_NaInt',0,b'\x00\x00\x68\x21R_NaN',0,b'\x00\x00\x68\x21R_NaReal',0,b'\x00\x00\x03\x21R_NaString',0,b'\x00\x00\x03\x21R_NameSymbol',0,b'\x00\x00\x68\x21R_NegInf',0,b'\x00\x00\x03\x21R_NilValue',0,b'\x00\x01\x17\x21R_Outputfile',0,b'\x00\x00\x7E\x23R_ParseVector',0,b'\x00\x00\x68\x21R_PosInf',0,b'\x00\x00\xF1\x23R_PreserveObject',0,b'\x00\x01\x0D\x23R_RegisterCFinalizer',0,b'\x00\x00\xF1\x23R_ReleaseObject',0,b'\x00\x01\x14\x23R_RunExitFinalizers',0,b'\x00\x01\x11\x23R_SetParams',0,b'\x00\x00\x29\x21R_SignalHandlers',0,b'\x00\x01\x11\x23R_SizeFromEnv',0,b'\x00\x00\x03\x21R_UnboundValue',0,b'\x00\x00\xE5\x23R_common_command_line',0,b'\x00\x00\x8D\x23R_do_slot',0,b'\x00\x00\x9B\x23R_do_slot_assign',0,b'\x00\x01\x14\x23R_dot_Last',0,b'\x00\x01\x14\x23R_gc',0,b'\x00\x00\x00\x23R_getEmbeddingDllInfo',0,b'\x00\x00\x51\x23R_has_slot',0,b'\x00\x00\x76\x23R_lsInternal',0,b'\x00\x00\x46\x23R_nchar',0,b'\x00\x00\x1C\x23R_registerRoutines',0,b'\x00\x00\xED\x23R_set_command_line_arguments',0,b'\x00\x00\x6D\x23R_tryCatchError',0,b'\x00\x00\x96\x23R_tryEval',0,b'\x00\x01\x14\x23Rf_KillAllDevices',0,b'\x00\x00\x58\x23Rf_ScalarComplex',0,b'\x00\x00\x6A\x23Rf_ScalarInteger',0,b'\x00\x00\x6A\x23Rf_ScalarLogical',0,b'\x00\x00\xB9\x23Rf_ScalarRaw',0,b'\x00\x00\x67\x23Rf_ScalarReal',0,b'\x00\x00\x73\x23Rf_ScalarString',0,b'\x00\x00\x6A\x23Rf_allocList',0,b'\x00\x00\xBC\x23Rf_allocVector',0,b'\x00\x00\x73\x23Rf_asChar',0,b'\x00\x00\x9B\x23Rf_defineVar',0,b'\x00\x00\x73\x23Rf_duplicate',0,b'\x00\x00\x7A\x23Rf_elt',0,b'\x00\x00\xEA\x23Rf_endEmbeddedR',0,b'\x00\x00\x8D\x23Rf_eval',0,b'\x00\x00\x8D\x23Rf_findFun',0,b'\x00\x00\x8D\x23Rf_findVar',0,b'\x00\x00\x8D\x23Rf_findVarInFrame',0,b'\x00\x00\x91\x23Rf_findVarInFrame3',0,b'\x00\x00\x8D\x23Rf_getAttrib',0,b'\x00\x00\x0C\x23Rf_getCharCE',0,b'\x00\x00\xED\x23Rf_initEmbeddedR',0,b'\x00\x00\x2C\x23Rf_initialize_R',0,b'\x00\x00\x5B\x23Rf_install',0,b'\x00\x00\x73\x23Rf_installChar',0,b'\x00\x00\x02\x23Rf_isList',0,b'\x00\x00\x02\x23Rf_isNull',0,b'\x00\x00\x73\x23Rf_lang1',0,b'\x00\x00\x8D\x23Rf_lang2',0,b'\x00\x00\x9B\x23Rf_lang3',0,b'\x00\x00\xA0\x23Rf_lang4',0,b'\x00\x00\xA6\x23Rf_lang5',0,b'\x00\x00\xAD\x23Rf_lang6',0,b'\x00\x00\x43\x23Rf_length',0,b'\x00\x00\x5B\x23Rf_mkChar',0,b'\x00\x00\x5E\x23Rf_mkCharCE',0,b'\x00\x00\x62\x23Rf_mkCharLenCE',0,b'\x00\x00\x5B\x23Rf_mkString',0,b'\x00\x00\x8D\x23Rf_namesgets',0,b'\x00\x00\x7A\x23Rf_nthcdr',0,b'\x00\x00\x73\x23Rf_protect',0,b'\x00\x00\x9B\x23Rf_setAttrib',0,b'\x00\x00\xEA\x23Rf_unprotect',0,b'\x00\x00\x55\x23Rf_xlength',0,b'\xFF\xFF\xFF\x1FS4SXP',25,b'\xFF\xFF\xFF\x0BSA_DEFAULT',2,b'\xFF\xFF\xFF\x0BSA_NORESTORE',0,b'\xFF\xFF\xFF\x0BSA_NOSAVE',3,b'\xFF\xFF\xFF\x0BSA_RESTORE',1,b'\xFF\xFF\xFF\x0BSA_SAVE',4,b'\xFF\xFF\xFF\x0BSA_SAVEASK',5,b'\xFF\xFF\xFF\x0BSA_SUICIDE',6,b'\x00\x00\x8D\x23SETCAR',0,b'\x00\x00\x8D\x23SETCDR',0,b'\x00\x00\xF4\x23SET_ENVFLAGS',0,b'\x00\x00\xFD\x23SET_INTEGER_ELT',0,b'\x00\x00\xFD\x23SET_LOGICAL_ELT',0,b'\x00\x00\xF8\x23SET_REAL_ELT',0,b'\x00\x01\x02\x23SET_STRING_ELT',0,b'\x00\x00\x8D\x23SET_TAG',0,b'\x00\x00\x88\x23SET_VECTOR_ELT',0,b'\xFF\xFF\xFF\x1FSPECIALSXP',7,b'\x00\x00\xCF\x23STDVEC_DATAPTR',0,b'\x00\x00\x84\x23STRING_ELT',0,b'\xFF\xFF\xFF\x1FSTRSXP',16,b'\xFF\xFF\xFF\x1FSYMSXP',1,b'\x00\x00\x73\x23TAG',0,b'\xFF\xFF\xFF\x0BTRUE',1,b'\xFF\xFF\xFF\x1FVECSXP',19,b'\x00\x00\x84\x23VECTOR_ELT',0,b'\xFF\xFF\xFF\x1FWEAKREFSXP',23,b'\xFF\xFF\xFF\x0BWidth',2,b'\x00\x01\x3A\x21ptr_R_Busy',0,b'\x00\x01\x25\x21ptr_R_ChooseFile',0,b'\x00\x01\x36\x21ptr_R_CleanUp',0,b'\x00\x01\x3C\x21ptr_R_ClearerrConsole',0,b'\x00\x01\x23\x21ptr_R_EditFile',0,b'\x00\x01\x26\x21ptr_R_EditFiles',0,b'\x00\x01\x3C\x21ptr_R_FlushConsole',0,b'\x00\x01\x3C\x21ptr_R_ProcessEvents',0,b'\x00\x01\x24\x21ptr_R_ReadConsole',0,b'\x00\x01\x3C\x21ptr_R_ResetConsole',0,b'\x00\x01\x27\x21ptr_R_ShowFiles',0,b'\x00\x01\x37\x21ptr_R_ShowMessage',0,b'\x00\x01\x37\x21ptr_R_Suicide',0,b'\x00\x01\x38\x21ptr_R_WriteConsole',0,b'\x00\x01\x39\x21ptr_R_WriteConsoleEx',0,b'\x00\x01\x3B\x21ptr_R_addhistory',0,b'\x00\x01\x3B\x21ptr_R_loadhistory',0,b'\x00\x01\x3B\x21ptr_R_savehistory',0,b'\x00\x01\x29\x21ptr_do_dataentry',0,b'\x00\x01\x29\x21ptr_do_dataviewer',0,b'\x00\x01\x29\x21ptr_do_selectlist',0,b'\x00\x01\x14\x23setup_Rmainloop',0),
_struct_unions = ((b'\x00\x00\x01\x34\x00\x00\x00\x03$1',b'\x00\x01\x2D\x11primsxp',b'\x00\x01\x30\x11symsxp',b'\x00\x01\x2C\x11listsxp',b'\x00\x01\x2B\x11envsxp',b'\x00\x01\x2A\x11closxp',b'\x00\x01\x2E\x11promsxp'),(b'\x00\x00\x01\x1A\x00\x00\x00\x02$R_CMethodDef',b'\x00\x00\x24\x11name',b'\x00\x00\xC1\x11fun',b'\x00\x00\x29\x11numArgs',b'\x00\x01\x35\x11types'),(b'\x00\x00\x01\x1B\x00\x00\x00\x02$R_CallMethodDef',b'\x00\x00\x24\x11name',b'\x00\x00\xC1\x11fun',b'\x00\x00\x29\x11numArgs'),(b'\x00\x00\x00\x59\x00\x00\x00\x02$Rcomplex',b'\x00\x00\x68\x11r',b'\x00\x00\x68\x11i'),(b'\x00\x00\x01\x1E\x00\x00\x00\x03$SEXPREC_ALIGN',b'\x00\x01\x20\x11s',b'\x00\x00\x68\x11align'),(b'\x00\x00\x01\x20\x00\x00\x00\x02$VECTOR_SEXPREC',b'\x00\x01\x2F\x11sxpinfo',b'\x00\x00\x03\x11attrib',b'\x00\x00\x03\x11gengc_next_node',b'\x00\x00\x03\x11gengc_prev_node',b'\x00\x01\x31\x11vecsxp'),(b'\x00\x00\x01\x32\x00\x00\x00\x02$structRstart',b'\x00\x00\x40\x11R_Quiet',b'\x00\x00\x40\x11R_Slave',b'\x00\x00\x40\x11R_Interactive',b'\x00\x00\x40\x11R_Verbose',b'\x00\x00\x40\x11LoadSiteFile',b'\x00\x00\x40\x11LoadInitFile',b'\x00\x00\x40\x11DebugInitFile',b'\x00\x00\xD5\x11RestoreAction',b'\x00\x00\xD5\x11SaveAction',b'\x00\x01\x28\x11vsize',b'\x00\x01\x28\x11nsize',b'\x00\x01\x28\x11max_vsize',b'\x00\x01\x28\x11max_nsize',b'\x00\x01\x28\x11ppsize',b'\x00\x00\x29\x11NoRenviron'),(b'\x00\x00\x01\x1D\x00\x00\x00\x02SEXPREC',b'\x00\x01\x2F\x11sxpinfo',b'\x00\x00\x03\x11attrib',b'\x00\x00\x03\x11gengc_next_node',b'\x00\x00\x03\x11gengc_prev_node',b'\x00\x01\x34\x11u'),(b'\x00\x00\x01\x16\x00\x00\x00\x10_DllInfo',),(b'\x00\x00\x01\x18\x00\x00\x00\x10_IO_FILE',),(b'\x00\x00\x01\x2A\x00\x00\x00\x02closxp_struct',b'\x00\x00\x03\x11formals',b'\x00\x00\x03\x11body',b'\x00\x00\x03\x11env'),(b'\x00\x00\x01\x2B\x00\x00\x00\x02envsxp_struct',b'\x00\x00\x03\x11frame',b'\x00\x00\x03\x11enclos',b'\x00\x00\x03\x11hashtab'),(b'\x00\x00\x01\x2C\x00\x00\x00\x02listsxp_struct',b'\x00\x00\x03\x11carval',b'\x00\x00\x03\x11cdrval',b'\x00\x00\x03\x11tagval'),(b'\x00\x00\x01\x2D\x00\x00\x00\x02primsxp_struct',b'\x00\x00\x29\x11offset'),(b'\x00\x00\x01\x2E\x00\x00\x00\x02promsxp_struct',b'\x00\x00\x03\x11value',b'\x00\x00\x03\x11expr',b'\x00\x00\x03\x11env'),(b'\x00\x00\x01\x2F\x00\x00\x00\x02sxpinfo_struct',b'\x00\x00\xBD\x13\x00\x00\x00\x05type',b'\x00\x00\xBD\x13\x00\x00\x00\x01scalar',b'\x00\x00\xBD\x13\x00\x00\x00\x01alt',b'\x00\x00\xBD\x13\x00\x00\x00\x01obj',b'\x00\x00\xBD\x13\x00\x00\x00\x10gp',b'\x00\x00\xBD\x13\x00\x00\x00\x01mark',b'\x00\x00\xBD\x13\x00\x00\x00\x01debug',b'\x00\x00\xBD\x13\x00\x00\x00\x01trace',b'\x00\x00\xBD\x13\x00\x00\x00\x01spare',b'\x00\x00\xBD\x13\x00\x00\x00\x01gcgen',b'\x00\x00\xBD\x13\x00\x00\x00\x03gccls',b'\x00\x00\xBD\x13\x00\x00\x00\x10named',b'\x00\x00\xBD\x13\x00\x00\x00\x20extra'),(b'\x00\x00\x01\x30\x00\x00\x00\x02symsxp_struct',b'\x00\x00\x03\x11pname',b'\x00\x00\x03\x11value',b'\x00\x00\x03\x11internal'),(b'\x00\x00\x01\x31\x00\x00\x00\x02vecsxp_struct',b'\x00\x00\x0A\x11length',b'\x00\x00\x0A\x11truelength')),
_enums = (b'\x00\x00\x01\x19\x00\x00\x00\x16$ParseStatus\x00PARSE_NULL,PARSE_OK,PARSE_INCOMPLETE,PARSE_ERROR,PARSE_EOF',b'\x00\x00\x00\x40\x00\x00\x00\x16$Rboolean\x00FALSE,TRUE',b'\x00\x00\x00\xD5\x00\x00\x00\x16$SA_TYPE\x00SA_NORESTORE,SA_RESTORE,SA_DEFAULT,SA_NOSAVE,SA_SAVE,SA_SAVEASK,SA_SUICIDE',b'\x00\x00\x00\x60\x00\x00\x00\x16$cetype_t\x00CE_NATIVE,CE_UTF8,CE_LATIN1,CE_BYTES,CE_SYMBOL,CE_ANY',b'\x00\x00\x00\x48\x00\x00\x00\x16$nchar_type\x00Bytes,Chars,Width'),
_typenames = (b'\x00\x00\x00\xC1DL_FUNC',b'\x00\x00\x01\x16DllInfo',b'\x00\x00\x01\x18FILE',b'\x00\x00\x01\x19ParseStatus',b'\x00\x00\x01\x0FR_CFinalizer_t',b'\x00\x00\x01\x1AR_CMethodDef',b'\x00\x00\x01\x1BR_CallMethodDef',b'\x00\x00\x01\x1BR_ExternalMethodDef',b'\x00\x00\x01\x1AR_FortranMethodDef',b'\x00\x00\x00\xBDR_NativePrimitiveArgType',b'\x00\x00\x00\x29R_len_t',b'\x00\x00\x00\x0AR_xlen_t',b'\x00\x00\x00\x40Rboolean',b'\x00\x00\x00\xBARbyte',b'\x00\x00\x00\x59Rcomplex',b'\x00\x00\x00\xE8Rstart',b'\x00\x00\x00\xD5SA_TYPE',b'\x00\x00\x00\x03SEXP',b'\x00\x00\x01\x1DSEXPREC',b'\x00\x00\x01\x1ESEXPREC_ALIGN',b'\x00\x00\x00\xBDSEXPTYPE',b'\x00\x00\x01\x1FVECSEXP',b'\x00\x00\x01\x20VECTOR_SEXPREC',b'\x00\x00\x00\x60cetype_t',b'\x00\x00\x00\x48nchar_type',b'\x00\x00\x01\x32structRstart'),
)
| 1,346.416667
| 6,590
| 0.751563
| 3,530
| 16,157
| 3.364306
| 0.142776
| 0.336477
| 0.129673
| 0.066689
| 0.509768
| 0.43289
| 0.337908
| 0.311132
| 0.28705
| 0.27745
| 0
| 0.306861
| 0.003218
| 16,157
| 11
| 6,591
| 1,468.818182
| 0.43055
| 0.001176
| 0
| 0
| 1
| 0.666667
| 0.881631
| 0.855602
| 0
| 1
| 0.000372
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
2681bd1cbbd180488bca8f880afe17697dd22e8f
| 6,897
|
py
|
Python
|
lib/animations.py
|
danechitoaie/entropy
|
70fd1bdd82281ec04e26b75fe399651ca82af2b3
|
[
"MIT"
] | null | null | null |
lib/animations.py
|
danechitoaie/entropy
|
70fd1bdd82281ec04e26b75fe399651ca82af2b3
|
[
"MIT"
] | null | null | null |
lib/animations.py
|
danechitoaie/entropy
|
70fd1bdd82281ec04e26b75fe399651ca82af2b3
|
[
"MIT"
] | null | null | null |
# Sublime Text 3 Modules
import sublime
# Lib Modules
from .constants import PROJECT_DATA_KEY
class EntropyServerConfigurationAnimation():
def __init__(self, window):
self.SET_STATUS_KEY = "{0}__{1}".format(PROJECT_DATA_KEY, self.__class__.__name__)
self.window = window
self.view = window.active_view()
self.run_animation = False
self.current_frame = 0
self.frame_text = "Retrieving the list of code directories"
self.frames = [
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
]
def start(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = True
self.current_frame = 0
self.next_frame()
def next_frame(self):
self.view.set_status(self.SET_STATUS_KEY, self.frames[self.current_frame])
self.current_frame += 1
if self.current_frame >= len(self.frames):
self.current_frame = 0
if self.run_animation:
sublime.set_timeout(lambda: self.next_frame(), 500)
else:
self.view.erase_status(self.SET_STATUS_KEY)
def stop(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = False
self.current_frame = 0
class EntropyOnPostSaveAnimation():
def __init__(self, view, file_path):
self.SET_STATUS_KEY = "{0}__{1}".format(PROJECT_DATA_KEY, self.__class__.__name__)
self.window = view.window()
self.view = view
self.run_animation = False
self.current_frame = 0
self.frame_text = "Uploading {0}".format(file_path if len(file_path) <= 64 else "...{0}".format(file_path[-61:]))
self.frames = [
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
]
def start(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = True
self.current_frame = 0
self.next_frame()
def next_frame(self):
self.view.set_status(self.SET_STATUS_KEY, self.frames[self.current_frame])
self.current_frame += 1
if self.current_frame >= len(self.frames):
self.current_frame = 0
if self.run_animation:
sublime.set_timeout(lambda: self.next_frame(), 500)
else:
self.view.erase_status(self.SET_STATUS_KEY)
def stop(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = False
self.current_frame = 0
class EntropyCleanProjectAnimation():
def __init__(self, window):
self.SET_STATUS_KEY = "{0}__{1}".format(PROJECT_DATA_KEY, self.__class__.__name__)
self.window = window
self.view = window.active_view()
self.run_animation = False
self.current_frame = 0
self.frame_text = "Cleaning up the project"
self.frames = [
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
"[ = ] {0}".format(self.frame_text),
]
def start(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = True
self.current_frame = 0
self.next_frame()
def next_frame(self):
self.view.set_status(self.SET_STATUS_KEY, self.frames[self.current_frame])
self.current_frame += 1
if self.current_frame >= len(self.frames):
self.current_frame = 0
if self.run_animation:
sublime.set_timeout(lambda: self.next_frame(), 500)
else:
self.view.erase_status(self.SET_STATUS_KEY)
def stop(self):
self.view.erase_status(self.SET_STATUS_KEY)
self.run_animation = False
self.current_frame = 0
| 41.053571
| 125
| 0.482674
| 713
| 6,897
| 4.388499
| 0.079944
| 0.16395
| 0.236817
| 0.276127
| 0.896453
| 0.896453
| 0.896453
| 0.896453
| 0.896453
| 0.896453
| 0
| 0.021026
| 0.372481
| 6,897
| 167
| 126
| 41.299401
| 0.701941
| 0.00493
| 0
| 0.9
| 0
| 0
| 0.156997
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085714
| false
| 0
| 0.014286
| 0
| 0.121429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
cd0a9ae59320524fe5dcbd94d9b8a8cd8af48a14
| 8
|
py
|
Python
|
Src/Hosts/Silverlight/Tests/tests/regressions/fixtures/x_dividebyzero.py
|
jdhardy/dlr
|
dca078fbf9d103fad4dcabda76795a23d82106bc
|
[
"Apache-2.0"
] | null | null | null |
Src/Hosts/Silverlight/Tests/tests/regressions/fixtures/x_dividebyzero.py
|
jdhardy/dlr
|
dca078fbf9d103fad4dcabda76795a23d82106bc
|
[
"Apache-2.0"
] | null | null | null |
Src/Hosts/Silverlight/Tests/tests/regressions/fixtures/x_dividebyzero.py
|
jdhardy/dlr
|
dca078fbf9d103fad4dcabda76795a23d82106bc
|
[
"Apache-2.0"
] | null | null | null |
2 / 0
| 2
| 5
| 0.25
| 2
| 8
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0.625
| 8
| 3
| 6
| 2.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2699e9b505df391075fd2b3100d0b62aee9261f6
| 4,483
|
py
|
Python
|
defences/cifar10.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
defences/cifar10.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
defences/cifar10.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
# This is a wrapper for keeping evidence of all the defences implemented for CIFAR-10
# Imports all the module paths
import sys
sys.path.append("../../")
import defences.CIFAR10.standard_training as standard_training_utils
import defences.CIFAR10.adversarial_training as adversarial_training_utils
import defences.CIFAR10.dual_adversarial_training as dual_adversarial_training_utils
import defences.CIFAR10.regularization as regularization_utils
import defences.CIFAR10.framework as framework
def standard_training(
trainSetLoader,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_standard"
):
return standard_training_utils.standard_training(
trainSetLoader,
long_training,
load_if_available,
load_path
)
def adversarial_training(
trainSetLoader,
attack_name,
attack_function,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_adversarial",
**kwargs
):
return adversarial_training_utils.adversarial_training(
trainSetLoader,
attack_name,
attack_function,
long_training,
load_if_available,
load_path,
**kwargs
)
def cw_adversarial_training(
trainSetLoader,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_adversarial",
**kwargs
):
return adversarial_training_utils.cw_adversarial_training(
trainSetLoader,
long_training,
load_if_available,
load_path,
**kwargs
)
def interpolated_adversarial_training(
trainSetLoader,
attack_name,
attack_function,
long_training=True,
load_if_available=False,
clip=True,
verbose=False,
test=False,
load_path="../models_data/CIFAR10/cifar10_interpolated_adversarial",
**kwargs
):
return adversarial_training_utils.interpolated_adversarial_training(
trainSetLoader,
attack_name,
attack_function,
long_training,
load_if_available,
clip,
verbose,
test,
load_path,
**kwargs
)
def dual_adversarial_training(
trainSetLoader,
attack_function1,
attack_function2,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_dual",
**kwargs
):
return dual_adversarial_training_utils.dual_adversarial_training(
trainSetLoader,
attack_function1,
attack_function2,
long_training,
load_if_available,
load_path,
**kwargs
)
def triple_adversarial_training(
trainSetLoader,
attack_function1,
attack_function2,
attack_function3,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_triple",
**kwargs
):
return dual_adversarial_training_utils.triple_adversarial_training(
trainSetLoader,
attack_function1,
attack_function2,
attack_function3,
long_training,
load_if_available,
load_path,
**kwargs
)
def jacobian_training(
trainSetLoader,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_jacobian",
**kwargs
):
return regularization_utils.jacobian_training(
trainSetLoader,
long_training,
load_if_available,
load_path,
**kwargs
)
def ALP_training(
trainSetLoader,
attack_name,
attack_function,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_alp",
**kwargs
):
return regularization_utils.ALP_training(
trainSetLoader,
attack_name,
attack_function,
long_training,
load_if_available,
load_path,
**kwargs
)
def jacobian_ALP_training(
trainSetLoader,
attack_name,
attack_function,
long_training=True,
load_if_available=False,
load_path="../models_data/FashionMNIST/fashion_mnist_alp",
**kwargs
):
return regularization_utils.jacobian_ALP_training(
trainSetLoader,
attack_name,
attack_function,
long_training,
load_if_available,
load_path,
**kwargs
)
def framework_training(
trainSetLoader,
attack_function1,
attack_function2,
long_training=True,
load_if_available=False,
load_path="../models_data/FashionMNIST/fashion_mnist_alp",
**kwargs
):
return framework.framework_training(
trainSetLoader,
attack_function1,
attack_function2,
long_training,
load_if_available,
load_path,
**kwargs
)
| 21.762136
| 85
| 0.729199
| 484
| 4,483
| 6.357438
| 0.121901
| 0.142996
| 0.097498
| 0.064998
| 0.862203
| 0.816055
| 0.737082
| 0.725057
| 0.725057
| 0.699708
| 0
| 0.01608
| 0.195405
| 4,483
| 205
| 86
| 21.868293
| 0.836984
| 0.024983
| 0
| 0.723757
| 0
| 0
| 0.095925
| 0.094551
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055249
| false
| 0
| 0.033149
| 0.055249
| 0.143646
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26dacd717cacfdcb0b242ca040e7179539700d6b
| 7,843
|
py
|
Python
|
example_model/policy/cnn/discrete.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 60
|
2019-01-29T14:13:00.000Z
|
2020-11-24T09:08:05.000Z
|
example_model/policy/cnn/discrete.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 2
|
2019-08-14T06:44:32.000Z
|
2020-11-12T12:57:55.000Z
|
example_model/policy/cnn/discrete.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 37
|
2019-01-22T05:19:34.000Z
|
2021-04-12T02:27:50.000Z
|
import tensorflow as tf
import numpy as np
class CNNLSTMActor:
def __init__(self, name, window_size, obs_stack, output_size, lstm_units, lstm_layers):
self.window_size = window_size
self.output_size = output_size
self.obs_stack = obs_stack
self.reuse = []
for i in range(self.obs_stack):
if i == 0:
self.reuse.append(False)
else:
self.reuse.append(True)
self.lstm_list = [lstm_units for i in range(lstm_layers)]
with tf.variable_scope(name):
self.input = tf.placeholder(dtype=tf.float32, shape=[None, self.window_size, self.window_size, self.obs_stack])
self.expand_input = tf.expand_dims(self.input, axis=3)
self.split = [self.expand_input[:, :, :, :, i] for i in range(self.obs_stack)]
self.conv1 = [tf.layers.conv2d(inputs=self.split[i], filters=8, kernel_size=[8, 8], strides=[4, 4], padding='VALID', activation=tf.nn.relu, name='conv1', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.conv2 = [tf.layers.conv2d(inputs=self.conv1[i], filters=16, kernel_size=[4, 4], strides=[2, 2], padding='VALID', activation=tf.nn.relu, name='conv2', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.conv3 = [tf.layers.conv2d(inputs=self.conv2[i], filters=16, kernel_size=[3, 3], strides=[1, 1], padding='VALID', activation=tf.nn.relu, name='conv3', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.reshape = [tf.reshape(self.conv3[i], [-1, 7 * 7 * 16]) for i in range(self.obs_stack)]
self.concat = tf.stack(self.reshape, axis=1)
enc_cell = [tf.nn.rnn_cell.GRUCell(size) for size in self.lstm_list]
enc_cell = tf.nn.rnn_cell.MultiRNNCell(enc_cell)
self.outputs_enc, enc_states = tf.nn.dynamic_rnn(cell=enc_cell, inputs=self.concat, dtype=tf.float32)
self.last_layer = self.outputs_enc[:, -1]
self.actor = tf.layers.dense(inputs=self.last_layer, units=self.output_size, activation=tf.nn.softmax)
self.scope = tf.get_variable_scope().name
def get_action_prob(self, obs):
return self.sess.run(self.act_probs, feed_dict={self.obs: obs})
def get_variables(self):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.scope)
def get_trainable_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
class CNNLSTMCritic:
def __init__(self, name, window_size, obs_stack, output_size, lstm_units, lstm_layers):
self.window_size = window_size
self.output_size = output_size
self.obs_stack = obs_stack
self.reuse = []
for i in range(self.obs_stack):
if i == 0:
self.reuse.append(False)
else:
self.reuse.append(True)
self.lstm_list = [lstm_units for i in range(lstm_layers)]
with tf.variable_scope(name):
self.input = tf.placeholder(dtype=tf.float32, shape=[None, self.window_size, self.window_size, self.obs_stack])
self.expand_input = tf.expand_dims(self.input, axis=3)
self.split = [self.expand_input[:, :, :, :, i] for i in range(self.obs_stack)]
self.conv1 = [tf.layers.conv2d(inputs=self.split[i], filters=8, kernel_size=[8, 8], strides=[4, 4], padding='VALID', activation=tf.nn.relu, name='conv1', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.conv2 = [tf.layers.conv2d(inputs=self.conv1[i], filters=16, kernel_size=[4, 4], strides=[2, 2], padding='VALID', activation=tf.nn.relu, name='conv2', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.conv3 = [tf.layers.conv2d(inputs=self.conv2[i], filters=16, kernel_size=[3, 3], strides=[1, 1], padding='VALID', activation=tf.nn.relu, name='conv3', reuse=self.reuse[i]) for i in range(self.obs_stack)]
self.reshape = [tf.reshape(self.conv3[i], [-1, 7 * 7 * 16]) for i in range(self.obs_stack)]
self.concat = tf.stack(self.reshape, axis=1)
enc_cell = [tf.nn.rnn_cell.GRUCell(size) for size in self.lstm_list]
enc_cell = tf.nn.rnn_cell.MultiRNNCell(enc_cell)
self.outputs_enc, enc_states = tf.nn.dynamic_rnn(cell=enc_cell, inputs=self.concat, dtype=tf.float32)
self.last_layer = self.outputs_enc[:, -1]
self.critic = tf.layers.dense(inputs=self.last_layer, units=1, activation=None)
self.scope = tf.get_variable_scope().name
def get_action_prob(self, obs):
return self.sess.run(self.act_probs, feed_dict={self.obs: obs})
def get_variables(self):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.scope)
def get_trainable_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
class CNNActor:
def __init__(self, name, window_size, obs_stack, output_size):
self.window_size = window_size
self.output_size = output_size
self.obs_stack = obs_stack
with tf.variable_scope(name):
self.input = tf.placeholder(dtype=tf.float32, shape=[None, window_size, window_size, obs_stack])
self.conv1 = tf.layers.conv2d(inputs=self.input, filters=32, kernel_size=[8, 8], strides=[4, 4], padding='VALID', activation=tf.nn.relu)
self.conv2 = tf.layers.conv2d(inputs=self.conv1, filters=64, kernel_size=[4, 4], strides=[2, 2], padding='VALID', activation=tf.nn.relu)
self.conv3 = tf.layers.conv2d(inputs=self.conv2, filters=64, kernel_size=[3, 3], strides=[1, 1], padding='VALID', activation=tf.nn.relu)
self.reshape = tf.reshape(self.conv3, [-1, 7 * 7 * 64])
self.dense_3 = tf.layers.dense(inputs=self.reshape, units=512, activation=tf.nn.relu)
self.actor = tf.layers.dense(inputs=self.dense_3, units=self.output_size, activation=tf.nn.softmax)
self.scope = tf.get_variable_scope().name
def get_action_prob(self, obs):
return self.sess.run(self.act_probs, feed_dict={self.obs: obs})
def get_variables(self):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.scope)
def get_trainable_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
class CNNCritic:
def __init__(self, name, window_size, obs_stack):
self.window_size = window_size
self.obs_stack = obs_stack
with tf.variable_scope(name):
self.input = tf.placeholder(dtype=tf.float32, shape=[None, window_size, window_size, obs_stack])
self.conv1 = tf.layers.conv2d(inputs=self.input, filters=32, kernel_size=[8, 8], strides=[4, 4], padding='VALID', activation=tf.nn.relu)
self.conv2 = tf.layers.conv2d(inputs=self.conv1, filters=64, kernel_size=[4, 4], strides=[2, 2], padding='VALID', activation=tf.nn.relu)
self.conv3 = tf.layers.conv2d(inputs=self.conv2, filters=64, kernel_size=[3, 3], strides=[1, 1], padding='VALID', activation=tf.nn.relu)
self.reshape = tf.reshape(self.conv3, [-1, 7 * 7 * 64])
self.dense_3 = tf.layers.dense(inputs=self.reshape, units=512, activation=tf.nn.relu)
self.critic = tf.layers.dense(inputs=self.dense_3, units=1, activation=None)
self.scope = tf.get_variable_scope().name
def get_action_prob(self, obs):
return self.sess.run(self.act_probs, feed_dict={self.obs: obs})
def get_variables(self):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, self.scope)
def get_trainable_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
| 54.465278
| 219
| 0.653321
| 1,147
| 7,843
| 4.302528
| 0.088928
| 0.04539
| 0.043769
| 0.031206
| 0.983384
| 0.983384
| 0.979331
| 0.974873
| 0.943465
| 0.943465
| 0
| 0.026464
| 0.209869
| 7,843
| 144
| 220
| 54.465278
| 0.769889
| 0
| 0
| 0.88785
| 0
| 0
| 0.011474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149533
| false
| 0
| 0.018692
| 0.11215
| 0.317757
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
f82d2c8532f916a0b8fe97ff24880215fa607b40
| 75,882
|
py
|
Python
|
spoonacular/com/spoonacular/meal_planning_api.py
|
Lowe-Man/spoonacular-python-api
|
c5522abdc2ef48258434e22b4f2038d64bcebd86
|
[
"MIT"
] | 21
|
2019-08-09T18:53:26.000Z
|
2022-03-14T22:10:10.000Z
|
spoonacular/com/spoonacular/meal_planning_api.py
|
Lowe-Man/spoonacular-python-api
|
c5522abdc2ef48258434e22b4f2038d64bcebd86
|
[
"MIT"
] | null | null | null |
spoonacular/com/spoonacular/meal_planning_api.py
|
Lowe-Man/spoonacular-python-api
|
c5522abdc2ef48258434e22b4f2038d64bcebd86
|
[
"MIT"
] | 55
|
2019-08-13T17:52:47.000Z
|
2022-03-27T04:29:34.000Z
|
# coding: utf-8
"""
spoonacular API
The spoonacular Nutrition, Recipe, and Food API allows you to access over 380,000 recipes, thousands of ingredients, 800,000 food products, and 100,000 menu items. Our food ontology and semantic recipe search engine makes it possible to search for recipes using natural language queries, such as \"gluten free brownies without sugar\" or \"low fat vegan cupcakes.\" You can automatically calculate the nutritional information for any recipe, analyze recipe costs, visualize ingredient lists, find recipes for what's in your fridge, find recipes based on special diets, nutritional requirements, or favorite ingredients, classify recipes into types and cuisines, convert ingredient amounts, or even compute an entire meal plan. With our powerful API, you can create many kinds of food and especially nutrition apps. Special diets/dietary requirements currently available include: vegan, vegetarian, pescetarian, gluten free, grain free, dairy free, high protein, whole 30, low sodium, low carb, Paleo, ketogenic, FODMAP, and Primal. # noqa: E501
The version of the OpenAPI document: 1.0
Contact: mail@spoonacular.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from spoonacular.api_client import ApiClient
from spoonacular.exceptions import (
ApiTypeError,
ApiValueError
)
class MealPlanningApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_meal_plan_template(self, username, hash, inline_object6, **kwargs): # noqa: E501
"""Add Meal Plan Template # noqa: E501
Add a meal plan template for a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_meal_plan_template(username, hash, inline_object6, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject6 inline_object6: (required)
:return: InlineResponse20040
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_meal_plan_template_with_http_info(username, hash, inline_object6, **kwargs) # noqa: E501
else:
(data) = self.add_meal_plan_template_with_http_info(username, hash, inline_object6, **kwargs) # noqa: E501
return data
def add_meal_plan_template_with_http_info(self, username, hash, inline_object6, **kwargs): # noqa: E501
"""Add Meal Plan Template # noqa: E501
Add a meal plan template for a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_meal_plan_template_with_http_info(username, hash, inline_object6, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject6 inline_object6: (required)
:return: InlineResponse20040
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash', 'inline_object6'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_meal_plan_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `add_meal_plan_template`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `add_meal_plan_template`") # noqa: E501
# verify the required parameter 'inline_object6' is set
if ('inline_object6' not in local_var_params or
local_var_params['inline_object6'] is None):
raise ApiValueError("Missing the required parameter `inline_object6` when calling `add_meal_plan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object6' in local_var_params:
body_params = local_var_params['inline_object6']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20040', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def add_to_meal_plan(self, username, hash, inline_object4, **kwargs): # noqa: E501
"""Add to Meal Plan # noqa: E501
Add an item to the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_meal_plan(username, hash, inline_object4, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject4 inline_object4: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_to_meal_plan_with_http_info(username, hash, inline_object4, **kwargs) # noqa: E501
else:
(data) = self.add_to_meal_plan_with_http_info(username, hash, inline_object4, **kwargs) # noqa: E501
return data
def add_to_meal_plan_with_http_info(self, username, hash, inline_object4, **kwargs): # noqa: E501
"""Add to Meal Plan # noqa: E501
Add an item to the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_meal_plan_with_http_info(username, hash, inline_object4, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject4 inline_object4: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash', 'inline_object4'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_to_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `add_to_meal_plan`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `add_to_meal_plan`") # noqa: E501
# verify the required parameter 'inline_object4' is set
if ('inline_object4' not in local_var_params or
local_var_params['inline_object4'] is None):
raise ApiValueError("Missing the required parameter `inline_object4` when calling `add_to_meal_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object4' in local_var_params:
body_params = local_var_params['inline_object4']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/items', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def add_to_shopping_list(self, username, hash, inline_object9, **kwargs): # noqa: E501
"""Add to Shopping List # noqa: E501
Add an item to the current shopping list of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_shopping_list(username, hash, inline_object9, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject9 inline_object9: (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_to_shopping_list_with_http_info(username, hash, inline_object9, **kwargs) # noqa: E501
else:
(data) = self.add_to_shopping_list_with_http_info(username, hash, inline_object9, **kwargs) # noqa: E501
return data
def add_to_shopping_list_with_http_info(self, username, hash, inline_object9, **kwargs): # noqa: E501
"""Add to Shopping List # noqa: E501
Add an item to the current shopping list of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_shopping_list_with_http_info(username, hash, inline_object9, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject9 inline_object9: (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash', 'inline_object9'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_to_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `add_to_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `add_to_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object9' is set
if ('inline_object9' not in local_var_params or
local_var_params['inline_object9'] is None):
raise ApiValueError("Missing the required parameter `inline_object9` when calling `add_to_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object9' in local_var_params:
body_params = local_var_params['inline_object9']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/items', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20042', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def clear_meal_plan_day(self, username, date, hash, inline_object3, **kwargs): # noqa: E501
"""Clear Meal Plan Day # noqa: E501
Delete all planned items from the user's meal plan for a specific day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clear_meal_plan_day(username, date, hash, inline_object3, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str date: The date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject3 inline_object3: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.clear_meal_plan_day_with_http_info(username, date, hash, inline_object3, **kwargs) # noqa: E501
else:
(data) = self.clear_meal_plan_day_with_http_info(username, date, hash, inline_object3, **kwargs) # noqa: E501
return data
def clear_meal_plan_day_with_http_info(self, username, date, hash, inline_object3, **kwargs): # noqa: E501
"""Clear Meal Plan Day # noqa: E501
Delete all planned items from the user's meal plan for a specific day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clear_meal_plan_day_with_http_info(username, date, hash, inline_object3, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str date: The date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject3 inline_object3: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'date', 'hash', 'inline_object3'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method clear_meal_plan_day" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'date' is set
if ('date' not in local_var_params or
local_var_params['date'] is None):
raise ApiValueError("Missing the required parameter `date` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'inline_object3' is set
if ('inline_object3' not in local_var_params or
local_var_params['inline_object3'] is None):
raise ApiValueError("Missing the required parameter `inline_object3` when calling `clear_meal_plan_day`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'date' in local_var_params:
path_params['date'] = local_var_params['date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object3' in local_var_params:
body_params = local_var_params['inline_object3']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/day/{date}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def connect_user(self, body, **kwargs): # noqa: E501
"""Connect User # noqa: E501
In order to call user-specific endpoints, you need to connect your app's users to spoonacular users. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_user(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: InlineResponse20043
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.connect_user_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.connect_user_with_http_info(body, **kwargs) # noqa: E501
return data
def connect_user_with_http_info(self, body, **kwargs): # noqa: E501
"""Connect User # noqa: E501
In order to call user-specific endpoints, you need to connect your app's users to spoonacular users. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_user_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: InlineResponse20043
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method connect_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ApiValueError("Missing the required parameter `body` when calling `connect_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/users/connect', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20043', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_from_meal_plan(self, username, id, hash, inline_object5, **kwargs): # noqa: E501
"""Delete from Meal Plan # noqa: E501
Delete an item from the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_meal_plan(username, id, hash, inline_object5, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject5 inline_object5: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_from_meal_plan_with_http_info(username, id, hash, inline_object5, **kwargs) # noqa: E501
else:
(data) = self.delete_from_meal_plan_with_http_info(username, id, hash, inline_object5, **kwargs) # noqa: E501
return data
def delete_from_meal_plan_with_http_info(self, username, id, hash, inline_object5, **kwargs): # noqa: E501
"""Delete from Meal Plan # noqa: E501
Delete an item from the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_meal_plan_with_http_info(username, id, hash, inline_object5, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject5 inline_object5: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash', 'inline_object5'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_from_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'inline_object5' is set
if ('inline_object5' not in local_var_params or
local_var_params['inline_object5'] is None):
raise ApiValueError("Missing the required parameter `inline_object5` when calling `delete_from_meal_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object5' in local_var_params:
body_params = local_var_params['inline_object5']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/items/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_from_shopping_list(self, username, id, hash, inline_object10, **kwargs): # noqa: E501
"""Delete from Shopping List # noqa: E501
Delete an item from the current shopping list of the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_shopping_list(username, id, hash, inline_object10, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject10 inline_object10: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_from_shopping_list_with_http_info(username, id, hash, inline_object10, **kwargs) # noqa: E501
else:
(data) = self.delete_from_shopping_list_with_http_info(username, id, hash, inline_object10, **kwargs) # noqa: E501
return data
def delete_from_shopping_list_with_http_info(self, username, id, hash, inline_object10, **kwargs): # noqa: E501
"""Delete from Shopping List # noqa: E501
Delete an item from the current shopping list of the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_shopping_list_with_http_info(username, id, hash, inline_object10, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject10 inline_object10: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash', 'inline_object10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_from_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object10' is set
if ('inline_object10' not in local_var_params or
local_var_params['inline_object10'] is None):
raise ApiValueError("Missing the required parameter `inline_object10` when calling `delete_from_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object10' in local_var_params:
body_params = local_var_params['inline_object10']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/items/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_meal_plan_template(self, username, id, hash, inline_object7, **kwargs): # noqa: E501
"""Delete Meal Plan Template # noqa: E501
Delete a meal plan template for a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_meal_plan_template(username, id, hash, inline_object7, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject7 inline_object7: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_meal_plan_template_with_http_info(username, id, hash, inline_object7, **kwargs) # noqa: E501
else:
(data) = self.delete_meal_plan_template_with_http_info(username, id, hash, inline_object7, **kwargs) # noqa: E501
return data
def delete_meal_plan_template_with_http_info(self, username, id, hash, inline_object7, **kwargs): # noqa: E501
"""Delete Meal Plan Template # noqa: E501
Delete a meal plan template for a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_meal_plan_template_with_http_info(username, id, hash, inline_object7, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject7 inline_object7: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash', 'inline_object7'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_meal_plan_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `delete_meal_plan_template`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `delete_meal_plan_template`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `delete_meal_plan_template`") # noqa: E501
# verify the required parameter 'inline_object7' is set
if ('inline_object7' not in local_var_params or
local_var_params['inline_object7'] is None):
raise ApiValueError("Missing the required parameter `inline_object7` when calling `delete_meal_plan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object7' in local_var_params:
body_params = local_var_params['inline_object7']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_meal_plan(self, **kwargs): # noqa: E501
"""Generate Meal Plan # noqa: E501
Generate a meal plan with three meals per day (breakfast, lunch, and dinner). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_meal_plan(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str time_frame: Either for one \"day\" or an entire \"week\".
:param float target_calories: What is the caloric target for one day? The meal plan generator will try to get as close as possible to that goal.
:param str diet: Enter a diet that the meal plan has to adhere to. See a full list of supported diets.
:param str exclude: A comma-separated list of allergens or ingredients that must be excluded.
:return: InlineResponse20037
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_meal_plan_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.generate_meal_plan_with_http_info(**kwargs) # noqa: E501
return data
def generate_meal_plan_with_http_info(self, **kwargs): # noqa: E501
"""Generate Meal Plan # noqa: E501
Generate a meal plan with three meals per day (breakfast, lunch, and dinner). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_meal_plan_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str time_frame: Either for one \"day\" or an entire \"week\".
:param float target_calories: What is the caloric target for one day? The meal plan generator will try to get as close as possible to that goal.
:param str diet: Enter a diet that the meal plan has to adhere to. See a full list of supported diets.
:param str exclude: A comma-separated list of allergens or ingredients that must be excluded.
:return: InlineResponse20037
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['time_frame', 'target_calories', 'diet', 'exclude'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'time_frame' in local_var_params:
query_params.append(('timeFrame', local_var_params['time_frame'])) # noqa: E501
if 'target_calories' in local_var_params:
query_params.append(('targetCalories', local_var_params['target_calories'])) # noqa: E501
if 'diet' in local_var_params:
query_params.append(('diet', local_var_params['diet'])) # noqa: E501
if 'exclude' in local_var_params:
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/generate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20037', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_shopping_list(self, username, start_date, end_date, hash, inline_object8, **kwargs): # noqa: E501
"""Generate Shopping List # noqa: E501
Generate the shopping list for a user from the meal planner in a given time frame. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_shopping_list(username, start_date, end_date, hash, inline_object8, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date in the format yyyy-mm-dd. (required)
:param str end_date: The end date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject8 inline_object8: (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object8, **kwargs) # noqa: E501
else:
(data) = self.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object8, **kwargs) # noqa: E501
return data
def generate_shopping_list_with_http_info(self, username, start_date, end_date, hash, inline_object8, **kwargs): # noqa: E501
"""Generate Shopping List # noqa: E501
Generate the shopping list for a user from the meal planner in a given time frame. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object8, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date in the format yyyy-mm-dd. (required)
:param str end_date: The end date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject8 inline_object8: (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'start_date', 'end_date', 'hash', 'inline_object8'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'start_date' is set
if ('start_date' not in local_var_params or
local_var_params['start_date'] is None):
raise ApiValueError("Missing the required parameter `start_date` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'end_date' is set
if ('end_date' not in local_var_params or
local_var_params['end_date'] is None):
raise ApiValueError("Missing the required parameter `end_date` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object8' is set
if ('inline_object8' not in local_var_params or
local_var_params['inline_object8'] is None):
raise ApiValueError("Missing the required parameter `inline_object8` when calling `generate_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'start_date' in local_var_params:
path_params['start-date'] = local_var_params['start_date'] # noqa: E501
if 'end_date' in local_var_params:
path_params['end-date'] = local_var_params['end_date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object8' in local_var_params:
body_params = local_var_params['inline_object8']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/{start-date}/{end-date}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20042', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_template(self, username, id, hash, **kwargs): # noqa: E501
"""Get Meal Plan Template # noqa: E501
Get information about a meal plan template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_template(username, id, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20041
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_template_with_http_info(username, id, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_template_with_http_info(username, id, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_template_with_http_info(self, username, id, hash, **kwargs): # noqa: E501
"""Get Meal Plan Template # noqa: E501
Get information about a meal plan template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_template_with_http_info(username, id, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param int id: The item's id. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20041
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_template`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_meal_plan_template`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20041', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_templates(self, username, hash, **kwargs): # noqa: E501
"""Get Meal Plan Templates # noqa: E501
Get meal plan templates from user or public ones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_templates(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20039
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_templates_with_http_info(username, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_templates_with_http_info(username, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_templates_with_http_info(self, username, hash, **kwargs): # noqa: E501
"""Get Meal Plan Templates # noqa: E501
Get meal plan templates from user or public ones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_templates_with_http_info(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20039
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_templates" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_templates`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20039', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_week(self, username, start_date, hash, **kwargs): # noqa: E501
"""Get Meal Plan Week # noqa: E501
Retrieve a meal planned week for the given user. The username must be a spoonacular user and the hash must the the user's hash that can be found in his/her account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_week(username, start_date, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date of the meal planned week in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20038
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_week_with_http_info(username, start_date, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_week_with_http_info(username, start_date, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_week_with_http_info(self, username, start_date, hash, **kwargs): # noqa: E501
"""Get Meal Plan Week # noqa: E501
Retrieve a meal planned week for the given user. The username must be a spoonacular user and the hash must the the user's hash that can be found in his/her account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_week_with_http_info(username, start_date, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date of the meal planned week in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20038
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'start_date', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_week" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_week`") # noqa: E501
# verify the required parameter 'start_date' is set
if ('start_date' not in local_var_params or
local_var_params['start_date'] is None):
raise ApiValueError("Missing the required parameter `start_date` when calling `get_meal_plan_week`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_week`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'start_date' in local_var_params:
path_params['start-date'] = local_var_params['start_date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/week/{start-date}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20038', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_shopping_list(self, username, hash, **kwargs): # noqa: E501
"""Get Shopping List # noqa: E501
Get the current shopping list for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_list(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_shopping_list_with_http_info(username, hash, **kwargs) # noqa: E501
else:
(data) = self.get_shopping_list_with_http_info(username, hash, **kwargs) # noqa: E501
return data
def get_shopping_list_with_http_info(self, username, hash, **kwargs): # noqa: E501
"""Get Shopping List # noqa: E501
Get the current shopping list for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_list_with_http_info(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: InlineResponse20042
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse20042', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.961236
| 1,050
| 0.635605
| 9,209
| 75,882
| 4.977088
| 0.036595
| 0.054457
| 0.086748
| 0.030021
| 0.952655
| 0.943339
| 0.93963
| 0.929441
| 0.920038
| 0.905398
| 0
| 0.020967
| 0.277194
| 75,882
| 1,650
| 1,051
| 45.989091
| 0.814688
| 0.335047
| 0
| 0.75388
| 0
| 0
| 0.207341
| 0.049946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032151
| false
| 0
| 0.005543
| 0
| 0.085366
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f8704ae364fefc7fb3e4adeb76bfe60bb337a2f6
| 44
|
py
|
Python
|
laspytest/__init__.py
|
kannes/laspy
|
f51b8c6c3e88b3b2423eb75efbd48d19a36c1cce
|
[
"BSD-2-Clause"
] | 60
|
2015-01-15T10:28:31.000Z
|
2022-03-28T10:57:59.000Z
|
laspytest/__init__.py
|
kannes/laspy
|
f51b8c6c3e88b3b2423eb75efbd48d19a36c1cce
|
[
"BSD-2-Clause"
] | 32
|
2015-02-26T23:52:07.000Z
|
2016-11-28T16:05:19.000Z
|
laspytest/__init__.py
|
kannes/laspy
|
f51b8c6c3e88b3b2423eb75efbd48d19a36c1cce
|
[
"BSD-2-Clause"
] | 38
|
2015-01-04T05:06:36.000Z
|
2021-07-27T15:06:10.000Z
|
from laspytest.test_laspy import test_laspy
| 22
| 43
| 0.886364
| 7
| 44
| 5.285714
| 0.714286
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f8a406de98b12ab03f4e78795d6ac5ad5ed48847
| 10,742
|
py
|
Python
|
test/5703/actions_test.py
|
dburkart/check-sieve
|
667f0e9670e8820e37a8162ec09e794e6e4f1cb4
|
[
"MIT"
] | 20
|
2015-09-06T04:16:04.000Z
|
2022-03-24T16:34:56.000Z
|
test/5703/actions_test.py
|
dburkart/mail-sieve-verifier
|
cb51fda06c933dd1e1d0ded05ccba9bedbe67e7f
|
[
"MIT"
] | 24
|
2015-06-14T01:44:30.000Z
|
2015-09-05T17:25:11.000Z
|
test/5703/actions_test.py
|
dburkart/mail-sieve-verifier
|
cb51fda06c933dd1e1d0ded05ccba9bedbe67e7f
|
[
"MIT"
] | 3
|
2015-09-08T05:24:08.000Z
|
2019-04-01T00:15:29.000Z
|
import unittest
import checksieve
class TestActions(unittest.TestCase):
def test_foreverypart(self):
sieve = '''
require ["foreverypart"];
foreverypart {
discard;
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_foreverypart_no_require(self):
sieve = '''
foreverypart {
discard;
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_foreverypart_with_name(self):
sieve = '''
require "foreverypart";
foreverypart :name "Cc" {
discard;
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_foreverypart_invalid_tag(self):
sieve = '''
require ["foreverypart", "fileinto"];
foreverypart :foo "bar" {
fileinto "Nonsense";
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_break(self):
sieve = '''
require "foreverypart";
foreverypart
{
break;
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_break_no_require(self):
sieve = '''
foreverypart
{
break;
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_break_with_name(self):
sieve = '''
require "foreverypart";
foreverypart {
break :name "Subject";
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_break_with_invalid_tag(self):
sieve = '''
require "foreverypart";
foreverypart {
break :foo "bar";
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_mime(self):
sieve = '''
require ["mime", "foreverypart", "fileinto"];
foreverypart
{
if allof (
header :mime :param "filename" :contains
"Content-Disposition" "important",
header :mime :subtype "Content-Type" "pdf",
size :over "100K")
{
fileinto "INBOX.important";
break;
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_mime_no_require(self):
sieve = '''
require ["foreverypart", "fileinto"];
foreverypart
{
if allof (
header :mime :param "filename" :contains
"Content-Disposition" "important",
header :mime :subtype "Content-Type" "pdf",
size :over "100K")
{
fileinto "INBOX.important";
break;
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_replace(self):
sieve = '''
require [ "foreverypart", "mime", "replace" ];
foreverypart
{
if anyof (
header :mime :contenttype :is
"Content-Type" "application/exe",
header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"] "*.com" )
{
replace "Executable attachment removed by user filter";
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_replace_with_mime(self):
sieve = '''
require [ "foreverypart", "mime", "replace" ];
foreverypart
{
if anyof (
header :mime :contenttype :is
"Content-Type" "application/exe",
header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"] "*.com" )
{
replace :mime "Executable attachment removed by user filter";
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_replace_with_from(self):
sieve = '''
require [ "foreverypart", "mime", "replace" ];
foreverypart
{
if anyof (
header :mime :contenttype :is
"Content-Type" "application/exe",
header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"] "*.com" )
{
replace :from "bob@foo.com" "bob@bar.com";
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_replace_with_weird_tag(self):
sieve = '''
require [ "foreverypart", "mime", "replace" ];
foreverypart
{
if anyof (
header :mime :contenttype :is
"Content-Type" "application/exe",
header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"] "*.com" )
{
replace :param "filename" "Executable attachment removed by user filter";
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_replace_no_require(self):
sieve = '''
require [ "foreverypart", "mime" ];
foreverypart
{
if anyof (
header :mime :contenttype :is
"Content-Type" "application/exe",
header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"] "*.com" )
{
replace "Executable attachment removed by user filter";
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_enclose(self):
sieve = '''
require [ "foreverypart", "mime", "enclose" ];
foreverypart
{
if header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"]
["*.com", "*.exe", "*.vbs", "*.scr",
"*.pif", "*.hta", "*.bat", "*.zip" ]
{
# these attachment types are executable
enclose :subject "Warning" text:
WARNING! The enclosed message contains executable attachments.
These attachment types may contain a computer virus program
that can infect your computer and potentially damage your data.
Before clicking on these message attachments, you should verify
with the sender that this message was sent by them and not a
computer virus.
.
;
break;
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_enclose(self):
sieve = '''
require [ "foreverypart", "mime", "enclose" ];
foreverypart
{
if header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"]
["*.com", "*.exe", "*.vbs", "*.scr",
"*.pif", "*.hta", "*.bat", "*.zip" ]
{
# these attachment types are executable
enclose :mime text:
WARNING! The enclosed message contains executable attachments.
These attachment types may contain a computer virus program
that can infect your computer and potentially damage your data.
Before clicking on these message attachments, you should verify
with the sender that this message was sent by them and not a
computer virus.
.
;
break;
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_enclose_no_require(self):
sieve = '''
require [ "foreverypart", "mime" ];
foreverypart
{
if header :mime :param "filename"
:matches ["Content-Type", "Content-Disposition"]
["*.com", "*.exe", "*.vbs", "*.scr",
"*.pif", "*.hta", "*.bat", "*.zip" ]
{
# these attachment types are executable
enclose :subject "Warning" text:
WARNING! The enclosed message contains executable attachments.
These attachment types may contain a computer virus program
that can infect your computer and potentially damage your data.
Before clicking on these message attachments, you should verify
with the sender that this message was sent by them and not a
computer virus.
.
;
break;
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_extracttext(self):
sieve = '''
require ["mime", "variables", "extracttext", "foreverypart"];
if header :contains "from" "boss@example.org"
{
# :matches is used to get the value of the Subject header
if header :matches "Subject" "*"
{
set "subject" "${1}";
}
# extract the first 100 characters of the first text/* part
foreverypart
{
if header :mime :type :is "Content-Type" "text"
{
extracttext :first 100 "msgcontent";
break;
}
}
# if it's not a 'for your information' message
if not header :contains "subject" "FYI:"
{
# do something using ${subject} and ${msgcontent}
# such as sending a notification using a
# notification extension
}
}
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_extracttext_no_require(self):
sieve = '''
require ["mime", "variables", "foreverypart"];
if header :contains "from" "boss@example.org"
{
# :matches is used to get the value of the Subject header
if header :matches "Subject" "*"
{
set "subject" "${1}";
}
# extract the first 100 characters of the first text/* part
foreverypart
{
if header :mime :type :is "Content-Type" "text"
{
extracttext :first 100 "msgcontent";
break;
}
}
# if it's not a 'for your information' message
if not header :contains "subject" "FYI:"
{
# do something using ${subject} and ${msgcontent}
# such as sending a notification using a
# notification extension
}
}
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
| 29.922006
| 88
| 0.516943
| 951
| 10,742
| 5.759201
| 0.146162
| 0.025561
| 0.076684
| 0.094942
| 0.947416
| 0.916378
| 0.882965
| 0.858134
| 0.858134
| 0.815592
| 0
| 0.002982
| 0.375721
| 10,742
| 358
| 89
| 30.005587
| 0.813749
| 0
| 0
| 0.64557
| 0
| 0
| 0.760101
| 0.020294
| 0
| 0
| 0
| 0
| 0.063291
| 1
| 0.063291
| false
| 0
| 0.018987
| 0
| 0.085443
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8bc4d814aaeb2bee264f9fe721dbda4d85ebe0d
| 40
|
py
|
Python
|
backend/flask/website/__init__.py
|
Darky2020/SauerDemos
|
27474ab922ac6bacb6c6a549b9dc42c93802ba88
|
[
"MIT"
] | 1
|
2022-03-05T06:08:35.000Z
|
2022-03-05T06:08:35.000Z
|
backend/flask/website/__init__.py
|
Darky2020/SauerDemos
|
27474ab922ac6bacb6c6a549b9dc42c93802ba88
|
[
"MIT"
] | null | null | null |
backend/flask/website/__init__.py
|
Darky2020/SauerDemos
|
27474ab922ac6bacb6c6a549b9dc42c93802ba88
|
[
"MIT"
] | null | null | null |
from .routes import blueprint as website
| 40
| 40
| 0.85
| 6
| 40
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 1
| 40
| 40
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
3e545fb54e0aa4ff91ee25cae7ab10355a3ad836
| 11,004
|
py
|
Python
|
tests/test.py
|
EFavDB/linselect
|
1e91b5adb1f833db2e7a53ad23fd2684927832d6
|
[
"MIT"
] | 93
|
2018-06-04T14:15:04.000Z
|
2022-01-03T20:53:48.000Z
|
tests/test.py
|
sohrabtowfighi/linselect
|
1e91b5adb1f833db2e7a53ad23fd2684927832d6
|
[
"MIT"
] | null | null | null |
tests/test.py
|
sohrabtowfighi/linselect
|
1e91b5adb1f833db2e7a53ad23fd2684927832d6
|
[
"MIT"
] | 5
|
2018-06-05T04:34:56.000Z
|
2020-05-01T14:28:14.000Z
|
from unittest import TestCase
import numpy as np
import linselect
def _shift_scale(x):
"""
Subtract mean and normalize an array.
Returns one with mean zero and variance
one.
"""
mean = np.mean(x)
sigma = np.std(x)
return (x - mean) / sigma
def _generate_normalized_array(m, n):
"""
m = number of rows, n number of columns
"""
print m, type(m)
print n, type(n)
x = np.random.randn(m, n)
for col in range(n):
x[:, col] = _shift_scale(x[:, col])
return x
class TestSelectionMethods():
"""
Check that the cod returned by our method
agrees with external package fit.
"""
n = 11
m = 2000
N = 5
clusters = 3
cluster_size = 5
# First tests check we report correct COD with all classes.
def test_gen_supervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set
X = _generate_normalized_array(m, n)
# start with first two features in s
s = [False for i in range(n)]
s[0] = True
s[1] = True
# make last column the target variable
targets = [False for i in range(n)]
targets[-1] = True
# make all mobile except first two and target
mobile = [True for i in range(n)]
mobile[0] = False
mobile[1] = False
mobile[-1] = False
# now search with a particular protocol
selector = linselect.GenSelect()
selector.position(
X=X, s=s, mobile=mobile, targets=targets)
selector.search(protocol=(3, 2), steps=25)
# check best cod found using N features
s_at_N = selector.best_results[N]['s']
cod_at_N = selector.best_results[N]['cod']
# compare to cod of fit from numpy using same features
X_at_N = X[:, s_at_N]
y = X[:, -1]
squared_error = np.linalg.lstsq(X_at_N, y)[1]
assert(np.isclose(cod_at_N, 1 - squared_error[0] / m, atol=1e-05))
def test_gen_unsupervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set
X = _generate_normalized_array(m, n)
# now search with default protocol
selector = linselect.GenSelect()
selector.position(X=X)
selector.search(protocol=(3, 2), steps=25)
# check best cod found using N features
s_at_N = selector.best_results[N]['s']
cod_at_N = selector.best_results[N]['cod']
# compare to cod of fit from numpy using same features
X_at_N = X[:, s_at_N]
y_at_N = X[:, ~s_at_N]
squared_error = np.linalg.lstsq(X_at_N, y_at_N)[1]
assert(np.isclose(cod_at_N, n - np.sum(squared_error) / m, atol=1e-05))
def test_fwd_supervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set with three target (y) vars.
X = _generate_normalized_array(m, n)
y = _generate_normalized_array(m, 3)
# now carry out reverse selection
selector = linselect.FwdSelect()
selector.fit(X, y)
# check best cod found using N features
s_at_N = selector.ordered_features[:N]
cod_at_N = selector.ordered_cods[N - 1]
# compare to cod of fit from numpy using same features
X_at_N = X[:, s_at_N]
squared_error = np.linalg.lstsq(X_at_N, y)[1]
assert(np.isclose(cod_at_N, 3 - np.sum(squared_error) / m, atol=1e-05))
def test_fwd_unsupervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set with two target (y) vars.
X = _generate_normalized_array(m, n)
# now carry out reverse selection
selector = linselect.FwdSelect()
selector.fit(X)
# check best cod found using N features
cod_at_N = selector.ordered_cods[N - 1]
# compare to cod of fit from numpy using same features
X_at_N = X[:, selector.ordered_features[:N]]
y_at_N = X[:, selector.ordered_features[N:]]
squared_error = np.linalg.lstsq(X_at_N, y_at_N)[1]
assert(np.isclose(cod_at_N, n - np.sum(squared_error) / m, atol=1e-05))
def test_rev_supervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set with two target (y) vars.
X = _generate_normalized_array(m, n)
y = _generate_normalized_array(m, 2)
# now carry out reverse selection
selector = linselect.RevSelect()
selector.fit(X, y)
# check best cod found using N features
cod_at_N = selector.ordered_cods[N - 1]
# compare to cod of fit from numpy using same features
X_at_N = X[:, selector.ordered_features[:N]]
squared_error = np.linalg.lstsq(X_at_N, y)[1]
assert(np.isclose(cod_at_N, 2 - np.sum(squared_error) / m, atol=1e-05))
def test_rev_unsupervised_cod(TestCase, m=m, n=n, N=N):
# generate a data set with two target (y) vars.
X = _generate_normalized_array(m, n)
# now carry out reverse selection
selector = linselect.RevSelect()
selector.fit(X)
# check best cod found using N features
cod_at_N = selector.ordered_cods[N - 1]
# compare to cod of fit from numpy using same features
X_at_N = X[:, selector.ordered_features[:N]]
y_at_N = X[:, selector.ordered_features[N:]]
squared_error = np.linalg.lstsq(X_at_N, y_at_N)[1]
assert(np.isclose(cod_at_N, n - np.sum(squared_error) / m, atol=1e-05))
# Tests below ensure we select the best candidate each time.
def test_fwd_supervised_ordering(TestCase, m=m, n=n):
# Take y linear in X's columns, with coefficient increasing with index.
X = _generate_normalized_array(m, n)
y = np.dot(X, np.arange(1, n + 1)).reshape(-1, 1)
# Forward selection
selector = linselect.FwdSelect()
selector.fit(X, y)
# Ensure correct feature order
assert(selector.ordered_features == range(n)[::-1])
def test_fwd_unsupervised_ordering(
TestCase, m=m, clusters=clusters, cluster_size=cluster_size):
# Generate well-separated clusters of features
centroids = 100 * np.random.rand(m, clusters)
X = np.random.rand(m, clusters * cluster_size)
for i in range(clusters):
X[:, i * cluster_size:(i+1) * cluster_size] += centroids[:, [i]]
# Forward selection
selector = linselect.FwdSelect()
selector.fit(X)
# Ensure top features are each from a different cluster
first_features = selector.ordered_features[:clusters]
first_clusters = sorted([c // cluster_size for c in first_features])
assert first_clusters == range(clusters)
def test_rev_supervised_ordering(TestCase, m=m, n=n):
# Take y linear in X's columns, with coefficient increasing with index.
X = _generate_normalized_array(m, n)
y = np.dot(X, np.arange(1, n + 1)).reshape(-1, 1)
# Reverse selection
selector = linselect.RevSelect()
selector.fit(X, y)
# Ensure correct feature order
assert(selector.ordered_features == range(n)[::-1])
def test_rev_unsupervised_ordering(
TestCase, m=m, clusters=clusters, cluster_size=cluster_size):
# Generate well-separated clusters of features
centroids = 100 * np.random.rand(m, clusters)
X = np.random.rand(m, clusters * cluster_size)
for i in range(clusters):
X[:, i * cluster_size:(i+1) * cluster_size] += centroids[:, [i]]
# Reverse selection
selector = linselect.RevSelect()
selector.fit(X)
# Ensure top features are each from a different cluster
first_features = selector.ordered_features[:clusters]
first_clusters = sorted([c // cluster_size for c in first_features])
assert first_clusters == range(clusters)
def test_gen_supervised_ordering(TestCase, m=m, n=n):
# Take last col linear others, with coefficient increasing with index.
X = _generate_normalized_array(m, n)
X[:, -1] = np.dot(X[:, :-1], np.arange(1, n))
# General selection set up
mobile = np.array([True for i in range(n)])
mobile[-1] = False
targets = ~mobile
selector = linselect.GenSelect()
selector.position(X, mobile=mobile, targets=targets)
# Now sweep back and forth a few times
selector.search(protocol=(1, 0), steps=n)
selector.search(protocol=(0, 1), steps=n)
selector.search(protocol=(1, 0), steps=n)
# Ensure correct features included with each subset size
for k in range(n):
assert np.all(
selector.best_results[k]['s'][-(k + 1):-1])
def test_gen_unsupervised_ordering(
TestCase, m=m, clusters=clusters, cluster_size=cluster_size):
# Generate well-separated clusters of features
centroids = 100 * np.random.rand(m, clusters)
X = np.random.rand(m, clusters * cluster_size)
for i in range(clusters):
X[:, i * cluster_size:(i+1) * cluster_size] += centroids[:, [i]]
# Now sweep back and forth a few times
selector = linselect.GenSelect()
selector.position(X)
selector.search(protocol=(1, 0), steps=clusters * cluster_size)
selector.search(protocol=(0, 1), steps=clusters * cluster_size)
selector.search(protocol=(1, 0), steps=clusters * cluster_size)
# Ensure top features are each from a different cluster
first_features = np.where(selector.best_results[clusters]['s'])[0]
first_clusters = sorted([c // cluster_size for c in first_features])
assert first_clusters == range(clusters)
# Tests below ensure dtype is respected throughout algorithms
def test_gen_dtype_maintained(TestCase, m=m, n=n):
X = _generate_normalized_array(m, n)
# General selection set up
mobile = np.array([True for i in range(n)])
mobile[-1] = False
targets = ~mobile
selector = linselect.GenSelect(dtype=np.float32)
selector.position(X, mobile=mobile, targets=targets)
# Now sweep back and forth a few times
selector.search(protocol=(1, 0), steps=n)
selector.search(protocol=(0, 1), steps=n)
selector.search(protocol=(1, 0), steps=n)
# Ensure dtype respected at each best result outcome
for br in selector.best_results:
assert isinstance(selector.best_results[br]['cod'], selector.dtype)
def test_fwd_dtype_maintained(TestCase, m=m, n=n):
X = _generate_normalized_array(m, n)
selector = linselect.FwdSelect(dtype=np.float32)
selector.fit(X)
for cod in selector.ordered_cods:
assert isinstance(cod, selector.dtype)
def test_rev_dtype_maintained(TestCase, m=m, n=n):
X = _generate_normalized_array(m, n)
selector = linselect.RevSelect(dtype=np.float32)
selector.fit(X)
for cod in selector.ordered_cods:
assert isinstance(cod, selector.dtype)
| 36.926174
| 79
| 0.6255
| 1,561
| 11,004
| 4.256887
| 0.119795
| 0.016704
| 0.051919
| 0.054176
| 0.823025
| 0.814748
| 0.801655
| 0.790971
| 0.751242
| 0.724906
| 0
| 0.01391
| 0.268266
| 11,004
| 297
| 80
| 37.050505
| 0.811351
| 0.196838
| 0
| 0.622857
| 0
| 0
| 0.001524
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 0
| null | null | 0
| 0.017143
| null | null | 0.011429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e838d7cce39e9a2f938317b5f8bfeae4a54746c
| 266
|
py
|
Python
|
OpenGLCffi/GL/EXT/NV/framebuffer_multisample_coverage.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/NV/framebuffer_multisample_coverage.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/NV/framebuffer_multisample_coverage.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
from OpenGLCffi.GL import params
@params(api='gl', prms=['target', 'coverageSamples', 'colorSamples', 'internalformat', 'width', 'height'])
def glRenderbufferStorageMultisampleCoverageNV(target, coverageSamples, colorSamples, internalformat, width, height):
pass
| 38
| 117
| 0.781955
| 24
| 266
| 8.666667
| 0.666667
| 0.201923
| 0.317308
| 0.451923
| 0.557692
| 0.557692
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082707
| 266
| 6
| 118
| 44.333333
| 0.852459
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3e929aba60d802da65bebcfba4e5f244b1b6ae19
| 111
|
py
|
Python
|
setup.py
|
causm/new-proj2
|
d622e01e416c174f289d3b929a2257c4f5a617bf
|
[
"MIT"
] | null | null | null |
setup.py
|
causm/new-proj2
|
d622e01e416c174f289d3b929a2257c4f5a617bf
|
[
"MIT"
] | 2
|
2019-02-12T19:53:20.000Z
|
2019-02-12T20:00:21.000Z
|
setup.py
|
causm/new-proj2
|
d622e01e416c174f289d3b929a2257c4f5a617bf
|
[
"MIT"
] | null | null | null |
import socket
print socket.gethostbyname('localhost')
print socket.gethostbyname('kesten.praetorianlabs.com')
| 27.75
| 55
| 0.828829
| 12
| 111
| 7.666667
| 0.666667
| 0.23913
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063063
| 111
| 3
| 56
| 37
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0.306306
| 0.225225
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
e45d417f2fd4ab5f8d36b82bbf54cfed947f8053
| 48,356
|
py
|
Python
|
advent_selfTraining/domain_adaptation/train_UDA.py
|
gritYCDA/boundaryOCDA
|
d93f2d4ad1f41d7ec19ba2a2fc7e98ecce914ccb
|
[
"Apache-2.0"
] | null | null | null |
advent_selfTraining/domain_adaptation/train_UDA.py
|
gritYCDA/boundaryOCDA
|
d93f2d4ad1f41d7ec19ba2a2fc7e98ecce914ccb
|
[
"Apache-2.0"
] | null | null | null |
advent_selfTraining/domain_adaptation/train_UDA.py
|
gritYCDA/boundaryOCDA
|
d93f2d4ad1f41d7ec19ba2a2fc7e98ecce914ccb
|
[
"Apache-2.0"
] | null | null | null |
# --------------------------------------------------------
# Domain adpatation training
# Copyright (c) 2019 valeo.ai
#
# Written by Tuan-Hung Vu
# --------------------------------------------------------
import os
import sys
from pathlib import Path
import os.path as osp
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import torch.optim as optim
from tensorboardX import SummaryWriter
from torch import nn
from torchvision.utils import make_grid
from tqdm import tqdm
import copy
from advent.model.discriminator import get_fc_discriminator
from advent.model.conv_abstract import get_conv_abstract
from advent.utils.func import adjust_learning_rate, adjust_learning_rate_discriminator
from advent.utils.func import loss_calc, bce_loss, mse_loss, reg_loss_calc_ign
from advent.utils.loss import entropy_loss
from advent.utils.simclr_loss import NTXentLoss
from advent.utils.func import prob_2_entropy
from advent.utils.viz_segmask import colorize_mask
import random
def train_advent(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_aux, pred_src_main = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, _, _, _ = batch
pred_trg_aux, pred_trg_main = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_trg_aux)))
loss_adv_trg_aux = bce_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
d_out_main = d_main(prob_2_entropy(F.softmax(pred_trg_main)))
loss_adv_trg_main = bce_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_src_aux)))
loss_d_aux = bce_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(prob_2_entropy(F.softmax(pred_src_main)))
loss_d_main = bce_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(prob_2_entropy(F.softmax(pred_trg_aux)))
loss_d_aux = bce_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(prob_2_entropy(F.softmax(pred_trg_main)))
loss_d_main = bce_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == cfg.TRAIN.TENSORBOARD_VIZRATE - 1:
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def train_adaptseg(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, _, _, _ = batch
pred_trg_main = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_adv_trg_aux = mse_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
d_out_main = d_main(F.softmax(pred_trg_main))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(F.softmax(pred_src_aux))
loss_d_aux = mse_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_d_aux = mse_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == cfg.TRAIN.TENSORBOARD_VIZRATE - 1:
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def train_adaptseg_w_trans(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
cudnn.benchmark = True
cudnn.enabled = True
# DISCRIMINATOR NETWORK
# feature-level
d_aux = get_fc_discriminator(num_classes=num_classes)
d_aux.train()
d_aux.to(device)
# seg maps, i.e. output, level
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
# OPTIMIZERS
# segnet's optimizer
optimizer = optim.SGD(model.optim_parameters(cfg.TRAIN.LEARNING_RATE),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# discriminators' optimizers
optimizer_d_aux = optim.Adam(d_aux.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
# labels for adversarial training
source_label = 0
target_label = 1
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
criterion = nn.MSELoss()
for i_iter in tqdm(range(cfg.TRAIN.EARLY_STOP + 1)):
# reset optimizers
optimizer.zero_grad()
optimizer_d_aux.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_aux, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# UDA Training
# only train segnet. Don't accumulate grads in disciminators
for param in d_aux.parameters():
param.requires_grad = False
for param in d_main.parameters():
param.requires_grad = False
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = interp(pred_src_aux)
loss_seg_src_aux = loss_calc(pred_src_aux, labels, device)
else:
loss_seg_src_aux = 0
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = (cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
+ cfg.TRAIN.LAMBDA_SEG_AUX * loss_seg_src_aux)
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_aug, _, _, _ = batch
pred_trg_main, _ = model(images_aug.cuda(device))
pred_trg_main_real, _ = model(images.cuda(device))
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = interp_target(pred_trg_aux)
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_adv_trg_aux = mse_loss(d_out_aux, source_label)
else:
loss_adv_trg_aux = 0
pred_trg_main = interp_target(pred_trg_main)
pred_trg_main_real = interp_target(pred_trg_main_real)
d_out_main = d_main(F.softmax(pred_trg_main))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = (cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
+ cfg.TRAIN.LAMBDA_ADV_AUX * loss_adv_trg_aux)
loss = loss
loss.backward()
# Train discriminator networks
# enable training mode on discriminator networks
for param in d_aux.parameters():
param.requires_grad = True
for param in d_main.parameters():
param.requires_grad = True
# train with source
if cfg.TRAIN.MULTI_LEVEL:
pred_src_aux = pred_src_aux.detach()
d_out_aux = d_aux(F.softmax(pred_src_aux))
loss_d_aux = mse_loss(d_out_aux, source_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# train with target
if cfg.TRAIN.MULTI_LEVEL:
pred_trg_aux = pred_trg_aux.detach()
d_out_aux = d_aux(F.softmax(pred_trg_aux))
loss_d_aux = mse_loss(d_out_aux, target_label)
loss_d_aux = loss_d_aux / 2
loss_d_aux.backward()
else:
loss_d_aux = 0
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main / 2
loss_d_main.backward()
# import pdb
# pdb.set_trace()
loss_consistency = 10 * criterion(F.softmax(pred_trg_main_real), F.softmax(pred_trg_main).detach())
loss_consistency.backward()
optimizer.step()
if cfg.TRAIN.MULTI_LEVEL:
optimizer_d_aux.step()
optimizer_d_main.step()
current_losses = {'loss_seg_src_aux': loss_seg_src_aux,
'loss_seg_src_main': loss_seg_src_main,
'loss_adv_trg_aux': loss_adv_trg_aux,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_aux': loss_d_aux,
'loss_d_main': loss_d_main,
'loss_consistency': loss_consistency}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(d_aux.state_dict(), snapshot_dir / f'model_{i_iter}_D_aux.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D_main.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
draw_in_tensorboard_trans(writer, images_aug, images, i_iter, pred_trg_main, pred_trg_main_real, num_classes, 'T')
draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def label_generator(pred_trg, cls_thresh_tot, cfg, i_iter, tot_iter):
import math
device = cfg.GPU_ID
### ###
output_main = F.softmax(pred_trg,dim=1)
amax_output = torch.argmax(output_main, dim=1).type(torch.uint8)
pred_label_trainIDs = amax_output.clone()
pred_label = amax_output.clone()
conf, _ = torch.max(output_main, dim=1)
conf_dict = {k:[] for k in range(cfg.NUM_CLASSES)}
pred_cls_num = torch.zeros(cfg.NUM_CLASSES)
for idx_cls in range(cfg.NUM_CLASSES):
idx_temp = pred_label == idx_cls
pred_cls_num[idx_cls] = pred_cls_num[idx_cls] + torch.sum(idx_temp)
if idx_temp.any():
conf_cls_temp = conf[idx_temp].type(torch.float32)
len_cls_temp = len(conf_cls_temp)
conf_cls = conf_cls_temp[0:len_cls_temp:16]
conf_dict[idx_cls].extend(conf_cls)
cls_thresh = torch.ones(cfg.NUM_CLASSES).type(torch.float32)
cls_sel_size = torch.zeros(cfg.NUM_CLASSES).type(torch.float32)
tgt_dict_tot = {}
for idx_cls in range(cfg.NUM_CLASSES):
if conf_dict[idx_cls] != None:
# conf_dict[idx_cls].sort(reverse=True) # sort in descending order
conf_dict[idx_cls], _ = torch.sort(torch.FloatTensor(conf_dict[idx_cls]), descending=True)
len_cls = len(conf_dict[idx_cls])
iter_ratio = 1.0-float(i_iter / (tot_iter+1))
coeff = 0.2 * (iter_ratio ** 0.5)
cls_sel_size[idx_cls] = int(math.floor(len_cls * coeff))
len_cls_thresh = int(cls_sel_size[idx_cls])
if len_cls_thresh != 0:
cls_thresh[idx_cls] = conf_dict[idx_cls][len_cls_thresh-1]
conf_dict[idx_cls] = None
cls_thresh_tot_ = torch.where(cls_thresh_tot==1.0, cls_thresh, 0.9 * cls_thresh_tot + 0.1 * cls_thresh)
cls_thresh_mask = (cls_thresh == 1.0) * (cls_thresh_tot!=1.0)
cls_thresh_tot = torch.where(cls_thresh_mask==1.0, cls_thresh_tot, cls_thresh_tot_)
weighted_prob = output_main / cls_thresh_tot.to(device).unsqueeze(0).unsqueeze(2).unsqueeze(3)
weighted_pred_trainIDs = torch.argmax(weighted_prob, dim=1).type(torch.uint8)
weighted_conf, _ = torch.max(weighted_prob, dim=1)
weighted_pred_trainIDs[weighted_conf < 1] = 255
return weighted_pred_trainIDs, cls_thresh_tot
def train_selfself(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
# Model clone
model_runner = copy.deepcopy(model)
model_runner.eval()
model_runner.to(device)
conv3x3_tgt = get_conv_abstract(cfg)
conv3x3_tgt.train()
conv3x3_tgt.to(device)
d_main = get_fc_discriminator(num_classes=num_classes)
d_main.train()
d_main.to(device)
tgt_dict_tot = {}
cudnn.benchmark = True
cudnn.enabled = True
# OPTIMIZERS
params = list(model.parameters()) + list(conv3x3_tgt.parameters())
optimizer = optim.SGD(params,
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
cls_thresh = torch.ones(num_classes).type(torch.float32)
optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
betas=(0.9, 0.99))
# for round in range(3):
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
source_label = 0
target_label = 1
for i_iter in tqdm(range(len(targetloader))):
# reset optimizers
optimizer.zero_grad()
optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_rev, _, _, name, name_next = batch
pred_trg_main, feat_trg_main = model(images.cuda(device))
pred_trg_main = interp_target(pred_trg_main)
with torch.no_grad():
pred_trg_main_run, feat_trg_main_run = model_runner(images.cuda(device))
pred_trg_main_run = interp_target(pred_trg_main_run)
##### Label generator for target #####
label_trg, cls_thresh = label_generator(pred_trg_main_run, cls_thresh, cfg, i_iter)
##### CE loss for trg
# MRKLD + Ign Region
loss_seg_trg_main = reg_loss_calc_ign(pred_trg_main, label_trg, device)
loss_tgt_seg = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_main
loss_tgt_selfsup, tgt_dict_tot = classSimCLR(feat_trg_main, label_trg, conv3x3_tgt, tgt_dict_tot, device)
loss = loss_tgt_seg + 0.1 * loss_tgt_selfsup
loss.backward()
##### Discriminator #####
for param in d_main.parameters():
param.requires_grad = False
pred_trg_main_rev, _ = model(images_rev.cuda(device))
pred_trg_main_rev = interp_target(pred_trg_main_rev)
d_out_main = d_main(F.softmax(pred_trg_main_rev))
loss_adv_trg_main = mse_loss(d_out_main, source_label)
loss = cfg.TRAIN.LAMBDA_ADV_MAIN * loss_adv_trg_main
loss = loss
loss.backward()
for param in d_main.parameters():
param.requires_grad = True
pred_src_main = pred_src_main.detach()
d_out_main = d_main(F.softmax(pred_src_main))
loss_d_main_src = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main_src / 2
loss_d_main.backward()
pred_trg_main = pred_trg_main.detach()
d_out_main = d_main(F.softmax(pred_trg_main))
loss_d_main_trg = mse_loss(d_out_main, source_label)
loss_d_main = loss_d_main_trg / 2
loss_d_main.backward()
pred_trg_main_rev = pred_trg_main_rev.detach()
d_out_main = d_main(F.softmax(pred_trg_main_rev))
loss_d_main_trg_rev = mse_loss(d_out_main, target_label)
loss_d_main = loss_d_main_trg_rev / 2
loss_d_main.backward()
##### Contrastive loss for trg
# Contrastive loss ()
optimizer.step()
optimizer_d_main.step()
if i_iter+1 % 500 == 0:
model_runner = copy.deepcopy(model)
# for param_fol, param_run in zip(model.parameters(), model_runner.parameters()):
# param_run.data = param_fol.data
current_losses = {'loss_seg_trg_main': loss_seg_trg_main,
'loss_seg_src_main': loss_seg_src_main,
'loss_tgt_selfsup': loss_tgt_selfsup,
'loss_adv_trg_main': loss_adv_trg_main,
'loss_d_main_src': loss_d_main_src,
'loss_d_main_trg': loss_d_main_trg,
'loss_d_main_trg_rev': loss_d_main_trg_rev
}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(model_runner.state_dict(), snapshot_dir / f'model_{i_iter}_run.pth')
torch.save(d_main.state_dict(), snapshot_dir / f'model_{i_iter}_D.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
# draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_trg_main, pred_trg_main_rev, num_classes, 'T')
draw_in_tensorboard(writer, images, i_iter, pred_trg_main, num_classes, 'T')
# draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
#TODO: self-training here !!!
def train_self_domain_swarp(model, trainloader, targetloader, cfg):
''' UDA training with advent
'''
# Create the model and start the training.
input_size_source = cfg.TRAIN.INPUT_SIZE_SOURCE
input_size_target = cfg.TRAIN.INPUT_SIZE_TARGET
device = cfg.GPU_ID
num_classes = cfg.NUM_CLASSES
viz_tensorboard = os.path.exists(cfg.TRAIN.TENSORBOARD_LOGDIR)
if viz_tensorboard:
writer = SummaryWriter(log_dir=cfg.TRAIN.TENSORBOARD_LOGDIR)
# SEGMNETATION NETWORK
model.train()
model.to(device)
# Model clone
model_runner = copy.deepcopy(model)
model_runner.eval()
model_runner.to(device)
# conv3x3_tgt = get_conv_abstract(cfg)
# conv3x3_tgt.train()
# conv3x3_tgt.to(device)
# d_main = get_fc_discriminator(num_classes=num_classes)
# d_main.train()
# d_main.to(device)
tgt_dict_tot = {}
cudnn.benchmark = True
cudnn.enabled = True
# OPTIMIZERS
# params = list(model.parameters()) + list(conv3x3_tgt.parameters())
optimizer = optim.SGD(model.parameters(),
lr=cfg.TRAIN.LEARNING_RATE,
momentum=cfg.TRAIN.MOMENTUM,
weight_decay=cfg.TRAIN.WEIGHT_DECAY)
# interpolate output segmaps
interp = nn.Upsample(size=(input_size_source[1], input_size_source[0]), mode='bilinear',
align_corners=True)
interp_target = nn.Upsample(size=(input_size_target[1], input_size_target[0]), mode='bilinear',
align_corners=True)
cls_thresh = torch.ones(num_classes).type(torch.float32)
# optimizer_d_main = optim.Adam(d_main.parameters(), lr=cfg.TRAIN.LEARNING_RATE_D,
# betas=(0.9, 0.99))
# for round in range(3):
trainloader_iter = enumerate(trainloader)
targetloader_iter = enumerate(targetloader)
source_label = 0
target_label = 1
tot_iter = len(targetloader)
for i_iter in tqdm(range(tot_iter)):
# reset optimizers
optimizer.zero_grad()
# optimizer_d_main.zero_grad()
# adapt LR if needed
adjust_learning_rate(optimizer, i_iter, cfg)
# adjust_learning_rate_discriminator(optimizer_d_main, i_iter, cfg)
# train on source
_, batch = trainloader_iter.__next__()
images_source, labels, _, _ = batch
pred_src_main, _ = model(images_source.cuda(device))
pred_src_main = interp(pred_src_main)
loss_seg_src_main = loss_calc(pred_src_main, labels, device)
loss = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_src_main
loss.backward()
# adversarial training ot fool the discriminator
_, batch = targetloader_iter.__next__()
images, images_rev, _, _, name, name_next = batch
pred_trg_main, feat_trg_main = model(images.cuda(device))
pred_trg_main = interp_target(pred_trg_main)
with torch.no_grad():
pred_trg_main_run, feat_trg_main_run = model_runner(images.cuda(device))
pred_trg_main_run = interp_target(pred_trg_main_run)
##### Label generator for target #####
label_trg, cls_thresh = label_generator(pred_trg_main_run, cls_thresh, cfg, i_iter, tot_iter)
##### CE loss for trg
# MRKLD + Ign Region
loss_seg_trg_main = reg_loss_calc_ign(pred_trg_main, label_trg, device)
loss_tgt_seg = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_main
##### Domain swarping ####
feat_tgt_swarped, tgt_dict_tot, tgt_label = DomainSwarping(feat_trg_main, label_trg, tgt_dict_tot, device)
ignore_mask = tgt_label == 255
feat_tgt_swarped = ~ignore_mask*feat_tgt_swarped + ignore_mask*feat_trg_main
pred_tgt_swarped = model.classifier_(feat_tgt_swarped)
pred_tgt_swarped = interp_target(pred_tgt_swarped)
loss_seg_trg_swarped = reg_loss_calc_ign(pred_tgt_swarped, label_trg, device)
loss_tgt_seg_swarped = cfg.TRAIN.LAMBDA_SEG_MAIN * loss_seg_trg_swarped
loss_tgt = loss_tgt_seg + loss_tgt_seg_swarped
loss_tgt.backward()
optimizer.step()
current_losses = {'loss_seg_trg_main': loss_seg_trg_main,
'loss_seg_src_main': loss_seg_src_main,
'loss_seg_trg_swarped': loss_seg_trg_swarped
}
print_losses(current_losses, i_iter)
if i_iter % cfg.TRAIN.SAVE_PRED_EVERY == 0 and i_iter != 0:
print('taking snapshot ...')
print('exp =', cfg.TRAIN.SNAPSHOT_DIR)
snapshot_dir = Path(cfg.TRAIN.SNAPSHOT_DIR)
torch.save(model.state_dict(), snapshot_dir / f'model_{i_iter}.pth')
torch.save(model_runner.state_dict(), snapshot_dir / f'model_{i_iter}_run.pth')
if i_iter >= cfg.TRAIN.EARLY_STOP - 1:
break
sys.stdout.flush()
# Visualize with tensorboard
if viz_tensorboard:
log_losses_tensorboard(writer, current_losses, i_iter)
if i_iter % cfg.TRAIN.TENSORBOARD_VIZRATE == 0:
# draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_trg_main, pred_trg_main_rev, num_classes, 'T')
draw_in_tensorboard(writer, images, label_trg, i_iter, pred_trg_main, pred_tgt_swarped, num_classes, 'T')
# draw_in_tensorboard(writer, images_source, i_iter, pred_src_main, num_classes, 'S')
def classSimCLR(tgt_feat_warped_cat, tgt_label, conv3x3_tgt, tgt_dict_tot, device):
tgt_feat_warped_cat_abs = conv3x3_tgt(tgt_feat_warped_cat)
# fnt_tgt_feat_warped_cat_abs = tgt_feat_warped_cat_abs[:,:tgt_feat_warped_cat_abs.size(1)//2,:,:]
##### class-wise Simclr #####
tgt_label = F.interpolate(tgt_label.unsqueeze(0).float(), (tgt_feat_warped_cat.size(2), tgt_feat_warped_cat.size(3)), mode='nearest')
tgt_label = tgt_label.long()
tgt_unique = torch.unique(tgt_label)
tgt_dict = {}
tgt_dict_tot_temp = {}
m = nn.AdaptiveAvgPool2d(1)
for label_ele in tgt_unique.tolist():
if not label_ele == 255:
cls_mask = tgt_label == label_ele
masked_tgt = cls_mask * tgt_feat_warped_cat_abs
avg_masked_tgt = m(masked_tgt) * (cls_mask.size(2) * cls_mask.size(3) / cls_mask.sum())
tgt_dict[label_ele] = avg_masked_tgt
# if label_ele in tgt_dict_tot:
# tgt_dict_tot[label_ele] = 0.99 * tgt_dict_tot[label_ele] + 0.01 * tgt_dict[label_ele]
# else:
# tgt_dict_tot[label_ele] = tgt_dict[label_ele]
if not label_ele in tgt_dict_tot:
tgt_dict_tot[label_ele] = tgt_dict[label_ele]
tgt_dict_tot_temp[label_ele] = tgt_dict_tot[label_ele]
if label_ele in tgt_dict_tot:
tgt_dict_tot[label_ele] = 0.99 * tgt_dict_tot[label_ele] + 0.01 * tgt_dict[label_ele]
tgt_dict = dict(sorted(tgt_dict.items()))
tgt_list = []
for key, value in tgt_dict.items():
tgt_list.append(value)
try:
tgt_cat = torch.cat(tgt_list,dim=0).squeeze().to(device)
tgt_cat = F.normalize(tgt_cat, dim=1)
tgt_dict_tot_temp = dict(sorted(tgt_dict_tot_temp.items()))
tgt_tot_temp_list = []
for key, value in tgt_dict_tot_temp.items():
tgt_tot_temp_list.append(value)
tgt_dict_temp_cat = torch.cat(tgt_tot_temp_list,dim=0).squeeze().to(device)
tgt_dict_temp_cat = F.normalize(tgt_dict_temp_cat, dim=1)
batch_size = tgt_dict_temp_cat.size(0)
simloss_xent = NTXentLoss(device, batch_size=batch_size, temperature=0.5, use_cosine_similarity=True)
cls_sim_loss = simloss_xent(tgt_dict_temp_cat.detach(), tgt_cat)
cls_sim_loss = cls_sim_loss
except:
cls_sim_loss = 0
# return src_feat_embedding_loss, tgt_feat_embedding_loss, cls_sim_loss
return cls_sim_loss, tgt_dict_tot
def DomainSwarping(tgt_feat_warped_cat, tgt_label, tgt_dict_tot, device):
alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
alpha = random.choice(alpha_list)
tgt_label = F.interpolate(tgt_label.unsqueeze(0).float(), (tgt_feat_warped_cat.size(2), tgt_feat_warped_cat.size(3)), mode='nearest')
tgt_label = tgt_label.long()
tgt_unique = torch.unique(tgt_label)
tgt_dict = {}
tgt_dict_tot_temp = {}
m = nn.AdaptiveAvgPool2d(1)
new_masked_tgt_init = 0
for label_ele in tgt_unique.tolist():
if not label_ele == 255:
cls_mask = tgt_label == label_ele
masked_tgt = cls_mask * tgt_feat_warped_cat
avg_masked_tgt = m(masked_tgt) * (cls_mask.size(2) * cls_mask.size(3) / cls_mask.sum())
tgt_dict[label_ele] = avg_masked_tgt
if not label_ele in tgt_dict_tot:
print('new class info inserted')
tgt_dict_tot[label_ele] = tgt_dict[label_ele]
# new_masked_tgt = alpha * tgt_dict_tot[label_ele] + (1-alpha) * masked_tgt
new_masked_tgt = tgt_dict_tot[label_ele]
new_masked_tgt_init += cls_mask * new_masked_tgt
tgt_dict_tot[label_ele] = alpha * tgt_dict_tot[label_ele] + (1-alpha) * tgt_dict[label_ele]
tgt_dict_tot[label_ele] = tgt_dict_tot[label_ele].detach()
return new_masked_tgt_init, tgt_dict_tot, tgt_label
def draw_in_tensorboard_trg(writer, images, images_rev, label_trg, i_iter, pred_main, pred_trg_main_rev, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
grid_image = make_grid(images_rev[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'images_rev - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_trg_main_rev).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_rev - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(label_trg.cpu().squeeze(), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Labels_IAST - {type_}', grid_image, i_iter)
def draw_in_tensorboard(writer, images, label_trg, i_iter, pred_main, pred_main_swarp, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
pred_main_cat = torch.cat((pred_main, pred_main_swarp), dim=-1)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main_cat).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_main_swarp - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(label_trg.cpu().squeeze(), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Labels_IAST - {type_}', grid_image, i_iter)
# grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
# np.argmax(F.softmax(pred_main_tgt).cpu().data[0].numpy().transpose(1, 2, 0),
# axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
# normalize=False, range=(0, 255))
# writer.add_image(f'Prediction_swarped - {type_}', grid_image, i_iter)
# output_sm = F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0)
# output_ent = np.sum(-np.multiply(output_sm, np.log2(output_sm)), axis=2,
# keepdims=False)
# grid_image = make_grid(torch.from_numpy(output_ent), 3, normalize=True,
# range=(0, np.log2(num_classes)))
# writer.add_image(f'Entropy - {type_}', grid_image, i_iter)
def draw_in_tensorboard_trans(writer, images, images_real, i_iter, pred_main, pred_main_real, num_classes, type_):
grid_image = make_grid(images[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image - {type_}', grid_image, i_iter)
grid_image = make_grid(images_real[:3].clone().cpu().data, 3, normalize=True)
writer.add_image(f'Image_real - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction - {type_}', grid_image, i_iter)
grid_image = make_grid(torch.from_numpy(np.array(colorize_mask(np.asarray(
np.argmax(F.softmax(pred_main_real).cpu().data[0].numpy().transpose(1, 2, 0),
axis=2), dtype=np.uint8)).convert('RGB')).transpose(2, 0, 1)), 3,
normalize=False, range=(0, 255))
writer.add_image(f'Prediction_real - {type_}', grid_image, i_iter)
# output_sm = F.softmax(pred_main).cpu().data[0].numpy().transpose(1, 2, 0)
# output_ent = np.sum(-np.multiply(output_sm, np.log2(output_sm)), axis=2,
# keepdims=False)
# grid_image = make_grid(torch.from_numpy(output_ent), 3, normalize=True,
# range=(0, np.log2(num_classes)))
# writer.add_image(f'Entropy - {type_}', grid_image, i_iter)
def print_losses(current_losses, i_iter):
list_strings = []
for loss_name, loss_value in current_losses.items():
list_strings.append(f'{loss_name} = {to_numpy(loss_value):.3f} ')
full_string = ' '.join(list_strings)
tqdm.write(f'iter = {i_iter} {full_string}')
def log_losses_tensorboard(writer, current_losses, i_iter):
for loss_name, loss_value in current_losses.items():
writer.add_scalar(f'data/{loss_name}', to_numpy(loss_value), i_iter)
def to_numpy(tensor):
if isinstance(tensor, (int, float)):
return tensor
else:
return tensor.data.cpu().numpy()
def train_domain_adaptation(model, trainloader, targetloader, cfg):
if cfg.TRAIN.DA_METHOD == 'AdvEnt':
train_advent(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'AdaptSeg':
train_adaptseg(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'AdaptSeg_w_trans':
train_adaptseg_w_trans(model, trainloader, targetloader, cfg)
elif cfg.TRAIN.DA_METHOD == 'self_domain_swarp':
train_self_domain_swarp(model, trainloader, targetloader, cfg)
else:
raise NotImplementedError(f"Not yet supported DA method {cfg.TRAIN.DA_METHOD}")
| 41.650301
| 162
| 0.631421
| 6,544
| 48,356
| 4.280868
| 0.055318
| 0.033126
| 0.02513
| 0.009745
| 0.874099
| 0.846969
| 0.827194
| 0.812844
| 0.794603
| 0.781574
| 0
| 0.01153
| 0.269998
| 48,356
| 1,160
| 163
| 41.686207
| 0.782068
| 0.111403
| 0
| 0.74359
| 0
| 0
| 0.034737
| 0.006231
| 0
| 0
| 0
| 0.000862
| 0
| 1
| 0.019231
| false
| 0
| 0.030769
| 0
| 0.05641
| 0.021795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f6cafd5555cb1380e6d3a4f49101b98fbdab681
| 66,294
|
py
|
Python
|
tests/test_job.py
|
xadams/signac
|
e53930db1897b6b7c59334a672fff4d00bcb8ac5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_job.py
|
xadams/signac
|
e53930db1897b6b7c59334a672fff4d00bcb8ac5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_job.py
|
xadams/signac
|
e53930db1897b6b7c59334a672fff4d00bcb8ac5
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2018 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
from __future__ import absolute_import
import unittest
import os
import io
import warnings
import logging
import uuid
import copy
import random
import json
from contextlib import contextmanager
import signac.contrib
import signac.common.config
from signac.common import six
from signac.errors import DestinationExistsError
from signac.errors import JobsCorruptedError
from signac.errors import InvalidKeyError
if six.PY2:
from tempdir import TemporaryDirectory
else:
from tempfile import TemporaryDirectory
try:
import h5py # noqa
H5PY = True
except ImportError:
H5PY = False
# Make sure the jobs created for this test are unique.
test_token = {'test_token': str(uuid.uuid4())}
warnings.simplefilter('default')
warnings.filterwarnings('error', category=DeprecationWarning, module='signac')
warnings.filterwarnings(
'ignore', category=PendingDeprecationWarning, message=r'.*Cache API.*')
BUILTINS = [
({'e': [1.0, '1.0', 1, True]}, '4d8058a305b940005be419b30e99bb53'),
({'d': True}, '33cf9999de25a715a56339c6c1b28b41'),
({'f': (1.0, '1.0', 1, True)}, 'e998db9b595e170bdff936f88ccdbf75'),
({'a': 1}, '42b7b4f2921788ea14dac5566e6f06d0'),
({'c': '1.0'}, '80fa45716dd3b83fa970877489beb42e'),
({'b': 1.0}, '0ba6c5a46111313f11c41a6642520451'),
]
def builtins_dict():
random.shuffle(BUILTINS)
d = dict()
for b in BUILTINS:
d.update(b[0])
return d
BUILTINS_HASH = '7a80b58db53bbc544fc27fcaaba2ce44'
NESTED_HASH = 'bd6f5828f4410b665bffcec46abeb8f3'
def config_from_cfg(cfg):
cfile = io.StringIO('\n'.join(cfg))
return signac.common.config.get_config(cfile)
def testdata():
return str(uuid.uuid4())
class BaseJobTest(unittest.TestCase):
project_class = signac.Project
def setUp(self):
self._tmp_dir = TemporaryDirectory(prefix='signac_')
self.addCleanup(self._tmp_dir.cleanup)
self._tmp_pr = os.path.join(self._tmp_dir.name, 'pr')
self._tmp_wd = os.path.join(self._tmp_dir.name, 'wd')
os.mkdir(self._tmp_pr)
self.config = signac.common.config.load_config()
self.project = self.project_class.init_project(
name='testing_test_project',
root=self._tmp_pr,
workspace=self._tmp_wd)
self.project.config['default_host'] = 'testing'
def tearDown(self):
pass
def open_job(self, *args, **kwargs):
project = self.project
return project.open_job(*args, **kwargs)
@classmethod
def nested_dict(self):
d = dict(builtins_dict())
d['g'] = builtins_dict()
return d
class JobIDTest(BaseJobTest):
def test_builtins(self):
for p, h in BUILTINS:
self.assertEqual(str(self.project.open_job(p)), h)
self.assertEqual(
str(self.project.open_job(builtins_dict())), BUILTINS_HASH)
def test_shuffle(self):
for i in range(10):
self.assertEqual(
str(self.project.open_job(builtins_dict())), BUILTINS_HASH)
def test_nested(self):
for i in range(10):
self.assertEqual(
str(self.project.open_job(self.nested_dict())), NESTED_HASH)
def test_sequences_identity(self):
job1 = self.project.open_job({'a': [1.0, '1.0', 1, True]})
job2 = self.project.open_job({'a': (1.0, '1.0', 1, True)})
self.assertEqual(str(job1), str(job2))
self.assertEqual(job1.statepoint(), job2.statepoint())
class JobTest(BaseJobTest):
def test_repr(self):
job = self.project.open_job({'a': 0})
job2 = self.project.open_job({'a': 0})
self.assertEqual(repr(job), repr(job2))
self.assertEqual(job, job2)
def test_str(self):
job = self.project.open_job({'a': 0})
self.assertEqual(str(job), job.get_id())
def test_isfile(self):
job = self.project.open_job({'a': 0})
fn = 'test.txt'
fn_ = os.path.join(job.workspace(), fn)
self.assertFalse(job.isfile(fn))
job.init()
self.assertFalse(job.isfile(fn))
with open(fn_, 'w') as file:
file.write('hello')
self.assertTrue(job.isfile(fn))
class JobSPInterfaceTest(BaseJobTest):
def test_interface_read_only(self):
sp = self.nested_dict()
job = self.open_job(sp)
self.assertEqual(job.statepoint(), json.loads(json.dumps(sp)))
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertEqual(getattr(job.sp, x), sp[x])
self.assertEqual(job.sp[x], sp[x])
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertEqual(getattr(job.sp.g, x), sp['g'][x])
self.assertEqual(job.sp[x], sp[x])
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertEqual(job.sp.get(x), sp[x])
self.assertEqual(job.sp.get(x), sp[x])
self.assertEqual(job.sp.g.get(x), sp['g'][x])
self.assertIsNone(job.sp.get('not_in_sp'))
self.assertIsNone(job.sp.g.get('not_in_sp'))
self.assertIsNone(job.sp.get('not_in_sp', None))
self.assertIsNone(job.sp.g.get('not_in_sp', None))
self.assertEqual(job.sp.get('not_in_sp', 23), 23)
self.assertEqual(job.sp.g.get('not_in_sp', 23), 23)
def test_interface_contains(self):
sp = self.nested_dict()
job = self.open_job(sp)
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertIn(x, job.sp)
self.assertIn(x, job.sp.g)
def test_interface_read_write(self):
sp = self.nested_dict()
job = self.open_job(sp)
job.init()
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertEqual(getattr(job.sp, x), sp[x])
self.assertEqual(job.sp[x], sp[x])
for x in ('a', 'b', 'c', 'd', 'e'):
self.assertEqual(getattr(job.sp.g, x), sp['g'][x])
self.assertEqual(job.sp[x], sp[x])
a = [1, 1.0, '1.0', True, None]
b = list(a) + [a] + [tuple(a)]
for v in b:
for x in ('a', 'b', 'c', 'd', 'e'):
setattr(job.sp, x, v)
self.assertEqual(getattr(job.sp, x), v)
setattr(job.sp.g, x, v)
self.assertEqual(getattr(job.sp.g, x), v)
def test_interface_job_identity_change(self):
job = self.open_job({'a': 0})
old_id = job.get_id()
job.sp.a = 1
self.assertNotEqual(old_id, job.get_id())
def test_interface_nested_kws(self):
with self.assertRaises(InvalidKeyError):
job = self.open_job({'a.b.c': 0})
job = self.open_job(dict(a=dict(b=dict(c=2))))
self.assertEqual(job.sp.a.b.c, 2)
self.assertEqual(job.sp['a']['b']['c'], 2)
def test_interface_lists(self):
job = self.open_job({'a': [1, 2, 3]})
self.assertEqual(job.sp.a, [1, 2, 3])
old_id = job.get_id()
job.sp.a.append(4)
self.assertEqual(job.sp.a, [1, 2, 3, 4])
self.assertNotEqual(old_id, job.get_id())
def test_interface_reserved_keywords(self):
job = self.open_job({'with': 0, 'pop': 1})
self.assertEqual(job.sp['with'], 0)
self.assertEqual(job.sp['pop'], 1)
self.assertEqual(job.sp.pop('with'), 0)
self.assertNotIn('with', job.sp)
def test_interface_illegal_type(self):
job = self.open_job(dict(a=0))
self.assertEqual(job.sp.a, 0)
class Foo(object):
pass
with self.assertRaises(TypeError):
job.sp.a = Foo()
def test_interface_rename(self):
job = self.open_job(dict(a=0))
job.init()
self.assertEqual(job.sp.a, 0)
job.sp.b = job.sp.pop('a')
self.assertNotIn('a', job.sp)
self.assertEqual(job.sp.b, 0)
def test_interface_add(self):
job = self.open_job(dict(a=0))
job.init()
with self.assertRaises(AttributeError):
job.sp.b
job.sp.b = 1
self.assertIn('b', job.sp)
self.assertEqual(job.sp.b, 1)
def test_interface_delete(self):
job = self.open_job(dict(a=0, b=0))
job.init()
self.assertIn('b', job.sp)
self.assertEqual(job.sp.b, 0)
del job.sp['b']
self.assertNotIn('b', job.sp)
with self.assertRaises(AttributeError):
job.sp.b
job.sp.b = 0
self.assertIn('b', job.sp)
self.assertEqual(job.sp.b, 0)
del job.sp.b
self.assertNotIn('b', job.sp)
with self.assertRaises(AttributeError):
job.sp.b
def test_interface_destination_conflict(self):
job_a = self.open_job(dict(a=0))
job_b = self.open_job(dict(b=0))
job_a.init()
id_a = job_a.get_id()
job_a.sp = dict(b=0)
self.assertEqual(job_a.statepoint(), dict(b=0))
self.assertEqual(job_a, job_b)
self.assertNotEqual(job_a.get_id(), id_a)
job_a = self.open_job(dict(a=0))
# Moving to existing job, no problem while empty:
self.assertNotEqual(job_a, job_b)
job_a.sp = dict(b=0)
job_a = self.open_job(dict(a=0))
job_b.init()
# Moving to an existing job with data leads
# to an error:
job_a.document['a'] = 0
job_b.document['a'] = 0
self.assertNotEqual(job_a, job_b)
with self.assertRaises(RuntimeError):
job_a.sp = dict(b=0)
with self.assertRaises(DestinationExistsError):
job_a.sp = dict(b=0)
def test_interface_multiple_changes(self):
for i in range(1, 4):
job = self.project.open_job(dict(a=i))
job.init()
for job in self.project:
self.assertTrue(job.sp.a > 0)
for job in self.project:
obj_id = id(job)
id0 = job.get_id()
sp0 = job.statepoint()
self.assertEqual(id(job), obj_id)
self.assertTrue(job.sp.a > 0)
self.assertEqual(job.get_id(), id0)
self.assertEqual(job.sp, sp0)
job.sp.a = - job.sp.a
self.assertEqual(id(job), obj_id)
self.assertTrue(job.sp.a < 0)
self.assertNotEqual(job.get_id(), id0)
self.assertNotEqual(job.sp, sp0)
job.sp.a = - job.sp.a
self.assertEqual(id(job), obj_id)
self.assertTrue(job.sp.a > 0)
self.assertEqual(job.get_id(), id0)
self.assertEqual(job.sp, sp0)
job2 = self.project.open_job(id=id0)
self.assertEqual(job.sp, job2.sp)
self.assertEqual(job.get_id(), job2.get_id())
class ConfigTest(BaseJobTest):
def test_set_get_delete(self):
key, value = list(test_token.items())[0]
key, value = 'author_name', list(test_token.values())[0]
config = copy.deepcopy(self.project.config)
config[key] = value
self.assertEqual(config[key], value)
self.assertIn(key, config)
del config[key]
self.assertNotIn(key, config)
def test_update(self):
key, value = 'author_name', list(test_token.values())[0]
config = copy.deepcopy(self.project.config)
config.update({key: value})
self.assertEqual(config[key], value)
self.assertIn(key, config)
def test_set_and_retrieve_version(self):
fake_version = 0, 0, 0
self.project.config['signac_version'] = fake_version
self.assertEqual(self.project.config['signac_version'], fake_version)
def test_str(self):
str(self.project.config)
class JobOpenAndClosingTest(BaseJobTest):
def test_init(self):
job = self.open_job(test_token)
self.assertFalse(os.path.isdir(job.workspace()))
job.init()
self.assertEqual(job.workspace(), job.ws)
self.assertTrue(os.path.isdir(job.workspace()))
self.assertTrue(os.path.isdir(job.ws))
self.assertTrue(os.path.exists(os.path.join(job.workspace(), job.FN_MANIFEST)))
def test_chained_init(self):
job = self.open_job(test_token)
self.assertFalse(os.path.isdir(job.workspace()))
job = self.open_job(test_token).init()
self.assertEqual(job.workspace(), job.ws)
self.assertTrue(os.path.isdir(job.workspace()))
self.assertTrue(os.path.isdir(job.ws))
self.assertTrue(os.path.exists(os.path.join(job.workspace(), job.FN_MANIFEST)))
def test_construction(self):
job = self.open_job(test_token)
job2 = eval(repr(job))
self.assertEqual(job, job2)
def test_open_job_close(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
with self.open_job(test_token) as job:
pass
job.remove()
def test_open_job_close_manual(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
job = self.open_job(test_token)
job.open()
job.close()
job.remove()
def test_open_job_close_with_error(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
job = self.open_job(test_token)
class TestError(Exception):
pass
with self.assertRaises(TestError):
with job:
raise TestError()
job.remove()
def test_reopen_job(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
with self.open_job(test_token) as job:
job_id = job.get_id()
self.assertEqual(str(job_id), str(job))
with self.open_job(test_token) as job:
self.assertEqual(job.get_id(), job_id)
job.remove()
def test_close_nonopen_job(self):
job = self.open_job(test_token)
job.close()
with job:
pass
def test_close_job_while_open(self):
rp = os.path.realpath
cwd = rp(os.getcwd())
job = self.open_job(test_token)
with job:
job.close()
self.assertEqual(cwd, rp(os.getcwd()))
def test_open_job_recursive(self):
rp = os.path.realpath
cwd = rp(os.getcwd())
job = self.open_job(test_token)
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
self.assertEqual(cwd, rp(os.getcwd()))
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
os.chdir(self.project.root_directory())
self.assertEqual(cwd, rp(os.getcwd()))
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
self.assertEqual(cwd, rp(os.getcwd()))
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
os.chdir(self.project.root_directory())
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
self.assertEqual(rp(os.getcwd()), rp(self.project.root_directory()))
self.assertEqual(cwd, rp(os.getcwd()))
with job:
job.close()
self.assertEqual(cwd, rp(os.getcwd()))
with job:
self.assertEqual(rp(job.workspace()), rp(os.getcwd()))
self.assertEqual(cwd, rp(os.getcwd()))
self.assertEqual(cwd, rp(os.getcwd()))
def test_corrupt_workspace(self):
job = self.open_job(test_token)
job.init()
fn_manifest = os.path.join(job.workspace(), job.FN_MANIFEST)
with open(fn_manifest, 'w') as file:
file.write("corrupted")
job2 = self.open_job(test_token)
try:
logging.disable(logging.ERROR)
with self.assertRaises(JobsCorruptedError):
job2.init()
finally:
logging.disable(logging.NOTSET)
job2.init(force=True)
job2.init()
class JobDocumentTest(BaseJobTest):
def test_get_set(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
self.assertFalse(bool(job.document))
self.assertEqual(len(job.document), 0)
self.assertNotIn(key, job.document)
job.document[key] = d
self.assertTrue(bool(job.document))
self.assertEqual(len(job.document), 1)
self.assertIn(key, job.document)
self.assertEqual(job.document[key], d)
self.assertEqual(job.document.get(key), d)
self.assertEqual(job.document.get('non-existent-key', d), d)
def test_del(self):
key = 'del0'
key1 = 'del1'
d = testdata()
d1 = testdata()
job = self.open_job(test_token)
self.assertEqual(len(job.document), 0)
self.assertNotIn(key, job.document)
job.document[key] = d
self.assertEqual(len(job.document), 1)
self.assertIn(key, job.document)
job.document[key1] = d1
self.assertEqual(len(job.document), 2)
self.assertIn(key, job.document)
self.assertIn(key1, job.document)
self.assertEqual(job.document[key], d)
self.assertEqual(job.document[key1], d1)
del job.document[key]
self.assertEqual(len(job.document), 1)
self.assertIn(key1, job.document)
self.assertNotIn(key, job.document)
def test_get_set_doc(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
self.assertFalse(bool(job.doc))
self.assertEqual(len(job.doc), 0)
self.assertNotIn(key, job.doc)
job.doc[key] = d
self.assertTrue(bool(job.doc))
self.assertEqual(len(job.doc), 1)
self.assertIn(key, job.doc)
self.assertEqual(job.doc[key], d)
self.assertEqual(job.doc.get(key), d)
self.assertEqual(job.doc.get('non-existent-key', d), d)
def test_set_set_doc(self):
key0, key1 = 'set_set0', 'set_set1'
d0, d1 = testdata(), testdata()
job = self.open_job(test_token)
self.assertFalse(bool(job.doc))
self.assertEqual(len(job.doc), 0)
self.assertNotIn(key0, job.doc)
job.doc[key0] = d0
self.assertTrue(bool(job.doc))
self.assertEqual(len(job.doc), 1)
self.assertIn(key0, job.doc)
self.assertEqual(job.doc[key0], d0)
job = self.open_job(test_token)
self.assertTrue(bool(job.doc))
self.assertEqual(len(job.doc), 1)
self.assertIn(key0, job.doc)
self.assertEqual(job.doc[key0], d0)
job = self.open_job(test_token)
job.document[key1] = d1
self.assertTrue(bool(job.doc))
self.assertEqual(len(job.doc), 2)
self.assertIn(key0, job.doc)
self.assertIn(key1, job.doc)
self.assertEqual(job.doc[key0], d0)
self.assertEqual(job.doc[key1], d1)
def test_get_set_nested(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
self.assertEqual(len(job.document), 0)
self.assertNotIn('key0', job.document)
job.document['key0'] = d0
self.assertEqual(len(job.document), 1)
self.assertIn('key0', job.document)
self.assertEqual(job.document['key0'], d0)
with self.assertRaises(AttributeError):
job.document.key0.key1
job.document.key0 = {'key1': d0}
self.assertEqual(len(job.document), 1)
self.assertIn('key0', job.document)
self.assertEqual(job.document(), {'key0': {'key1': d0}})
self.assertEqual(job.document['key0'], {'key1': d0})
self.assertEqual(job.document['key0']['key1'], d0)
self.assertEqual(job.document.key0, {'key1': d0})
self.assertEqual(job.document.key0.key1, d0)
job.document.key0.key1 = d1
self.assertEqual(job.document, {'key0': {'key1': d1}})
self.assertEqual(job.document['key0'], {'key1': d1})
self.assertEqual(job.document['key0']['key1'], d1)
self.assertEqual(job.document.key0, {'key1': d1})
self.assertEqual(job.document.key0.key1, d1)
job.document['key0']['key1'] = d2
self.assertEqual(job.document, {'key0': {'key1': d2}})
self.assertEqual(job.document['key0'], {'key1': d2})
self.assertEqual(job.document['key0']['key1'], d2)
self.assertEqual(job.document.key0, {'key1': d2})
self.assertEqual(job.document.key0.key1, d2)
def test_get_set_nested_doc(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
self.assertEqual(len(job.doc), 0)
self.assertNotIn('key0', job.doc)
job.doc['key0'] = d0
self.assertEqual(len(job.doc), 1)
self.assertIn('key0', job.doc)
self.assertEqual(job.doc['key0'], d0)
with self.assertRaises(AttributeError):
job.doc.key0.key1
job.doc.key0 = {'key1': d0}
self.assertEqual(len(job.doc), 1)
self.assertIn('key0', job.doc)
self.assertEqual(job.doc(), {'key0': {'key1': d0}})
self.assertEqual(job.doc['key0'], {'key1': d0})
self.assertEqual(job.doc['key0']['key1'], d0)
self.assertEqual(job.doc.key0, {'key1': d0})
self.assertEqual(job.doc.key0.key1, d0)
job.doc.key0.key1 = d1
self.assertEqual(job.doc, {'key0': {'key1': d1}})
self.assertEqual(job.doc['key0'], {'key1': d1})
self.assertEqual(job.doc['key0']['key1'], d1)
self.assertEqual(job.doc.key0, {'key1': d1})
self.assertEqual(job.doc.key0.key1, d1)
job.doc['key0']['key1'] = d2
self.assertEqual(job.doc, {'key0': {'key1': d2}})
self.assertEqual(job.doc['key0'], {'key1': d2})
self.assertEqual(job.doc['key0']['key1'], d2)
self.assertEqual(job.doc.key0, {'key1': d2})
self.assertEqual(job.doc.key0.key1, d2)
def test_assign(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
self.assertEqual(len(job.document), 0)
job.document[key] = d0
self.assertEqual(len(job.document), 1)
self.assertEqual(job.document(), {key: d0})
with self.assertRaises(ValueError):
job.document = d1
job.document = {key: d1}
self.assertEqual(len(job.document), 1)
self.assertEqual(job.document(), {key: d1})
def test_assign_doc(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
self.assertEqual(len(job.doc), 0)
job.doc[key] = d0
self.assertEqual(len(job.doc), 1)
self.assertEqual(job.doc(), {key: d0})
with self.assertRaises(ValueError):
job.doc = d1
job.doc = {key: d1}
self.assertEqual(len(job.doc), 1)
self.assertEqual(job.doc(), {key: d1})
def test_copy_document(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
job.document[key] = d
self.assertTrue(bool(job.document))
self.assertEqual(len(job.document), 1)
self.assertIn(key, job.document)
self.assertEqual(job.document[key], d)
self.assertEqual(job.document.get(key), d)
self.assertEqual(job.document.get('non-existent-key', d), d)
copy = dict(job.document)
self.assertTrue(bool(copy))
self.assertEqual(len(copy), 1)
self.assertIn(key, copy)
self.assertEqual(copy[key], d)
self.assertEqual(copy.get(key), d)
self.assertEqual(copy.get('non-existent-key', d), d)
def test_update(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
job.document.update({key: d})
self.assertIn(key, job.document)
def test_clear_document(self):
key = 'clear'
d = testdata()
job = self.open_job(test_token)
job.document[key] = d
self.assertIn(key, job.document)
self.assertEqual(len(job.document), 1)
job.document.clear()
self.assertNotIn(key, job.document)
self.assertEqual(len(job.document), 0)
def test_reopen(self):
key = 'clear'
d = testdata()
job = self.open_job(test_token)
job.document[key] = d
self.assertIn(key, job.document)
self.assertEqual(len(job.document), 1)
job2 = self.open_job(test_token)
self.assertIn(key, job2.document)
self.assertEqual(len(job2.document), 1)
def test_concurrency(self):
key = 'concurrent'
d = testdata()
job = self.open_job(test_token)
job2 = self.open_job(test_token)
self.assertNotIn(key, job.document)
self.assertNotIn(key, job2.document)
job.document[key] = d
self.assertIn(key, job.document)
self.assertIn(key, job2.document)
def test_remove(self):
key = 'remove'
job = self.open_job(test_token)
job.remove()
d = testdata()
job.document[key] = d
self.assertIn(key, job.document)
self.assertEqual(len(job.document), 1)
fn_test = os.path.join(job.workspace(), 'test')
with open(fn_test, 'w') as file:
file.write('test')
self.assertTrue(os.path.isfile(fn_test))
job.remove()
self.assertNotIn(key, job.document)
self.assertFalse(os.path.isfile(fn_test))
def test_clear_job(self):
key = 'clear'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.init()
self.assertIn(job, self.project)
job.clear()
self.assertIn(job, self.project)
job.clear()
job.clear()
self.assertIn(job, self.project)
d = testdata()
job.document[key] = d
self.assertIn(job, self.project)
self.assertIn(key, job.document)
self.assertEqual(len(job.document), 1)
job.clear()
self.assertEqual(len(job.document), 0)
with open(job.fn('test'), 'w') as file:
file.write('test')
self.assertTrue(job.isfile('test'))
self.assertIn(job, self.project)
job.clear()
self.assertFalse(job.isfile('test'))
self.assertEqual(len(job.document), 0)
def test_reset(self):
key = 'reset'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.reset()
self.assertIn(job, self.project)
self.assertEqual(len(job.document), 0)
job.document[key] = testdata()
self.assertEqual(len(job.document), 1)
job.reset()
self.assertIn(job, self.project)
self.assertEqual(len(job.document), 0)
def test_doc(self):
key = 'test_doc'
job = self.open_job(test_token)
def check_content(key, d):
self.assertEqual(job.doc[key], d)
self.assertEqual(getattr(job.doc, key), d)
self.assertEqual(job.doc()[key], d)
self.assertEqual(job.document[key], d)
self.assertEqual(getattr(job.document, key), d)
self.assertEqual(job.document()[key], d)
d = testdata()
job.doc[key] = d
check_content(key, d)
d2 = testdata()
job.doc[key] = d2
check_content(key, d2)
d3 = testdata()
job.document[key] = d3
check_content(key, d3)
d4 = testdata()
setattr(job.doc, key, d4)
check_content(key, d4)
def test_sp_formatting(self):
job = self.open_job({'a': 0})
self.assertEqual('{job.statepoint.a}'.format(job=job), str(job.sp.a))
self.assertEqual('{job.sp.a}'.format(job=job), str(job.sp.a))
self.assertEqual('{job.statepoint[a]}'.format(job=job), str(job.sp.a))
self.assertEqual('{job.sp[a]}'.format(job=job), str(job.sp.a))
job.sp.a = dict(b=0)
self.assertEqual('{job.statepoint.a.b}'.format(job=job), str(job.sp.a.b))
self.assertEqual('{job.sp.a.b}'.format(job=job), str(job.sp.a.b))
self.assertEqual('{job.statepoint[a][b]}'.format(job=job), str(job.sp.a.b))
self.assertEqual('{job.sp[a][b]}'.format(job=job), str(job.sp.a.b))
def test_doc_formatting(self):
job = self.open_job(test_token)
job.doc.a = 0
self.assertEqual('{job.doc.a}'.format(job=job), str(job.doc.a))
self.assertEqual('{job.doc[a]}'.format(job=job), str(job.doc.a))
self.assertEqual('{job.document.a}'.format(job=job), str(job.doc.a))
self.assertEqual('{job.document[a]}'.format(job=job), str(job.doc.a))
job.doc.a = dict(b=0)
self.assertEqual('{job.doc.a.b}'.format(job=job), str(job.doc.a.b))
self.assertEqual('{job.doc.a.b}'.format(job=job), str(job.doc.a.b))
self.assertEqual('{job.document.a.b}'.format(job=job), str(job.doc.a.b))
self.assertEqual('{job.document[a][b]}'.format(job=job), str(job.doc.a.b))
@unittest.skipIf(not H5PY, 'test requires the h5py package')
def test_reset_statepoint_job(self):
key = 'move_job'
d = testdata()
src = test_token
dst = dict(test_token)
dst['dst'] = True
src_job = self.open_job(src)
src_job.document[key] = d
self.assertIn(key, src_job.document)
self.assertEqual(len(src_job.document), 1)
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
src_job.reset_statepoint(dst)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
self.assertIn(key, dst_job.document)
self.assertEqual(len(dst_job.document), 1)
self.assertNotIn(key, src_job.document)
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
src_job.reset_statepoint(dst)
with self.assertRaises(DestinationExistsError):
src_job.reset_statepoint(dst)
@unittest.skipIf(not H5PY, 'test requires the h5py package')
def test_reset_statepoint_project(self):
key = 'move_job'
d = testdata()
src = test_token
dst = dict(test_token)
dst['dst'] = True
src_job = self.open_job(src)
src_job.document[key] = d
self.assertIn(key, src_job.document)
self.assertEqual(len(src_job.document), 1)
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
self.project.reset_statepoint(src_job, dst)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
self.assertIn(key, dst_job.document)
self.assertEqual(len(dst_job.document), 1)
self.assertNotIn(key, src_job.document)
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(DestinationExistsError):
self.project.reset_statepoint(src_job, dst)
@unittest.skipIf(not H5PY, 'test requires the h5py package')
def test_update_statepoint(self):
key = 'move_job'
d = testdata()
src = test_token
extension = {'dst': True}
dst = dict(src)
dst.update(extension)
extension2 = {'dst': False}
dst2 = dict(src)
dst2.update(extension2)
src_job = self.open_job(src)
src_job.document[key] = d
self.assertIn(key, src_job.document)
self.assertEqual(len(src_job.document), 1)
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
self.project.update_statepoint(src_job, extension)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
self.assertEqual(dst_job.statepoint(), dst)
self.assertIn(key, dst_job.document)
self.assertEqual(len(dst_job.document), 1)
self.assertNotIn(key, src_job.document)
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(DestinationExistsError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(KeyError):
self.project.update_statepoint(dst_job, extension2)
self.project.update_statepoint(dst_job, extension2, overwrite=True)
dst2_job = self.open_job(dst2)
self.assertEqual(dst2_job.statepoint(), dst2)
self.assertIn(key, dst2_job.document)
self.assertEqual(len(dst2_job.document), 1)
self.assertIn(key, dst2_job.data)
self.assertEqual(len(dst2_job.data), 1)
@unittest.skipIf(not H5PY, 'test requires the h5py package')
class JobOpenDataTest(BaseJobTest):
@staticmethod
@contextmanager
def open_data(job):
with job.data:
yield
def test_get_set(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.data))
self.assertEqual(len(job.data), 0)
self.assertNotIn(key, job.data)
job.data[key] = d
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 1)
self.assertIn(key, job.data)
self.assertEqual(job.data[key], d)
self.assertEqual(job.data.get(key), d)
self.assertEqual(job.data.get('non-existent-key', d), d)
def test_del(self):
key = 'del0'
key1 = 'del1'
d = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
self.assertNotIn(key, job.data)
job.data[key] = d
self.assertEqual(len(job.data), 1)
self.assertIn(key, job.data)
job.data[key1] = d1
self.assertEqual(len(job.data), 2)
self.assertIn(key, job.data)
self.assertIn(key1, job.data)
self.assertEqual(job.data[key], d)
self.assertEqual(job.data[key1], d1)
del job.data[key]
self.assertEqual(len(job.data), 1)
self.assertIn(key1, job.data)
self.assertNotIn(key, job.data)
def test_get_set_data(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.data))
self.assertEqual(len(job.data), 0)
self.assertNotIn(key, job.data)
job.data[key] = d
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 1)
self.assertIn(key, job.data)
self.assertEqual(job.data[key], d)
self.assertEqual(job.data.get(key), d)
self.assertEqual(job.data.get('non-existent-key', d), d)
def test_set_set_data(self):
key0, key1 = 'set_set0', 'set_set1'
d0, d1 = testdata(), testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.data))
self.assertEqual(len(job.data), 0)
self.assertNotIn(key0, job.data)
job.data[key0] = d0
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 1)
self.assertIn(key0, job.data)
self.assertEqual(job.data[key0], d0)
job = self.open_job(test_token)
with self.open_data(job):
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 1)
self.assertIn(key0, job.data)
self.assertEqual(job.data[key0], d0)
job = self.open_job(test_token)
with self.open_data(job):
job.data[key1] = d1
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 2)
self.assertIn(key0, job.data)
self.assertIn(key1, job.data)
self.assertEqual(job.data[key0], d0)
self.assertEqual(job.data[key1], d1)
def test_get_set_nested(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
self.assertNotIn('key0', job.data)
job.data['key0'] = d0
self.assertEqual(len(job.data), 1)
self.assertIn('key0', job.data)
self.assertEqual(job.data['key0'], d0)
with self.assertRaises(AttributeError):
job.data.key0.key1
job.data.key0 = {'key1': d0}
self.assertEqual(len(job.data), 1)
self.assertIn('key0', job.data)
self.assertEqual(dict(job.data), {'key0': {'key1': d0}})
self.assertEqual(job.data['key0'], {'key1': d0})
self.assertEqual(job.data['key0']['key1'], d0)
self.assertEqual(job.data.key0, {'key1': d0})
self.assertEqual(job.data.key0.key1, d0)
job.data.key0.key1 = d1
self.assertEqual(job.data, {'key0': {'key1': d1}})
self.assertEqual(job.data['key0'], {'key1': d1})
self.assertEqual(job.data['key0']['key1'], d1)
self.assertEqual(job.data.key0, {'key1': d1})
self.assertEqual(job.data.key0.key1, d1)
job.data['key0']['key1'] = d2
self.assertEqual(job.data, {'key0': {'key1': d2}})
self.assertEqual(job.data['key0'], {'key1': d2})
self.assertEqual(job.data['key0']['key1'], d2)
self.assertEqual(job.data.key0, {'key1': d2})
self.assertEqual(job.data.key0.key1, d2)
def test_get_set_nested_data(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
self.assertNotIn('key0', job.data)
job.data['key0'] = d0
self.assertEqual(len(job.data), 1)
self.assertIn('key0', job.data)
self.assertEqual(job.data['key0'], d0)
with self.assertRaises(AttributeError):
job.data.key0.key1
job.data.key0 = {'key1': d0}
self.assertEqual(len(job.data), 1)
self.assertIn('key0', job.data)
self.assertEqual(dict(job.data), {'key0': {'key1': d0}})
self.assertEqual(job.data['key0'], {'key1': d0})
self.assertEqual(job.data['key0']['key1'], d0)
self.assertEqual(job.data.key0, {'key1': d0})
self.assertEqual(job.data.key0.key1, d0)
job.data.key0.key1 = d1
self.assertEqual(job.data, {'key0': {'key1': d1}})
self.assertEqual(job.data['key0'], {'key1': d1})
self.assertEqual(job.data['key0']['key1'], d1)
self.assertEqual(job.data.key0, {'key1': d1})
self.assertEqual(job.data.key0.key1, d1)
job.data['key0']['key1'] = d2
self.assertEqual(job.data, {'key0': {'key1': d2}})
self.assertEqual(job.data['key0'], {'key1': d2})
self.assertEqual(job.data['key0']['key1'], d2)
self.assertEqual(job.data.key0, {'key1': d2})
self.assertEqual(job.data.key0.key1, d2)
def test_assign(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
job.data[key] = d0
self.assertEqual(len(job.data), 1)
self.assertEqual(dict(job.data), {key: d0})
with self.assertRaises(ValueError):
job.data = d1
job.data = {key: d1}
self.assertEqual(len(job.data), 1)
self.assertEqual(dict(job.data), {key: d1})
def test_assign_data(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
job.data[key] = d0
self.assertEqual(len(job.data), 1)
self.assertEqual(dict(job.data), {key: d0})
with self.assertRaises(ValueError):
job.data = d1
job.data = {key: d1}
self.assertEqual(len(job.data), 1)
self.assertEqual(dict(job.data), {key: d1})
def test_copy_data(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.data[key] = d
self.assertTrue(bool(job.data))
self.assertEqual(len(job.data), 1)
self.assertIn(key, job.data)
self.assertEqual(job.data[key], d)
self.assertEqual(job.data.get(key), d)
self.assertEqual(job.data.get('non-existent-key', d), d)
copy = dict(job.data)
self.assertTrue(bool(copy))
self.assertEqual(len(copy), 1)
self.assertIn(key, copy)
self.assertEqual(copy[key], d)
self.assertEqual(copy.get(key), d)
self.assertEqual(copy.get('non-existent-key', d), d)
def test_update(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.data.update({key: d})
self.assertIn(key, job.data)
def test_clear_data(self):
key = 'clear'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.data[key] = d
self.assertIn(key, job.data)
self.assertEqual(len(job.data), 1)
job.data.clear()
self.assertNotIn(key, job.data)
self.assertEqual(len(job.data), 0)
def test_reopen(self):
key = 'clear'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.data[key] = d
self.assertIn(key, job.data)
self.assertEqual(len(job.data), 1)
job2 = self.open_job(test_token)
with self.open_data(job2):
self.assertIn(key, job2.data)
self.assertEqual(len(job2.data), 1)
def test_concurrency(self):
key = 'concurrent'
d = testdata()
job = self.open_job(test_token)
job2 = self.open_job(test_token)
with self.open_data(job):
with self.open_data(job2):
self.assertNotIn(key, job.data)
self.assertNotIn(key, job2.data)
job.data[key] = d
self.assertIn(key, job.data)
self.assertIn(key, job2.data)
def test_move_not_initialized(self):
job = self.open_job(test_token)
with self.assertRaises(RuntimeError):
job.move(job._project)
def test_move_intra_project(self):
job = self.open_job(test_token).init()
job.move(self.project) # no-op
def test_move_inter_project(self):
job = self.open_job(test_token).init()
project_a = self.project
project_b = self.project_class.init_project(
name='project_b',
root=os.path.join(self._tmp_pr, 'project_b'))
job.move(project_b)
job.move(project_a)
project_b.clone(job)
with self.assertRaises(DestinationExistsError):
job.move(project_b)
def test_remove(self):
key = 'remove'
job = self.open_job(test_token)
job.remove()
d = testdata()
with self.open_data(job):
job.data[key] = d
self.assertIn(key, job.data)
self.assertEqual(len(job.data), 1)
fn_test = os.path.join(job.workspace(), 'test')
with open(fn_test, 'w') as file:
file.write('test')
self.assertTrue(os.path.isfile(fn_test))
job.remove()
with self.open_data(job):
self.assertNotIn(key, job.data)
self.assertFalse(os.path.isfile(fn_test))
def test_clear_job(self):
key = 'clear'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.init()
self.assertIn(job, self.project)
job.clear()
self.assertIn(job, self.project)
job.clear()
job.clear()
self.assertIn(job, self.project)
d = testdata()
with self.open_data(job):
job.data[key] = d
self.assertIn(job, self.project)
self.assertIn(key, job.data)
self.assertEqual(len(job.data), 1)
job.clear()
with self.open_data(job):
self.assertEqual(len(job.data), 0)
with open(job.fn('test'), 'w') as file:
file.write('test')
self.assertTrue(job.isfile('test'))
self.assertIn(job, self.project)
job.clear()
self.assertFalse(job.isfile('test'))
with self.open_data(job):
self.assertEqual(len(job.data), 0)
def test_reset(self):
key = 'reset'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.reset()
self.assertIn(job, self.project)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
job.data[key] = testdata()
self.assertEqual(len(job.data), 1)
job.reset()
self.assertIn(job, self.project)
with self.open_data(job):
self.assertEqual(len(job.data), 0)
def test_data(self):
key = 'test_data'
job = self.open_job(test_token)
def check_content(key, d):
self.assertEqual(job.data[key], d)
self.assertEqual(getattr(job.data, key), d)
self.assertEqual(dict(job.data)[key], d)
self.assertEqual(job.data[key], d)
self.assertEqual(getattr(job.data, key), d)
self.assertEqual(dict(job.data)[key], d)
with self.open_data(job):
d = testdata()
job.data[key] = d
check_content(key, d)
d2 = testdata()
job.data[key] = d2
check_content(key, d2)
d3 = testdata()
job.data[key] = d3
check_content(key, d3)
d4 = testdata()
setattr(job.data, key, d4)
check_content(key, d4)
def test_reset_statepoint_job(self):
key = 'move_job'
d = testdata()
src = test_token
dst = dict(test_token)
dst['dst'] = True
src_job = self.open_job(src)
with self.open_data(src_job):
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
src_job.reset_statepoint(dst)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
with self.open_data(dst_job):
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
with self.open_data(src_job):
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
src_job.reset_statepoint(dst)
with self.assertRaises(DestinationExistsError):
src_job.reset_statepoint(dst)
def test_reset_statepoint_project(self):
key = 'move_job'
d = testdata()
src = test_token
dst = dict(test_token)
dst['dst'] = True
src_job = self.open_job(src)
with self.open_data(src_job):
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
self.project.reset_statepoint(src_job, dst)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
with self.open_data(dst_job):
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
with self.open_data(src_job):
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(DestinationExistsError):
self.project.reset_statepoint(src_job, dst)
def test_update_statepoint(self):
key = 'move_job'
d = testdata()
src = test_token
extension = {'dst': True}
dst = dict(src)
dst.update(extension)
extension2 = {'dst': False}
dst2 = dict(src)
dst2.update(extension2)
src_job = self.open_job(src)
with self.open_data(src_job):
src_job.data[key] = d
self.assertIn(key, src_job.data)
self.assertEqual(len(src_job.data), 1)
self.project.update_statepoint(src_job, extension)
src_job = self.open_job(src)
dst_job = self.open_job(dst)
self.assertEqual(dst_job.statepoint(), dst)
with self.open_data(dst_job):
self.assertIn(key, dst_job.data)
self.assertEqual(len(dst_job.data), 1)
with self.open_data(src_job):
self.assertNotIn(key, src_job.data)
with self.assertRaises(RuntimeError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(DestinationExistsError):
self.project.reset_statepoint(src_job, dst)
with self.assertRaises(KeyError):
self.project.update_statepoint(dst_job, extension2)
self.project.update_statepoint(dst_job, extension2, overwrite=True)
dst2_job = self.open_job(dst2)
self.assertEqual(dst2_job.statepoint(), dst2)
with self.open_data(dst2_job):
self.assertIn(key, dst2_job.data)
self.assertEqual(len(dst2_job.data), 1)
@unittest.skipIf(not H5PY, 'test requires the h5py package')
class JobClosedDataTest(JobOpenDataTest):
@staticmethod
@contextmanager
def open_data(job):
yield
def test_implicit_initialization(self):
job = self.open_job(test_token)
self.assertNotIn('test', job.stores)
self.assertNotIn('foo', job.stores.test)
self.assertEqual(list(job.stores.keys()), [])
self.assertEqual(list(job.stores), [])
self.assertNotIn('test', job.stores)
job.stores.test.foo = True
self.assertIn('test', job.stores)
self.assertIn('foo', job.stores.test)
self.assertEqual(list(job.stores.keys()), ['test'])
self.assertEqual(list(job.stores), ['test'])
@unittest.skipIf(not H5PY, 'test requires the h5py package')
class JobOpenCustomDataTest(BaseJobTest):
@staticmethod
@contextmanager
def open_data(job):
with job.stores.test:
yield
def test_get_set(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn(key, job.stores.test)
job.stores.test[key] = d
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key, job.stores.test)
self.assertEqual(job.stores.test[key], d)
self.assertEqual(job.stores.test.get(key), d)
self.assertEqual(job.stores.test.get('non-existent-key', d), d)
def test_del(self):
key = 'del0'
key1 = 'del1'
d = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn(key, job.stores.test)
job.stores.test[key] = d
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key, job.stores.test)
job.stores.test[key1] = d1
self.assertEqual(len(job.stores.test), 2)
self.assertIn(key, job.stores.test)
self.assertIn(key1, job.stores.test)
self.assertEqual(job.stores.test[key], d)
self.assertEqual(job.stores.test[key1], d1)
del job.stores.test[key]
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key1, job.stores.test)
self.assertNotIn(key, job.stores.test)
def test_get_set_data(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn(key, job.stores.test)
job.stores.test[key] = d
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key, job.stores.test)
self.assertEqual(job.stores.test[key], d)
self.assertEqual(job.stores.test.get(key), d)
self.assertEqual(job.stores.test.get('non-existent-key', d), d)
def test_set_set_data(self):
key0, key1 = 'set_set0', 'set_set1'
d0, d1 = testdata(), testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertFalse(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn(key0, job.stores.test)
job.stores.test[key0] = d0
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key0, job.stores.test)
self.assertEqual(job.stores.test[key0], d0)
job = self.open_job(test_token)
with self.open_data(job):
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key0, job.stores.test)
self.assertEqual(job.stores.test[key0], d0)
job = self.open_job(test_token)
with self.open_data(job):
job.stores.test[key1] = d1
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 2)
self.assertIn(key0, job.stores.test)
self.assertIn(key1, job.stores.test)
self.assertEqual(job.stores.test[key0], d0)
self.assertEqual(job.stores.test[key1], d1)
def test_get_set_nested(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn('key0', job.stores.test)
job.stores.test['key0'] = d0
self.assertEqual(len(job.stores.test), 1)
self.assertIn('key0', job.stores.test)
self.assertEqual(job.stores.test['key0'], d0)
with self.assertRaises(AttributeError):
job.stores.test.key0.key1
job.stores.test.key0 = {'key1': d0}
self.assertEqual(len(job.stores.test), 1)
self.assertIn('key0', job.stores.test)
self.assertEqual(dict(job.stores.test), {'key0': {'key1': d0}})
self.assertEqual(job.stores.test['key0'], {'key1': d0})
self.assertEqual(job.stores.test['key0']['key1'], d0)
self.assertEqual(job.stores.test.key0, {'key1': d0})
self.assertEqual(job.stores.test.key0.key1, d0)
job.stores.test.key0.key1 = d1
self.assertEqual(job.stores.test, {'key0': {'key1': d1}})
self.assertEqual(job.stores.test['key0'], {'key1': d1})
self.assertEqual(job.stores.test['key0']['key1'], d1)
self.assertEqual(job.stores.test.key0, {'key1': d1})
self.assertEqual(job.stores.test.key0.key1, d1)
job.stores.test['key0']['key1'] = d2
self.assertEqual(job.stores.test, {'key0': {'key1': d2}})
self.assertEqual(job.stores.test['key0'], {'key1': d2})
self.assertEqual(job.stores.test['key0']['key1'], d2)
self.assertEqual(job.stores.test.key0, {'key1': d2})
self.assertEqual(job.stores.test.key0.key1, d2)
def test_get_set_nested_data(self):
d0 = testdata()
d1 = testdata()
d2 = testdata()
assert d0 != d1 != d2
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
self.assertNotIn('key0', job.stores.test)
job.stores.test['key0'] = d0
self.assertEqual(len(job.stores.test), 1)
self.assertIn('key0', job.stores.test)
self.assertEqual(job.stores.test['key0'], d0)
with self.assertRaises(AttributeError):
job.stores.test.key0.key1
job.stores.test.key0 = {'key1': d0}
self.assertEqual(len(job.stores.test), 1)
self.assertIn('key0', job.stores.test)
self.assertEqual(dict(job.stores.test), {'key0': {'key1': d0}})
self.assertEqual(job.stores.test['key0'], {'key1': d0})
self.assertEqual(job.stores.test['key0']['key1'], d0)
self.assertEqual(job.stores.test.key0, {'key1': d0})
self.assertEqual(job.stores.test.key0.key1, d0)
job.stores.test.key0.key1 = d1
self.assertEqual(job.stores.test, {'key0': {'key1': d1}})
self.assertEqual(job.stores.test['key0'], {'key1': d1})
self.assertEqual(job.stores.test['key0']['key1'], d1)
self.assertEqual(job.stores.test.key0, {'key1': d1})
self.assertEqual(job.stores.test.key0.key1, d1)
job.stores.test['key0']['key1'] = d2
self.assertEqual(job.stores.test, {'key0': {'key1': d2}})
self.assertEqual(job.stores.test['key0'], {'key1': d2})
self.assertEqual(job.stores.test['key0']['key1'], d2)
self.assertEqual(job.stores.test.key0, {'key1': d2})
self.assertEqual(job.stores.test.key0.key1, d2)
def test_assign(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
job.stores.test[key] = d0
self.assertEqual(len(job.stores.test), 1)
self.assertEqual(dict(job.stores.test), {key: d0})
with self.assertRaises(ValueError):
job.stores.test = d1
job.stores.test = {key: d1}
self.assertEqual(len(job.stores.test), 1)
self.assertEqual(dict(job.stores.test), {key: d1})
def test_assign_data(self):
key = 'assign'
d0 = testdata()
d1 = testdata()
job = self.open_job(test_token)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
job.stores.test[key] = d0
self.assertEqual(len(job.stores.test), 1)
self.assertEqual(dict(job.stores.test), {key: d0})
with self.assertRaises(ValueError):
job.stores.test = d1
job.stores.test = {key: d1}
self.assertEqual(len(job.stores.test), 1)
self.assertEqual(dict(job.stores.test), {key: d1})
def test_copy_data(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.stores.test[key] = d
self.assertTrue(bool(job.stores.test))
self.assertEqual(len(job.stores.test), 1)
self.assertIn(key, job.stores.test)
self.assertEqual(job.stores.test[key], d)
self.assertEqual(job.stores.test.get(key), d)
self.assertEqual(job.stores.test.get('non-existent-key', d), d)
copy = dict(job.stores.test)
self.assertTrue(bool(copy))
self.assertEqual(len(copy), 1)
self.assertIn(key, copy)
self.assertEqual(copy[key], d)
self.assertEqual(copy.get(key), d)
self.assertEqual(copy.get('non-existent-key', d), d)
def test_update(self):
key = 'get_set'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.stores.test.update({key: d})
self.assertIn(key, job.stores.test)
def test_clear_data(self):
key = 'clear'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.stores.test[key] = d
self.assertIn(key, job.stores.test)
self.assertEqual(len(job.stores.test), 1)
job.stores.test.clear()
self.assertNotIn(key, job.stores.test)
self.assertEqual(len(job.stores.test), 0)
def test_reopen(self):
key = 'reopen'
d = testdata()
job = self.open_job(test_token)
with self.open_data(job):
job.stores.test[key] = d
self.assertIn(key, job.stores.test)
self.assertEqual(len(job.stores.test), 1)
job2 = self.open_job(test_token)
with self.open_data(job2):
self.assertIn(key, job2.stores.test)
self.assertEqual(len(job2.stores.test), 1)
def test_concurrency(self):
key = 'concurrent'
d = testdata()
job = self.open_job(test_token)
job2 = self.open_job(test_token)
with self.open_data(job):
with self.open_data(job2):
self.assertNotIn(key, job.stores.test)
self.assertNotIn(key, job2.stores.test)
job.stores.test[key] = d
self.assertIn(key, job.stores.test)
self.assertIn(key, job2.stores.test)
def test_remove(self):
key = 'remove'
job = self.open_job(test_token)
job.remove()
d = testdata()
with self.open_data(job):
job.stores.test[key] = d
self.assertIn(key, job.stores.test)
self.assertEqual(len(job.stores.test), 1)
fn_test = os.path.join(job.workspace(), 'test')
with open(fn_test, 'w') as file:
file.write('test')
self.assertTrue(os.path.isfile(fn_test))
job.remove()
with self.open_data(job):
self.assertNotIn(key, job.stores.test)
self.assertFalse(os.path.isfile(fn_test))
def test_clear_job(self):
key = 'clear'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.clear()
self.assertNotIn(job, self.project)
job.init()
self.assertIn(job, self.project)
job.clear()
self.assertIn(job, self.project)
job.clear()
job.clear()
self.assertIn(job, self.project)
d = testdata()
with self.open_data(job):
job.stores.test[key] = d
self.assertIn(job, self.project)
self.assertIn(key, job.stores.test)
self.assertEqual(len(job.stores.test), 1)
job.clear()
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
with open(job.fn('test'), 'w') as file:
file.write('test')
self.assertTrue(job.isfile('test'))
self.assertIn(job, self.project)
job.clear()
self.assertFalse(job.isfile('test'))
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
def test_reset(self):
key = 'reset'
job = self.open_job(test_token)
self.assertNotIn(job, self.project)
job.reset()
self.assertIn(job, self.project)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
job.stores.test[key] = testdata()
self.assertEqual(len(job.stores.test), 1)
job.reset()
self.assertIn(job, self.project)
with self.open_data(job):
self.assertEqual(len(job.stores.test), 0)
def test_data(self):
key = 'test_data'
job = self.open_job(test_token)
def check_content(key, d):
self.assertEqual(job.stores.test[key], d)
self.assertEqual(getattr(job.stores.test, key), d)
self.assertEqual(dict(job.stores.test)[key], d)
self.assertEqual(job.stores.test[key], d)
self.assertEqual(getattr(job.stores.test, key), d)
self.assertEqual(dict(job.stores.test)[key], d)
with self.open_data(job):
d = testdata()
job.stores.test[key] = d
check_content(key, d)
d2 = testdata()
job.stores.test[key] = d2
check_content(key, d2)
d3 = testdata()
job.stores.test[key] = d3
check_content(key, d3)
d4 = testdata()
setattr(job.stores.test, key, d4)
check_content(key, d4)
class JobClosedCustomDataTest(JobOpenCustomDataTest):
@staticmethod
@contextmanager
def open_data(job):
yield
if __name__ == '__main__':
unittest.main()
| 37.181155
| 87
| 0.580052
| 8,486
| 66,294
| 4.432006
| 0.038298
| 0.16352
| 0.096198
| 0.039085
| 0.877293
| 0.847594
| 0.820021
| 0.790827
| 0.759426
| 0.740734
| 0
| 0.022622
| 0.278532
| 66,294
| 1,782
| 88
| 37.20202
| 0.763721
| 0.004616
| 0
| 0.766708
| 0
| 0
| 0.037315
| 0.004214
| 0
| 0
| 0
| 0
| 0.436881
| 1
| 0.069307
| false
| 0.003094
| 0.012995
| 0.000619
| 0.094059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5f9b01856461278f9cb1f05f170a37b07ec7fa76
| 41,998
|
py
|
Python
|
train_tfidf.py
|
redyandri/doc2vec-master
|
c0f7fe05dd0a683590ad6374219efe777d4e769d
|
[
"Apache-2.0"
] | null | null | null |
train_tfidf.py
|
redyandri/doc2vec-master
|
c0f7fe05dd0a683590ad6374219efe777d4e769d
|
[
"Apache-2.0"
] | null | null | null |
train_tfidf.py
|
redyandri/doc2vec-master
|
c0f7fe05dd0a683590ad6374219efe777d4e769d
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
import logging
from victorinox import victorinox
import pickle
from sklearn.svm import SVC
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
import numpy as np
import time
from scipy import spatial
import math
import os
from sklearn.linear_model import SGDClassifier
from sklearn.calibration import CalibratedClassifierCV
import heapq
from sklearn.neighbors import KNeighborsClassifier
from sklearn import metrics
#enable logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
class MyIter(object):
path=""
def __init__(self,fp):
self.path=fp
def __iter__(self):
# path = datapath(self.path)
with open(self.path, 'r', encoding='utf-8') as fin:
for line in fin:
yield line
tool=victorinox()
datapath_staffs=r"data/dataset_lower_clean_stem_group_staffs.csv"
datapath_sentences=r"data/dataset_lower_clean_stem_group_staffs_sentences.csv"
tfidf_vectors=r"data/tfidf_group_vectors.csv"
tfidf_model=r"data/tfidf_group_model.pkl"
knn_model=r"data/knn_group_model.pkl"
csvsummary=r"data/dataset_lower_clean_stem_staff_group_with_periods_summary.csv"
# corpus=MyIter(datapath_sentences)
# vectorizer = CountVectorizer(decode_error="replace")
# vec_train = vectorizer.fit_transform(corpus)
# pickle.dump(vectorizer.vocabulary_,open(tfidf_model,"wb"))
# print("tfidf model saved to %s"%tfidf_model)
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# print("tfidf model loaded from %s"%tfidf_model)
# df=pd.read_csv(datapath_staffs,sep=";")
# dfpegawai=df.ID_PEGAWAI
# res=[]
# with open(datapath_sentences,"r") as f:
# lines=f.read().splitlines()
# for x in range(len(lines)):
# vec=transformer.fit_transform(loaded_vec.fit_transform(np.array([lines[x]]))).toarray()[0].tolist()#tfidf_model.transform([lines[x]]).toarray()[0].tolist()
# id=[dfpegawai.iloc[x]]
# res.append(vec+id)
# dfres=pd.DataFrame(res)
# dfres.to_csv(tfidf_vectors,sep=";",index=None,header=None)
# print("tfidf vectors saved to %s"%tfidf_vectors)
# tfidf_model=r"data/tfidf_group_model.pkl"
# tfidf_vectors=r"data/tfidf_group_vectors.csv"
# datapath_staffs=r"data/dataset_lower_clean_stem_group_staffs.csv"
# sentence0="komitmen layan kendali gratifikasi sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang upacara bendera ingat hari ulang tahun 74 merdeka republik indonesia tahun 2019 tanggal 17 agustus 2019 laksana cuti tahun hari libur nasional dan cuti sama hari raya natal tahun 2019 sampai tetap unit kerja predikat wbk tahun 2019 tingkat menteri uang selenggara rapat kerja raker rapat koordinasi rakor bina mental asn integritas sampai instruksi menteri uang nomor 595 imk 01 2019 tanggal 29 juli 2019 selenggara rapat kerja raker rapat koordinasi rakor mohon dukung video converence keikutsertaan pegawai kptik bmn giat pmo informal meeting tahun 2019 himbauan laksana wajib masuk kerja taat jam kerja lingkung sekretariat jenderal menteri uang himbauan laksana wajib masuk kerja taat jam kerja lingkung sekretariat jenderal menteri uang kuesioner minat beasiswa kumpul berkas ppkpns dp3 lingkung pusintek se tentu tega disiplin atas langgar tentu jam kerja langkah langkah strategis laksana anggar lingkup unit eselon i sekretariat jenderal menteri uang ta 2020 himbauan kait sampai lapor harta kaya pajak pajak pribadi lalu alpha se tentu tega disiplin atas langgar tentu jam kerja upacara bendera rangka ingat hari sumpah pemuda hari oeang republik indonesia 73 tahun 2019 disposisi sekretaris jenderal sampai arah presiden menteri kabinet indonesia maju sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang sampai surat edar menteri uang laksana pelihara data sumber daya manusia hris seluruh pegawai lingkung menteri uang sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang surat tugas latih bppk pekan 3 bulan september 2019 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 imbau laksana isi lapor harta kaya lhk lapor pajak pajak pribadi lp2p lalu alpha tawar seleksi sesuai inpassing jabat fungsional pranata uang anggar dapat belanja negara apbn imbau laksana isi lapor harta kaya lhk lapor pajak pajak pribadi lp2p lalu alpha monitoring evaluasi efektivitas organisasi lingkung menteri uang"
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# df=pd.read_csv(datapath_staffs,sep=";")
# sentence_ori=df.iloc[0,1]
# veccorpus0=[0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2775396528207745,0.10674602031568248,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12809522437881898,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.19214283656822848,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.21349204063136495,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.04269840812627299,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.17079363250509197,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.40563487719959346,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.17079363250509197,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12809522437881898,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.021349204063136496,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.17079363250509197,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.04269840812627299,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.10674602031568248,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.06404761218940949,0.0,0.0,0.19214283656822848,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1494444284419555,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.08539681625254598,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.40563487719959346,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.04269840812627299,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.021349204063136496,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]
# vec_ori=transformer.fit_transform(loaded_vec.fit_transform(np.array([sentence_ori]))).toarray()[0].tolist()
# vec=transformer.fit_transform(loaded_vec.fit_transform(np.array([sentence0]))).toarray()[0].tolist()
# print(vec_ori==vec)
# print(vec_ori==vec==veccorpus0)
# tfidf_vectors=r"data/tfidf_group_vectors.csv"
# svc_model=r"data/svc_model.pkl"
# df=pd.read_csv(tfidf_vectors,sep=";")
# print('Training classifier')
# model = SVC(kernel='linear', probability=True)
# X=df.iloc[:,:-1]
# y=df.iloc[:,-1]
# model.fit(X, y)
# with open(svc_model, 'wb') as outfile:
# pickle.dump((model, y), outfile)
# print('Saved classifier model to file "%s"' %svc_model)
# svc_model=r"data/svc_model.pkl"
# tfidf_model=r"data/tfidf_group_model.pkl"
# tfidf_vectors=r"data/tfidf_group_vectors.csv"
# datapath_staffs=r"data/dataset_lower_clean_stem_group_staffs.csv"
# sentence0="komitmen layan kendali gratifikasi sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang upacara bendera ingat hari ulang tahun 74 merdeka republik indonesia tahun 2019 tanggal 17 agustus 2019 laksana cuti tahun hari libur nasional dan cuti sama hari raya natal tahun 2019 sampai tetap unit kerja predikat wbk tahun 2019 tingkat menteri uang selenggara rapat kerja raker rapat koordinasi rakor bina mental asn integritas sampai instruksi menteri uang nomor 595 imk 01 2019 tanggal 29 juli 2019 selenggara rapat kerja raker rapat koordinasi rakor mohon dukung video converence keikutsertaan pegawai kptik bmn giat pmo informal meeting tahun 2019 himbauan laksana wajib masuk kerja taat jam kerja lingkung sekretariat jenderal menteri uang himbauan laksana wajib masuk kerja taat jam kerja lingkung sekretariat jenderal menteri uang kuesioner minat beasiswa kumpul berkas ppkpns dp3 lingkung pusintek se tentu tega disiplin atas langgar tentu jam kerja langkah langkah strategis laksana anggar lingkup unit eselon i sekretariat jenderal menteri uang ta 2020 himbauan kait sampai lapor harta kaya pajak pajak pribadi lalu alpha se tentu tega disiplin atas langgar tentu jam kerja upacara bendera rangka ingat hari sumpah pemuda hari oeang republik indonesia 73 tahun 2019 disposisi sekretaris jenderal sampai arah presiden menteri kabinet indonesia maju sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang sampai surat edar menteri uang laksana pelihara data sumber daya manusia hris seluruh pegawai lingkung menteri uang sampai salin surat edar menteri uang laksana pelihara data sumber daya manusia human resources information system seluruh pegawai lingkung menteri uang surat tugas latih bppk pekan 3 bulan september 2019 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 tindak lanjut hasil one on one meeting kait najam anggar menteri uang ta 2019 ta 2020 imbau laksana isi lapor harta kaya lhk lapor pajak pajak pribadi lp2p lalu alpha tawar seleksi sesuai inpassing jabat fungsional pranata uang anggar dapat belanja negara apbn imbau laksana isi lapor harta kaya lhk lapor pajak pajak pribadi lp2p lalu alpha monitoring evaluasi efektivitas organisasi lingkung menteri uang"
# sentence1="sql server"
# print('Testing classifier')
# with open(svc_model, 'rb') as infile:
# (model, class_names) = pickle.load(infile)
# print('Loaded classifier model from file "%s"' % svc_model)
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# vec=transformer.fit_transform(loaded_vec.fit_transform([sentence0])).toarray()[0].tolist()
# dim=len(loaded_vec.vocabulary)
# vec=np.array(vec).reshape(-1, dim)
# predictions = model.predict_proba(vec)
# pred = model.predict(vec)
# best_class_indices = np.argmax(predictions, axis=1)
# best_class_probabilities = predictions[np.arange(len(best_class_indices)), best_class_indices]
# print("pred:%s"%str(pred))
# print("prediction:%s"%str(predictions))
# print("best_class_indices:%s"%str(best_class_indices))
# print("best_class_probabilities:%s"%str(best_class_probabilities))
# print("employee:%s"%str(class_names[best_class_indices[0]]))
# df=pd.read_csv(tfidf_vectors,sep=";",header=None)
# q="application software level 1 symfoni php framework knowledge update tools itsm sipelantik knowledge update itsm awareness latih data services knowledge update tools itsm sipelantik knowledge update itsm awareness latih data services seminar enterprise architecture cross platform mobile development with xamarin scrum project management workshop syncfusion framework workshop tata kelola tik bas cobit 5 international public service forum 2018 workshop mas atur presiden nomor 16 tahun 2018 ada barang jasa perintah training administrasi manajemen sdm latih uji nasional sertifikasi ahli ada barang jasa perintah"
# q2="application software level 1 symfoni php framework knowledge update tools itsm sipelantik knowledge update itsm awareness latih data services knowledge update tools itsm sipelantik knowledge update itsm awareness latih data services seminar enterprise architecture cross platform mobile development with xamarin scrum project management workshop syncfusion framework workshop tata kelola tik BERbasis cobit 5 international public service forum 2018 workshop mas PERaturAN presiden nomor 16 tahun 2018 PENGadaAN barang jasa pEMerintahAN training administrasi manajemen sdm PElatihAN ujiAN nasional sertifikasi ahli PENGadaAN barang jasa pEMerintahAN "
# q3="oracle"
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# t1=time.time()
# qv=transformer.fit_transform(loaded_vec.fit_transform(np.array([q2]))).toarray()[0].tolist()
# scores=[]
# for idx,row in df.iterrows():
# v=row[:-1].tolist()
# score=1 - spatial.distance.cosine(qv,v)
# scores.append(score)
# print(scores)
# top5=heapq.nlargest(5, range(len(scores)), scores.__getitem__)
# res=df.iloc[top5,-1]
# elapsed=time.time()-t1
# print(res)
# print([scores[x] for x in top5])
# print("elapsed:%f"%elapsed)
# print("########################################")
# print("train KNN")
# t1=time.time()
# knn = KNeighborsClassifier(n_neighbors=len(df))
# knn.fit(df.iloc[:,:-1], df.iloc[:,-1])
# print("train KNN DONE in %f"%(time.time()-t1))
# with open(knn_model,"wb") as f:
# pickle.dump(knn,f)
# with open(knn_model,"rb") as f:
# knn_saved=pickle.load(f)
# q3=tool.preprocess_sentence(q3)
# qv=transformer.fit_transform(loaded_vec.fit_transform([q3])).toarray()[0].tolist()
# (distances, indices)=knn_saved.kneighbors([qv],n_neighbors=5)
# indices=indices.tolist()[0]
# res=df.iloc[indices,-1]
# print(res.tolist())
# print(distances.tolist())
#########################################################################################################
# datapath_all=r"data/dataset_lower_clean_stem.csv"
# datapath_staffs=r"data/dataset_lower_clean_stem_staff.csv"
# datapath_staff_sentences=r"data/dataset_lower_clean_stem_staff_sentences.csv"
# tfidf_vectors=r"data/tfidf_per_sentence_vectors.csv"
# tfidf_model=r"data/tfidf_per_sentence_model.pkl"
# leader_csv=r"data/leaders.csv"
# svc_model=r"data/svc_per_sentence_model.pkl"
# dataset_vector=r"data/tfidf_per_sentence_vectors.csv"
# dataset_vector_nip=r"data/tfidf_per_sentence_vectors_nip.csv"
# dataset_vector_idseq=r"data/tfidf_per_sentence_vectors_idseq.csv"
# staff_dictionary=r"data/staff_dictionary.pkl"
# staff_dictionary_by_sequence=r"data/staff_dictionary_by_sequence.pkl"
# staff_dictionary_by_sequence_reveresed=r"data/staff_dictionary_by_sequence_reversed.pkl"
# with open(leader_csv,"r") as f:
# leaders=f.read().splitlines()
# df=pd.read_csv(datapath_all,sep=";")
# X=[]
# y=[]
# for idx,row in df.iterrows():
# id=row["ID_PEGAWAI"]
# nip=str(id).split("_")[0]
# if nip in leaders:
# continue
# else:
# y.append(id)
# X.append(row["KOMPETENSI"])
# dct_staff={"KOMPETENSI":X,"ID_PEGAWAI":y}
# dct_staff_sentences={"KOMPETENSI":X}
# df_staff=pd.DataFrame(dct_staff)
# df_staff.to_csv(datapath_staffs,sep=";",index=None)
# print("saved %d records to %s"%(len(df_staff),datapath_staffs))
# df_staff_sentences=pd.DataFrame(dct_staff_sentences)
# df_staff_sentences.to_csv(datapath_staff_sentences,sep=";",index=None,header=None)
# print("saved %d sentences to %s"%(len(df_staff_sentences),datapath_staff_sentences))
# corpus=MyIter(datapath_staff_sentences)
# vectorizer = CountVectorizer(decode_error="replace")
# vec_train = vectorizer.fit_transform(corpus)
# pickle.dump(vectorizer.vocabulary_,open(tfidf_model,"wb"))
# print("tfidf model saved to %s"%tfidf_model)
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# print("tfidf model loaded from %s"%tfidf_model)
# df=pd.read_csv(datapath_staffs,sep=";")
# dfpegawai=df.ID_PEGAWAI
# dfcompetence=df.KOMPETENSI
# total=len(dfcompetence)
# res=[]
# batchsize=20000
# it=0
# with open(tfidf_vectors,"a+") as f:
# for line in corpus:#dfcompetence[it:it+batchsize]:
# vec=transformer.fit_transform(loaded_vec.fit_transform([line])).toarray()[0].tolist()#tfidf_model.transform([lines[x]]).toarray()[0].tolist()
# id=[dfpegawai.iloc[it]]
# l=vec + id
# f.write(";".join([str(x) for x in l]))
# f.write("\n")
# print('\r%d/%d'%(it,total), end="", flush=True)
# it+=1
#
#
#
#
#
#
#
# with open(staff_dictionary_by_sequence,"rb") as f:
# kamus=pickle.load(f)
# ids=[int(k) for k,v in kamus.items()]
# corpus=MyIter(dataset_vector_idseq)
# print('Training classifier')
# #model = SVC(kernel='linear', probability=True)
# clf = SGDClassifier()
# batchsize=5000
# i=0
# X=[]
# y=[]
# #y_all=[]
# for line in corpus:
# if i%batchsize==0:
# if i!=0:
# clf.partial_fit(X, y, classes=ids)
# print('\r partial train on batch %d' % (math.floor(i/batchsize)), end="", flush=True)
# X=[]
# y=[]
# else:
# parts=line.split(";")
# vec=parts[0:-1]
# id=parts[-1].replace("\n","")
# X.append(vec)
# y.append(int(id))
# #y_all.append(id)
# i+=1
# clf.partial_fit(X, y, classes=ids)
# print('\r partial train on batch %d' % (math.floor(i/batchsize)), end="", flush=True)
# with open(svc_model, 'wb') as outfile:
# pickle.dump(clf, outfile)
# print('Saved classifier model to file "%s"' %svc_model)
#
#
#
#
#
# sentence0="sampai surat edar menteri uang nomor se 29 mk 1 2019 lapor dialog kerja individu periode i tahun 2020 lingkung menteri uang"
# with open(staff_dictionary_by_sequence,"rb") as f:
# kamus=pickle.load(f)
# sentence1="sql server"
# print('Testing classifier')
# with open(svc_model, 'rb') as infile:
# model = pickle.load(infile)
# print('Loaded classifier model from file "%s"' % svc_model)
# transformer = TfidfTransformer()
# loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
# vec=transformer.fit_transform(loaded_vec.fit_transform([sentence0])).toarray()[0].tolist()
# dim=len(loaded_vec.vocabulary)
# vec=np.array(vec).reshape(-1, dim)
# # predictions = model.predict_proba(vec)
# pred = model.predict(vec)
# # best_class_indices = np.argmax(predictions, axis=1)
# # best_class_probabilities = predictions[np.arange(len(best_class_indices)), best_class_indices]
# print("pred:%s"%str(kamus[pred[0]]))
# # print("prediction:%s"%str(predictions))
# # print("best_class_indices:%s"%str(best_class_indices))
# # print("best_class_probabilities:%s"%str(best_class_probabilities))
#####################################################################################
transformer = TfidfTransformer()
loaded_vec = CountVectorizer(decode_error="replace",vocabulary=pickle.load(open(tfidf_model, "rb")))
with open(knn_model,"rb") as f:
knn_saved=pickle.load(f)
testcorpus=MyIter(csvsummary)
ytrue=[]
ypred=[]
truecount=0
for line in testcorpus:
parts=line.split(";")
id=parts[0]
ytrue.append(id)
X=parts[1].replace("."," ")
# q3=tool.preprocess_sentence(q3)
qv=transformer.fit_transform(loaded_vec.fit_transform([X])).toarray()[0].tolist()
y_pred=knn_saved.predict([qv])
ypred.append(y_pred)
is_true=id==y_pred[0]
if is_true:
truecount+=1
print("%r, accuracy:%f"%((is_true),truecount/411))
print(metrics.accuracy_score(ytrue,ypred))
| 125.742515
| 23,948
| 0.641459
| 13,274
| 41,998
| 2.004369
| 0.034504
| 0.779148
| 1.151808
| 1.513193
| 0.875592
| 0.868376
| 0.854431
| 0.841352
| 0.834098
| 0.815681
| 0
| 0.341008
| 0.053741
| 41,998
| 333
| 23,949
| 126.12012
| 0.328477
| 0.935902
| 0
| 0
| 0
| 0
| 0.145438
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.327273
| 0
| 0.4
| 0.036364
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 15
|
f2ea0697970fde3fd66104bfca22385b569e5bbc
| 14,725
|
py
|
Python
|
kickstarter_django/kickstarter/models.py
|
pratyaymodi/kickstarter
|
a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8
|
[
"MIT"
] | null | null | null |
kickstarter_django/kickstarter/models.py
|
pratyaymodi/kickstarter
|
a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8
|
[
"MIT"
] | null | null | null |
kickstarter_django/kickstarter/models.py
|
pratyaymodi/kickstarter
|
a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Kickstarter(models.Model):
"""
All fields extracted from the
CSV, This model is not used in
any of the tables or charts but
to create Postgres views for
the following models
"""
status = models.TextField(blank=True, null=True)
disable_communication = models.TextField(blank=True, null=True)
location_type = models.TextField(blank=True, null=True)
category_parent_id = models.IntegerField(blank=True, null=True)
sub_category = models.TextField(blank=True, null=True)
usd_pledged = models.TextField(blank=True, null=True)
launched_at = models.TextField(blank=True, null=True)
category_slug = models.TextField(blank=True, null=True)
currency = models.TextField(blank=True, null=True)
deadline = models.TextField(blank=True, null=True)
spotlight = models.TextField(blank=True, null=True)
currency_trailing_code = models.TextField(blank=True, null=True)
displayable_name = models.TextField(blank=True, null=True)
state_changed_at = models.TextField(blank=True, null=True)
goal = models.TextField(blank=True, null=True)
category = models.TextField(blank=True, null=True)
city = models.TextField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
creator_name = models.TextField(blank=True, null=True)
staff_pick = models.TextField(blank=True, null=True)
country = models.TextField(blank=True, null=True)
pledged = models.TextField(blank=True, null=True)
creator = models.TextField(blank=True, null=True)
location_code = models.TextField(blank=True, null=True)
slug = models.TextField(blank=True, null=True)
state = models.TextField(blank=True, null=True)
static_usd_rate = models.TextField(blank=True, null=True)
location = models.TextField(blank=True, null=True)
backers_count = models.TextField(blank=True, null=True)
currency_symbol = models.TextField(blank=True, null=True)
category_id = models.IntegerField(blank=True, null=True)
created_at = models.TextField(blank=True, null=True)
blurb = models.TextField(blank=True, null=True)
category_position = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'kickstarter'
class Projects(models.Model):
"""
create view
projects
as
select
id,
name,
creator_name,
blurb,
backers_count,
goal,
pledged,
round((pledged::decimal/goal::decimal)*100,2) as percent_of_goal,
status,
category,
sub_category,
to_timestamp(launched_at::int) as launched_at,
to_timestamp(deadline::int) as deadline,
to_timestamp(created_at::int) as created_at,
location,
country,
state,
city
from kickstarter
"""
name = models.TextField(blank=True, null=True)
creator_name = models.TextField(blank=True, null=True)
backers_count = models.TextField(blank=True, null=True)
goal = models.TextField(blank=True, null=True)
pledged = models.TextField(blank=True, null=True)
status = models.TextField(blank=True, null=True)
category = models.TextField(blank=True, null=True)
sub_category = models.TextField(blank=True, null=True)
location = models.TextField(blank=True, null=True)
country = models.TextField(blank=True, null=True)
state = models.TextField(blank=True, null=True)
city = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'projects'
class CategoryStatusCount(models.Model):
"""
Count of projects in a category
by status with totals
Created as a view in Postgres
create view category_status_count
as
SELECT row_number() over (order by category
nulls last) as id, *
FROM (
SELECT *
FROM crosstab( $$with cte AS
(
with cet AS (SELECT category,
status,
count(id) AS count
FROM projects
GROUP BY category,
status) table cet
UNION ALL
select 'Total' as one,
status,
sum(count) as count
FROM cet
GROUP BY status,
one
ORDER BY 1,2
) TABLE cte
UNION ALL
SELECT category,
'Total' as status,
SUM(count) AS ct
FROM cte
GROUP BY 1
ORDER BY 1$$,
$$values
('canceled'::text),
('failed'::text),
('live'::text),
('successful'::text),
('suspended'::text),
('Total')$$ ) AS t
("category" text,
"canceled" INT,
"failed" INT,
"live" INT,
"successful" INT,
"suspended" INT,
"Total" INT
)
) as ct
order by category
"""
category = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
total = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'category_status_count'
class SubCategoryStatusCount(models.Model):
"""
Count of projects in a sub_category
by status with totals
Created as a view in Postgres
create view sub_category_status_count
as
SELECT row_number() over (order by sub_category
nulls last) as id, *
FROM (
SELECT *
FROM crosstab( $$with cte AS
(
with cet AS (SELECT sub_category,
status,
count(id) AS count
FROM projects
GROUP BY sub_category,
status) table cet
UNION ALL
select 'Total' as one,
status,
sum(count) as count
FROM cet
GROUP BY status,
one
ORDER BY 1,2
) TABLE cte
UNION ALL
SELECT sub_category,
'Total' as status,
SUM(count) AS ct
FROM cte
GROUP BY 1
ORDER BY 1$$,
$$values
('canceled'::text),
('failed'::text),
('live'::text),
('successful'::text),
('suspended'::text),
('Total')$$ ) AS t
("sub_category" text,
"canceled" INT,
"failed" INT,
"live" INT,
"successful" INT,
"suspended" INT,
"Total" INT
)
) as ct
order by sub_category
"""
sub_category = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
total = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'sub_category_status_count'
class CountryStatusCount(models.Model):
"""
create view country_status_count
as
SELECT *
FROM crosstab( $$with cte AS
(
with cet AS (SELECT location,
status,
count(id) AS count
FROM projects
GROUP BY location,
status) table cet
UNION ALL
select 'Zo-Total' as one, -- To make sure it stays at the end
status,
sum(count) as count
FROM cet
GROUP BY status,
one
ORDER BY 1,2
) TABLE cte
UNION ALL
SELECT location,
'Total' as status,
SUM(count) AS ct
FROM cte
GROUP BY 1
ORDER BY 1$$,
$$values
('canceled'::text),
('failed'::text),
('live'::text),
('successful'::text),
('suspended'::text),
('Total')$$ ) AS t
("country" text,
"canceled" INT,
"failed" INT,
"live" INT,
"successful" INT,
"suspended" INT,
"total" INT
)
"""
country = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
total = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'country_status_count'
class MonthStatusCount(models.Model):
"""
CREATE VIEW monthly_status_count AS
SELECT row_number() over (
ORDER BY MONTH nulls LAST) AS id,
* FROM
(SELECT *
FROM crosstab($$with cte AS ( WITH cet AS
(
SELECT to_char(DATE(created_at),'YYYY-MM') AS month,
status,
count(id)
FROM projects
GROUP BY month,
status
ORDER BY month
) TABLE cet
UNION ALL
select 'Total' as one,--To make sure it stays at the end
status,
sum(count) as count
FROM cet
GROUP BY status,
one
ORDER BY 1,2
) TABLE cte
UNION ALL
SELECT month,
'Total' AS status,
SUM(count) AS ct
FROM cte
GROUP BY month
ORDER BY month$$ , $$values ('canceled'::text),
('failed'::text),
('live'::text),
('successful'::text),
('suspended'::text),
('Total')$$) AS t
("month" text, "canceled" INT,
"failed" INT, "live" INT, "successful" INT,
"suspended" INT, "total" INT)) AS ct
ORDER BY MONTH
"""
month = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
total = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'monthly_status_count'
class CategoryStatusPercent(models.Model):
"""
create view category_status_percent
as
select
category,
round((canceled::decimal/total)*100,2)||' %' as canceled,
round((failed::decimal/total)*100,2)||' %' as failed,
round((live::decimal/total)*100,2)||' %' as live,
round((successful::decimal/total)*100,2)||' %' as successful,
round((suspended::decimal/total)*100,2)||' %' as suspended
from category_status_count
"""
category = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'category_status_percent'
class SubCategoryStatusPercent(models.Model):
"""
create view sub_category_status_percent
as
select
sub_category,
round((canceled::decimal/total)*100,2)||' %' as canceled,
round((failed::decimal/total)*100,2)||' %' as failed,
round((live::decimal/total)*100,2)||' %' as live,
round((successful::decimal/total)*100,2)||' %' as successful,
round((suspended::decimal/total)*100,2)||' %' as suspended
from sub_category_status_count
"""
sub_category = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'sub_category_status_percent'
class CountryStatusPercent(models.Model):
"""
create view country_status_percent
as
select
country,
round((canceled::decimal/total)*100,2)||' %' as canceled,
round((failed::decimal/total)*100,2)||' %' as failed,
round((live::decimal/total)*100,2)||' %' as live,
round((successful::decimal/total)*100,2)||' %' as successful,
round((suspended::decimal/total)*100,2)||' %' as suspended
from country_status_count
"""
country = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'country_status_percent'
class MonthStatusPercent(models.Model):
"""
CREATE VIEW monthly_status_percent AS
SELECT row_number() over (
ORDER BY MONTH nulls LAST) AS id,*
FROM
(SELECT MONTH,
round((canceled::decimal/total)*100,2)||' %' AS canceled,
round((failed::decimal/total)*100,2)||' %' AS failed,
round((live::decimal/total)*100,2)||' %' AS live,
round((SUCCESSFUL::decimal/total)*100,2)||' %' AS SUCCESSFUL,
round((suspended::decimal/total)*100,2)||' %' AS suspended
FROM monthly_status_count) AS ct
ORDER BY MONTH
"""
month = models.TextField(blank=True, null=True)
canceled = models.IntegerField(blank=True, null=True)
failed = models.IntegerField(blank=True, null=True)
live = models.IntegerField(blank=True, null=True)
successful = models.IntegerField(blank=True, null=True)
suspended = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'monthly_status_percent'
| 31.941432
| 78
| 0.595722
| 1,685
| 14,725
| 5.138279
| 0.083086
| 0.101871
| 0.147147
| 0.192423
| 0.854354
| 0.845807
| 0.820166
| 0.74024
| 0.737699
| 0.731924
| 0
| 0.00944
| 0.295008
| 14,725
| 460
| 79
| 32.01087
| 0.824583
| 0.453039
| 0
| 0.685714
| 0
| 0
| 0.028641
| 0.02015
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014286
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
841286ee03531d2d810345241232a4fc38f4b20b
| 3,251
|
py
|
Python
|
isi_mip/climatemodels/migrations/0093_auto_20190326_1338.py
|
ISI-MIP/isimip
|
c2a78c727337e38f3695031e00afd607da7d6dcb
|
[
"MIT"
] | 4
|
2017-07-05T08:06:18.000Z
|
2021-03-01T17:23:18.000Z
|
isi_mip/climatemodels/migrations/0093_auto_20190326_1338.py
|
ISI-MIP/isimip
|
c2a78c727337e38f3695031e00afd607da7d6dcb
|
[
"MIT"
] | 4
|
2020-01-31T09:02:57.000Z
|
2021-04-20T14:04:35.000Z
|
isi_mip/climatemodels/migrations/0093_auto_20190326_1338.py
|
ISI-MIP/isimip
|
c2a78c727337e38f3695031e00afd607da7d6dcb
|
[
"MIT"
] | 4
|
2017-10-12T01:48:55.000Z
|
2020-04-29T13:50:03.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-03-26 12:38
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import isi_mip.climatemodels.models
class Migration(migrations.Migration):
dependencies = [
('climatemodels', '0092_auto_20181219_1253'),
]
operations = [
migrations.AddField(
model_name='attachment',
name='attachment1_description',
field=models.TextField(blank=True, null=True, verbose_name='Attachment description'),
),
migrations.AddField(
model_name='attachment',
name='attachment2_description',
field=models.TextField(blank=True, null=True, verbose_name='Attachment description'),
),
migrations.AddField(
model_name='attachment',
name='attachment3_description',
field=models.TextField(blank=True, null=True, verbose_name='Attachment description'),
),
migrations.AddField(
model_name='attachment',
name='attachment4_description',
field=models.TextField(blank=True, null=True, verbose_name='Attachment description'),
),
migrations.AddField(
model_name='attachment',
name='attachment5_description',
field=models.TextField(blank=True, null=True, verbose_name='Attachment description'),
),
migrations.AlterField(
model_name='attachment',
name='attachment1',
field=models.FileField(blank=True, null=True, upload_to=isi_mip.climatemodels.models.impact_model_path, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['pdf', 'txt', 'csv'])], verbose_name='Attachment'),
),
migrations.AlterField(
model_name='attachment',
name='attachment2',
field=models.FileField(blank=True, null=True, upload_to=isi_mip.climatemodels.models.impact_model_path, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['pdf', 'txt', 'csv'])], verbose_name='Attachment'),
),
migrations.AlterField(
model_name='attachment',
name='attachment3',
field=models.FileField(blank=True, null=True, upload_to=isi_mip.climatemodels.models.impact_model_path, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['pdf', 'txt', 'csv'])], verbose_name='Attachment'),
),
migrations.AlterField(
model_name='attachment',
name='attachment4',
field=models.FileField(blank=True, null=True, upload_to=isi_mip.climatemodels.models.impact_model_path, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['pdf', 'txt', 'csv'])], verbose_name='Attachment'),
),
migrations.AlterField(
model_name='attachment',
name='attachment5',
field=models.FileField(blank=True, null=True, upload_to=isi_mip.climatemodels.models.impact_model_path, validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['pdf', 'txt', 'csv'])], verbose_name='Attachment'),
),
]
| 47.808824
| 245
| 0.665949
| 321
| 3,251
| 6.560748
| 0.199377
| 0.132953
| 0.090218
| 0.109212
| 0.878443
| 0.826211
| 0.791073
| 0.791073
| 0.791073
| 0.791073
| 0
| 0.017127
| 0.209782
| 3,251
| 67
| 246
| 48.522388
| 0.802647
| 0.021224
| 0
| 0.666667
| 1
| 0
| 0.160742
| 0.04341
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.116667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
84166058443ca0ceefbc850ac278223834c0d582
| 630
|
py
|
Python
|
correct_python_programs/sieve.py
|
PatrickShaw/QuixBugs
|
5a2eb2987fdac12860b526ffa92a57e5831fd639
|
[
"MIT"
] | 22
|
2018-01-29T01:56:30.000Z
|
2022-03-21T12:25:40.000Z
|
correct_python_programs/sieve.py
|
zixifan/QuixBugs
|
5a2eb2987fdac12860b526ffa92a57e5831fd639
|
[
"MIT"
] | 31
|
2017-12-18T21:04:34.000Z
|
2022-02-21T07:38:09.000Z
|
correct_python_programs/sieve.py
|
zixifan/QuixBugs
|
5a2eb2987fdac12860b526ffa92a57e5831fd639
|
[
"MIT"
] | 19
|
2018-01-06T14:18:33.000Z
|
2022-03-21T12:25:43.000Z
|
def sieve(max):
primes = []
for n in range(2, max + 1):
if all(n % p > 0 for p in primes):
primes.append(n)
return primes
"""
def sieve(max):
primes = []
for n in range(2, max + 1):
if not any(n % p == 0 for p in primes):
primes.append(n)
return primes
def sieve(max):
primes = []
for n in range(2, max + 1):
if all(n % p for p in primes):
primes.append(n)
return primes
def sieve(max):
primes = []
for n in range(2, max + 1):
if not any(n % p for p in primes):
primes.append(n)
return primes
"""
| 19.6875
| 47
| 0.501587
| 100
| 630
| 3.16
| 0.18
| 0.101266
| 0.139241
| 0.21519
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.025189
| 0.369841
| 630
| 31
| 48
| 20.322581
| 0.770781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ffc20fa01d287bc9e531590640ca2d9021032da4
| 12,584
|
py
|
Python
|
model.py
|
sm823zw/Natural-Language-Inference
|
0cc89173f798478439d98377b2b540ea48b54cd8
|
[
"MIT"
] | 3
|
2021-06-01T16:49:28.000Z
|
2021-09-29T19:23:11.000Z
|
model.py
|
sm823zw/Natural-Language-Inference
|
0cc89173f798478439d98377b2b540ea48b54cd8
|
[
"MIT"
] | null | null | null |
model.py
|
sm823zw/Natural-Language-Inference
|
0cc89173f798478439d98377b2b540ea48b54cd8
|
[
"MIT"
] | null | null | null |
import re
import tensorflow as tf
from attention import *
def create_LSTM_model(premise, hypothesis,
embed_matrix, l2,
EMBEDDING_DIM, MAX_SEQ_LEN,
attention=False, baseline=True):
lam = tf.keras.regularizers.l2(l2=l2)
embedding = tf.keras.layers.Embedding(embed_matrix.shape[0],
output_dim=EMBEDDING_DIM,
weights=[embed_matrix],
input_length=MAX_SEQ_LEN,
trainable=False)
translation = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(300, activation='relu', kernel_regularizer=lam))
if attention:
BiLSTM = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True))
else:
BiLSTM = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=False))
premise = embedding(premise)
hypothesis = embedding(hypothesis)
premise = translation(premise)
hypothesis = translation(hypothesis)
premise = BiLSTM(premise)
hypothesis = BiLSTM(hypothesis)
if attention:
_, premise = CustomAttention(return_sequences=False)(premise)
_, hypothesis = CustomAttention(return_sequences=False)(hypothesis)
if baseline:
train_input = tf.keras.layers.concatenate([premise, hypothesis])
else:
dot_product = tf.keras.layers.Multiply()([premise, hypothesis])
difference = tf.keras.layers.Subtract()([premise, hypothesis])
train_input = tf.keras.layers.concatenate([premise, hypothesis, dot_product, difference])
train_input = tf.keras.layers.Dropout(0.1)(train_input)
for i in range(3):
train_input = tf.keras.layers.Dense(200, kernel_regularizer=lam)(train_input)
train_input = tf.keras.layers.BatchNormalization()(train_input)
train_input = tf.keras.layers.ReLU()(train_input)
train_input = tf.keras.layers.Dropout(0.1)(train_input)
prediction = tf.keras.layers.Dense(3, activation='softmax')(train_input)
return prediction
def create_GRU_model(premise, hypothesis,
embed_matrix, l2,
EMBEDDING_DIM, MAX_SEQ_LEN,
attention=False, baseline=True):
lam = tf.keras.regularizers.l2(l2=l2)
embedding = tf.keras.layers.Embedding(embed_matrix.shape[0],
output_dim=EMBEDDING_DIM,
weights=[embed_matrix],
input_length=MAX_SEQ_LEN,
trainable=False)
translation = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(300, activation='relu', kernel_regularizer=lam))
if attention:
BiGRU = tf.keras.layers.Bidirectional(tf.keras.layers.GRU(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True))
else:
BiGRU = tf.keras.layers.Bidirectional(tf.keras.layers.GRU(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=False))
premise = embedding(premise)
hypothesis = embedding(hypothesis)
premise = BiGRU(premise)
hypothesis = BiGRU(hypothesis)
premise = tf.keras.layers.BatchNormalization()(premise)
hypothesis = tf.keras.layers.BatchNormalization()(hypothesis)
if attention:
_, premise = CustomAttention(return_sequences=False)(premise)
_, hypothesis = CustomAttention(return_sequences=False)(hypothesis)
if baseline:
train_input = tf.keras.layers.concatenate([premise, hypothesis])
else:
dot_product = tf.keras.layers.Multiply()([premise, hypothesis])
difference = tf.keras.layers.Subtract()([premise, hypothesis])
train_input = tf.keras.layers.concatenate([premise, hypothesis, dot_product, difference])
train_input = tf.keras.layers.Dropout(0.1)(train_input)
for i in range(3):
train_input = tf.keras.layers.Dense(200, kernel_regularizer=lam)(train_input)
train_input = tf.keras.layers.BatchNormalization()(train_input)
train_input = tf.keras.layers.ReLU()(train_input)
train_input = tf.keras.layers.Dropout(0.1)(train_input)
prediction = tf.keras.layers.Dense(3, activation='softmax')(train_input)
return prediction
def create_Rochtaschel_model(premise, hypothesis,
embed_matrix, l2,
EMBEDDING_DIM, MAX_SEQ_LEN,
two_way=False):
lam = tf.keras.regularizers.l2(l2=l2)
embedding = tf.keras.layers.Embedding(embed_matrix.shape[0],
output_dim=EMBEDDING_DIM,
weights=[embed_matrix],
input_length=MAX_SEQ_LEN,
trainable=False)
translation = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(300, activation='relu', kernel_regularizer=lam))
lstm_layer_1 = tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True,
return_state=True,
time_major=False)
lstm_layer_2 = tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True,
time_major=False)
premise = embedding(premise)
hypothesis = embedding(hypothesis)
premise = translation(premise)
hypothesis = translation(hypothesis)
if two_way:
premise_1, forward_h, forward_c, = lstm_layer_1(premise)
init_states = [forward_h, forward_c]
hypothesis_1 = lstm_layer_2(hypothesis, initial_state=init_states)
train_input_1 = RochtaschelAttention(regularizer=lam)(tf.keras.layers.concatenate([premise_1, hypothesis_1], axis=1))
hypothesis_2, forward_h, forward_c, = lstm_layer_1(hypothesis)
init_states = [forward_h, forward_c]
premise_2 = lstm_layer_2(premise, initial_state=init_states)
train_input_2 = RochtaschelAttention(regularizer=lam)(tf.keras.layers.concatenate([hypothesis_2, premise_2], axis=1))
train_input = tf.keras.layers.concatenate([train_input_1, train_input_2])
else:
premise, forward_h, forward_c, = lstm_layer_1(premise)
init_states = [forward_h, forward_c]
hypothesis = lstm_layer_2(hypothesis, initial_state=init_states)
train_input = RochtaschelAttention(regularizer=lam)(tf.keras.layers.concatenate([premise, hypothesis], axis=1))
train_input = tf.keras.layers.Dropout(0.25)(train_input)
for i in range(3):
train_input = tf.keras.layers.Dense(100, kernel_regularizer=lam)(train_input)
train_input = tf.keras.layers.BatchNormalization()(train_input)
train_input = tf.keras.layers.ReLU()(train_input)
train_input = tf.keras.layers.Dropout(0.1)(train_input)
prediction = tf.keras.layers.Dense(3, activation='softmax')(train_input)
return prediction
def create_Inner_Attention_model(premise, hypothesis,
embed_matrix, l2,
EMBEDDING_DIM, MAX_SEQ_LEN,
baseline=True):
lam = tf.keras.regularizers.l2(l2=l2)
embedding = tf.keras.layers.Embedding(embed_matrix.shape[0],
output_dim=EMBEDDING_DIM,
weights=[embed_matrix],
input_length=MAX_SEQ_LEN,
trainable=False)
translation = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(300, activation='relu', kernel_regularizer=lam))
BiLSTM = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True))
premise = embedding(premise)
hypothesis = embedding(hypothesis)
premise = translation(premise)
hypothesis = translation(hypothesis)
premise = BiLSTM(premise)
hypothesis = BiLSTM(hypothesis)
premise = InnerAttention(regularizer=lam)(premise)
hypothesis = InnerAttention(regularizer=lam)(hypothesis)
if baseline:
train_input = tf.keras.layers.concatenate([premise, hypothesis])
else:
dot_product = tf.keras.layers.Multiply()([premise, hypothesis])
difference = tf.keras.layers.Subtract()([premise, hypothesis])
train_input = tf.keras.layers.concatenate([premise, hypothesis, dot_product, difference])
train_input = tf.keras.layers.Dropout(0.2)(train_input)
for i in range(3):
train_input = tf.keras.layers.Dense(100, kernel_regularizer=lam)(train_input)
train_input = tf.keras.layers.BatchNormalization()(train_input)
train_input = tf.keras.layers.ReLU()(train_input)
train_input = tf.keras.layers.Dropout(0.2)(train_input)
prediction = tf.keras.layers.Dense(3, activation='softmax')(train_input)
return prediction
def create_Novel_model(premise, hypothesis,
embed_matrix, l2,
EMBEDDING_DIM, MAX_SEQ_LEN,
baseline=True):
lam = tf.keras.regularizers.l2(l2=l2)
embedding = tf.keras.layers.Embedding(embed_matrix.shape[0],
output_dim=EMBEDDING_DIM,
weights=[embed_matrix],
input_length=MAX_SEQ_LEN,
trainable=False)
translation = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(300, activation='relu', kernel_regularizer=lam))
BiLSTM = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(100,
kernel_regularizer=lam,
recurrent_regularizer=lam,
return_sequences=True))
premise = embedding(premise)
hypothesis = embedding(hypothesis)
premise = translation(premise)
hypothesis = translation(hypothesis)
premise = BiLSTM(premise)
hypothesis = BiLSTM(hypothesis)
premise_1 = InnerAttention(regularizer=lam)(premise)
hypothesis_1 = InnerAttention(regularizer=lam)(hypothesis)
_, premise_2 = CustomAttention(return_sequences=False, regularizer=lam)(premise)
_, hypothesis_2 = CustomAttention(return_sequences=False, regularizer=lam)(hypothesis)
if baseline:
train_input = tf.keras.layers.concatenate([premise_1, hypothesis_1])
else:
dot_product = tf.keras.layers.Multiply()([premise_1, hypothesis_1])
difference = tf.keras.layers.Subtract()([premise_1, hypothesis_1])
train_input = tf.keras.layers.concatenate([premise_1, hypothesis_1, dot_product, difference, premise_2, hypothesis_2])
train_input = tf.keras.layers.Dropout(0.2)(train_input)
for i in range(3):
train_input = tf.keras.layers.Dense(100, kernel_regularizer=lam)(train_input)
train_input = tf.keras.layers.BatchNormalization()(train_input)
train_input = tf.keras.layers.ReLU()(train_input)
train_input = tf.keras.layers.Dropout(0.2)(train_input)
prediction = tf.keras.layers.Dense(3, activation='softmax')(train_input)
return prediction
| 41.946667
| 126
| 0.592657
| 1,263
| 12,584
| 5.701504
| 0.072842
| 0.0836
| 0.14623
| 0.080267
| 0.923761
| 0.911262
| 0.896542
| 0.86238
| 0.836828
| 0.836828
| 0
| 0.017079
| 0.316036
| 12,584
| 299
| 127
| 42.086957
| 0.819565
| 0
| 0
| 0.824074
| 0
| 0
| 0.004371
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023148
| false
| 0
| 0.013889
| 0
| 0.060185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ffe45e63fec6aa96671bf4f88a736c44af0d4a7b
| 51
|
py
|
Python
|
oi/uoj/P104/gen.py
|
Riteme/test
|
b511d6616a25f4ae8c3861e2029789b8ee4dcb8d
|
[
"BSD-Source-Code"
] | 3
|
2018-08-30T09:43:20.000Z
|
2019-12-03T04:53:43.000Z
|
oi/uoj/P104/gen.py
|
Riteme/test
|
b511d6616a25f4ae8c3861e2029789b8ee4dcb8d
|
[
"BSD-Source-Code"
] | null | null | null |
oi/uoj/P104/gen.py
|
Riteme/test
|
b511d6616a25f4ae8c3861e2029789b8ee4dcb8d
|
[
"BSD-Source-Code"
] | null | null | null |
print 10**5, 200
print " ".join(["10000"] * 10**5)
| 17
| 33
| 0.54902
| 9
| 51
| 3.111111
| 0.666667
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.325581
| 0.156863
| 51
| 2
| 34
| 25.5
| 0.325581
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0814cfe49b97b899b1bab01c4b224b56bd44b00c
| 121
|
py
|
Python
|
invoke_args/invocation.py
|
jbcurtin/bert-etl-testing
|
760c847fb9810d1568febe101bb65cbd46aa0b7a
|
[
"MIT"
] | null | null | null |
invoke_args/invocation.py
|
jbcurtin/bert-etl-testing
|
760c847fb9810d1568febe101bb65cbd46aa0b7a
|
[
"MIT"
] | null | null | null |
invoke_args/invocation.py
|
jbcurtin/bert-etl-testing
|
760c847fb9810d1568febe101bb65cbd46aa0b7a
|
[
"MIT"
] | null | null | null |
import typing
def test_one() -> typing.List[typing.Dict[str, typing.Any]]:
return [{'python_member': {'test': 1}}]
| 20.166667
| 60
| 0.652893
| 17
| 121
| 4.529412
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.140496
| 121
| 5
| 61
| 24.2
| 0.730769
| 0
| 0
| 0
| 0
| 0
| 0.141667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4badbc2cd1237623678e391237b9b4e06b9c7064
| 48,888
|
py
|
Python
|
fhir/resources/tests/test_auditevent.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_auditevent.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_auditevent.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/AuditEvent
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import auditevent
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class AuditEventTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("AuditEvent", js["resourceType"])
return auditevent.AuditEvent(js)
def testAuditEvent1(self):
inst = self.instantiate_from("audit-event-example-search.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent1(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent1(inst2)
def implAuditEvent1(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("E"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[0].name), force_bytes("Grahame Grieve"))
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(
force_bytes(inst.entity[0].query),
force_bytes(
"aHR0cDovL2ZoaXItZGV2LmhlYWx0aGludGVyc2VjdGlvbnMuY29tLmF1L29wZW4vRW5jb3VudGVyP3BhcnRpY2lwYW50PTEz"
),
)
self.assertEqual(force_bytes(inst.entity[0].role.code), force_bytes("24"))
self.assertEqual(force_bytes(inst.entity[0].role.display), force_bytes("Query"))
self.assertEqual(
force_bytes(inst.entity[0].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-search"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2015-08-22T23:42:24Z").date)
self.assertEqual(inst.recorded.as_json(), "2015-08-22T23:42:24Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Cloud"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("3"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Web Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("search"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("search"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://hl7.org/fhir/restful-interaction"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("rest"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("Restful Operation")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-event-type"),
)
def testAuditEvent2(self):
inst = self.instantiate_from("audit-event-example-logout.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent2(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent2(inst2)
def implAuditEvent2(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("E"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[0].name), force_bytes("Grahame Grieve"))
self.assertEqual(
force_bytes(inst.agent[0].network.address), force_bytes("127.0.0.1")
)
self.assertEqual(force_bytes(inst.agent[0].network.type), force_bytes("2"))
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-logout"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2013-06-20T23:46:41Z").date)
self.assertEqual(inst.recorded.as_json(), "2013-06-20T23:46:41Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Cloud"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("3"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Web Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("110123"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("Logout"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110114"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("User Authentication")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent3(self):
inst = self.instantiate_from("audit-event-example-vread.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent3(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent3(inst2)
def implAuditEvent3(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("R"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[0].name), force_bytes("Grahame Grieve"))
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.entity[0].lifecycle.code), force_bytes("6"))
self.assertEqual(
force_bytes(inst.entity[0].lifecycle.display), force_bytes("Access / Use")
)
self.assertEqual(
force_bytes(inst.entity[0].lifecycle.system),
force_bytes("http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-rest"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2013-06-20T23:42:24Z").date)
self.assertEqual(inst.recorded.as_json(), "2013-06-20T23:42:24Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Cloud"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("3"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Web Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("vread"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("vread"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://hl7.org/fhir/restful-interaction"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("rest"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("Restful Operation")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-event-type"),
)
def testAuditEvent4(self):
inst = self.instantiate_from("audit-event-example-media.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent4(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent4(inst2)
def implAuditEvent4(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("R"))
self.assertFalse(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[1].name), force_bytes("Grahame Grieve"))
self.assertTrue(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[2].media.code), force_bytes("110033"))
self.assertEqual(force_bytes(inst.agent[2].media.display), force_bytes("DVD"))
self.assertEqual(
force_bytes(inst.agent[2].media.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(
force_bytes(inst.agent[2].name), force_bytes("Media title: Hello World")
)
self.assertFalse(inst.agent[2].requestor)
self.assertEqual(
force_bytes(inst.agent[2].type.coding[0].code), force_bytes("110154")
)
self.assertEqual(
force_bytes(inst.agent[2].type.coding[0].display),
force_bytes("Destination Media"),
)
self.assertEqual(
force_bytes(inst.agent[2].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.entity[0].role.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].role.display), force_bytes("Patient")
)
self.assertEqual(
force_bytes(inst.entity[0].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("Person")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.entity[1].role.code), force_bytes("20"))
self.assertEqual(force_bytes(inst.entity[1].role.display), force_bytes("Job"))
self.assertEqual(
force_bytes(inst.entity[1].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[1].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[1].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[1].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.entity[2].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[2].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[2].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-media"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2015-08-27T23:42:24Z").date)
self.assertEqual(inst.recorded.as_json(), "2015-08-27T23:42:24Z")
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("ITI-32"))
self.assertEqual(
force_bytes(inst.subtype[0].display),
force_bytes("Distribute Document Set on Media"),
)
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("urn:oid:1.3.6.1.4.1.19376.1.2"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110106"))
self.assertEqual(force_bytes(inst.type.display), force_bytes("Export"))
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent5(self):
inst = self.instantiate_from("audit-event-example-login.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent5(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent5(inst2)
def implAuditEvent5(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("E"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[0].name), force_bytes("Grahame Grieve"))
self.assertEqual(
force_bytes(inst.agent[0].network.address), force_bytes("127.0.0.1")
)
self.assertEqual(force_bytes(inst.agent[0].network.type), force_bytes("2"))
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-login"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2013-06-20T23:41:23Z").date)
self.assertEqual(inst.recorded.as_json(), "2013-06-20T23:41:23Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Cloud"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("3"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Web Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("110122"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("Login"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110114"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("User Authentication")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent6(self):
inst = self.instantiate_from("audit-event-example-pixQuery.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent6(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent6(inst2)
def implAuditEvent6(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("E"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[0].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[0].network.type), force_bytes("1"))
self.assertFalse(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[1].name), force_bytes("Grahame Grieve"))
self.assertTrue(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.entity[0].role.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].role.display), force_bytes("Patient")
)
self.assertEqual(
force_bytes(inst.entity[0].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("Person")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(
force_bytes(inst.entity[1].detail[0].type), force_bytes("MSH-10")
)
self.assertEqual(
force_bytes(inst.entity[1].detail[0].valueBase64Binary),
force_bytes("MS4yLjg0MC4xMTQzNTAuMS4xMy4wLjEuNy4xLjE="),
)
self.assertEqual(force_bytes(inst.entity[1].role.code), force_bytes("24"))
self.assertEqual(force_bytes(inst.entity[1].role.display), force_bytes("Query"))
self.assertEqual(
force_bytes(inst.entity[1].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[1].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[1].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[1].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-pixQuery"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2015-08-26T23:42:24Z").date)
self.assertEqual(inst.recorded.as_json(), "2015-08-26T23:42:24Z")
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("ITI-9"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("PIX Query"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("urn:oid:1.3.6.1.4.1.19376.1.2"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110112"))
self.assertEqual(force_bytes(inst.type.display), force_bytes("Query"))
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent7(self):
inst = self.instantiate_from("auditevent-example.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent7(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent7(inst2)
def implAuditEvent7(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("E"))
self.assertEqual(
force_bytes(inst.agent[0].network.address), force_bytes("127.0.0.1")
)
self.assertEqual(force_bytes(inst.agent[0].network.type), force_bytes("2"))
self.assertFalse(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].role[0].text), force_bytes("Service User (Logon)")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.entity[0].lifecycle.code), force_bytes("6"))
self.assertEqual(
force_bytes(inst.entity[0].lifecycle.display), force_bytes("Access / Use")
)
self.assertEqual(
force_bytes(inst.entity[0].lifecycle.system),
force_bytes("http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"),
)
self.assertEqual(
force_bytes(inst.entity[0].name), force_bytes("Grahame's Laptop")
)
self.assertEqual(force_bytes(inst.entity[0].role.code), force_bytes("4"))
self.assertEqual(
force_bytes(inst.entity[0].role.display), force_bytes("Domain Resource")
)
self.assertEqual(
force_bytes(inst.entity[0].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("4"))
self.assertEqual(force_bytes(inst.entity[0].type.display), force_bytes("Other"))
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(inst.recorded.date, FHIRDate("2012-10-25T22:04:27+11:00").date)
self.assertEqual(inst.recorded.as_json(), "2012-10-25T22:04:27+11:00")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Development"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("110122"))
self.assertEqual(force_bytes(inst.source.type[0].display), force_bytes("Login"))
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("110120"))
self.assertEqual(
force_bytes(inst.subtype[0].display), force_bytes("Application Start")
)
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">Application Start for under service login "Grahame" (id: Grahame\'s Test HL7Connect)</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110100"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("Application Activity")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent8(self):
inst = self.instantiate_from("auditevent-example-disclosure.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent8(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent8(inst2)
def implAuditEvent8(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("R"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("notMe"))
self.assertEqual(
force_bytes(inst.agent[0].name),
force_bytes("That guy everyone wishes would be caught"),
)
self.assertEqual(
force_bytes(inst.agent[0].network.address), force_bytes("custodian.net")
)
self.assertEqual(force_bytes(inst.agent[0].network.type), force_bytes("1"))
self.assertEqual(
force_bytes(inst.agent[0].policy[0]), force_bytes("http://consent.com/yes")
)
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(
force_bytes(inst.agent[1].network.address), force_bytes("marketing.land")
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertEqual(
force_bytes(inst.agent[1].purposeOfUse[0].coding[0].code),
force_bytes("HMARKT"),
)
self.assertEqual(
force_bytes(inst.agent[1].purposeOfUse[0].coding[0].display),
force_bytes("healthcare marketing"),
)
self.assertEqual(
force_bytes(inst.agent[1].purposeOfUse[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110152")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Destination Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.entity[0].role.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].role.display), force_bytes("Patient")
)
self.assertEqual(
force_bytes(inst.entity[0].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("1"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("Person")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(
force_bytes(inst.entity[1].description),
force_bytes("data about Everthing important"),
)
self.assertEqual(force_bytes(inst.entity[1].lifecycle.code), force_bytes("11"))
self.assertEqual(
force_bytes(inst.entity[1].lifecycle.display), force_bytes("Disclosure")
)
self.assertEqual(
force_bytes(inst.entity[1].lifecycle.system),
force_bytes("http://terminology.hl7.org/CodeSystem/dicom-audit-lifecycle"),
)
self.assertEqual(force_bytes(inst.entity[1].name), force_bytes("Namne of What"))
self.assertEqual(force_bytes(inst.entity[1].role.code), force_bytes("4"))
self.assertEqual(
force_bytes(inst.entity[1].role.display), force_bytes("Domain Resource")
)
self.assertEqual(
force_bytes(inst.entity[1].role.system),
force_bytes("http://terminology.hl7.org/CodeSystem/object-role"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[0].code), force_bytes("V")
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[0].display),
force_bytes("very restricted"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-Confidentiality"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[1].code), force_bytes("STD")
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[1].display),
force_bytes("sexually transmitted disease information sensitivity"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[1].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActCode"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[2].code), force_bytes("DELAU")
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[2].display),
force_bytes("delete after use"),
)
self.assertEqual(
force_bytes(inst.entity[1].securityLabel[2].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActCode"),
)
self.assertEqual(force_bytes(inst.entity[1].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[1].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[1].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-disclosure"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("0"))
self.assertEqual(
force_bytes(inst.outcomeDesc), force_bytes("Successful Disclosure")
)
self.assertEqual(
force_bytes(inst.purposeOfEvent[0].coding[0].code), force_bytes("HMARKT")
)
self.assertEqual(
force_bytes(inst.purposeOfEvent[0].coding[0].display),
force_bytes("healthcare marketing"),
)
self.assertEqual(
force_bytes(inst.purposeOfEvent[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(inst.recorded.date, FHIRDate("2013-09-22T00:08:00Z").date)
self.assertEqual(inst.recorded.as_json(), "2013-09-22T00:08:00Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Watcher"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("4"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Application Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("Disclosure"))
self.assertEqual(
force_bytes(inst.subtype[0].display), force_bytes("HIPAA disclosure")
)
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">Disclosure by some idiot, for marketing reasons, to places unknown, of a Poor Sap, data about Everthing important.</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("110106"))
self.assertEqual(force_bytes(inst.type.display), force_bytes("Export"))
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
def testAuditEvent9(self):
inst = self.instantiate_from("auditevent-example-error.json")
self.assertIsNotNone(inst, "Must have instantiated a AuditEvent instance")
self.implAuditEvent9(inst)
js = inst.as_json()
self.assertEqual("AuditEvent", js["resourceType"])
inst2 = auditevent.AuditEvent(js)
self.implAuditEvent9(inst2)
def implAuditEvent9(self, inst):
self.assertEqual(force_bytes(inst.action), force_bytes("C"))
self.assertEqual(force_bytes(inst.agent[0].altId), force_bytes("601847123"))
self.assertEqual(force_bytes(inst.agent[0].name), force_bytes("Grahame Grieve"))
self.assertTrue(inst.agent[0].requestor)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].code), force_bytes("humanuser")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].display), force_bytes("human user")
)
self.assertEqual(
force_bytes(inst.agent[0].type.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/extra-security-role-type"
),
)
self.assertEqual(force_bytes(inst.agent[1].altId), force_bytes("6580"))
self.assertEqual(
force_bytes(inst.agent[1].network.address),
force_bytes("Workstation1.ehr.familyclinic.com"),
)
self.assertEqual(force_bytes(inst.agent[1].network.type), force_bytes("1"))
self.assertFalse(inst.agent[1].requestor)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].code), force_bytes("110153")
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].display),
force_bytes("Source Role ID"),
)
self.assertEqual(
force_bytes(inst.agent[1].type.coding[0].system),
force_bytes("http://dicom.nema.org/resources/ontology/DCM"),
)
self.assertEqual(force_bytes(inst.contained[0].id), force_bytes("o1"))
self.assertEqual(
force_bytes(inst.entity[0].detail[0].type),
force_bytes("requested transaction"),
)
self.assertEqual(
force_bytes(inst.entity[0].detail[0].valueString),
force_bytes("http POST ..... "),
)
self.assertEqual(force_bytes(inst.entity[0].type.code), force_bytes("2"))
self.assertEqual(
force_bytes(inst.entity[0].type.display), force_bytes("System Object")
)
self.assertEqual(
force_bytes(inst.entity[0].type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-entity-type"),
)
self.assertEqual(
force_bytes(inst.entity[1].description), force_bytes("transaction failed")
)
self.assertEqual(
force_bytes(inst.entity[1].type.code), force_bytes("OperationOutcome")
)
self.assertEqual(
force_bytes(inst.entity[1].type.display), force_bytes("OperationOutcome")
)
self.assertEqual(
force_bytes(inst.entity[1].type.system),
force_bytes("http://hl7.org/fhir/resource-types"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-error"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.outcome), force_bytes("8"))
self.assertEqual(
force_bytes(inst.outcomeDesc),
force_bytes(
"Invalid request to create an Operation resource on the Patient endpoint."
),
)
self.assertEqual(inst.recorded.date, FHIRDate("2017-09-07T23:42:24Z").date)
self.assertEqual(inst.recorded.as_json(), "2017-09-07T23:42:24Z")
self.assertEqual(force_bytes(inst.source.site), force_bytes("Cloud"))
self.assertEqual(force_bytes(inst.source.type[0].code), force_bytes("3"))
self.assertEqual(
force_bytes(inst.source.type[0].display), force_bytes("Web Server")
)
self.assertEqual(
force_bytes(inst.source.type[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/security-source-type"),
)
self.assertEqual(force_bytes(inst.subtype[0].code), force_bytes("create"))
self.assertEqual(force_bytes(inst.subtype[0].display), force_bytes("create"))
self.assertEqual(
force_bytes(inst.subtype[0].system),
force_bytes("http://hl7.org/fhir/restful-interaction"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.code), force_bytes("rest"))
self.assertEqual(
force_bytes(inst.type.display), force_bytes("Restful Operation")
)
self.assertEqual(
force_bytes(inst.type.system),
force_bytes("http://terminology.hl7.org/CodeSystem/audit-event-type"),
)
| 45.775281
| 180
| 0.628559
| 5,696
| 48,888
| 5.265625
| 0.057409
| 0.235055
| 0.234721
| 0.293402
| 0.922549
| 0.91568
| 0.911346
| 0.90141
| 0.880205
| 0.850765
| 0
| 0.030917
| 0.227909
| 48,888
| 1,067
| 181
| 45.818182
| 0.763684
| 0.003559
| 0
| 0.610516
| 0
| 0.005842
| 0.169486
| 0.015748
| 0
| 0
| 0
| 0
| 0.397274
| 1
| 0.0185
| false
| 0
| 0.009737
| 0
| 0.030185
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
29be42c0736b458b4e2a1b346b2a639c706267a2
| 86
|
py
|
Python
|
bmtk/utils/converters/sonata/__init__.py
|
aaberbach/bmtk
|
42aa70ce2003227a32df6ce5a95420dbf4bdfbd4
|
[
"BSD-3-Clause"
] | 1
|
2021-04-06T16:33:57.000Z
|
2021-04-06T16:33:57.000Z
|
bmtk/utils/converters/sonata/__init__.py
|
moekay/bmtk
|
6efdf6387d2a6badf276b917ee15d238daeae883
|
[
"BSD-3-Clause"
] | null | null | null |
bmtk/utils/converters/sonata/__init__.py
|
moekay/bmtk
|
6efdf6387d2a6badf276b917ee15d238daeae883
|
[
"BSD-3-Clause"
] | 1
|
2020-04-28T23:53:07.000Z
|
2020-04-28T23:53:07.000Z
|
from .edge_converters import convert_edges
from .node_converters import convert_nodes
| 28.666667
| 42
| 0.883721
| 12
| 86
| 6
| 0.666667
| 0.444444
| 0.638889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
29cde7230b5c0d4ccdcf775277026fad667f3d1e
| 12,178
|
py
|
Python
|
homework6/problem2.py
|
jojonium/CS-539-Machine-Learning
|
a1d2b07d0e092faf5580b44f8d4f01d02ea89564
|
[
"MIT"
] | null | null | null |
homework6/problem2.py
|
jojonium/CS-539-Machine-Learning
|
a1d2b07d0e092faf5580b44f8d4f01d02ea89564
|
[
"MIT"
] | null | null | null |
homework6/problem2.py
|
jojonium/CS-539-Machine-Learning
|
a1d2b07d0e092faf5580b44f8d4f01d02ea89564
|
[
"MIT"
] | null | null | null |
import numpy as np
from collections import Counter
from tree import DecisionTree
from problem1 import bootstrap
import problem1 as p1
# Note: please don't import any new package. You should solve this problem using only the package(s) above.
#-------------------------------------------------------------------------
'''
Problem 2: Random Forest (50 points)
In this problem, we will implement our second ensemble method: Random Forest
A list of all variables being used in this problem is provided at the end of this file.
'''
#----------------------------------------------------
'''
(Sampling a subset of features) Given a bootstrap sampled dataset (Xs), create a set of randomly sampled features (m features), by sampling without replacement on the features (the same feature cannot be sampled twice).
---- Inputs: --------
* Xs: the feature values of a bootstrap sample of the dataset (X), a numpy matrix of shape p by n. Xs[i,j] is the feature value of the i-th feature on the j-th data sample of the bootstrap.
* m: the number of features to be sampled for each tree in random forest, an integer scalar.
---- Outputs: --------
* Xf: the sampled feature values of the bootstrap dataset (Xs), a numpy matrix of shape m by n. Xf[i,j] is the feature value of the i-th random feature on the j-th data sample of the bootstrap.
* fid: the indices of the sampled features, a numpy vector of length m. fid[i] is the index of the i-th random feature, for example, if we sample two features (the third and the first feature) from Xs, the fid should be [2,0].
---- Hints: --------
* You could use choice() function in numpy to generate random indices with or without replacement.
* This problem can be solved using 2 line(s) of code.
'''
#---------------------
def sample_features(Xs, m):
#########################################
## INSERT YOUR CODE HERE (10 points)
#########################################
return Xf, fid
#-----------------
'''
TEST: Now you can test the correctness of your code above by typing the following in the terminal:
---------------------------------------------------
nosetests -v test2.py:test_sample_features
--- OR ----
python3 -m nose -v test2.py:test_sample_features
--- OR ----
python -m nose -v test2.py:test_sample_features
---------------------------------------------------
'''
#----------------------------------------------------
'''
(Build a Random Forest) Given a dataset of features (X) and labels (Y), create a random forest of multiple decision trees. In each decision tree, we first sample a bootstrap of the dataset (Xs) and a subset of features (Xf) randomly sampled from Xs, and then the decision tree is trained on Xf.
---- Inputs: --------
* X: the feature values of a dataset of samples, a numpy matrix of shape p by n. X[i,j] is the feature value of the i-th feature on the j-th data sample.
* Y: the class labels of a dataset of samples, a numpy array of length n. Y[i] is the class label of the i-th data sample, which can be an int/float/string.
* n_trees: the size of the ensemble (number of decision trees), an integer scalar.
* m: the number of features to be sampled for each tree in random forest, an integer scalar.
---- Outputs: --------
* Ts: the ensemble of random forest, each tree is trained on a bootstrap sample of the dataset with a set of random features, Ts[i] is the i-th decision tree in the random forest.
* Fs: the feature IDs of random forest, an integer matrix of shape m by p. Fs[i] is the list of feature indices (fid) sampled for the i-th decision tree in the random forest. Fs[i,j] is the index of the j-th sampled feature for the i-th decision tree.
---- Hints: --------
* (Step 1): create a bootstrap sample (Xs) from the dataset (X).
* (Step 2): sample a subset of features (Xf) on the bootstrap samples (Xs).
* (Step 3): use the sampled features Xf to train a decision tree to get the tree (t) and feature ID list (fid).
* (Step 4): add the tree (t) to the ensemble (Ts) and add feature IDs (fid) to the feature IDs (Fs).
* You could use DecisionTree() function in tree.py to create a decision tree.
* This problem can be solved using 3 line(s) of code.
'''
#---------------------
def random_forest(X, Y, n_trees, m):
Ts=[] # create an empty list to store the tree ensemble
Fs= np.empty((n_trees,m),dtype=int) # create an empty matrix to store a list of feature ids for each tree
for i in range(n_trees): # create one tree at a time
pass # ignore this line
#########################################
## INSERT YOUR CODE HERE (10 points)
#########################################
return Ts, Fs
#-----------------
'''
TEST: Now you can test the correctness of your code above by typing the following in the terminal:
---------------------------------------------------
nosetests -v test2.py:test_random_forest
--- OR ----
python3 -m nose -v test2.py:test_random_forest
--- OR ----
python -m nose -v test2.py:test_random_forest
---------------------------------------------------
'''
#----------------------------------------------------
'''
(Predict Label of one data sample using Random Forest) Given a random forest (Ts and Fs), predict the label (y) of one data sample (x) using majority vote by the trees.
---- Inputs: --------
* Ts: the ensemble of random forest, each tree is trained on a bootstrap sample of the dataset with a set of random features, Ts[i] is the i-th decision tree in the random forest.
* Fs: the feature IDs of random forest, an integer matrix of shape m by p. Fs[i] is the list of feature indices (fid) sampled for the i-th decision tree in the random forest. Fs[i,j] is the index of the j-th sampled feature for the i-th decision tree.
* x: the feature values of one data instance, a numpy vector of length p. Xs[i] is the feature value of the i-th feature on the data instance.
---- Outputs: --------
* y: the class labels of one data instance, a scalar of int/float/string.
---- Hints: --------
* You could use predict_1() function in each decision tree to predict the label of one data sample.
* This problem can be solved using 2 line(s) of code.
'''
#---------------------
def predict_1(Ts, Fs, x):
#########################################
## INSERT YOUR CODE HERE (10 points)
#########################################
return y
#-----------------
'''
TEST: Now you can test the correctness of your code above by typing the following in the terminal:
---------------------------------------------------
nosetests -v test2.py:test_predict_1
--- OR ----
python3 -m nose -v test2.py:test_predict_1
--- OR ----
python -m nose -v test2.py:test_predict_1
---------------------------------------------------
'''
#----------------------------------------------------
'''
(Predict Labels of multiple data samples using Random Forest) Given a random forest (Ts and Fs), predict the labels (Y) of all the data samples in (X) using majority vote by the trees.
---- Inputs: --------
* Ts: the ensemble of random forest, each tree is trained on a bootstrap sample of the dataset with a set of random features, Ts[i] is the i-th decision tree in the random forest.
* Fs: the feature IDs of random forest, an integer matrix of shape m by p. Fs[i] is the list of feature indices (fid) sampled for the i-th decision tree in the random forest. Fs[i,j] is the index of the j-th sampled feature for the i-th decision tree.
* X: the feature values of a dataset of samples, a numpy matrix of shape p by n. X[i,j] is the feature value of the i-th feature on the j-th data sample.
---- Outputs: --------
* Y: the class labels of a dataset of samples, a numpy array of length n. Y[i] is the class label of the i-th data sample, which can be an int/float/string.
---- Hints: --------
* You could use predict_1() function in each decision tree to predict the label of one data sample.
* This problem can be solved using 1 line(s) of code.
'''
#---------------------
def predict(Ts, Fs, X):
#########################################
## INSERT YOUR CODE HERE (20 points)
#########################################
return Y
#-----------------
'''
TEST: Now you can test the correctness of your code above by typing the following in the terminal:
---------------------------------------------------
nosetests -v test2.py:test_predict
--- OR ----
python3 -m nose -v test2.py:test_predict
--- OR ----
python -m nose -v test2.py:test_predict
---------------------------------------------------
'''
#--------------------------------------------
'''
TEST problem 2:
Now you can test the correctness of all the above functions by typing the following in the terminal:
---------------------------------------------------
nosetests -v test2.py
--- OR ----
python3 -m nose -v test2.py
--- OR ----
python -m nose -v test2.py
---------------------------------------------------
If your code passed all the tests, you will see the following message in the terminal:
----------- Problem 2 (50 points in total)--------------------- ... ok
* (10 points) sample_features ... ok
* (10 points) random_forest ... ok
* (10 points) predict_1 ... ok
* (20 points) predict ... ok
----------------------------------------------------------------------
Ran 4 tests in 0.586s
OK
'''
#--------------------------------------------
#--------------------------------------------
'''
List of All Variables
* n: the number of samples in the training set, an integer scalar.
* p: the number of features in each sample, an integer scalar.
* m: the number of features to be sampled for each tree in random forest, an integer scalar.
* n_trees: the size of the ensemble (number of decision trees), an integer scalar.
* X: the feature values of a dataset of samples, a numpy matrix of shape p by n. X[i,j] is the feature value of the i-th feature on the j-th data sample.
* Y: the class labels of a dataset of samples, a numpy array of length n. Y[i] is the class label of the i-th data sample, which can be an int/float/string.
* Xs: the feature values of a bootstrap sample of the dataset (X), a numpy matrix of shape p by n. Xs[i,j] is the feature value of the i-th feature on the j-th data sample of the bootstrap.
* Xf: the sampled feature values of the bootstrap dataset (Xs), a numpy matrix of shape m by n. Xf[i,j] is the feature value of the i-th random feature on the j-th data sample of the bootstrap.
* fid: the indices of the sampled features, a numpy vector of length m. fid[i] is the index of the i-th random feature, for example, if we sample two features (the third and the first feature) from Xs, the fid should be [2,0].
* Ts: the ensemble of random forest, each tree is trained on a bootstrap sample of the dataset with a set of random features, Ts[i] is the i-th decision tree in the random forest.
* Fs: the feature IDs of random forest, an integer matrix of shape m by p. Fs[i] is the list of feature indices (fid) sampled for the i-th decision tree in the random forest. Fs[i,j] is the index of the j-th sampled feature for the i-th decision tree.
* x: the feature values of one data instance, a numpy vector of length p. Xs[i] is the feature value of the i-th feature on the data instance.
* y: the class labels of one data instance, a scalar of int/float/string.
'''
#--------------------------------------------
| 57.443396
| 299
| 0.571276
| 1,786
| 12,178
| 3.87626
| 0.106383
| 0.025278
| 0.022534
| 0.016178
| 0.744764
| 0.734364
| 0.724397
| 0.693919
| 0.671385
| 0.652607
| 0
| 0.007449
| 0.228363
| 12,178
| 212
| 300
| 57.443396
| 0.729275
| 0.082854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0
| 1
| 0.235294
| false
| 0.058824
| 0.294118
| 0
| 0.764706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
d9b95fad7ca585f750a8097b7f03ca1bba8d9e98
| 198
|
py
|
Python
|
pyforms/pyforms/dialogs.py
|
GMDFr/GUI_Blob_Tracker
|
1648ef3f5b0015e828639db317c7364e4af7ccab
|
[
"MIT"
] | null | null | null |
pyforms/pyforms/dialogs.py
|
GMDFr/GUI_Blob_Tracker
|
1648ef3f5b0015e828639db317c7364e4af7ccab
|
[
"MIT"
] | null | null | null |
pyforms/pyforms/dialogs.py
|
GMDFr/GUI_Blob_Tracker
|
1648ef3f5b0015e828639db317c7364e4af7ccab
|
[
"MIT"
] | null | null | null |
from confapp import conf
if conf.PYFORMS_MODE=='GUI':
from pyforms_gui.dialogs.csv_parser import CsvParserDialog
elif conf.PYFORMS_MODE=='TERMINAL':
class CsvParserDialog(object): pass
| 22
| 60
| 0.772727
| 26
| 198
| 5.730769
| 0.653846
| 0.147651
| 0.201342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141414
| 198
| 9
| 61
| 22
| 0.876471
| 0
| 0
| 0
| 0
| 0
| 0.057592
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
d9cadfc854308555ae9562c952ab1cf3223fd161
| 2,078
|
py
|
Python
|
src/agent/rnd_net/conv_net.py
|
abagaria/opiq
|
b7df5134d8b265972ad70f7ba92b385f1e3fe5f2
|
[
"MIT"
] | 13
|
2020-02-13T16:09:16.000Z
|
2021-07-21T16:23:39.000Z
|
src/agent/rnd_net/conv_net.py
|
abagaria/opiq
|
b7df5134d8b265972ad70f7ba92b385f1e3fe5f2
|
[
"MIT"
] | 2
|
2020-08-04T01:46:37.000Z
|
2021-06-28T10:30:53.000Z
|
src/agent/rnd_net/conv_net.py
|
abagaria/opiq
|
b7df5134d8b265972ad70f7ba92b385f1e3fe5f2
|
[
"MIT"
] | 3
|
2020-08-01T07:33:02.000Z
|
2021-11-13T20:54:55.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
# device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class Target_RND(nn.Module):
def __init__(self, args):
super(Target_RND, self).__init__()
maze_size = args.state_shape[0]
image_size = maze_size
stride = 2
channels = 16
self.conv1 = nn.Conv2d(1, channels, 3, stride=stride)
self.conv2 = nn.Conv2d(channels, channels, 3, stride=stride)
for _ in range(2):
image_size = int((image_size + 2 * 0 - 3) / stride + 1)
self.fc_size = image_size * image_size * channels
assert self.fc_size == 400
self.fc_size_half = image_size * image_size * (channels // 2)
self.fc1 = nn.Linear(self.fc_size, self.fc_size_half)
self.out = nn.Linear(self.fc_size_half, args.rnd_rep_size)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
# Flatten
x = x.view(-1, self.fc_size)
x = F.relu(self.fc1(x))
out = self.out(x)
return out
class Predictor_RND(nn.Module):
def __init__(self, args):
super(Predictor_RND, self).__init__()
maze_size = args.state_shape[0]
image_size = maze_size
stride = 2
channels = 16
self.conv1 = nn.Conv2d(1, channels, 3, stride=stride)
self.conv2 = nn.Conv2d(channels, channels, 3, stride=stride)
for _ in range(2):
image_size = int((image_size + 2 * 0 - 3) / stride + 1)
self.fc_size = image_size * image_size * channels
assert self.fc_size == 400
self.fc_size_half = image_size * image_size * (channels // 2)
self.fc1 = nn.Linear(self.fc_size, self.fc_size_half)
self.out = nn.Linear(self.fc_size_half, args.rnd_rep_size)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
# Flatten
x = x.view(-1, self.fc_size)
x = F.relu(self.fc1(x))
out = self.out(x)
return out
| 29.685714
| 71
| 0.591434
| 308
| 2,078
| 3.772727
| 0.181818
| 0.108434
| 0.120482
| 0.072289
| 0.876076
| 0.876076
| 0.876076
| 0.876076
| 0.822719
| 0.822719
| 0
| 0.03367
| 0.285371
| 2,078
| 69
| 72
| 30.115942
| 0.748822
| 0.040905
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 1
| 0.081633
| false
| 0
| 0.061224
| 0
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9dc715806e86e38b6ee82af1b7e0c7e5d024e34
| 20,084
|
py
|
Python
|
packages/gtmcore/gtmcore/inventory/tests/test_process_sweep_status.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
packages/gtmcore/gtmcore/inventory/tests/test_process_sweep_status.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
packages/gtmcore/gtmcore/inventory/tests/test_process_sweep_status.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2017 FlashX, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pytest
import os
import copy
from gtmcore.labbook import LabBook
from gtmcore.inventory.inventory import InventoryManager
from gtmcore.activity import ActivityType, ActivityRecord, ActivityDetailType
from gtmcore.fixtures import mock_config_file
@pytest.fixture
def mock_lb(mock_config_file):
im = InventoryManager(mock_config_file[0])
lb = im.create_labbook('test', 'test', 'sweep-test', description='sweepin')
yield lb
def helper_gen_record():
return ActivityRecord(ActivityType.LABBOOK,
message="--overwritten--",
show=False,
importance=255,
linked_commit="",
tags=['save'])
def helper_write_file(lb: LabBook, section: str, name: str, content: str):
filename = os.path.join(lb.root_dir, section, name)
with open(filename, 'wt') as f:
f.write(content)
def helper_commit(lb, ar):
git_status = lb.git.status()
lb.git.add_all()
lb.git.commit("Sweep of uncommitted changes")
ar.linked_commit = lb.git.commit_hash
return git_status, lb, ar
class TestShims(object):
def test_process_sweep_status_new_code(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 1
assert modified_count == 0
assert len(ar._detail_objects) == 1
assert ar.type == ActivityType.CODE
assert ar._detail_objects[0][1] == ActivityDetailType.CODE.value
helper_write_file(mock_lb, 'code', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'code', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'code', 'f4.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 3
assert modified_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.CODE
assert ar._detail_objects[0][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert "Created" in ar._detail_objects[0][3].data['text/markdown']
def test_process_sweep_status_new_input(self, mock_lb):
helper_write_file(mock_lb, 'input', 'f1.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 1
assert modified_count == 0
assert ar.type == ActivityType.INPUT_DATA
assert len(ar._detail_objects) == 1
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f4.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 3
assert modified_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.INPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[2][1] == ActivityDetailType.INPUT_DATA.value
def test_process_sweep_status_new_output(self, mock_lb):
helper_write_file(mock_lb, 'output', 'f1.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 1
assert modified_count == 0
assert len(ar._detail_objects) == 1
assert ar.type == ActivityType.OUTPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.OUTPUT_DATA.value
helper_write_file(mock_lb, 'output', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f4.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 3
assert modified_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.OUTPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.OUTPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.OUTPUT_DATA.value
assert ar._detail_objects[2][1] == ActivityDetailType.OUTPUT_DATA.value
def test_process_sweep_status_modified_code(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f1.txt', 'catdog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert modified_count == 1
assert len(ar._detail_objects) == 1
assert ar.type == ActivityType.CODE
assert ar._detail_objects[0][1] == ActivityDetailType.CODE.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
helper_write_file(mock_lb, 'code', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'code', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'code', 'f4.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f3.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f4.txt', 'pupper')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.CODE
assert ar._detail_objects[0][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Modified" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
def test_process_sweep_status_modified_input(self, mock_lb):
helper_write_file(mock_lb, 'input', 'f1.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'input', 'f1.txt', 'catdog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert modified_count == 1
assert len(ar._detail_objects) == 1
assert ar.type == ActivityType.INPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f4.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'input', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f3.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f4.txt', 'pupper')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.INPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[2][1] == ActivityDetailType.INPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Modified" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
def test_process_sweep_status_modified_output(self, mock_lb):
helper_write_file(mock_lb, 'output', 'f1.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'output', 'f1.txt', 'catdog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert modified_count == 1
assert len(ar._detail_objects) == 1
assert ar.type == ActivityType.OUTPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
helper_write_file(mock_lb, 'output', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f3.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f4.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'output', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'output', 'f3.txt', 'pupper')
helper_write_file(mock_lb, 'output', 'f4.txt', 'pupper')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert len(ar._detail_objects) == 3
assert ar.type == ActivityType.OUTPUT_DATA
assert ar._detail_objects[0][1] == ActivityDetailType.OUTPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.OUTPUT_DATA.value
assert ar._detail_objects[2][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Modified" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
def test_process_sweep_status_mixed_new_no_modified(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f3.txt', 'cat')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 3
assert modified_count == 0
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 3
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Created" in ar._detail_objects[0][3].data['text/markdown']
assert "Created" in ar._detail_objects[1][3].data['text/markdown']
assert "Created" in ar._detail_objects[2][3].data['text/markdown']
def test_process_sweep_status_no_new_mixed_modified(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f3.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f1.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'output', 'f3.txt', 'pupper')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 0
assert modified_count == 3
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 3
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Modified" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
def test_process_sweep_status_mixed_new_same_modified(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'code', 'f2.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f1.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f21.txt', 'dog')
helper_write_file(mock_lb, 'input', 'f22.txt', 'dog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 2
assert modified_count == 2
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 4
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[3][1] == ActivityDetailType.CODE.value
assert "Created" in ar._detail_objects[0][3].data['text/markdown']
assert "Created" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
assert "Modified" in ar._detail_objects[3][3].data['text/markdown']
def test_process_sweep_status_same_new_mixed_modified(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_write_file(mock_lb, 'output', 'f3.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f1.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'output', 'f3.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f21.txt', 'dog')
helper_write_file(mock_lb, 'code', 'f22.txt', 'dog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 2
assert modified_count == 3
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 5
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[3][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[4][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Created" in ar._detail_objects[1][3].data['text/markdown']
assert "Created" in ar._detail_objects[2][3].data['text/markdown']
assert "Modified" in ar._detail_objects[3][3].data['text/markdown']
def test_process_sweep_status_mixed_new_mixed_modified(self, mock_lb):
helper_write_file(mock_lb, 'code', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'code', 'f1.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f21.txt', 'dog')
helper_write_file(mock_lb, 'output', 'f22.txt', 'dog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 2
assert modified_count == 2
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 4
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[3][1] == ActivityDetailType.OUTPUT_DATA.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Created" in ar._detail_objects[1][3].data['text/markdown']
assert "Modified" in ar._detail_objects[2][3].data['text/markdown']
assert "Created" in ar._detail_objects[3][3].data['text/markdown']
def test_process_sweep_status_same_new_same_modified(self, mock_lb):
helper_write_file(mock_lb, 'input', 'f1.txt', 'cat')
helper_write_file(mock_lb, 'input', 'f2.txt', 'cat')
helper_commit(mock_lb, helper_gen_record())
helper_write_file(mock_lb, 'input', 'f1.txt', 'pupper')
helper_write_file(mock_lb, 'input', 'f2.txt', 'pupper')
helper_write_file(mock_lb, 'code', 'f21.txt', 'dog')
helper_write_file(mock_lb, 'code', 'f22.txt', 'dog')
git_status, lb, ar = helper_commit(mock_lb, helper_gen_record())
ar, new_count, modified_count, deleted_count = lb.process_sweep_status(ar, git_status)
assert new_count == 2
assert modified_count == 2
assert ar.type == ActivityType.LABBOOK
assert len(ar._detail_objects) == 4
assert ar._detail_objects[0][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[1][1] == ActivityDetailType.INPUT_DATA.value
assert ar._detail_objects[2][1] == ActivityDetailType.CODE.value
assert ar._detail_objects[3][1] == ActivityDetailType.CODE.value
assert "Modified" in ar._detail_objects[0][3].data['text/markdown']
assert "Modified" in ar._detail_objects[1][3].data['text/markdown']
assert "Created" in ar._detail_objects[2][3].data['text/markdown']
assert "Created" in ar._detail_objects[3][3].data['text/markdown']
| 50.084788
| 94
| 0.680542
| 2,780
| 20,084
| 4.619065
| 0.070144
| 0.0528
| 0.116813
| 0.105054
| 0.875399
| 0.875399
| 0.871272
| 0.866677
| 0.858422
| 0.853672
| 0
| 0.018655
| 0.193935
| 20,084
| 400
| 95
| 50.21
| 0.774538
| 0.05233
| 0
| 0.81672
| 0
| 0
| 0.097828
| 0
| 0
| 0
| 0
| 0
| 0.485531
| 1
| 0.051447
| false
| 0
| 0.025723
| 0.003215
| 0.086817
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a1e31bedad64111ead71a995894352ff26dcc9a
| 44
|
py
|
Python
|
powderday/agn_models/__init__.py
|
smlower/powderday
|
99e7cec28bfbcba40ec1cff367fa564f51d9b62c
|
[
"BSD-3-Clause"
] | 18
|
2019-09-04T09:48:54.000Z
|
2022-02-08T20:45:19.000Z
|
powderday/agn_models/__init__.py
|
smlower/powderday
|
99e7cec28bfbcba40ec1cff367fa564f51d9b62c
|
[
"BSD-3-Clause"
] | 79
|
2019-09-05T15:09:02.000Z
|
2022-02-25T13:29:51.000Z
|
powderday/agn_models/__init__.py
|
smlower/powderday
|
99e7cec28bfbcba40ec1cff367fa564f51d9b62c
|
[
"BSD-3-Clause"
] | 14
|
2019-08-30T18:24:52.000Z
|
2021-08-05T15:33:13.000Z
|
from . import hopkins
from . import nenkova
| 14.666667
| 21
| 0.772727
| 6
| 44
| 5.666667
| 0.666667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8a2cb9c4f7dcf3e8371fa3432ff66d7dd921d4cc
| 443,178
|
py
|
Python
|
qt_material/resources/resource_pyside2_rc.py
|
zmister2016/qt-material
|
f4d153eb530c9ab8ef4cc2eedaf599e6e46fd44c
|
[
"BSD-2-Clause"
] | null | null | null |
qt_material/resources/resource_pyside2_rc.py
|
zmister2016/qt-material
|
f4d153eb530c9ab8ef4cc2eedaf599e6e46fd44c
|
[
"BSD-2-Clause"
] | null | null | null |
qt_material/resources/resource_pyside2_rc.py
|
zmister2016/qt-material
|
f4d153eb530c9ab8ef4cc2eedaf599e6e46fd44c
|
[
"BSD-2-Clause"
] | 1
|
2021-08-14T17:13:13.000Z
|
2021-08-14T17:13:13.000Z
|
# Resource object code (Python 3)
# Created by: object code
# Created by: The Resource Compiler for Qt version 5.15.1
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore
qt_resource_data = b"\
\x00\x00\x1f\xef\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22sli\
der.svg\x22\x0a inks\
cape:export-file\
name=\x22/home/yeis\
on/Development/p\
iton/art/icon_li\
te.png\x22\x0a inksc\
ape:export-xdpi=\
\x2296\x22\x0a inkscape\
:export-ydpi=\x2296\
\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:namedv\
iew\x0a id=\x22bas\
e\x22\x0a pagecolo\
r=\x22#ffffff\x22\x0a \
bordercolor=\x22#6\
66666\x22\x0a bord\
eropacity=\x221.0\x22\x0a\
inkscape:pa\
geopacity=\x220.0\x22\x0a\
inkscape:pa\
geshadow=\x222\x22\x0a \
inkscape:zoom=\
\x2228.704913\x22\x0a \
inkscape:cx=\x228.\
5671075\x22\x0a in\
kscape:cy=\x228.802\
1939\x22\x0a inksc\
ape:document-uni\
ts=\x22px\x22\x0a ink\
scape:current-la\
yer=\x22layer1\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221015\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:3;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 10,0.013671\
88 C 4.4846749,0\
.01360343 0.0136\
0343,4.4846749 0\
.01367188,10 0.0\
136035,15.515325\
4.484675,19.986\
396 10,19.986328\
15.515325,19.98\
6396 19.986396,1\
5.515325 19.9863\
28,10 19.986396,\
4.484675 15.5153\
25,0.0136035 10,\
0.01367188 Z\x22\x0a \
transform=\x22\
matrix(0.2645833\
2,0,0,0.26458332\
,0,291.70835)\x22\x0a \
id=\x22path82\
6\x22\x0a inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22ccccc\x22 />\x0a\
</g>\x0a</svg>\x0a\
\x00\x00\x1e\xb7\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22upa\
rrow.svg\x22\x0a ink\
scape:export-fil\
ename=\x22/home/yei\
son/Development/\
piton/art/icon_l\
ite.png\x22\x0a inks\
cape:export-xdpi\
=\x2296\x22\x0a inkscap\
e:export-ydpi=\x229\
6\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:named\
view\x0a id=\x22ba\
se\x22\x0a pagecol\
or=\x22#ffffff\x22\x0a \
bordercolor=\x22#\
666666\x22\x0a bor\
deropacity=\x221.0\x22\
\x0a inkscape:p\
ageopacity=\x220.0\x22\
\x0a inkscape:p\
ageshadow=\x222\x22\x0a \
inkscape:zoom\
=\x2228.704913\x22\x0a \
inkscape:cx=\x224\
.6862968\x22\x0a i\
nkscape:cy=\x225.00\
26685\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox=\x22tru\
e\x22\x0a inkscape\
:bbox-paths=\x22tru\
e\x22\x0a inkscape\
:bbox-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-edge-\
midpoints=\x22true\x22\
\x0a inkscape:s\
nap-bbox-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-nod\
es=\x22true\x22\x0a i\
nkscape:object-p\
aths=\x22true\x22\x0a \
inkscape:snap-i\
ntersection-path\
s=\x22true\x22\x0a in\
kscape:snap-smoo\
th-nodes=\x22true\x22\x0a\
inkscape:sn\
ap-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-global=\x22t\
rue\x22\x0a fit-ma\
rgin-top=\x220\x22\x0a \
fit-margin-lef\
t=\x220\x22\x0a fit-m\
argin-right=\x220\x22\x0a\
fit-margin-\
bottom=\x220\x22\x0a \
inkscape:guide-b\
box=\x22true\x22>\x0a \
<inkscape:grid\x0a \
type=\x22xygr\
id\x22\x0a id=\x22g\
rid974\x22\x0a e\
mpspacing=\x228\x22\x0a \
spacingx=\x220\
.26458332\x22\x0a \
spacingy=\x220.26\
458332\x22\x0a d\
otted=\x22false\x22\x0a \
visible=\x22tr\
ue\x22\x0a enabl\
ed=\x22true\x22\x0a \
snapvisiblegrid\
linesonly=\x22true\x22\
\x0a originx=\
\x220\x22\x0a origi\
ny=\x220\x22 />\x0a </so\
dipodi:namedview\
>\x0a <metadata\x0a \
id=\x22metadata5\
\x22>\x0a <rdf:RDF>\
\x0a <cc:Work\x0a\
rdf:abo\
ut=\x22\x22>\x0a <\
dc:format>image/\
svg+xml</dc:form\
at>\x0a <dc:\
type\x0a \
rdf:resource=\x22ht\
tp://purl.org/dc\
/dcmitype/StillI\
mage\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a \
</metadata>\x0a <g\
\x0a inkscape:l\
abel=\x22Layer 1\x22\x0a \
inkscape:gro\
upmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a\
transform=\x22\
translate(0,-291\
.70835)\x22>\x0a <g\
\x0a id=\x22g847\
\x22\x0a transfo\
rm=\x22matrix(0.052\
07439,0,0,0.0520\
7453,-0.90125164\
,282.41203)\x22>\x0a \
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform\
=\x22matrix(1.99862\
19,0,0,1.9986185\
,17.324484,-313.\
52314)\x22>\x0a \
<path\x0a \
inkscape:t\
ransform-center-\
y=\x223.175\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.070004\
33;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22\x0a \
d=\x22M \
25.399999,271.60\
002 -8.0000008e-\
7,246.20002 H 50\
.799999 Z\x22\x0a \
id=\x22path\
883\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22 />\x0a \
<path\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path880\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.399999,25.4\
H 0 Z\x22\x0a \
inkscape:tr\
ansform-center-y\
=\x22-3.1749995\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0534658\x22\
\x0a y=\
\x22253.84885\x22\x0a \
x=\x227.64\
87389\x22\x0a \
height=\x2235.5\
28759\x22\x0a \
width=\x2235.52\
8786\x22\x0a \
id=\x22rect870\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
06184419;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
r=\x2225.3968\
28\x22\x0a \
cy=\x22271.60001\x22\x0a\
cx=\
\x2225.4\x22\x0a \
id=\x22path872\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07635882;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<circle\x0a \
transform\
=\x22rotate(-45)\x22\x0a \
cx=\x22\
-174.08969\x22\x0a \
cy=\x22210\
.01071\x22\x0a \
r=\x2212.65607\
1\x22\x0a \
id=\x22path876\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.073\
99406;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-x=\x22-3\
.1749999\x22\x0a \
sodipodi:\
nodetypes=\x22cccc\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
id=\x22\
path904\x22\x0a \
d=\x22m 25.4,\
271.60002 -25.40\
000040000004,25.\
4 v -50.8 z\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-x=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22m 25.39\
9999,271.60002 2\
5.4,-25.4 v 50.8\
z\x22\x0a \
id=\x22path906\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<rect\x0a \
ry=\x225.0514922\
\x22\x0a y\
=\x22256.39301\x22\x0a \
x=\x222.5\
663135\x22\x0a \
height=\x2230.\
440479\x22\x0a \
width=\x2245.6\
93634\x22\x0a \
id=\x22rect837\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.0657438;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.0657438;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22\x0a \
id=\x22rect8\
31\x22\x0a \
width=\x2245.69358\
8\x22\x0a \
height=\x2230.44051\
\x22\x0a x\
=\x22248.76645\x22\x0a \
y=\x22-40\
.633385\x22\x0a \
ry=\x225.0514\
97\x22\x0a \
transform=\x22rota\
te(90)\x22 />\x0a \
</g>\x0a </\
g>\x0a </g>\x0a \
<path\x0a sty\
le=\x22opacity:1;fi\
ll:#ffc107;fill-\
opacity:1;stroke\
:none;stroke-wid\
th:0.38596651;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-o\
pacity:1\x22\x0a \
d=\x22m 50.206421,\
401.67683 c 110.\
217209,0.71279 5\
5.108609,0.3564 \
0,0 z\x22\x0a id\
=\x22rect997\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22 />\x0a <path\x0a\
style=\x22fi\
ll:none;stroke:#\
ff0000;stroke-wi\
dth:0.52916664;s\
troke-linecap:bu\
tt;stroke-linejo\
in:bevel;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
3.6919632,295.1\
7947 -1.04613,-1\
.65058 -1.04613,\
1.65058\x22\x0a \
id=\x22path827\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a sodi\
podi:nodetypes=\x22\
ccc\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00-\xb6\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22too\
lbar-handle-vert\
ical.svg\x22\x0a ink\
scape:export-fil\
ename=\x22/home/yei\
son/Development/\
piton/art/icon_l\
ite.png\x22\x0a inks\
cape:export-xdpi\
=\x2296\x22\x0a inkscap\
e:export-ydpi=\x229\
6\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:named\
view\x0a id=\x22ba\
se\x22\x0a pagecol\
or=\x22#ffffff\x22\x0a \
bordercolor=\x22#\
666666\x22\x0a bor\
deropacity=\x221.0\x22\
\x0a inkscape:p\
ageopacity=\x220.0\x22\
\x0a inkscape:p\
ageshadow=\x222\x22\x0a \
inkscape:zoom\
=\x2224.802598\x22\x0a \
inkscape:cx=\x22-\
4.3785546\x22\x0a \
inkscape:cy=\x2210.\
683358\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22g839\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221004\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <g\x0a \
id=\x22g839\x22>\x0a \
<g\x0a \
id=\x22g849\x22\x0a \
transform=\x22ro\
tate(90,2.645833\
2,294.35418)\x22>\x0a \
<rect\x0a \
y=\x22291.7\
0834\x22\x0a \
x=\x221.8520832\x22\x0a \
height\
=\x220.52916664\x22\x0a \
width=\x22\
0.52916664\x22\x0a \
id=\x22rect8\
30\x22\x0a s\
tyle=\x22opacity:1;\
fill:#ff0000;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.52916664;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:#ff\
0000;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id\
=\x22rect832\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
height=\x220.52\
916664\x22\x0a \
x=\x221.8520832\x22\
\x0a y=\x222\
93.29584\x22 />\x0a \
<rect\x0a \
y=\x22294.883\
36\x22\x0a x\
=\x221.8520832\x22\x0a \
height=\x22\
0.52916664\x22\x0a \
width=\x220.\
52916664\x22\x0a \
id=\x22rect834\
\x22\x0a sty\
le=\x22opacity:1;fi\
ll:#ff0000;fill-\
opacity:1;stroke\
:none;stroke-wid\
th:0.52916664;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1\x22 />\x0a \
<rect\x0a \
style=\x22opac\
ity:1;fill:#ff00\
00;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22\
rect836\x22\x0a \
width=\x220.529\
16664\x22\x0a \
height=\x220.5291\
6664\x22\x0a \
x=\x221.8520832\x22\x0a \
y=\x22296\
.47086\x22 />\x0a \
<rect\x0a \
y=\x22292.50208\
\x22\x0a x=\x22\
2.3812499\x22\x0a \
height=\x220.\
52916664\x22\x0a \
width=\x220.52\
916664\x22\x0a \
id=\x22rect838\x22\x0a\
style\
=\x22opacity:1;fill\
:#ff0000;fill-op\
acity:1;stroke:n\
one;stroke-width\
:0.52916664;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:#ff0000\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22\x0a \
id=\x22re\
ct840\x22\x0a \
width=\x220.52916\
664\x22\x0a \
height=\x220.529166\
64\x22\x0a x\
=\x222.3812499\x22\x0a \
y=\x22294.0\
896\x22 />\x0a \
<rect\x0a \
y=\x22295.67709\x22\x0a \
x=\x222.3\
812499\x22\x0a \
height=\x220.529\
16664\x22\x0a \
width=\x220.52916\
664\x22\x0a \
id=\x22rect842\x22\x0a \
style=\x22o\
pacity:1;fill:#f\
f0000;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22 />\x0a <\
rect\x0a \
style=\x22opacity:1\
;fill:#ff0000;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.52916664\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect8\
44\x22\x0a w\
idth=\x220.52916664\
\x22\x0a hei\
ght=\x220.52916664\x22\
\x0a x=\x222\
.9104166\x22\x0a \
y=\x22291.7083\
4\x22 />\x0a <r\
ect\x0a y\
=\x22293.29584\x22\x0a \
x=\x222.910\
4166\x22\x0a \
height=\x220.52916\
664\x22\x0a \
width=\x220.5291666\
4\x22\x0a id\
=\x22rect846\x22\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <re\
ct\x0a st\
yle=\x22opacity:1;f\
ill:#ff0000;fill\
-opacity:1;strok\
e:none;stroke-wi\
dth:0.52916664;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1\x22\x0a \
id=\x22rect848\
\x22\x0a wid\
th=\x220.52916664\x22\x0a\
heigh\
t=\x220.52916664\x22\x0a \
x=\x222.9\
104166\x22\x0a \
y=\x22294.88336\x22\
/>\x0a <rec\
t\x0a y=\x22\
296.47086\x22\x0a \
x=\x222.91041\
66\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a id=\x22\
rect850\x22\x0a \
style=\x22opaci\
ty:1;fill:#ff000\
0;fill-opacity:1\
;stroke:none;str\
oke-width:0.5291\
6664;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1\x22 \
/>\x0a </g>\x0a \
</g>\x0a </g>\x0a</\
svg>\x0a\
\x00\x00$\xad\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22spl\
itter-vertical.s\
vg\x22\x0a inkscape:\
export-filename=\
\x22/home/yeison/De\
velopment/piton/\
art/icon_lite.pn\
g\x22\x0a inkscape:e\
xport-xdpi=\x2296\x22\x0a\
inkscape:expo\
rt-ydpi=\x2296\x22>\x0a \
<defs\x0a id=\x22d\
efs2\x22 />\x0a <sodi\
podi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#f\
fffff\x22\x0a bord\
ercolor=\x22#666666\
\x22\x0a borderopa\
city=\x221.0\x22\x0a \
inkscape:pageopa\
city=\x220.0\x22\x0a \
inkscape:pagesha\
dow=\x222\x22\x0a ink\
scape:zoom=\x2224.8\
02598\x22\x0a inks\
cape:cx=\x226.30230\
18\x22\x0a inkscap\
e:cy=\x228.969841\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle />\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<g\x0a id=\
\x22g839\x22\x0a tr\
ansform=\x22matrix(\
0,-1,-1,0,297.00\
002,297.00002)\x22>\
\x0a <rect\x0a \
y=\x22291.972\
93\x22\x0a x=\x22\
2.3812499\x22\x0a \
height=\x220.52\
916664\x22\x0a \
width=\x220.529166\
64\x22\x0a id=\
\x22rect827\x22\x0a \
style=\x22opacit\
y:1;fill:#ff0000\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22 /\
>\x0a <rect\x0a \
style=\x22op\
acity:1;fill:#ff\
0000;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id=\x22\
rect829\x22\x0a \
width=\x220.52916\
664\x22\x0a he\
ight=\x220.52916664\
\x22\x0a x=\x222.\
3812499\x22\x0a \
y=\x22296.20627\x22 \
/>\x0a <rect\x0a \
y=\x22295.1\
4792\x22\x0a x\
=\x222.3812499\x22\x0a \
height=\x220.\
52916664\x22\x0a \
width=\x220.5291\
6664\x22\x0a i\
d=\x22rect832\x22\x0a \
style=\x22opac\
ity:1;fill:#ff00\
00;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
/>\x0a <rect\x0a\
style=\x22\
opacity:1;fill:#\
ff0000;fill-opac\
ity:1;stroke:non\
e;stroke-width:0\
.52916664;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1\x22\x0a id\
=\x22rect834\x22\x0a \
width=\x220.529\
16664\x22\x0a \
height=\x220.529166\
64\x22\x0a x=\x22\
2.3812499\x22\x0a \
y=\x22294.0896\x22\
/>\x0a <rect\x0a\
y=\x22293.\
03128\x22\x0a \
x=\x222.3812499\x22\x0a \
height=\x220\
.52916664\x22\x0a \
width=\x220.529\
16664\x22\x0a \
id=\x22rect836\x22\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a </g>\x0a \
</g>\x0a</svg>\x0a\
\x00\x00\x1e\xb7\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22lef\
tarrow.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2228.704913\x22\x0a \
inkscape:cx=\
\x224.6862968\x22\x0a \
inkscape:cy=\x225.\
0026685\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221004\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
true\x22\x0a inksc\
ape:snap-bbox=\x22t\
rue\x22\x0a inksca\
pe:bbox-paths=\x22t\
rue\x22\x0a inksca\
pe:bbox-nodes=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox-edg\
e-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-n\
odes=\x22true\x22\x0a \
inkscape:object\
-paths=\x22true\x22\x0a \
inkscape:snap\
-intersection-pa\
ths=\x22true\x22\x0a \
inkscape:snap-sm\
ooth-nodes=\x22true\
\x22\x0a inkscape:\
snap-midpoints=\x22\
true\x22\x0a inksc\
ape:snap-global=\
\x22true\x22\x0a fit-\
margin-top=\x220\x22\x0a \
fit-margin-l\
eft=\x220\x22\x0a fit\
-margin-right=\x220\
\x22\x0a fit-margi\
n-bottom=\x220\x22\x0a \
inkscape:guide\
-bbox=\x22true\x22>\x0a \
<inkscape:grid\
\x0a type=\x22xy\
grid\x22\x0a id=\
\x22grid974\x22\x0a \
empspacing=\x228\x22\x0a\
spacingx=\
\x220.26458332\x22\x0a \
spacingy=\x220.\
26458332\x22\x0a \
dotted=\x22false\x22\x0a\
visible=\x22\
true\x22\x0a ena\
bled=\x22true\x22\x0a \
snapvisiblegr\
idlinesonly=\x22tru\
e\x22\x0a origin\
x=\x220\x22\x0a ori\
giny=\x220\x22 />\x0a </\
sodipodi:namedvi\
ew>\x0a <metadata\x0a\
id=\x22metadat\
a5\x22>\x0a <rdf:RD\
F>\x0a <cc:Wor\
k\x0a rdf:a\
bout=\x22\x22>\x0a \
<dc:format>imag\
e/svg+xml</dc:fo\
rmat>\x0a <d\
c:type\x0a \
rdf:resource=\x22\
http://purl.org/\
dc/dcmitype/Stil\
lImage\x22 />\x0a \
<dc:title />\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a <pat\
h\x0a style=\x22\
fill:none;stroke\
:#ff0000;stroke-\
width:0.52916664\
;stroke-linecap:\
butt;stroke-line\
join:bevel;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-opac\
ity:1\x22\x0a d=\
\x22m 3.4711232,293\
.30805 -1.65058,\
1.04613 1.65058,\
1.04613\x22\x0a \
id=\x22path827\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a sodi\
podi:nodetypes=\x22\
ccc\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00 q\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22flo\
at.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
23.490934\x22\x0a \
inkscape:cx=\x2212.\
388735\x22\x0a ink\
scape:cy=\x227.1323\
576\x22\x0a inksca\
pe:document-unit\
s=\x22px\x22\x0a inks\
cape:current-lay\
er=\x22layer1\x22\x0a \
showgrid=\x22true\x22\
\x0a inkscape:w\
indow-width=\x22192\
0\x22\x0a inkscape\
:window-height=\x22\
1004\x22\x0a inksc\
ape:window-x=\x220\x22\
\x0a inkscape:w\
indow-y=\x220\x22\x0a \
inkscape:window\
-maximized=\x221\x22\x0a \
inkscape:sho\
wpageshadow=\x22fal\
se\x22\x0a units=\x22\
px\x22\x0a inkscap\
e:pagecheckerboa\
rd=\x22false\x22\x0a \
showguides=\x22true\
\x22\x0a inkscape:\
snap-bbox=\x22true\x22\
\x0a inkscape:b\
box-paths=\x22true\x22\
\x0a inkscape:b\
box-nodes=\x22true\x22\
\x0a inkscape:s\
nap-bbox-edge-mi\
dpoints=\x22true\x22\x0a \
inkscape:sna\
p-bbox-midpoints\
=\x22true\x22\x0a ink\
scape:snap-nodes\
=\x22false\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:1;\
stroke:#ff0000;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a id=\x22rec\
t829\x22\x0a wid\
th=\x222.6581812\x22\x0a \
height=\x222.\
658181\x22\x0a x\
=\x220.6522209\x22\x0a \
y=\x22293.68961\
\x22\x0a ry=\x220.2\
9536656\x22 />\x0a \
<rect\x0a ry=\
\x220.29536656\x22\x0a \
y=\x22292.3606\x22\
\x0a x=\x221.981\
2645\x22\x0a hei\
ght=\x222.658181\x22\x0a \
width=\x222.6\
581812\x22\x0a i\
d=\x22rect839\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:1;str\
oke:#ff0000;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a </g>\x0a</svg>\x0a\
\
\x00\x00#\x10\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_indetermin\
ate.svg\x22\x0a inks\
cape:export-file\
name=\x22/home/yeis\
on/Development/p\
iton/art/icon_li\
te.png\x22\x0a inksc\
ape:export-xdpi=\
\x2296\x22\x0a inkscape\
:export-ydpi=\x2296\
\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:namedv\
iew\x0a id=\x22bas\
e\x22\x0a pagecolo\
r=\x22#ffffff\x22\x0a \
bordercolor=\x22#6\
66666\x22\x0a bord\
eropacity=\x221.0\x22\x0a\
inkscape:pa\
geopacity=\x220.0\x22\x0a\
inkscape:pa\
geshadow=\x222\x22\x0a \
inkscape:zoom=\
\x2266.442396\x22\x0a \
inkscape:cx=\x2210\
.78082\x22\x0a ink\
scape:cy=\x229.3546\
433\x22\x0a inksca\
pe:document-unit\
s=\x22px\x22\x0a inks\
cape:current-lay\
er=\x22layer1\x22\x0a \
showgrid=\x22true\x22\
\x0a inkscape:w\
indow-width=\x22192\
0\x22\x0a inkscape\
:window-height=\x22\
1004\x22\x0a inksc\
ape:window-x=\x220\x22\
\x0a inkscape:w\
indow-y=\x220\x22\x0a \
inkscape:window\
-maximized=\x221\x22\x0a \
inkscape:sho\
wpageshadow=\x22fal\
se\x22\x0a units=\x22\
px\x22\x0a inkscap\
e:pagecheckerboa\
rd=\x22false\x22\x0a \
showguides=\x22fals\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title></dc:t\
itle>\x0a </cc\
:Work>\x0a </rdf\
:RDF>\x0a </metada\
ta>\x0a <g\x0a in\
kscape:label=\x22La\
yer 1\x22\x0a inks\
cape:groupmode=\x22\
layer\x22\x0a id=\x22\
layer1\x22\x0a tra\
nsform=\x22translat\
e(0,-291.70835)\x22\
>\x0a <g\x0a \
transform=\x22trans\
late(47.359504,-\
89.690092)\x22\x0a \
id=\x22layer1-3\x22\
\x0a inkscape\
:label=\x22Layer 1\x22\
>\x0a <path\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22rect1954\x22\x0a \
d=\x22m -46\
.03762,382.19219\
c -0.292672,0 -\
0.528134,0.23546\
-0.528134,0.528\
13 v 2.6479 c 0,\
0.29268 0.235462\
,0.52814 0.52813\
4,0.52814 h 2.64\
79 c 0.292673,0 \
0.528133,-0.2354\
6 0.528133,-0.52\
814 v -2.6479 c \
0,-0.29267 -0.23\
546,-0.52813 -0.\
528133,-0.52813 \
z m 10e-4,0.2645\
8 h 2.645833 c 0\
.146573,0 0.2645\
83,0.11801 0.264\
583,0.26459 v 2.\
64583 c 0,0.1465\
7 -0.11801,0.264\
58 -0.264583,0.2\
6458 h -2.645799\
c -0.146574,0 -\
0.264584,-0.1180\
1 -0.264584,-0.2\
6458 v -2.64583 \
c 0,-0.14658 0.1\
1801,-0.26459 0.\
264584,-0.26459 \
z\x22\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
#ff0000;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a <pat\
h\x0a style\
=\x22opacity:1;fill\
:#ff0000;fill-op\
acity:1;stroke:#\
ff0000;stroke-wi\
dth:0;stroke-lin\
ecap:square;stro\
ke-linejoin:mite\
r;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d=\x22m \
-45.772004,382.9\
8594 v 2.11667 h\
2.116666 v -2.1\
1667 z m 1.85208\
3,0.26459 v 1.58\
75 h -1.5875 z\x22\x0a\
id=\x22rec\
t2118\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a sodipo\
di:nodetypes=\x22cc\
ccccccc\x22 />\x0a \
</g>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22m\
atrix(0.05207439\
,0,0,0.05207453,\
-0.90125164,282.\
41203)\x22>\x0a <\
g\x0a id=\x22g\
851\x22>\x0a <g\
\x0a id=\x22\
g1059\x22\x0a \
transform=\x22mat\
rix(1.9986219,0,\
0,1.9986185,17.3\
24484,-313.52314\
)\x22>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-y=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 25.39\
9999,271.60002 -\
8.0000008e-7,246\
.20002 H 50.7999\
99 Z\x22\x0a \
id=\x22path883\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<path\x0a \
sodipodi:no\
detypes=\x22cccc\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
id=\x22pa\
th880\x22\x0a \
d=\x22m 25.3999\
99,271.60002 25.\
399999,25.4 H 0 \
Z\x22\x0a \
inkscape:transfo\
rm-center-y=\x22-3.\
1749995\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0534658\x22\x0a \
y=\x22253.\
84885\x22\x0a \
x=\x227.6487389\
\x22\x0a h\
eight=\x2235.528759\
\x22\x0a w\
idth=\x2235.528786\x22\
\x0a id\
=\x22rect870\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06184\
419;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
r=\x2225.396828\x22\x0a \
cy=\x22\
271.60001\x22\x0a \
cx=\x2225.4\
\x22\x0a i\
d=\x22path872\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0763\
5882;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <ci\
rcle\x0a \
transform=\x22rot\
ate(-45)\x22\x0a \
cx=\x22-174.\
08969\x22\x0a \
cy=\x22210.0107\
1\x22\x0a \
r=\x2212.656071\x22\x0a \
id=\x22p\
ath876\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07399406\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x22-3.1749\
999\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22\x0a \
inkscap\
e:connector-curv\
ature=\x220\x22\x0a \
id=\x22path9\
04\x22\x0a \
d=\x22m 25.4,271.6\
0002 -25.4000004\
0000004,25.4 v -\
50.8 z\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x223.175\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
d=\x22m 25.399999,\
271.60002 25.4,-\
25.4 v 50.8 z\x22\x0a \
id=\x22\
path906\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22\x0a \
sodipodi:nod\
etypes=\x22cccc\x22 />\
\x0a <rect\
\x0a ry\
=\x225.0514922\x22\x0a \
y=\x22256\
.39301\x22\x0a \
x=\x222.566313\
5\x22\x0a \
height=\x2230.44047\
9\x22\x0a \
width=\x2245.693634\
\x22\x0a i\
d=\x22rect837\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0657\
438;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <rec\
t\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22\x0a \
id=\x22rect831\x22\x0a \
widt\
h=\x2245.693588\x22\x0a \
heigh\
t=\x2230.44051\x22\x0a \
x=\x22248\
.76645\x22\x0a \
y=\x22-40.6333\
85\x22\x0a \
ry=\x225.051497\x22\x0a \
tran\
sform=\x22rotate(90\
)\x22 />\x0a </\
g>\x0a </g>\x0a \
</g>\x0a <path\
\x0a style=\x22o\
pacity:1;fill:#f\
fc107;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
38596651;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
50.206421,401.6\
7683 c 110.21720\
9,0.71279 55.108\
609,0.3564 0,0 z\
\x22\x0a id=\x22rec\
t997\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a </g>\x0a</svg>\x0a\
\x00\x00,\xc0\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22too\
lbar-handle-hori\
zontal.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2224.802598\x22\x0a \
inkscape:cx=\
\x223.483525\x22\x0a \
inkscape:cy=\x2210.\
683358\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22g839\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221004\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <g\x0a \
id=\x22g839\x22>\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:#ff0000\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22\x0a \
id=\x22rect\
830\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a height\
=\x220.52916664\x22\x0a \
x=\x221.8520\
832\x22\x0a y=\
\x22291.70834\x22 />\x0a \
<rect\x0a \
y=\x22293.29584\
\x22\x0a x=\x221.\
8520832\x22\x0a \
height=\x220.5291\
6664\x22\x0a w\
idth=\x220.52916664\
\x22\x0a id=\x22r\
ect832\x22\x0a \
style=\x22opacity:\
1;fill:#ff0000;f\
ill-opacity:1;st\
roke:none;stroke\
-width:0.5291666\
4;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1\x22 />\x0a\
<rect\x0a \
style=\x22opac\
ity:1;fill:#ff00\
00;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22re\
ct834\x22\x0a \
width=\x220.5291666\
4\x22\x0a heig\
ht=\x220.52916664\x22\x0a\
x=\x221.85\
20832\x22\x0a \
y=\x22294.88336\x22 />\
\x0a <rect\x0a \
y=\x22296.470\
86\x22\x0a x=\x22\
1.8520832\x22\x0a \
height=\x220.52\
916664\x22\x0a \
width=\x220.529166\
64\x22\x0a id=\
\x22rect836\x22\x0a \
style=\x22opacit\
y:1;fill:#ff0000\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22 /\
>\x0a <rect\x0a \
style=\x22op\
acity:1;fill:#ff\
0000;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id=\x22\
rect838\x22\x0a \
width=\x220.52916\
664\x22\x0a he\
ight=\x220.52916664\
\x22\x0a x=\x222.\
3812499\x22\x0a \
y=\x22292.50208\x22 \
/>\x0a <rect\x0a \
y=\x22294.0\
896\x22\x0a x=\
\x222.3812499\x22\x0a \
height=\x220.5\
2916664\x22\x0a \
width=\x220.52916\
664\x22\x0a id\
=\x22rect840\x22\x0a \
style=\x22opaci\
ty:1;fill:#ff000\
0;fill-opacity:1\
;stroke:none;str\
oke-width:0.5291\
6664;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1\x22 \
/>\x0a <rect\x0a \
style=\x22o\
pacity:1;fill:#f\
f0000;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\
\x22rect842\x22\x0a \
width=\x220.5291\
6664\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a x=\x222\
.3812499\x22\x0a \
y=\x22295.67709\x22\
/>\x0a <rect\x0a\
y=\x22291.\
70834\x22\x0a \
x=\x222.9104166\x22\x0a \
height=\x220\
.52916664\x22\x0a \
width=\x220.529\
16664\x22\x0a \
id=\x22rect844\x22\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <rect\
\x0a style=\
\x22opacity:1;fill:\
#ff0000;fill-opa\
city:1;stroke:no\
ne;stroke-width:\
0.52916664;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1\x22\x0a i\
d=\x22rect846\x22\x0a \
width=\x220.52\
916664\x22\x0a \
height=\x220.52916\
664\x22\x0a x=\
\x222.9104166\x22\x0a \
y=\x22293.2958\
4\x22 />\x0a <rec\
t\x0a y=\x2229\
4.88336\x22\x0a \
x=\x222.9104166\x22\x0a\
height=\
\x220.52916664\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
id=\x22rect848\x22\x0a \
style=\x22o\
pacity:1;fill:#f\
f0000;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22 />\x0a <re\
ct\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.52916664;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22\x0a \
id=\x22rect850\x22\x0a \
width=\x220.\
52916664\x22\x0a \
height=\x220.529\
16664\x22\x0a \
x=\x222.9104166\x22\x0a \
y=\x22296.47\
086\x22 />\x0a </g>\
\x0a </g>\x0a</svg>\x0a\
\x00\x00!\x8c\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rad\
iobutton_checked\
.svg\x22\x0a inkscap\
e:export-filenam\
e=\x22/home/yeison/\
Development/pito\
n/art/icon_lite.\
png\x22\x0a inkscape\
:export-xdpi=\x2296\
\x22\x0a inkscape:ex\
port-ydpi=\x2296\x22>\x0a\
<defs\x0a id=\
\x22defs2\x22 />\x0a <so\
dipodi:namedview\
\x0a id=\x22base\x22\x0a\
pagecolor=\x22\
#ffffff\x22\x0a bo\
rdercolor=\x22#6666\
66\x22\x0a bordero\
pacity=\x221.0\x22\x0a \
inkscape:pageo\
pacity=\x220.0\x22\x0a \
inkscape:pages\
hadow=\x222\x22\x0a i\
nkscape:zoom=\x2239\
.85\x22\x0a inksca\
pe:cx=\x228.1932246\
\x22\x0a inkscape:\
cy=\x2210\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22layer1\x22\x0a \
showgrid=\x22tr\
ue\x22\x0a inkscap\
e:window-width=\x22\
1920\x22\x0a inksc\
ape:window-heigh\
t=\x221004\x22\x0a in\
kscape:window-x=\
\x220\x22\x0a inkscap\
e:window-y=\x220\x22\x0a \
inkscape:win\
dow-maximized=\x221\
\x22\x0a inkscape:\
showpageshadow=\x22\
false\x22\x0a unit\
s=\x22px\x22\x0a inks\
cape:pagechecker\
board=\x22false\x22\x0a \
showguides=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox=\x22tr\
ue\x22\x0a inkscap\
e:bbox-paths=\x22tr\
ue\x22\x0a inkscap\
e:bbox-nodes=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-edge\
-midpoints=\x22true\
\x22\x0a inkscape:\
snap-bbox-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-no\
des=\x22true\x22\x0a \
inkscape:object-\
paths=\x22true\x22\x0a \
inkscape:snap-\
intersection-pat\
hs=\x22true\x22\x0a i\
nkscape:snap-smo\
oth-nodes=\x22true\x22\
\x0a inkscape:s\
nap-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-global=\x22\
true\x22\x0a fit-m\
argin-top=\x220\x22\x0a \
fit-margin-le\
ft=\x220\x22\x0a fit-\
margin-right=\x220\x22\
\x0a fit-margin\
-bottom=\x220\x22\x0a \
inkscape:guide-\
bbox=\x22true\x22>\x0a \
<inkscape:grid\x0a\
type=\x22xyg\
rid\x22\x0a id=\x22\
grid974\x22\x0a \
empspacing=\x228\x22\x0a \
spacingx=\x22\
0.26458332\x22\x0a \
spacingy=\x220.2\
6458332\x22\x0a \
dotted=\x22false\x22\x0a \
visible=\x22t\
rue\x22\x0a enab\
led=\x22true\x22\x0a \
snapvisiblegri\
dlinesonly=\x22true\
\x22\x0a originx\
=\x220\x22\x0a orig\
iny=\x220\x22 />\x0a </s\
odipodi:namedvie\
w>\x0a <metadata\x0a \
id=\x22metadata\
5\x22>\x0a <rdf:RDF\
>\x0a <cc:Work\
\x0a rdf:ab\
out=\x22\x22>\x0a \
<dc:format>image\
/svg+xml</dc:for\
mat>\x0a <dc\
:type\x0a \
rdf:resource=\x22h\
ttp://purl.org/d\
c/dcmitype/Still\
Image\x22 />\x0a \
<dc:title></dc\
:title>\x0a </\
cc:Work>\x0a </r\
df:RDF>\x0a </meta\
data>\x0a <g\x0a \
inkscape:label=\x22\
Layer 1\x22\x0a in\
kscape:groupmode\
=\x22layer\x22\x0a id\
=\x22layer1\x22\x0a t\
ransform=\x22transl\
ate(0,-291.70835\
)\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22ma\
trix(0.05207439,\
0,0,0.05207453,-\
0.90125164,282.4\
1203)\x22>\x0a <g\
\x0a id=\x22g8\
51\x22>\x0a <g\x0a\
id=\x22g\
1059\x22\x0a \
transform=\x22matr\
ix(1.9986219,0,0\
,1.9986185,17.32\
4484,-313.52314)\
\x22>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-y=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22M 25.399\
999,271.60002 -8\
.0000008e-7,246.\
20002 H 50.79999\
9 Z\x22\x0a \
id=\x22path883\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22 />\x0a \
<path\x0a \
sodipodi:nod\
etypes=\x22cccc\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
id=\x22pat\
h880\x22\x0a \
d=\x22m 25.39999\
9,271.60002 25.3\
99999,25.4 H 0 Z\
\x22\x0a i\
nkscape:transfor\
m-center-y=\x22-3.1\
749995\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0534658\x22\x0a \
y=\x22253.8\
4885\x22\x0a \
x=\x227.6487389\x22\
\x0a he\
ight=\x2235.528759\x22\
\x0a wi\
dth=\x2235.528786\x22\x0a\
id=\
\x22rect870\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.061844\
19;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
r=\x2225.396828\x22\x0a \
cy=\x222\
71.60001\x22\x0a \
cx=\x2225.4\x22\
\x0a id\
=\x22path872\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.07635\
882;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
transform=\x22rota\
te(-45)\x22\x0a \
cx=\x22-174.0\
8969\x22\x0a \
cy=\x22210.01071\
\x22\x0a r\
=\x2212.656071\x22\x0a \
id=\x22pa\
th876\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07399406;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<path\x0a \
inksc\
ape:transform-ce\
nter-x=\x22-3.17499\
99\x22\x0a \
sodipodi:nodety\
pes=\x22cccc\x22\x0a \
inkscape\
:connector-curva\
ture=\x220\x22\x0a \
id=\x22path90\
4\x22\x0a \
d=\x22m 25.4,271.60\
002 -25.40000040\
000004,25.4 v -5\
0.8 z\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<path\x0a \
inksc\
ape:transform-ce\
nter-x=\x223.175\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
d=\x22m 25.399999,2\
71.60002 25.4,-2\
5.4 v 50.8 z\x22\x0a \
id=\x22p\
ath906\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0514922\x22\x0a \
y=\x22256.\
39301\x22\x0a \
x=\x222.5663135\
\x22\x0a h\
eight=\x2230.440479\
\x22\x0a w\
idth=\x2245.693634\x22\
\x0a id\
=\x22rect837\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06574\
38;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <rect\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.0657438;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
id=\x22rect831\x22\x0a \
width\
=\x2245.693588\x22\x0a \
height\
=\x2230.44051\x22\x0a \
x=\x22248.\
76645\x22\x0a \
y=\x22-40.63338\
5\x22\x0a \
ry=\x225.051497\x22\x0a \
trans\
form=\x22rotate(90)\
\x22 />\x0a </g\
>\x0a </g>\x0a \
</g>\x0a <path\x0a\
style=\x22op\
acity:1;fill:#ff\
c107;fill-opacit\
y:1;stroke:none;\
stroke-width:0.3\
8596651;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-opacity\
:1\x22\x0a d=\x22m \
50.206421,401.67\
683 c 110.217209\
,0.71279 55.1086\
09,0.3564 0,0 z\x22\
\x0a id=\x22rect\
997\x22\x0a inks\
cape:connector-c\
urvature=\x220\x22 />\x0a\
<path\x0a \
style=\x22opacity:\
1;fill:#ff0000;f\
ill-opacity:1;st\
roke:none;stroke\
-width:2.1008215\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22M 10,3.013\
6719 A 6.9930773\
,6.9930773 0 0 0\
3.0058594,10.00\
5859 6.9930773,6\
.9930773 0 0 0 1\
0,17 6.9930773,6\
.9930773 0 0 0 1\
6.992188,10.0058\
59 6.9930773,6.9\
930773 0 0 0 10,\
3.0136719 Z M 10\
,4 a 5.9999993,5\
.9999993 0 0 1 6\
,6 5.9999993,5.9\
999993 0 0 1 -6,\
6 5.9999993,5.99\
99993 0 0 1 -6,-\
6 5.9999993,5.99\
99993 0 0 1 6,-6\
z\x22\x0a trans\
form=\x22matrix(0.2\
6458332,0,0,0.26\
458332,0,291.708\
35)\x22\x0a id=\x22\
path826\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
/>\x0a <circle\x0a\
style=\x22op\
acity:1;fill:#ff\
0000;fill-opacit\
y:1;stroke:none;\
stroke-width:0.3\
1793803;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22ci\
rcle830\x22\x0a \
cx=\x222.6458333\x22\x0a \
cy=\x22294.35\
419\x22\x0a r=\x221\
.0583313\x22 />\x0a <\
/g>\x0a</svg>\x0a\
\x00\x00\x1e\xba\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rig\
htarrow.svg\x22\x0a \
inkscape:export-\
filename=\x22/home/\
yeison/Developme\
nt/piton/art/ico\
n_lite.png\x22\x0a i\
nkscape:export-x\
dpi=\x2296\x22\x0a inks\
cape:export-ydpi\
=\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 /\
>\x0a <sodipodi:na\
medview\x0a id=\
\x22base\x22\x0a page\
color=\x22#ffffff\x22\x0a\
bordercolor\
=\x22#666666\x22\x0a \
borderopacity=\x221\
.0\x22\x0a inkscap\
e:pageopacity=\x220\
.0\x22\x0a inkscap\
e:pageshadow=\x222\x22\
\x0a inkscape:z\
oom=\x2228.704913\x22\x0a\
inkscape:cx\
=\x224.6862968\x22\x0a \
inkscape:cy=\x225\
.0026685\x22\x0a i\
nkscape:document\
-units=\x22px\x22\x0a \
inkscape:curren\
t-layer=\x22layer1\x22\
\x0a showgrid=\x22\
true\x22\x0a inksc\
ape:window-width\
=\x221920\x22\x0a ink\
scape:window-hei\
ght=\x221004\x22\x0a \
inkscape:window-\
x=\x220\x22\x0a inksc\
ape:window-y=\x220\x22\
\x0a inkscape:w\
indow-maximized=\
\x221\x22\x0a inkscap\
e:showpageshadow\
=\x22false\x22\x0a un\
its=\x22px\x22\x0a in\
kscape:pagecheck\
erboard=\x22false\x22\x0a\
showguides=\
\x22true\x22\x0a inks\
cape:snap-bbox=\x22\
true\x22\x0a inksc\
ape:bbox-paths=\x22\
true\x22\x0a inksc\
ape:bbox-nodes=\x22\
true\x22\x0a inksc\
ape:snap-bbox-ed\
ge-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
nodes=\x22true\x22\x0a \
inkscape:objec\
t-paths=\x22true\x22\x0a \
inkscape:sna\
p-intersection-p\
aths=\x22true\x22\x0a \
inkscape:snap-s\
mooth-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-midpoints=\
\x22true\x22\x0a inks\
cape:snap-global\
=\x22true\x22\x0a fit\
-margin-top=\x220\x22\x0a\
fit-margin-\
left=\x220\x22\x0a fi\
t-margin-right=\x22\
0\x22\x0a fit-marg\
in-bottom=\x220\x22\x0a \
inkscape:guid\
e-bbox=\x22true\x22>\x0a \
<inkscape:gri\
d\x0a type=\x22x\
ygrid\x22\x0a id\
=\x22grid974\x22\x0a \
empspacing=\x228\x22\
\x0a spacingx\
=\x220.26458332\x22\x0a \
spacingy=\x220\
.26458332\x22\x0a \
dotted=\x22false\x22\
\x0a visible=\
\x22true\x22\x0a en\
abled=\x22true\x22\x0a \
snapvisibleg\
ridlinesonly=\x22tr\
ue\x22\x0a origi\
nx=\x220\x22\x0a or\
iginy=\x220\x22 />\x0a <\
/sodipodi:namedv\
iew>\x0a <metadata\
\x0a id=\x22metada\
ta5\x22>\x0a <rdf:R\
DF>\x0a <cc:Wo\
rk\x0a rdf:\
about=\x22\x22>\x0a \
<dc:format>ima\
ge/svg+xml</dc:f\
ormat>\x0a <\
dc:type\x0a \
rdf:resource=\
\x22http://purl.org\
/dc/dcmitype/Sti\
llImage\x22 />\x0a \
<dc:title />\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22fill:none;strok\
e:#ff0000;stroke\
-width:0.5291666\
4;stroke-linecap\
:butt;stroke-lin\
ejoin:bevel;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-opa\
city:1\x22\x0a d\
=\x22m 1.8205432,29\
5.40031 1.65058,\
-1.04613 -1.6505\
8,-1.04613\x22\x0a \
id=\x22path827\x22\x0a\
inkscape:\
connector-curvat\
ure=\x220\x22\x0a s\
odipodi:nodetype\
s=\x22ccc\x22 />\x0a </g\
>\x0a</svg>\x0a\
\x00\x00 Y\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rad\
iobutton_uncheck\
ed.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
46.98187\x22\x0a i\
nkscape:cx=\x22-0.0\
91640624\x22\x0a i\
nkscape:cy=\x229.47\
69385\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox=\x22tru\
e\x22\x0a inkscape\
:bbox-paths=\x22tru\
e\x22\x0a inkscape\
:bbox-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-edge-\
midpoints=\x22true\x22\
\x0a inkscape:s\
nap-bbox-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-nod\
es=\x22true\x22\x0a i\
nkscape:object-p\
aths=\x22true\x22\x0a \
inkscape:snap-i\
ntersection-path\
s=\x22true\x22\x0a in\
kscape:snap-smoo\
th-nodes=\x22true\x22\x0a\
inkscape:sn\
ap-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-global=\x22t\
rue\x22\x0a fit-ma\
rgin-top=\x220\x22\x0a \
fit-margin-lef\
t=\x220\x22\x0a fit-m\
argin-right=\x220\x22\x0a\
fit-margin-\
bottom=\x220\x22\x0a \
inkscape:guide-b\
box=\x22true\x22>\x0a \
<inkscape:grid\x0a \
type=\x22xygr\
id\x22\x0a id=\x22g\
rid974\x22\x0a e\
mpspacing=\x228\x22\x0a \
spacingx=\x220\
.26458332\x22\x0a \
spacingy=\x220.26\
458332\x22\x0a d\
otted=\x22false\x22\x0a \
visible=\x22tr\
ue\x22\x0a enabl\
ed=\x22true\x22\x0a \
snapvisiblegrid\
linesonly=\x22true\x22\
\x0a originx=\
\x220\x22\x0a origi\
ny=\x220\x22 />\x0a </so\
dipodi:namedview\
>\x0a <metadata\x0a \
id=\x22metadata5\
\x22>\x0a <rdf:RDF>\
\x0a <cc:Work\x0a\
rdf:abo\
ut=\x22\x22>\x0a <\
dc:format>image/\
svg+xml</dc:form\
at>\x0a <dc:\
type\x0a \
rdf:resource=\x22ht\
tp://purl.org/dc\
/dcmitype/StillI\
mage\x22 />\x0a \
<dc:title></dc:\
title>\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ff0000;fi\
ll-opacity:0.352\
94119;stroke:non\
e;stroke-width:2\
.1008215;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a d=\x22M \
10.097656,3.0078\
125 A 6.9930773,\
6.9930773 0 0 0 \
3.1054688,10 6.9\
930773,6.9930773\
0 0 0 10.097656\
,16.994141 6.993\
0773,6.9930773 0\
0 0 17.091797,1\
0 6.9930773,6.99\
30773 0 0 0 10.0\
97656,3.0078125 \
Z M 10,4 a 5.999\
9844,5.9999844 0\
0 1 6,6 5.99998\
44,5.9999844 0 0\
1 -6,6 5.999984\
4,5.9999844 0 0 \
1 -6,-6 5.999984\
4,5.9999844 0 0 \
1 6,-6 z\x22\x0a \
transform=\x22matr\
ix(0.26458332,0,\
0,0.26458332,0,2\
91.70835)\x22\x0a \
id=\x22path826\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22 />\x0a </g>\
\x0a</svg>\x0a\
\x00\x00\x1f\xb4\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22tab\
_close.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2220.297438\x22\x0a \
inkscape:cx=\
\x2210.202825\x22\x0a \
inkscape:cy=\x228.\
9235955\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221015\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
false\x22\x0a inks\
cape:snap-bbox=\x22\
true\x22\x0a inksc\
ape:bbox-paths=\x22\
true\x22\x0a inksc\
ape:bbox-nodes=\x22\
true\x22\x0a inksc\
ape:snap-bbox-ed\
ge-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
nodes=\x22true\x22\x0a \
inkscape:objec\
t-paths=\x22true\x22\x0a \
inkscape:sna\
p-intersection-p\
aths=\x22true\x22\x0a \
inkscape:snap-s\
mooth-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-midpoints=\
\x22true\x22\x0a inks\
cape:snap-global\
=\x22true\x22\x0a fit\
-margin-top=\x220\x22\x0a\
fit-margin-\
left=\x220\x22\x0a fi\
t-margin-right=\x22\
0\x22\x0a fit-marg\
in-bottom=\x220\x22\x0a \
inkscape:guid\
e-bbox=\x22true\x22>\x0a \
<inkscape:gri\
d\x0a type=\x22x\
ygrid\x22\x0a id\
=\x22grid974\x22\x0a \
empspacing=\x228\x22\
\x0a spacingx\
=\x220.26458332\x22\x0a \
spacingy=\x220\
.26458332\x22\x0a \
dotted=\x22false\x22\
\x0a visible=\
\x22true\x22\x0a en\
abled=\x22true\x22\x0a \
snapvisibleg\
ridlinesonly=\x22tr\
ue\x22\x0a origi\
nx=\x220\x22\x0a or\
iginy=\x220\x22 />\x0a <\
/sodipodi:namedv\
iew>\x0a <metadata\
\x0a id=\x22metada\
ta5\x22>\x0a <rdf:R\
DF>\x0a <cc:Wo\
rk\x0a rdf:\
about=\x22\x22>\x0a \
<dc:format>ima\
ge/svg+xml</dc:f\
ormat>\x0a <\
dc:type\x0a \
rdf:resource=\
\x22http://purl.org\
/dc/dcmitype/Sti\
llImage\x22 />\x0a \
<dc:title />\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22fill:#ff0000;st\
roke:#ff0000;str\
oke-width:0.5291\
6664;stroke-line\
cap:butt;stroke-\
linejoin:miter;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
opacity:1\x22\x0a \
d=\x22m 0.9809021\
5,292.68924 3.32\
986205,3.32989\x22\x0a\
id=\x22path8\
26\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<path\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id=\x22path\
842\x22\x0a d=\x22m\
4.3107782,292.6\
8925 -3.32989002\
,3.32987\x22\x0a \
style=\x22fill:#ff\
0000;stroke:#ff0\
000;stroke-width\
:0.52916664;stro\
ke-linecap:butt;\
stroke-linejoin:\
miter;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-opacity:1\
\x22 />\x0a </g>\x0a</sv\
g>\x0a\
\x00\x00)I\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22siz\
egrip.svg\x22\x0a in\
kscape:export-fi\
lename=\x22/home/ye\
ison/Development\
/piton/art/icon_\
lite.png\x22\x0a ink\
scape:export-xdp\
i=\x2296\x22\x0a inksca\
pe:export-ydpi=\x22\
96\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a\
<sodipodi:name\
dview\x0a id=\x22b\
ase\x22\x0a pageco\
lor=\x22#ffffff\x22\x0a \
bordercolor=\x22\
#666666\x22\x0a bo\
rderopacity=\x221.0\
\x22\x0a inkscape:\
pageopacity=\x220.0\
\x22\x0a inkscape:\
pageshadow=\x222\x22\x0a \
inkscape:zoo\
m=\x2216\x22\x0a inks\
cape:cx=\x224.85602\
4\x22\x0a inkscape\
:cy=\x229.6877956\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle />\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<rect\x0a \
style=\x22opacity:1\
;fill:#ff0000;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:1.05832505\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect832\x22\x0a\
width=\x220.\
52916664\x22\x0a \
height=\x220.52915\
841\x22\x0a x=\x224\
.2333331\x22\x0a \
y=\x22295.94168\x22 /\
>\x0a <rect\x0a \
y=\x22295.94168\x22\
\x0a x=\x221.058\
3333\x22\x0a hei\
ght=\x220.52915841\x22\
\x0a width=\x220\
.52916664\x22\x0a \
id=\x22rect836\x22\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:1.05\
832505;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <rect\x0a \
style=\x22opa\
city:1;fill:#ff0\
000;fill-opacity\
:1;stroke:none;s\
troke-width:1.05\
832505;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22\x0a id=\x22rec\
t838\x22\x0a wid\
th=\x220.52916664\x22\x0a\
height=\x220\
.52915841\x22\x0a \
x=\x222.6458333\x22\x0a\
y=\x22295.94\
168\x22 />\x0a <rec\
t\x0a y=\x22292.\
76669\x22\x0a x=\
\x224.2333331\x22\x0a \
height=\x220.529\
15841\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a id=\x22rect\
840\x22\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:1.05832505;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22 />\x0a <\
rect\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:1.05832505;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22\x0a i\
d=\x22rect842\x22\x0a \
width=\x220.5291\
6664\x22\x0a hei\
ght=\x220.52915841\x22\
\x0a x=\x221.852\
0832\x22\x0a y=\x22\
295.14795\x22 />\x0a \
<rect\x0a s\
tyle=\x22opacity:1;\
fill:#ff0000;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:1.05832505;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22\x0a \
id=\x22rect844\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
height=\x220.529158\
41\x22\x0a x=\x223.\
4395831\x22\x0a \
y=\x22295.14795\x22 />\
\x0a <rect\x0a \
y=\x22294.35419\x22\x0a\
x=\x224.2333\
331\x22\x0a heig\
ht=\x220.52915841\x22\x0a\
width=\x220.\
52916664\x22\x0a \
id=\x22rect846\x22\x0a \
style=\x22opac\
ity:1;fill:#ff00\
00;fill-opacity:\
1;stroke:none;st\
roke-width:1.058\
32505;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
/>\x0a <rect\x0a \
style=\x22opac\
ity:1;fill:#ff00\
00;fill-opacity:\
1;stroke:none;st\
roke-width:1.058\
32505;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22rect\
848\x22\x0a widt\
h=\x220.52916664\x22\x0a \
height=\x220.\
52915841\x22\x0a \
x=\x223.4395831\x22\x0a \
y=\x22293.560\
42\x22 />\x0a <rect\
\x0a style=\x22o\
pacity:1;fill:#f\
f0000;fill-opaci\
ty:1;stroke:none\
;stroke-width:1.\
05832505;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\x22r\
ect852\x22\x0a w\
idth=\x220.52916664\
\x22\x0a height=\
\x220.52915841\x22\x0a \
x=\x222.6458333\
\x22\x0a y=\x22294.\
35419\x22 />\x0a </g>\
\x0a</svg>\x0a\
\x00\x00$v\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22spl\
itter-horizontal\
.svg\x22\x0a inkscap\
e:export-filenam\
e=\x22/home/yeison/\
Development/pito\
n/art/icon_lite.\
png\x22\x0a inkscape\
:export-xdpi=\x2296\
\x22\x0a inkscape:ex\
port-ydpi=\x2296\x22>\x0a\
<defs\x0a id=\
\x22defs2\x22 />\x0a <so\
dipodi:namedview\
\x0a id=\x22base\x22\x0a\
pagecolor=\x22\
#ffffff\x22\x0a bo\
rdercolor=\x22#6666\
66\x22\x0a bordero\
pacity=\x221.0\x22\x0a \
inkscape:pageo\
pacity=\x220.0\x22\x0a \
inkscape:pages\
hadow=\x222\x22\x0a i\
nkscape:zoom=\x2224\
.802598\x22\x0a in\
kscape:cx=\x226.302\
3018\x22\x0a inksc\
ape:cy=\x228.969841\
\x22\x0a inkscape:\
document-units=\x22\
px\x22\x0a inkscap\
e:current-layer=\
\x22layer1\x22\x0a sh\
owgrid=\x22true\x22\x0a \
inkscape:wind\
ow-width=\x221920\x22\x0a\
inkscape:wi\
ndow-height=\x22100\
4\x22\x0a inkscape\
:window-x=\x220\x22\x0a \
inkscape:wind\
ow-y=\x220\x22\x0a in\
kscape:window-ma\
ximized=\x221\x22\x0a \
inkscape:showpa\
geshadow=\x22false\x22\
\x0a units=\x22px\x22\
\x0a inkscape:p\
agecheckerboard=\
\x22false\x22\x0a sho\
wguides=\x22true\x22\x0a \
inkscape:sna\
p-bbox=\x22true\x22\x0a \
inkscape:bbox\
-paths=\x22true\x22\x0a \
inkscape:bbox\
-nodes=\x22true\x22\x0a \
inkscape:snap\
-bbox-edge-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-b\
box-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-nodes=\x22t\
rue\x22\x0a inksca\
pe:object-paths=\
\x22true\x22\x0a inks\
cape:snap-inters\
ection-paths=\x22tr\
ue\x22\x0a inkscap\
e:snap-smooth-no\
des=\x22true\x22\x0a \
inkscape:snap-mi\
dpoints=\x22true\x22\x0a \
inkscape:sna\
p-global=\x22true\x22\x0a\
fit-margin-\
top=\x220\x22\x0a fit\
-margin-left=\x220\x22\
\x0a fit-margin\
-right=\x220\x22\x0a \
fit-margin-botto\
m=\x220\x22\x0a inksc\
ape:guide-bbox=\x22\
true\x22>\x0a <inks\
cape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid97\
4\x22\x0a empspa\
cing=\x228\x22\x0a \
spacingx=\x220.2645\
8332\x22\x0a spa\
cingy=\x220.2645833\
2\x22\x0a dotted\
=\x22false\x22\x0a \
visible=\x22true\x22\x0a \
enabled=\x22t\
rue\x22\x0a snap\
visiblegridlines\
only=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\
\x22 />\x0a </sodipod\
i:namedview>\x0a <\
metadata\x0a id\
=\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22\
>\x0a <dc:fo\
rmat>image/svg+x\
ml</dc:format>\x0a \
<dc:type\x0a\
rdf:r\
esource=\x22http://\
purl.org/dc/dcmi\
type/StillImage\x22\
/>\x0a <dc:\
title />\x0a <\
/cc:Work>\x0a </\
rdf:RDF>\x0a </met\
adata>\x0a <g\x0a \
inkscape:label=\
\x22Layer 1\x22\x0a i\
nkscape:groupmod\
e=\x22layer\x22\x0a i\
d=\x22layer1\x22\x0a \
transform=\x22trans\
late(0,-291.7083\
5)\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22m\
atrix(0.05207439\
,0,0,0.05207453,\
-0.90125164,282.\
41203)\x22>\x0a <\
g\x0a id=\x22g\
851\x22>\x0a <g\
\x0a id=\x22\
g1059\x22\x0a \
transform=\x22mat\
rix(1.9986219,0,\
0,1.9986185,17.3\
24484,-313.52314\
)\x22>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-y=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 25.39\
9999,271.60002 -\
8.0000008e-7,246\
.20002 H 50.7999\
99 Z\x22\x0a \
id=\x22path883\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<path\x0a \
sodipodi:no\
detypes=\x22cccc\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
id=\x22pa\
th880\x22\x0a \
d=\x22m 25.3999\
99,271.60002 25.\
399999,25.4 H 0 \
Z\x22\x0a \
inkscape:transfo\
rm-center-y=\x22-3.\
1749995\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0534658\x22\x0a \
y=\x22253.\
84885\x22\x0a \
x=\x227.6487389\
\x22\x0a h\
eight=\x2235.528759\
\x22\x0a w\
idth=\x2235.528786\x22\
\x0a id\
=\x22rect870\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06184\
419;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
r=\x2225.396828\x22\x0a \
cy=\x22\
271.60001\x22\x0a \
cx=\x2225.4\
\x22\x0a i\
d=\x22path872\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0763\
5882;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <ci\
rcle\x0a \
transform=\x22rot\
ate(-45)\x22\x0a \
cx=\x22-174.\
08969\x22\x0a \
cy=\x22210.0107\
1\x22\x0a \
r=\x2212.656071\x22\x0a \
id=\x22p\
ath876\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07399406\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x22-3.1749\
999\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22\x0a \
inkscap\
e:connector-curv\
ature=\x220\x22\x0a \
id=\x22path9\
04\x22\x0a \
d=\x22m 25.4,271.6\
0002 -25.4000004\
0000004,25.4 v -\
50.8 z\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x223.175\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
d=\x22m 25.399999,\
271.60002 25.4,-\
25.4 v 50.8 z\x22\x0a \
id=\x22\
path906\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22\x0a \
sodipodi:nod\
etypes=\x22cccc\x22 />\
\x0a <rect\
\x0a ry\
=\x225.0514922\x22\x0a \
y=\x22256\
.39301\x22\x0a \
x=\x222.566313\
5\x22\x0a \
height=\x2230.44047\
9\x22\x0a \
width=\x2245.693634\
\x22\x0a i\
d=\x22rect837\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0657\
438;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <rec\
t\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22\x0a \
id=\x22rect831\x22\x0a \
widt\
h=\x2245.693588\x22\x0a \
heigh\
t=\x2230.44051\x22\x0a \
x=\x22248\
.76645\x22\x0a \
y=\x22-40.6333\
85\x22\x0a \
ry=\x225.051497\x22\x0a \
tran\
sform=\x22rotate(90\
)\x22 />\x0a </\
g>\x0a </g>\x0a \
</g>\x0a <path\
\x0a style=\x22o\
pacity:1;fill:#f\
fc107;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
38596651;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
50.206421,401.6\
7683 c 110.21720\
9,0.71279 55.108\
609,0.3564 0,0 z\
\x22\x0a id=\x22rec\
t997\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a <g\x0a i\
d=\x22g839\x22>\x0a \
<rect\x0a y\
=\x22291.97293\x22\x0a \
x=\x222.38124\
99\x22\x0a hei\
ght=\x220.52916664\x22\
\x0a width=\
\x220.52916664\x22\x0a \
id=\x22rect82\
7\x22\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.52916664;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22 />\x0a \
<rect\x0a \
style=\x22opacity:1\
;fill:#ff0000;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.52916664\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect829\
\x22\x0a width\
=\x220.52916664\x22\x0a \
height=\x220\
.52916664\x22\x0a \
x=\x222.3812499\
\x22\x0a y=\x2229\
6.20627\x22 />\x0a \
<rect\x0a \
y=\x22295.14792\x22\x0a \
x=\x222.381\
2499\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a widt\
h=\x220.52916664\x22\x0a \
id=\x22rect\
832\x22\x0a st\
yle=\x22opacity:1;f\
ill:#ff0000;fill\
-opacity:1;strok\
e:none;stroke-wi\
dth:0.52916664;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1\x22 />\x0a \
<rect\x0a \
style=\x22opacity\
:1;fill:#ff0000;\
fill-opacity:1;s\
troke:none;strok\
e-width:0.529166\
64;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1\x22\x0a \
id=\x22rect8\
34\x22\x0a wid\
th=\x220.52916664\x22\x0a\
height=\
\x220.52916664\x22\x0a \
x=\x222.38124\
99\x22\x0a y=\x22\
294.0896\x22 />\x0a \
<rect\x0a \
y=\x22293.03128\x22\x0a\
x=\x222.38\
12499\x22\x0a \
height=\x220.529166\
64\x22\x0a wid\
th=\x220.52916664\x22\x0a\
id=\x22rec\
t836\x22\x0a s\
tyle=\x22opacity:1;\
fill:#ff0000;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.52916664;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22 />\x0a \
</g>\x0a </g>\x0a</\
svg>\x0a\
\x00\x00\x1e\xbb\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22dow\
narrow.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2228.704913\x22\x0a \
inkscape:cx=\
\x2211.479559\x22\x0a \
inkscape:cy=\x225.\
0026685\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221004\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
true\x22\x0a inksc\
ape:snap-bbox=\x22t\
rue\x22\x0a inksca\
pe:bbox-paths=\x22t\
rue\x22\x0a inksca\
pe:bbox-nodes=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox-edg\
e-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-n\
odes=\x22true\x22\x0a \
inkscape:object\
-paths=\x22true\x22\x0a \
inkscape:snap\
-intersection-pa\
ths=\x22true\x22\x0a \
inkscape:snap-sm\
ooth-nodes=\x22true\
\x22\x0a inkscape:\
snap-midpoints=\x22\
true\x22\x0a inksc\
ape:snap-global=\
\x22true\x22\x0a fit-\
margin-top=\x220\x22\x0a \
fit-margin-l\
eft=\x220\x22\x0a fit\
-margin-right=\x220\
\x22\x0a fit-margi\
n-bottom=\x220\x22\x0a \
inkscape:guide\
-bbox=\x22true\x22>\x0a \
<inkscape:grid\
\x0a type=\x22xy\
grid\x22\x0a id=\
\x22grid974\x22\x0a \
empspacing=\x228\x22\x0a\
spacingx=\
\x220.26458332\x22\x0a \
spacingy=\x220.\
26458332\x22\x0a \
dotted=\x22false\x22\x0a\
visible=\x22\
true\x22\x0a ena\
bled=\x22true\x22\x0a \
snapvisiblegr\
idlinesonly=\x22tru\
e\x22\x0a origin\
x=\x220\x22\x0a ori\
giny=\x220\x22 />\x0a </\
sodipodi:namedvi\
ew>\x0a <metadata\x0a\
id=\x22metadat\
a5\x22>\x0a <rdf:RD\
F>\x0a <cc:Wor\
k\x0a rdf:a\
bout=\x22\x22>\x0a \
<dc:format>imag\
e/svg+xml</dc:fo\
rmat>\x0a <d\
c:type\x0a \
rdf:resource=\x22\
http://purl.org/\
dc/dcmitype/Stil\
lImage\x22 />\x0a \
<dc:title />\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a <pat\
h\x0a style=\x22\
fill:none;stroke\
:#ff0000;stroke-\
width:0.52916664\
;stroke-linecap:\
butt;stroke-line\
join:bevel;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-opac\
ity:1\x22\x0a d=\
\x22m 1.5997022,293\
.52889 1.0461311\
,1.65058 1.04613\
09,-1.65058\x22\x0a \
id=\x22path827\x22\
\x0a inkscape\
:connector-curva\
ture=\x220\x22\x0a \
sodipodi:nodetyp\
es=\x22ccc\x22 />\x0a </\
g>\x0a</svg>\x0a\
\x00\x00\x1f\xba\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22clo\
se.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
0.44851425\x22\x0a \
inkscape:cx=\x22-4\
61.15031\x22\x0a i\
nkscape:cy=\x2268.2\
80762\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22fa\
lse\x22\x0a inksca\
pe:snap-bbox=\x22tr\
ue\x22\x0a inkscap\
e:bbox-paths=\x22tr\
ue\x22\x0a inkscap\
e:bbox-nodes=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-edge\
-midpoints=\x22true\
\x22\x0a inkscape:\
snap-bbox-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-no\
des=\x22true\x22\x0a \
inkscape:object-\
paths=\x22true\x22\x0a \
inkscape:snap-\
intersection-pat\
hs=\x22true\x22\x0a i\
nkscape:snap-smo\
oth-nodes=\x22true\x22\
\x0a inkscape:s\
nap-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-global=\x22\
true\x22\x0a fit-m\
argin-top=\x220\x22\x0a \
fit-margin-le\
ft=\x220\x22\x0a fit-\
margin-right=\x220\x22\
\x0a fit-margin\
-bottom=\x220\x22\x0a \
inkscape:guide-\
bbox=\x22true\x22>\x0a \
<inkscape:grid\x0a\
type=\x22xyg\
rid\x22\x0a id=\x22\
grid974\x22\x0a \
empspacing=\x228\x22\x0a \
spacingx=\x22\
0.26458332\x22\x0a \
spacingy=\x220.2\
6458332\x22\x0a \
dotted=\x22false\x22\x0a \
visible=\x22t\
rue\x22\x0a enab\
led=\x22true\x22\x0a \
snapvisiblegri\
dlinesonly=\x22true\
\x22\x0a originx\
=\x220\x22\x0a orig\
iny=\x220\x22 />\x0a </s\
odipodi:namedvie\
w>\x0a <metadata\x0a \
id=\x22metadata\
5\x22>\x0a <rdf:RDF\
>\x0a <cc:Work\
\x0a rdf:ab\
out=\x22\x22>\x0a \
<dc:format>image\
/svg+xml</dc:for\
mat>\x0a <dc\
:type\x0a \
rdf:resource=\x22h\
ttp://purl.org/d\
c/dcmitype/Still\
Image\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a\
</rdf:RDF>\x0a \
</metadata>\x0a <\
g\x0a inkscape:\
label=\x22Layer 1\x22\x0a\
inkscape:gr\
oupmode=\x22layer\x22\x0a\
id=\x22layer1\x22\
\x0a transform=\
\x22translate(0,-29\
1.70835)\x22>\x0a <\
g\x0a id=\x22g84\
7\x22\x0a transf\
orm=\x22matrix(0.05\
207439,0,0,0.052\
07453,-0.9012516\
4,282.41203)\x22>\x0a \
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfor\
m=\x22matrix(1.9986\
219,0,0,1.998618\
5,17.324484,-313\
.52314)\x22>\x0a \
<path\x0a \
inkscape:\
transform-center\
-y=\x223.175\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.07000\
433;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22\x0a \
d=\x22M\
25.399999,271.6\
0002 -8.0000008e\
-7,246.20002 H 5\
0.799999 Z\x22\x0a \
id=\x22pat\
h883\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
sodipodi:nodety\
pes=\x22cccc\x22 />\x0a \
<path\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m 2\
5.399999,271.600\
02 25.399999,25.\
4 H 0 Z\x22\x0a \
inkscape:t\
ransform-center-\
y=\x22-3.1749995\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
ry=\x225.0534658\
\x22\x0a y\
=\x22253.84885\x22\x0a \
x=\x227.6\
487389\x22\x0a \
height=\x2235.\
528759\x22\x0a \
width=\x2235.5\
28786\x22\x0a \
id=\x22rect870\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.06184419;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<circle\x0a \
r=\x2225.396\
828\x22\x0a \
cy=\x22271.60001\x22\
\x0a cx\
=\x2225.4\x22\x0a \
id=\x22path872\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.07635882;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
transfor\
m=\x22rotate(-45)\x22\x0a\
cx=\
\x22-174.08969\x22\x0a \
cy=\x2221\
0.01071\x22\x0a \
r=\x2212.6560\
71\x22\x0a \
id=\x22path876\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
399406;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
path\x0a \
inkscape:trans\
form-center-x=\x22-\
3.1749999\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22\x0a i\
nkscape:connecto\
r-curvature=\x220\x22\x0a\
id=\
\x22path904\x22\x0a \
d=\x22m 25.4\
,271.60002 -25.4\
0000040000004,25\
.4 v -50.8 z\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
path\x0a \
inkscape:trans\
form-center-x=\x223\
.175\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22\x0a \
d=\x22m 25.3\
99999,271.60002 \
25.4,-25.4 v 50.\
8 z\x22\x0a \
id=\x22path906\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22 />\x0a \
<rect\x0a \
ry=\x225.051492\
2\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222.\
5663135\x22\x0a \
height=\x2230\
.440479\x22\x0a \
width=\x2245.\
693634\x22\x0a \
id=\x22rect837\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.0657438;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
id=\x22rect\
831\x22\x0a \
width=\x2245.6935\
88\x22\x0a \
height=\x2230.4405\
1\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-4\
0.633385\x22\x0a \
ry=\x225.051\
497\x22\x0a \
transform=\x22rot\
ate(90)\x22 />\x0a \
</g>\x0a <\
/g>\x0a </g>\x0a \
<path\x0a st\
yle=\x22fill:#ff000\
0;stroke:#ff0000\
;stroke-width:0.\
52916664;stroke-\
linecap:butt;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-opacity:1\x22\x0a \
d=\x22m 1.319\
2054,293.02755 2\
.6532555,2.65327\
\x22\x0a id=\x22pat\
h826\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a <path\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a id=\x22pa\
th842\x22\x0a d=\
\x22m 3.9724723,293\
.02756 -2.653278\
2,2.65325\x22\x0a \
style=\x22fill:#f\
f0000;stroke:#ff\
0000;stroke-widt\
h:0.52916664;str\
oke-linecap:butt\
;stroke-linejoin\
:miter;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22 />\x0a <circl\
e\x0a style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:1;stroke:#ff000\
0;stroke-width:0\
.5292387;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\x22p\
ath829\x22\x0a c\
x=\x222.6458311\x22\x0a \
cy=\x22294.354\
19\x22\x0a r=\x222.\
381216\x22 />\x0a </g\
>\x0a</svg>\x0a\
\x00\x00\x225\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_checked.sv\
g\x22\x0a inkscape:e\
xport-filename=\x22\
/home/yeison/Dev\
elopment/piton/a\
rt/icon_lite.png\
\x22\x0a inkscape:ex\
port-xdpi=\x2296\x22\x0a \
inkscape:expor\
t-ydpi=\x2296\x22>\x0a <\
defs\x0a id=\x22de\
fs2\x22 />\x0a <sodip\
odi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#ff\
ffff\x22\x0a borde\
rcolor=\x22#666666\x22\
\x0a borderopac\
ity=\x221.0\x22\x0a i\
nkscape:pageopac\
ity=\x220.0\x22\x0a i\
nkscape:pageshad\
ow=\x222\x22\x0a inks\
cape:zoom=\x2240.59\
4876\x22\x0a inksc\
ape:cx=\x229.477075\
1\x22\x0a inkscape\
:cy=\x227.4819362\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22false\x22\x0a \
inkscape:snap\
-bbox=\x22true\x22\x0a \
inkscape:bbox-\
paths=\x22true\x22\x0a \
inkscape:bbox-\
nodes=\x22true\x22\x0a \
inkscape:snap-\
bbox-edge-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-bb\
ox-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-nodes=\x22tr\
ue\x22\x0a inkscap\
e:object-paths=\x22\
true\x22\x0a inksc\
ape:snap-interse\
ction-paths=\x22tru\
e\x22\x0a inkscape\
:snap-smooth-nod\
es=\x22true\x22\x0a i\
nkscape:snap-mid\
points=\x22true\x22\x0a \
inkscape:snap\
-global=\x22true\x22\x0a \
fit-margin-t\
op=\x220\x22\x0a fit-\
margin-left=\x220\x22\x0a\
fit-margin-\
right=\x220\x22\x0a f\
it-margin-bottom\
=\x220\x22\x0a inksca\
pe:guide-bbox=\x22t\
rue\x22>\x0a <inksc\
ape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid974\
\x22\x0a empspac\
ing=\x228\x22\x0a s\
pacingx=\x220.26458\
332\x22\x0a spac\
ingy=\x220.26458332\
\x22\x0a dotted=\
\x22false\x22\x0a v\
isible=\x22true\x22\x0a \
enabled=\x22tr\
ue\x22\x0a snapv\
isiblegridlineso\
nly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22\
/>\x0a </sodipodi\
:namedview>\x0a <m\
etadata\x0a id=\
\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\
\x0a <dc:for\
mat>image/svg+xm\
l</dc:format>\x0a \
<dc:type\x0a \
rdf:re\
source=\x22http://p\
url.org/dc/dcmit\
ype/StillImage\x22 \
/>\x0a <dc:t\
itle></dc:title>\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22opacity:1;fill:\
#ff0000;fill-opa\
city:1;stroke:#f\
f0000;stroke-wid\
th:0;stroke-line\
cap:square;strok\
e-linejoin:miter\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\x22m 1.3\
218831,292.50314\
c -0.292672,0 -\
0.52813314,0.235\
46 -0.52813314,0\
.52813 v 2.6479 \
c 0,0.29268 0.23\
546114,0.52814 0\
.52813314,0.5281\
4 h 2.6479 c 0.2\
92673,0 0.528133\
,-0.23546 0.5281\
33,-0.52814 v -2\
.6479 c 0,-0.292\
67 -0.23546,-0.5\
2813 -0.528133,-\
0.52813 z m 0.00\
1,0.26458 h 2.64\
5833 c 0.146573,\
0 0.264583,0.118\
01 0.264583,0.26\
459 v 2.64583 c \
0,0.14657 -0.118\
01,0.26458 -0.26\
4583,0.26458 h -\
2.645799 c -0.14\
6574,0 -0.264584\
,-0.11801 -0.264\
584,-0.26458 v -\
2.64583 c 0,-0.1\
4658 0.11801,-0.\
26459 0.264584,-\
0.26459 z\x22\x0a \
id=\x22rect1954\x22\x0a\
inkscape:\
connector-curvat\
ure=\x220\x22 />\x0a <\
rect\x0a styl\
e=\x22opacity:1;fil\
l:#ff0000;fill-o\
pacity:1;stroke:\
#ff0000;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22re\
ct2118\x22\x0a w\
idth=\x222.1166663\x22\
\x0a height=\x22\
2.1166787\x22\x0a \
x=\x221.5874999\x22\x0a\
y=\x22293.29\
584\x22\x0a ry=\x22\
0\x22 />\x0a </g>\x0a</s\
vg>\x0a\
\x00\x00\x1dw\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_checked.sv\
g\x22\x0a inkscape:e\
xport-filename=\x22\
/home/yeison/Dev\
elopment/piton/a\
rt/icon_lite.png\
\x22\x0a inkscape:ex\
port-xdpi=\x2296\x22\x0a \
inkscape:expor\
t-ydpi=\x2296\x22>\x0a <\
defs\x0a id=\x22de\
fs2\x22 />\x0a <sodip\
odi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#ff\
ffff\x22\x0a borde\
rcolor=\x22#666666\x22\
\x0a borderopac\
ity=\x221.0\x22\x0a i\
nkscape:pageopac\
ity=\x220.0\x22\x0a i\
nkscape:pageshad\
ow=\x222\x22\x0a inks\
cape:zoom=\x2217.53\
8085\x22\x0a inksc\
ape:cx=\x226.604083\
6\x22\x0a inkscape\
:cy=\x229.0271347\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221015\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle></dc:title>\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00!\xef\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_unchecked.\
svg\x22\x0a inkscape\
:export-filename\
=\x22/home/yeison/D\
evelopment/piton\
/art/icon_lite.p\
ng\x22\x0a inkscape:\
export-xdpi=\x2296\x22\
\x0a inkscape:exp\
ort-ydpi=\x2296\x22>\x0a \
<defs\x0a id=\x22\
defs2\x22 />\x0a <sod\
ipodi:namedview\x0a\
id=\x22base\x22\x0a \
pagecolor=\x22#\
ffffff\x22\x0a bor\
dercolor=\x22#66666\
6\x22\x0a borderop\
acity=\x221.0\x22\x0a \
inkscape:pageop\
acity=\x220.0\x22\x0a \
inkscape:pagesh\
adow=\x222\x22\x0a in\
kscape:zoom=\x2235.\
076169\x22\x0a ink\
scape:cx=\x222.2827\
573\x22\x0a inksca\
pe:cy=\x228.126074\x22\
\x0a inkscape:d\
ocument-units=\x22p\
x\x22\x0a inkscape\
:current-layer=\x22\
layer1\x22\x0a sho\
wgrid=\x22true\x22\x0a \
inkscape:windo\
w-width=\x221920\x22\x0a \
inkscape:win\
dow-height=\x221004\
\x22\x0a inkscape:\
window-x=\x220\x22\x0a \
inkscape:windo\
w-y=\x220\x22\x0a ink\
scape:window-max\
imized=\x221\x22\x0a \
inkscape:showpag\
eshadow=\x22false\x22\x0a\
units=\x22px\x22\x0a\
inkscape:pa\
gecheckerboard=\x22\
false\x22\x0a show\
guides=\x22true\x22\x0a \
inkscape:snap\
-bbox=\x22true\x22\x0a \
inkscape:bbox-\
paths=\x22true\x22\x0a \
inkscape:bbox-\
nodes=\x22true\x22\x0a \
inkscape:snap-\
bbox-edge-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-bb\
ox-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-nodes=\x22tr\
ue\x22\x0a inkscap\
e:object-paths=\x22\
true\x22\x0a inksc\
ape:snap-interse\
ction-paths=\x22tru\
e\x22\x0a inkscape\
:snap-smooth-nod\
es=\x22true\x22\x0a i\
nkscape:snap-mid\
points=\x22true\x22\x0a \
inkscape:snap\
-global=\x22true\x22\x0a \
fit-margin-t\
op=\x220\x22\x0a fit-\
margin-left=\x220\x22\x0a\
fit-margin-\
right=\x220\x22\x0a f\
it-margin-bottom\
=\x220\x22\x0a inksca\
pe:guide-bbox=\x22t\
rue\x22>\x0a <inksc\
ape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid974\
\x22\x0a empspac\
ing=\x228\x22\x0a s\
pacingx=\x220.26458\
332\x22\x0a spac\
ingy=\x220.26458332\
\x22\x0a dotted=\
\x22false\x22\x0a v\
isible=\x22true\x22\x0a \
enabled=\x22tr\
ue\x22\x0a snapv\
isiblegridlineso\
nly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22\
/>\x0a </sodipodi\
:namedview>\x0a <m\
etadata\x0a id=\
\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\
\x0a <dc:for\
mat>image/svg+xm\
l</dc:format>\x0a \
<dc:type\x0a \
rdf:re\
source=\x22http://p\
url.org/dc/dcmit\
ype/StillImage\x22 \
/>\x0a <dc:t\
itle></dc:title>\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <re\
ct\x0a style=\
\x22opacity:1;fill:\
#ffffff00;fill-o\
pacity:0;stroke:\
#ff0000;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22re\
ct5674\x22\x0a w\
idth=\x220.79374999\
\x22\x0a height=\
\x220.79374999\x22\x0a \
x=\x22-1.322916\
6\x22\x0a y=\x22294\
.61877\x22\x0a r\
y=\x220.39687499\x22 /\
>\x0a <path\x0a \
style=\x22opacit\
y:1;fill:#ff0000\
;fill-opacity:0.\
35294119;stroke:\
#ff0000;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a d=\x22M 4\
.9960938,3 C 3.8\
899304,3 3,3.889\
9304 3,4.9960938\
V 15.003906 C 3\
,16.11007 3.8899\
304,17 4.9960938\
,17 H 15.003906 \
C 16.11007,17 17\
,16.11007 17,15.\
003906 V 4.99609\
38 C 17,3.889930\
4 16.11007,3 15.\
003906,3 Z M 5,4\
h 10 c 0.553979\
,0 1,0.4460206 1\
,1 v 10 c 0,0.55\
3979 -0.446021,1\
-1,1 H 5 C 4.44\
60206,16 4,15.55\
3979 4,15 V 5 C \
4,4.4460206 4.44\
60206,4 5,4 Z\x22\x0a \
transform=\
\x22matrix(0.264583\
32,0,0,0.2645833\
2,0,291.70835)\x22\x0a\
id=\x22rect1\
954\x22\x0a inks\
cape:connector-c\
urvature=\x220\x22 />\x0a\
</g>\x0a</svg>\x0a\
\x00\x00\x1f\xef\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22sli\
der.svg\x22\x0a inks\
cape:export-file\
name=\x22/home/yeis\
on/Development/p\
iton/art/icon_li\
te.png\x22\x0a inksc\
ape:export-xdpi=\
\x2296\x22\x0a inkscape\
:export-ydpi=\x2296\
\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:namedv\
iew\x0a id=\x22bas\
e\x22\x0a pagecolo\
r=\x22#ffffff\x22\x0a \
bordercolor=\x22#6\
66666\x22\x0a bord\
eropacity=\x221.0\x22\x0a\
inkscape:pa\
geopacity=\x220.0\x22\x0a\
inkscape:pa\
geshadow=\x222\x22\x0a \
inkscape:zoom=\
\x2228.704913\x22\x0a \
inkscape:cx=\x228.\
5671075\x22\x0a in\
kscape:cy=\x228.802\
1939\x22\x0a inksc\
ape:document-uni\
ts=\x22px\x22\x0a ink\
scape:current-la\
yer=\x22layer1\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221015\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <path\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:3;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 10,0.013671\
88 C 4.4846749,0\
.01360343 0.0136\
0343,4.4846749 0\
.01367188,10 0.0\
136035,15.515325\
4.484675,19.986\
396 10,19.986328\
15.515325,19.98\
6396 19.986396,1\
5.515325 19.9863\
28,10 19.986396,\
4.484675 15.5153\
25,0.0136035 10,\
0.01367188 Z\x22\x0a \
transform=\x22\
matrix(0.2645833\
2,0,0,0.26458332\
,0,291.70835)\x22\x0a \
id=\x22path82\
6\x22\x0a inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22ccccc\x22 />\x0a\
</g>\x0a</svg>\x0a\
\x00\x00\x1e\xb7\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22upa\
rrow.svg\x22\x0a ink\
scape:export-fil\
ename=\x22/home/yei\
son/Development/\
piton/art/icon_l\
ite.png\x22\x0a inks\
cape:export-xdpi\
=\x2296\x22\x0a inkscap\
e:export-ydpi=\x229\
6\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:named\
view\x0a id=\x22ba\
se\x22\x0a pagecol\
or=\x22#ffffff\x22\x0a \
bordercolor=\x22#\
666666\x22\x0a bor\
deropacity=\x221.0\x22\
\x0a inkscape:p\
ageopacity=\x220.0\x22\
\x0a inkscape:p\
ageshadow=\x222\x22\x0a \
inkscape:zoom\
=\x2228.704913\x22\x0a \
inkscape:cx=\x224\
.6862968\x22\x0a i\
nkscape:cy=\x225.00\
26685\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox=\x22tru\
e\x22\x0a inkscape\
:bbox-paths=\x22tru\
e\x22\x0a inkscape\
:bbox-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-edge-\
midpoints=\x22true\x22\
\x0a inkscape:s\
nap-bbox-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-nod\
es=\x22true\x22\x0a i\
nkscape:object-p\
aths=\x22true\x22\x0a \
inkscape:snap-i\
ntersection-path\
s=\x22true\x22\x0a in\
kscape:snap-smoo\
th-nodes=\x22true\x22\x0a\
inkscape:sn\
ap-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-global=\x22t\
rue\x22\x0a fit-ma\
rgin-top=\x220\x22\x0a \
fit-margin-lef\
t=\x220\x22\x0a fit-m\
argin-right=\x220\x22\x0a\
fit-margin-\
bottom=\x220\x22\x0a \
inkscape:guide-b\
box=\x22true\x22>\x0a \
<inkscape:grid\x0a \
type=\x22xygr\
id\x22\x0a id=\x22g\
rid974\x22\x0a e\
mpspacing=\x228\x22\x0a \
spacingx=\x220\
.26458332\x22\x0a \
spacingy=\x220.26\
458332\x22\x0a d\
otted=\x22false\x22\x0a \
visible=\x22tr\
ue\x22\x0a enabl\
ed=\x22true\x22\x0a \
snapvisiblegrid\
linesonly=\x22true\x22\
\x0a originx=\
\x220\x22\x0a origi\
ny=\x220\x22 />\x0a </so\
dipodi:namedview\
>\x0a <metadata\x0a \
id=\x22metadata5\
\x22>\x0a <rdf:RDF>\
\x0a <cc:Work\x0a\
rdf:abo\
ut=\x22\x22>\x0a <\
dc:format>image/\
svg+xml</dc:form\
at>\x0a <dc:\
type\x0a \
rdf:resource=\x22ht\
tp://purl.org/dc\
/dcmitype/StillI\
mage\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a \
</metadata>\x0a <g\
\x0a inkscape:l\
abel=\x22Layer 1\x22\x0a \
inkscape:gro\
upmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a\
transform=\x22\
translate(0,-291\
.70835)\x22>\x0a <g\
\x0a id=\x22g847\
\x22\x0a transfo\
rm=\x22matrix(0.052\
07439,0,0,0.0520\
7453,-0.90125164\
,282.41203)\x22>\x0a \
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform\
=\x22matrix(1.99862\
19,0,0,1.9986185\
,17.324484,-313.\
52314)\x22>\x0a \
<path\x0a \
inkscape:t\
ransform-center-\
y=\x223.175\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.070004\
33;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22\x0a \
d=\x22M \
25.399999,271.60\
002 -8.0000008e-\
7,246.20002 H 50\
.799999 Z\x22\x0a \
id=\x22path\
883\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22 />\x0a \
<path\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path880\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.399999,25.4\
H 0 Z\x22\x0a \
inkscape:tr\
ansform-center-y\
=\x22-3.1749995\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0534658\x22\
\x0a y=\
\x22253.84885\x22\x0a \
x=\x227.64\
87389\x22\x0a \
height=\x2235.5\
28759\x22\x0a \
width=\x2235.52\
8786\x22\x0a \
id=\x22rect870\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
06184419;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
r=\x2225.3968\
28\x22\x0a \
cy=\x22271.60001\x22\x0a\
cx=\
\x2225.4\x22\x0a \
id=\x22path872\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07635882;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<circle\x0a \
transform\
=\x22rotate(-45)\x22\x0a \
cx=\x22\
-174.08969\x22\x0a \
cy=\x22210\
.01071\x22\x0a \
r=\x2212.65607\
1\x22\x0a \
id=\x22path876\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.073\
99406;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-x=\x22-3\
.1749999\x22\x0a \
sodipodi:\
nodetypes=\x22cccc\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
id=\x22\
path904\x22\x0a \
d=\x22m 25.4,\
271.60002 -25.40\
000040000004,25.\
4 v -50.8 z\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-x=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22m 25.39\
9999,271.60002 2\
5.4,-25.4 v 50.8\
z\x22\x0a \
id=\x22path906\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<rect\x0a \
ry=\x225.0514922\
\x22\x0a y\
=\x22256.39301\x22\x0a \
x=\x222.5\
663135\x22\x0a \
height=\x2230.\
440479\x22\x0a \
width=\x2245.6\
93634\x22\x0a \
id=\x22rect837\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.0657438;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.0657438;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22\x0a \
id=\x22rect8\
31\x22\x0a \
width=\x2245.69358\
8\x22\x0a \
height=\x2230.44051\
\x22\x0a x\
=\x22248.76645\x22\x0a \
y=\x22-40\
.633385\x22\x0a \
ry=\x225.0514\
97\x22\x0a \
transform=\x22rota\
te(90)\x22 />\x0a \
</g>\x0a </\
g>\x0a </g>\x0a \
<path\x0a sty\
le=\x22opacity:1;fi\
ll:#ffc107;fill-\
opacity:1;stroke\
:none;stroke-wid\
th:0.38596651;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-o\
pacity:1\x22\x0a \
d=\x22m 50.206421,\
401.67683 c 110.\
217209,0.71279 5\
5.108609,0.3564 \
0,0 z\x22\x0a id\
=\x22rect997\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22 />\x0a <path\x0a\
style=\x22fi\
ll:none;stroke:#\
0000ff;stroke-wi\
dth:0.52916664;s\
troke-linecap:bu\
tt;stroke-linejo\
in:bevel;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
3.6919632,295.1\
7947 -1.04613,-1\
.65058 -1.04613,\
1.65058\x22\x0a \
id=\x22path827\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a sodi\
podi:nodetypes=\x22\
ccc\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00-\xb6\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22too\
lbar-handle-vert\
ical.svg\x22\x0a ink\
scape:export-fil\
ename=\x22/home/yei\
son/Development/\
piton/art/icon_l\
ite.png\x22\x0a inks\
cape:export-xdpi\
=\x2296\x22\x0a inkscap\
e:export-ydpi=\x229\
6\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:named\
view\x0a id=\x22ba\
se\x22\x0a pagecol\
or=\x22#ffffff\x22\x0a \
bordercolor=\x22#\
666666\x22\x0a bor\
deropacity=\x221.0\x22\
\x0a inkscape:p\
ageopacity=\x220.0\x22\
\x0a inkscape:p\
ageshadow=\x222\x22\x0a \
inkscape:zoom\
=\x2224.802598\x22\x0a \
inkscape:cx=\x22-\
4.3785546\x22\x0a \
inkscape:cy=\x2210.\
683358\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22g839\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221004\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <g\x0a \
id=\x22g839\x22>\x0a \
<g\x0a \
id=\x22g849\x22\x0a \
transform=\x22ro\
tate(90,2.645833\
2,294.35418)\x22>\x0a \
<rect\x0a \
y=\x22291.7\
0834\x22\x0a \
x=\x221.8520832\x22\x0a \
height\
=\x220.52916664\x22\x0a \
width=\x22\
0.52916664\x22\x0a \
id=\x22rect8\
30\x22\x0a s\
tyle=\x22opacity:1;\
fill:#0000ff;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.52916664;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:#00\
00ff;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id\
=\x22rect832\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
height=\x220.52\
916664\x22\x0a \
x=\x221.8520832\x22\
\x0a y=\x222\
93.29584\x22 />\x0a \
<rect\x0a \
y=\x22294.883\
36\x22\x0a x\
=\x221.8520832\x22\x0a \
height=\x22\
0.52916664\x22\x0a \
width=\x220.\
52916664\x22\x0a \
id=\x22rect834\
\x22\x0a sty\
le=\x22opacity:1;fi\
ll:#0000ff;fill-\
opacity:1;stroke\
:none;stroke-wid\
th:0.52916664;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1\x22 />\x0a \
<rect\x0a \
style=\x22opac\
ity:1;fill:#0000\
ff;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22\
rect836\x22\x0a \
width=\x220.529\
16664\x22\x0a \
height=\x220.5291\
6664\x22\x0a \
x=\x221.8520832\x22\x0a \
y=\x22296\
.47086\x22 />\x0a \
<rect\x0a \
y=\x22292.50208\
\x22\x0a x=\x22\
2.3812499\x22\x0a \
height=\x220.\
52916664\x22\x0a \
width=\x220.52\
916664\x22\x0a \
id=\x22rect838\x22\x0a\
style\
=\x22opacity:1;fill\
:#0000ff;fill-op\
acity:1;stroke:n\
one;stroke-width\
:0.52916664;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:#0000ff\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22\x0a \
id=\x22re\
ct840\x22\x0a \
width=\x220.52916\
664\x22\x0a \
height=\x220.529166\
64\x22\x0a x\
=\x222.3812499\x22\x0a \
y=\x22294.0\
896\x22 />\x0a \
<rect\x0a \
y=\x22295.67709\x22\x0a \
x=\x222.3\
812499\x22\x0a \
height=\x220.529\
16664\x22\x0a \
width=\x220.52916\
664\x22\x0a \
id=\x22rect842\x22\x0a \
style=\x22o\
pacity:1;fill:#0\
000ff;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22 />\x0a <\
rect\x0a \
style=\x22opacity:1\
;fill:#0000ff;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.52916664\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect8\
44\x22\x0a w\
idth=\x220.52916664\
\x22\x0a hei\
ght=\x220.52916664\x22\
\x0a x=\x222\
.9104166\x22\x0a \
y=\x22291.7083\
4\x22 />\x0a <r\
ect\x0a y\
=\x22293.29584\x22\x0a \
x=\x222.910\
4166\x22\x0a \
height=\x220.52916\
664\x22\x0a \
width=\x220.5291666\
4\x22\x0a id\
=\x22rect846\x22\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <re\
ct\x0a st\
yle=\x22opacity:1;f\
ill:#0000ff;fill\
-opacity:1;strok\
e:none;stroke-wi\
dth:0.52916664;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1\x22\x0a \
id=\x22rect848\
\x22\x0a wid\
th=\x220.52916664\x22\x0a\
heigh\
t=\x220.52916664\x22\x0a \
x=\x222.9\
104166\x22\x0a \
y=\x22294.88336\x22\
/>\x0a <rec\
t\x0a y=\x22\
296.47086\x22\x0a \
x=\x222.91041\
66\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a id=\x22\
rect850\x22\x0a \
style=\x22opaci\
ty:1;fill:#0000f\
f;fill-opacity:1\
;stroke:none;str\
oke-width:0.5291\
6664;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1\x22 \
/>\x0a </g>\x0a \
</g>\x0a </g>\x0a</\
svg>\x0a\
\x00\x00$\xad\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22spl\
itter-vertical.s\
vg\x22\x0a inkscape:\
export-filename=\
\x22/home/yeison/De\
velopment/piton/\
art/icon_lite.pn\
g\x22\x0a inkscape:e\
xport-xdpi=\x2296\x22\x0a\
inkscape:expo\
rt-ydpi=\x2296\x22>\x0a \
<defs\x0a id=\x22d\
efs2\x22 />\x0a <sodi\
podi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#f\
fffff\x22\x0a bord\
ercolor=\x22#666666\
\x22\x0a borderopa\
city=\x221.0\x22\x0a \
inkscape:pageopa\
city=\x220.0\x22\x0a \
inkscape:pagesha\
dow=\x222\x22\x0a ink\
scape:zoom=\x2224.8\
02598\x22\x0a inks\
cape:cx=\x226.30230\
18\x22\x0a inkscap\
e:cy=\x228.969841\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle />\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<g\x0a id=\
\x22g839\x22\x0a tr\
ansform=\x22matrix(\
0,-1,-1,0,297.00\
002,297.00002)\x22>\
\x0a <rect\x0a \
y=\x22291.972\
93\x22\x0a x=\x22\
2.3812499\x22\x0a \
height=\x220.52\
916664\x22\x0a \
width=\x220.529166\
64\x22\x0a id=\
\x22rect827\x22\x0a \
style=\x22opacit\
y:1;fill:#0000ff\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22 /\
>\x0a <rect\x0a \
style=\x22op\
acity:1;fill:#00\
00ff;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id=\x22\
rect829\x22\x0a \
width=\x220.52916\
664\x22\x0a he\
ight=\x220.52916664\
\x22\x0a x=\x222.\
3812499\x22\x0a \
y=\x22296.20627\x22 \
/>\x0a <rect\x0a \
y=\x22295.1\
4792\x22\x0a x\
=\x222.3812499\x22\x0a \
height=\x220.\
52916664\x22\x0a \
width=\x220.5291\
6664\x22\x0a i\
d=\x22rect832\x22\x0a \
style=\x22opac\
ity:1;fill:#0000\
ff;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
/>\x0a <rect\x0a\
style=\x22\
opacity:1;fill:#\
0000ff;fill-opac\
ity:1;stroke:non\
e;stroke-width:0\
.52916664;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1\x22\x0a id\
=\x22rect834\x22\x0a \
width=\x220.529\
16664\x22\x0a \
height=\x220.529166\
64\x22\x0a x=\x22\
2.3812499\x22\x0a \
y=\x22294.0896\x22\
/>\x0a <rect\x0a\
y=\x22293.\
03128\x22\x0a \
x=\x222.3812499\x22\x0a \
height=\x220\
.52916664\x22\x0a \
width=\x220.529\
16664\x22\x0a \
id=\x22rect836\x22\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a </g>\x0a \
</g>\x0a</svg>\x0a\
\x00\x00\x1e\xb7\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22lef\
tarrow.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2228.704913\x22\x0a \
inkscape:cx=\
\x224.6862968\x22\x0a \
inkscape:cy=\x225.\
0026685\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221004\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
true\x22\x0a inksc\
ape:snap-bbox=\x22t\
rue\x22\x0a inksca\
pe:bbox-paths=\x22t\
rue\x22\x0a inksca\
pe:bbox-nodes=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox-edg\
e-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-n\
odes=\x22true\x22\x0a \
inkscape:object\
-paths=\x22true\x22\x0a \
inkscape:snap\
-intersection-pa\
ths=\x22true\x22\x0a \
inkscape:snap-sm\
ooth-nodes=\x22true\
\x22\x0a inkscape:\
snap-midpoints=\x22\
true\x22\x0a inksc\
ape:snap-global=\
\x22true\x22\x0a fit-\
margin-top=\x220\x22\x0a \
fit-margin-l\
eft=\x220\x22\x0a fit\
-margin-right=\x220\
\x22\x0a fit-margi\
n-bottom=\x220\x22\x0a \
inkscape:guide\
-bbox=\x22true\x22>\x0a \
<inkscape:grid\
\x0a type=\x22xy\
grid\x22\x0a id=\
\x22grid974\x22\x0a \
empspacing=\x228\x22\x0a\
spacingx=\
\x220.26458332\x22\x0a \
spacingy=\x220.\
26458332\x22\x0a \
dotted=\x22false\x22\x0a\
visible=\x22\
true\x22\x0a ena\
bled=\x22true\x22\x0a \
snapvisiblegr\
idlinesonly=\x22tru\
e\x22\x0a origin\
x=\x220\x22\x0a ori\
giny=\x220\x22 />\x0a </\
sodipodi:namedvi\
ew>\x0a <metadata\x0a\
id=\x22metadat\
a5\x22>\x0a <rdf:RD\
F>\x0a <cc:Wor\
k\x0a rdf:a\
bout=\x22\x22>\x0a \
<dc:format>imag\
e/svg+xml</dc:fo\
rmat>\x0a <d\
c:type\x0a \
rdf:resource=\x22\
http://purl.org/\
dc/dcmitype/Stil\
lImage\x22 />\x0a \
<dc:title />\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a <pat\
h\x0a style=\x22\
fill:none;stroke\
:#0000ff;stroke-\
width:0.52916664\
;stroke-linecap:\
butt;stroke-line\
join:bevel;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-opac\
ity:1\x22\x0a d=\
\x22m 3.4711232,293\
.30805 -1.65058,\
1.04613 1.65058,\
1.04613\x22\x0a \
id=\x22path827\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a sodi\
podi:nodetypes=\x22\
ccc\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00 q\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22flo\
at.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
23.490934\x22\x0a \
inkscape:cx=\x2212.\
388735\x22\x0a ink\
scape:cy=\x227.1323\
576\x22\x0a inksca\
pe:document-unit\
s=\x22px\x22\x0a inks\
cape:current-lay\
er=\x22layer1\x22\x0a \
showgrid=\x22true\x22\
\x0a inkscape:w\
indow-width=\x22192\
0\x22\x0a inkscape\
:window-height=\x22\
1004\x22\x0a inksc\
ape:window-x=\x220\x22\
\x0a inkscape:w\
indow-y=\x220\x22\x0a \
inkscape:window\
-maximized=\x221\x22\x0a \
inkscape:sho\
wpageshadow=\x22fal\
se\x22\x0a units=\x22\
px\x22\x0a inkscap\
e:pagecheckerboa\
rd=\x22false\x22\x0a \
showguides=\x22true\
\x22\x0a inkscape:\
snap-bbox=\x22true\x22\
\x0a inkscape:b\
box-paths=\x22true\x22\
\x0a inkscape:b\
box-nodes=\x22true\x22\
\x0a inkscape:s\
nap-bbox-edge-mi\
dpoints=\x22true\x22\x0a \
inkscape:sna\
p-bbox-midpoints\
=\x22true\x22\x0a ink\
scape:snap-nodes\
=\x22false\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:1;\
stroke:#0000ff;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a id=\x22rec\
t829\x22\x0a wid\
th=\x222.6581812\x22\x0a \
height=\x222.\
658181\x22\x0a x\
=\x220.6522209\x22\x0a \
y=\x22293.68961\
\x22\x0a ry=\x220.2\
9536656\x22 />\x0a \
<rect\x0a ry=\
\x220.29536656\x22\x0a \
y=\x22292.3606\x22\
\x0a x=\x221.981\
2645\x22\x0a hei\
ght=\x222.658181\x22\x0a \
width=\x222.6\
581812\x22\x0a i\
d=\x22rect839\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:1;str\
oke:#0000ff;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a </g>\x0a</svg>\x0a\
\
\x00\x00#\x10\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_indetermin\
ate.svg\x22\x0a inks\
cape:export-file\
name=\x22/home/yeis\
on/Development/p\
iton/art/icon_li\
te.png\x22\x0a inksc\
ape:export-xdpi=\
\x2296\x22\x0a inkscape\
:export-ydpi=\x2296\
\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a \
<sodipodi:namedv\
iew\x0a id=\x22bas\
e\x22\x0a pagecolo\
r=\x22#ffffff\x22\x0a \
bordercolor=\x22#6\
66666\x22\x0a bord\
eropacity=\x221.0\x22\x0a\
inkscape:pa\
geopacity=\x220.0\x22\x0a\
inkscape:pa\
geshadow=\x222\x22\x0a \
inkscape:zoom=\
\x2266.442396\x22\x0a \
inkscape:cx=\x2210\
.78082\x22\x0a ink\
scape:cy=\x229.3546\
433\x22\x0a inksca\
pe:document-unit\
s=\x22px\x22\x0a inks\
cape:current-lay\
er=\x22layer1\x22\x0a \
showgrid=\x22true\x22\
\x0a inkscape:w\
indow-width=\x22192\
0\x22\x0a inkscape\
:window-height=\x22\
1004\x22\x0a inksc\
ape:window-x=\x220\x22\
\x0a inkscape:w\
indow-y=\x220\x22\x0a \
inkscape:window\
-maximized=\x221\x22\x0a \
inkscape:sho\
wpageshadow=\x22fal\
se\x22\x0a units=\x22\
px\x22\x0a inkscap\
e:pagecheckerboa\
rd=\x22false\x22\x0a \
showguides=\x22fals\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title></dc:t\
itle>\x0a </cc\
:Work>\x0a </rdf\
:RDF>\x0a </metada\
ta>\x0a <g\x0a in\
kscape:label=\x22La\
yer 1\x22\x0a inks\
cape:groupmode=\x22\
layer\x22\x0a id=\x22\
layer1\x22\x0a tra\
nsform=\x22translat\
e(0,-291.70835)\x22\
>\x0a <g\x0a \
transform=\x22trans\
late(47.359504,-\
89.690092)\x22\x0a \
id=\x22layer1-3\x22\
\x0a inkscape\
:label=\x22Layer 1\x22\
>\x0a <path\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22rect1954\x22\x0a \
d=\x22m -46\
.03762,382.19219\
c -0.292672,0 -\
0.528134,0.23546\
-0.528134,0.528\
13 v 2.6479 c 0,\
0.29268 0.235462\
,0.52814 0.52813\
4,0.52814 h 2.64\
79 c 0.292673,0 \
0.528133,-0.2354\
6 0.528133,-0.52\
814 v -2.6479 c \
0,-0.29267 -0.23\
546,-0.52813 -0.\
528133,-0.52813 \
z m 10e-4,0.2645\
8 h 2.645833 c 0\
.146573,0 0.2645\
83,0.11801 0.264\
583,0.26459 v 2.\
64583 c 0,0.1465\
7 -0.11801,0.264\
58 -0.264583,0.2\
6458 h -2.645799\
c -0.146574,0 -\
0.264584,-0.1180\
1 -0.264584,-0.2\
6458 v -2.64583 \
c 0,-0.14658 0.1\
1801,-0.26459 0.\
264584,-0.26459 \
z\x22\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
#0000ff;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a <pat\
h\x0a style\
=\x22opacity:1;fill\
:#0000ff;fill-op\
acity:1;stroke:#\
0000ff;stroke-wi\
dth:0;stroke-lin\
ecap:square;stro\
ke-linejoin:mite\
r;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d=\x22m \
-45.772004,382.9\
8594 v 2.11667 h\
2.116666 v -2.1\
1667 z m 1.85208\
3,0.26459 v 1.58\
75 h -1.5875 z\x22\x0a\
id=\x22rec\
t2118\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a sodipo\
di:nodetypes=\x22cc\
ccccccc\x22 />\x0a \
</g>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22m\
atrix(0.05207439\
,0,0,0.05207453,\
-0.90125164,282.\
41203)\x22>\x0a <\
g\x0a id=\x22g\
851\x22>\x0a <g\
\x0a id=\x22\
g1059\x22\x0a \
transform=\x22mat\
rix(1.9986219,0,\
0,1.9986185,17.3\
24484,-313.52314\
)\x22>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-y=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 25.39\
9999,271.60002 -\
8.0000008e-7,246\
.20002 H 50.7999\
99 Z\x22\x0a \
id=\x22path883\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<path\x0a \
sodipodi:no\
detypes=\x22cccc\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
id=\x22pa\
th880\x22\x0a \
d=\x22m 25.3999\
99,271.60002 25.\
399999,25.4 H 0 \
Z\x22\x0a \
inkscape:transfo\
rm-center-y=\x22-3.\
1749995\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0534658\x22\x0a \
y=\x22253.\
84885\x22\x0a \
x=\x227.6487389\
\x22\x0a h\
eight=\x2235.528759\
\x22\x0a w\
idth=\x2235.528786\x22\
\x0a id\
=\x22rect870\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06184\
419;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
r=\x2225.396828\x22\x0a \
cy=\x22\
271.60001\x22\x0a \
cx=\x2225.4\
\x22\x0a i\
d=\x22path872\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0763\
5882;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <ci\
rcle\x0a \
transform=\x22rot\
ate(-45)\x22\x0a \
cx=\x22-174.\
08969\x22\x0a \
cy=\x22210.0107\
1\x22\x0a \
r=\x2212.656071\x22\x0a \
id=\x22p\
ath876\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07399406\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x22-3.1749\
999\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22\x0a \
inkscap\
e:connector-curv\
ature=\x220\x22\x0a \
id=\x22path9\
04\x22\x0a \
d=\x22m 25.4,271.6\
0002 -25.4000004\
0000004,25.4 v -\
50.8 z\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x223.175\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
d=\x22m 25.399999,\
271.60002 25.4,-\
25.4 v 50.8 z\x22\x0a \
id=\x22\
path906\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22\x0a \
sodipodi:nod\
etypes=\x22cccc\x22 />\
\x0a <rect\
\x0a ry\
=\x225.0514922\x22\x0a \
y=\x22256\
.39301\x22\x0a \
x=\x222.566313\
5\x22\x0a \
height=\x2230.44047\
9\x22\x0a \
width=\x2245.693634\
\x22\x0a i\
d=\x22rect837\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0657\
438;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <rec\
t\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22\x0a \
id=\x22rect831\x22\x0a \
widt\
h=\x2245.693588\x22\x0a \
heigh\
t=\x2230.44051\x22\x0a \
x=\x22248\
.76645\x22\x0a \
y=\x22-40.6333\
85\x22\x0a \
ry=\x225.051497\x22\x0a \
tran\
sform=\x22rotate(90\
)\x22 />\x0a </\
g>\x0a </g>\x0a \
</g>\x0a <path\
\x0a style=\x22o\
pacity:1;fill:#f\
fc107;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
38596651;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
50.206421,401.6\
7683 c 110.21720\
9,0.71279 55.108\
609,0.3564 0,0 z\
\x22\x0a id=\x22rec\
t997\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a </g>\x0a</svg>\x0a\
\x00\x00,\xc0\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22too\
lbar-handle-hori\
zontal.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2224.802598\x22\x0a \
inkscape:cx=\
\x223.483525\x22\x0a \
inkscape:cy=\x2210.\
683358\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22g839\x22\x0a \
showgrid=\x22true\
\x22\x0a inkscape:\
window-width=\x2219\
20\x22\x0a inkscap\
e:window-height=\
\x221004\x22\x0a inks\
cape:window-x=\x220\
\x22\x0a inkscape:\
window-y=\x220\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:sh\
owpageshadow=\x22fa\
lse\x22\x0a units=\
\x22px\x22\x0a inksca\
pe:pagecheckerbo\
ard=\x22false\x22\x0a \
showguides=\x22tru\
e\x22\x0a inkscape\
:snap-bbox=\x22true\
\x22\x0a inkscape:\
bbox-paths=\x22true\
\x22\x0a inkscape:\
bbox-nodes=\x22true\
\x22\x0a inkscape:\
snap-bbox-edge-m\
idpoints=\x22true\x22\x0a\
inkscape:sn\
ap-bbox-midpoint\
s=\x22true\x22\x0a in\
kscape:snap-node\
s=\x22true\x22\x0a in\
kscape:object-pa\
ths=\x22true\x22\x0a \
inkscape:snap-in\
tersection-paths\
=\x22true\x22\x0a ink\
scape:snap-smoot\
h-nodes=\x22true\x22\x0a \
inkscape:sna\
p-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-global=\x22tr\
ue\x22\x0a fit-mar\
gin-top=\x220\x22\x0a \
fit-margin-left\
=\x220\x22\x0a fit-ma\
rgin-right=\x220\x22\x0a \
fit-margin-b\
ottom=\x220\x22\x0a i\
nkscape:guide-bb\
ox=\x22true\x22>\x0a <\
inkscape:grid\x0a \
type=\x22xygri\
d\x22\x0a id=\x22gr\
id974\x22\x0a em\
pspacing=\x228\x22\x0a \
spacingx=\x220.\
26458332\x22\x0a \
spacingy=\x220.264\
58332\x22\x0a do\
tted=\x22false\x22\x0a \
visible=\x22tru\
e\x22\x0a enable\
d=\x22true\x22\x0a \
snapvisiblegridl\
inesonly=\x22true\x22\x0a\
originx=\x22\
0\x22\x0a origin\
y=\x220\x22 />\x0a </sod\
ipodi:namedview>\
\x0a <metadata\x0a \
id=\x22metadata5\x22\
>\x0a <rdf:RDF>\x0a\
<cc:Work\x0a \
rdf:abou\
t=\x22\x22>\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0a <dc:t\
ype\x0a r\
df:resource=\x22htt\
p://purl.org/dc/\
dcmitype/StillIm\
age\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <g\x0a\
inkscape:la\
bel=\x22Layer 1\x22\x0a \
inkscape:grou\
pmode=\x22layer\x22\x0a \
id=\x22layer1\x22\x0a \
transform=\x22t\
ranslate(0,-291.\
70835)\x22>\x0a <g\x0a\
id=\x22g847\x22\
\x0a transfor\
m=\x22matrix(0.0520\
7439,0,0,0.05207\
453,-0.90125164,\
282.41203)\x22>\x0a \
<g\x0a i\
d=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transform=\
\x22matrix(1.998621\
9,0,0,1.9986185,\
17.324484,-313.5\
2314)\x22>\x0a \
<path\x0a \
inkscape:tr\
ansform-center-y\
=\x223.175\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
d=\x22M 2\
5.399999,271.600\
02 -8.0000008e-7\
,246.20002 H 50.\
799999 Z\x22\x0a \
id=\x22path8\
83\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a s\
odipodi:nodetype\
s=\x22cccc\x22 />\x0a \
<path\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path880\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.399999,25.4 \
H 0 Z\x22\x0a \
inkscape:tra\
nsform-center-y=\
\x22-3.1749995\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
/>\x0a <r\
ect\x0a \
ry=\x225.0534658\x22\x0a\
y=\x22\
253.84885\x22\x0a \
x=\x227.648\
7389\x22\x0a \
height=\x2235.52\
8759\x22\x0a \
width=\x2235.528\
786\x22\x0a \
id=\x22rect870\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
6184419;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<circle\x0a \
r=\x2225.39682\
8\x22\x0a \
cy=\x22271.60001\x22\x0a \
cx=\x22\
25.4\x22\x0a \
id=\x22path872\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07635882;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<circle\x0a \
transform=\
\x22rotate(-45)\x22\x0a \
cx=\x22-\
174.08969\x22\x0a \
cy=\x22210.\
01071\x22\x0a \
r=\x2212.656071\
\x22\x0a i\
d=\x22path876\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0739\
9406;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x22-3.\
1749999\x22\x0a \
sodipodi:n\
odetypes=\x22cccc\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
id=\x22p\
ath904\x22\x0a \
d=\x22m 25.4,2\
71.60002 -25.400\
00040000004,25.4\
v -50.8 z\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-x=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22m 25.399\
999,271.60002 25\
.4,-25.4 v 50.8 \
z\x22\x0a \
id=\x22path906\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22 />\x0a <\
rect\x0a \
ry=\x225.0514922\x22\
\x0a y=\
\x22256.39301\x22\x0a \
x=\x222.56\
63135\x22\x0a \
height=\x2230.4\
40479\x22\x0a \
width=\x2245.69\
3634\x22\x0a \
id=\x22rect837\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.0657438;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
id=\x22rect83\
1\x22\x0a \
width=\x2245.693588\
\x22\x0a h\
eight=\x2230.44051\x22\
\x0a x=\
\x22248.76645\x22\x0a \
y=\x22-40.\
633385\x22\x0a \
ry=\x225.05149\
7\x22\x0a \
transform=\x22rotat\
e(90)\x22 />\x0a \
</g>\x0a </g\
>\x0a </g>\x0a <\
path\x0a styl\
e=\x22opacity:1;fil\
l:#ffc107;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.38596651;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-op\
acity:1\x22\x0a \
d=\x22m 50.206421,4\
01.67683 c 110.2\
17209,0.71279 55\
.108609,0.3564 0\
,0 z\x22\x0a id=\
\x22rect997\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22 />\x0a <g\x0a \
id=\x22g839\x22>\x0a \
<rect\x0a \
style=\x22opacit\
y:1;fill:#0000ff\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22\x0a \
id=\x22rect\
830\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a height\
=\x220.52916664\x22\x0a \
x=\x221.8520\
832\x22\x0a y=\
\x22291.70834\x22 />\x0a \
<rect\x0a \
y=\x22293.29584\
\x22\x0a x=\x221.\
8520832\x22\x0a \
height=\x220.5291\
6664\x22\x0a w\
idth=\x220.52916664\
\x22\x0a id=\x22r\
ect832\x22\x0a \
style=\x22opacity:\
1;fill:#0000ff;f\
ill-opacity:1;st\
roke:none;stroke\
-width:0.5291666\
4;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1\x22 />\x0a\
<rect\x0a \
style=\x22opac\
ity:1;fill:#0000\
ff;fill-opacity:\
1;stroke:none;st\
roke-width:0.529\
16664;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22re\
ct834\x22\x0a \
width=\x220.5291666\
4\x22\x0a heig\
ht=\x220.52916664\x22\x0a\
x=\x221.85\
20832\x22\x0a \
y=\x22294.88336\x22 />\
\x0a <rect\x0a \
y=\x22296.470\
86\x22\x0a x=\x22\
1.8520832\x22\x0a \
height=\x220.52\
916664\x22\x0a \
width=\x220.529166\
64\x22\x0a id=\
\x22rect836\x22\x0a \
style=\x22opacit\
y:1;fill:#0000ff\
;fill-opacity:1;\
stroke:none;stro\
ke-width:0.52916\
664;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1\x22 /\
>\x0a <rect\x0a \
style=\x22op\
acity:1;fill:#00\
00ff;fill-opacit\
y:1;stroke:none;\
stroke-width:0.5\
2916664;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1\x22\x0a id=\x22\
rect838\x22\x0a \
width=\x220.52916\
664\x22\x0a he\
ight=\x220.52916664\
\x22\x0a x=\x222.\
3812499\x22\x0a \
y=\x22292.50208\x22 \
/>\x0a <rect\x0a \
y=\x22294.0\
896\x22\x0a x=\
\x222.3812499\x22\x0a \
height=\x220.5\
2916664\x22\x0a \
width=\x220.52916\
664\x22\x0a id\
=\x22rect840\x22\x0a \
style=\x22opaci\
ty:1;fill:#0000f\
f;fill-opacity:1\
;stroke:none;str\
oke-width:0.5291\
6664;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1\x22 \
/>\x0a <rect\x0a \
style=\x22o\
pacity:1;fill:#0\
000ff;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\
\x22rect842\x22\x0a \
width=\x220.5291\
6664\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a x=\x222\
.3812499\x22\x0a \
y=\x22295.67709\x22\
/>\x0a <rect\x0a\
y=\x22291.\
70834\x22\x0a \
x=\x222.9104166\x22\x0a \
height=\x220\
.52916664\x22\x0a \
width=\x220.529\
16664\x22\x0a \
id=\x22rect844\x22\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:0.52\
916664;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <rect\
\x0a style=\
\x22opacity:1;fill:\
#0000ff;fill-opa\
city:1;stroke:no\
ne;stroke-width:\
0.52916664;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1\x22\x0a i\
d=\x22rect846\x22\x0a \
width=\x220.52\
916664\x22\x0a \
height=\x220.52916\
664\x22\x0a x=\
\x222.9104166\x22\x0a \
y=\x22293.2958\
4\x22 />\x0a <rec\
t\x0a y=\x2229\
4.88336\x22\x0a \
x=\x222.9104166\x22\x0a\
height=\
\x220.52916664\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
id=\x22rect848\x22\x0a \
style=\x22o\
pacity:1;fill:#0\
000ff;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
52916664;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22 />\x0a <re\
ct\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.52916664;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22\x0a \
id=\x22rect850\x22\x0a \
width=\x220.\
52916664\x22\x0a \
height=\x220.529\
16664\x22\x0a \
x=\x222.9104166\x22\x0a \
y=\x22296.47\
086\x22 />\x0a </g>\
\x0a </g>\x0a</svg>\x0a\
\x00\x00!\x8c\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rad\
iobutton_checked\
.svg\x22\x0a inkscap\
e:export-filenam\
e=\x22/home/yeison/\
Development/pito\
n/art/icon_lite.\
png\x22\x0a inkscape\
:export-xdpi=\x2296\
\x22\x0a inkscape:ex\
port-ydpi=\x2296\x22>\x0a\
<defs\x0a id=\
\x22defs2\x22 />\x0a <so\
dipodi:namedview\
\x0a id=\x22base\x22\x0a\
pagecolor=\x22\
#ffffff\x22\x0a bo\
rdercolor=\x22#6666\
66\x22\x0a bordero\
pacity=\x221.0\x22\x0a \
inkscape:pageo\
pacity=\x220.0\x22\x0a \
inkscape:pages\
hadow=\x222\x22\x0a i\
nkscape:zoom=\x2239\
.85\x22\x0a inksca\
pe:cx=\x228.1932246\
\x22\x0a inkscape:\
cy=\x2210\x22\x0a ink\
scape:document-u\
nits=\x22px\x22\x0a i\
nkscape:current-\
layer=\x22layer1\x22\x0a \
showgrid=\x22tr\
ue\x22\x0a inkscap\
e:window-width=\x22\
1920\x22\x0a inksc\
ape:window-heigh\
t=\x221004\x22\x0a in\
kscape:window-x=\
\x220\x22\x0a inkscap\
e:window-y=\x220\x22\x0a \
inkscape:win\
dow-maximized=\x221\
\x22\x0a inkscape:\
showpageshadow=\x22\
false\x22\x0a unit\
s=\x22px\x22\x0a inks\
cape:pagechecker\
board=\x22false\x22\x0a \
showguides=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox=\x22tr\
ue\x22\x0a inkscap\
e:bbox-paths=\x22tr\
ue\x22\x0a inkscap\
e:bbox-nodes=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-edge\
-midpoints=\x22true\
\x22\x0a inkscape:\
snap-bbox-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-no\
des=\x22true\x22\x0a \
inkscape:object-\
paths=\x22true\x22\x0a \
inkscape:snap-\
intersection-pat\
hs=\x22true\x22\x0a i\
nkscape:snap-smo\
oth-nodes=\x22true\x22\
\x0a inkscape:s\
nap-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-global=\x22\
true\x22\x0a fit-m\
argin-top=\x220\x22\x0a \
fit-margin-le\
ft=\x220\x22\x0a fit-\
margin-right=\x220\x22\
\x0a fit-margin\
-bottom=\x220\x22\x0a \
inkscape:guide-\
bbox=\x22true\x22>\x0a \
<inkscape:grid\x0a\
type=\x22xyg\
rid\x22\x0a id=\x22\
grid974\x22\x0a \
empspacing=\x228\x22\x0a \
spacingx=\x22\
0.26458332\x22\x0a \
spacingy=\x220.2\
6458332\x22\x0a \
dotted=\x22false\x22\x0a \
visible=\x22t\
rue\x22\x0a enab\
led=\x22true\x22\x0a \
snapvisiblegri\
dlinesonly=\x22true\
\x22\x0a originx\
=\x220\x22\x0a orig\
iny=\x220\x22 />\x0a </s\
odipodi:namedvie\
w>\x0a <metadata\x0a \
id=\x22metadata\
5\x22>\x0a <rdf:RDF\
>\x0a <cc:Work\
\x0a rdf:ab\
out=\x22\x22>\x0a \
<dc:format>image\
/svg+xml</dc:for\
mat>\x0a <dc\
:type\x0a \
rdf:resource=\x22h\
ttp://purl.org/d\
c/dcmitype/Still\
Image\x22 />\x0a \
<dc:title></dc\
:title>\x0a </\
cc:Work>\x0a </r\
df:RDF>\x0a </meta\
data>\x0a <g\x0a \
inkscape:label=\x22\
Layer 1\x22\x0a in\
kscape:groupmode\
=\x22layer\x22\x0a id\
=\x22layer1\x22\x0a t\
ransform=\x22transl\
ate(0,-291.70835\
)\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22ma\
trix(0.05207439,\
0,0,0.05207453,-\
0.90125164,282.4\
1203)\x22>\x0a <g\
\x0a id=\x22g8\
51\x22>\x0a <g\x0a\
id=\x22g\
1059\x22\x0a \
transform=\x22matr\
ix(1.9986219,0,0\
,1.9986185,17.32\
4484,-313.52314)\
\x22>\x0a <pa\
th\x0a \
inkscape:transfo\
rm-center-y=\x223.1\
75\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07000433;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22\x0a \
d=\x22M 25.399\
999,271.60002 -8\
.0000008e-7,246.\
20002 H 50.79999\
9 Z\x22\x0a \
id=\x22path883\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22 />\x0a \
<path\x0a \
sodipodi:nod\
etypes=\x22cccc\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
id=\x22pat\
h880\x22\x0a \
d=\x22m 25.39999\
9,271.60002 25.3\
99999,25.4 H 0 Z\
\x22\x0a i\
nkscape:transfor\
m-center-y=\x22-3.1\
749995\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0534658\x22\x0a \
y=\x22253.8\
4885\x22\x0a \
x=\x227.6487389\x22\
\x0a he\
ight=\x2235.528759\x22\
\x0a wi\
dth=\x2235.528786\x22\x0a\
id=\
\x22rect870\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.061844\
19;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
r=\x2225.396828\x22\x0a \
cy=\x222\
71.60001\x22\x0a \
cx=\x2225.4\x22\
\x0a id\
=\x22path872\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.07635\
882;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
transform=\x22rota\
te(-45)\x22\x0a \
cx=\x22-174.0\
8969\x22\x0a \
cy=\x22210.01071\
\x22\x0a r\
=\x2212.656071\x22\x0a \
id=\x22pa\
th876\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07399406;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<path\x0a \
inksc\
ape:transform-ce\
nter-x=\x22-3.17499\
99\x22\x0a \
sodipodi:nodety\
pes=\x22cccc\x22\x0a \
inkscape\
:connector-curva\
ture=\x220\x22\x0a \
id=\x22path90\
4\x22\x0a \
d=\x22m 25.4,271.60\
002 -25.40000040\
000004,25.4 v -5\
0.8 z\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<path\x0a \
inksc\
ape:transform-ce\
nter-x=\x223.175\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
d=\x22m 25.399999,2\
71.60002 25.4,-2\
5.4 v 50.8 z\x22\x0a \
id=\x22p\
ath906\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0514922\x22\x0a \
y=\x22256.\
39301\x22\x0a \
x=\x222.5663135\
\x22\x0a h\
eight=\x2230.440479\
\x22\x0a w\
idth=\x2245.693634\x22\
\x0a id\
=\x22rect837\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06574\
38;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <rect\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.0657438;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
id=\x22rect831\x22\x0a \
width\
=\x2245.693588\x22\x0a \
height\
=\x2230.44051\x22\x0a \
x=\x22248.\
76645\x22\x0a \
y=\x22-40.63338\
5\x22\x0a \
ry=\x225.051497\x22\x0a \
trans\
form=\x22rotate(90)\
\x22 />\x0a </g\
>\x0a </g>\x0a \
</g>\x0a <path\x0a\
style=\x22op\
acity:1;fill:#ff\
c107;fill-opacit\
y:1;stroke:none;\
stroke-width:0.3\
8596651;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-opacity\
:1\x22\x0a d=\x22m \
50.206421,401.67\
683 c 110.217209\
,0.71279 55.1086\
09,0.3564 0,0 z\x22\
\x0a id=\x22rect\
997\x22\x0a inks\
cape:connector-c\
urvature=\x220\x22 />\x0a\
<path\x0a \
style=\x22opacity:\
1;fill:#0000ff;f\
ill-opacity:1;st\
roke:none;stroke\
-width:2.1008215\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22M 10,3.013\
6719 A 6.9930773\
,6.9930773 0 0 0\
3.0058594,10.00\
5859 6.9930773,6\
.9930773 0 0 0 1\
0,17 6.9930773,6\
.9930773 0 0 0 1\
6.992188,10.0058\
59 6.9930773,6.9\
930773 0 0 0 10,\
3.0136719 Z M 10\
,4 a 5.9999993,5\
.9999993 0 0 1 6\
,6 5.9999993,5.9\
999993 0 0 1 -6,\
6 5.9999993,5.99\
99993 0 0 1 -6,-\
6 5.9999993,5.99\
99993 0 0 1 6,-6\
z\x22\x0a trans\
form=\x22matrix(0.2\
6458332,0,0,0.26\
458332,0,291.708\
35)\x22\x0a id=\x22\
path826\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
/>\x0a <circle\x0a\
style=\x22op\
acity:1;fill:#00\
00ff;fill-opacit\
y:1;stroke:none;\
stroke-width:0.3\
1793803;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22ci\
rcle830\x22\x0a \
cx=\x222.6458333\x22\x0a \
cy=\x22294.35\
419\x22\x0a r=\x221\
.0583313\x22 />\x0a <\
/g>\x0a</svg>\x0a\
\x00\x00\x1e\xba\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rig\
htarrow.svg\x22\x0a \
inkscape:export-\
filename=\x22/home/\
yeison/Developme\
nt/piton/art/ico\
n_lite.png\x22\x0a i\
nkscape:export-x\
dpi=\x2296\x22\x0a inks\
cape:export-ydpi\
=\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 /\
>\x0a <sodipodi:na\
medview\x0a id=\
\x22base\x22\x0a page\
color=\x22#ffffff\x22\x0a\
bordercolor\
=\x22#666666\x22\x0a \
borderopacity=\x221\
.0\x22\x0a inkscap\
e:pageopacity=\x220\
.0\x22\x0a inkscap\
e:pageshadow=\x222\x22\
\x0a inkscape:z\
oom=\x2228.704913\x22\x0a\
inkscape:cx\
=\x224.6862968\x22\x0a \
inkscape:cy=\x225\
.0026685\x22\x0a i\
nkscape:document\
-units=\x22px\x22\x0a \
inkscape:curren\
t-layer=\x22layer1\x22\
\x0a showgrid=\x22\
true\x22\x0a inksc\
ape:window-width\
=\x221920\x22\x0a ink\
scape:window-hei\
ght=\x221004\x22\x0a \
inkscape:window-\
x=\x220\x22\x0a inksc\
ape:window-y=\x220\x22\
\x0a inkscape:w\
indow-maximized=\
\x221\x22\x0a inkscap\
e:showpageshadow\
=\x22false\x22\x0a un\
its=\x22px\x22\x0a in\
kscape:pagecheck\
erboard=\x22false\x22\x0a\
showguides=\
\x22true\x22\x0a inks\
cape:snap-bbox=\x22\
true\x22\x0a inksc\
ape:bbox-paths=\x22\
true\x22\x0a inksc\
ape:bbox-nodes=\x22\
true\x22\x0a inksc\
ape:snap-bbox-ed\
ge-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
nodes=\x22true\x22\x0a \
inkscape:objec\
t-paths=\x22true\x22\x0a \
inkscape:sna\
p-intersection-p\
aths=\x22true\x22\x0a \
inkscape:snap-s\
mooth-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-midpoints=\
\x22true\x22\x0a inks\
cape:snap-global\
=\x22true\x22\x0a fit\
-margin-top=\x220\x22\x0a\
fit-margin-\
left=\x220\x22\x0a fi\
t-margin-right=\x22\
0\x22\x0a fit-marg\
in-bottom=\x220\x22\x0a \
inkscape:guid\
e-bbox=\x22true\x22>\x0a \
<inkscape:gri\
d\x0a type=\x22x\
ygrid\x22\x0a id\
=\x22grid974\x22\x0a \
empspacing=\x228\x22\
\x0a spacingx\
=\x220.26458332\x22\x0a \
spacingy=\x220\
.26458332\x22\x0a \
dotted=\x22false\x22\
\x0a visible=\
\x22true\x22\x0a en\
abled=\x22true\x22\x0a \
snapvisibleg\
ridlinesonly=\x22tr\
ue\x22\x0a origi\
nx=\x220\x22\x0a or\
iginy=\x220\x22 />\x0a <\
/sodipodi:namedv\
iew>\x0a <metadata\
\x0a id=\x22metada\
ta5\x22>\x0a <rdf:R\
DF>\x0a <cc:Wo\
rk\x0a rdf:\
about=\x22\x22>\x0a \
<dc:format>ima\
ge/svg+xml</dc:f\
ormat>\x0a <\
dc:type\x0a \
rdf:resource=\
\x22http://purl.org\
/dc/dcmitype/Sti\
llImage\x22 />\x0a \
<dc:title />\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22fill:none;strok\
e:#0000ff;stroke\
-width:0.5291666\
4;stroke-linecap\
:butt;stroke-lin\
ejoin:bevel;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-opa\
city:1\x22\x0a d\
=\x22m 1.8205432,29\
5.40031 1.65058,\
-1.04613 -1.6505\
8,-1.04613\x22\x0a \
id=\x22path827\x22\x0a\
inkscape:\
connector-curvat\
ure=\x220\x22\x0a s\
odipodi:nodetype\
s=\x22ccc\x22 />\x0a </g\
>\x0a</svg>\x0a\
\x00\x00 Y\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22rad\
iobutton_uncheck\
ed.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
46.98187\x22\x0a i\
nkscape:cx=\x22-0.0\
91640624\x22\x0a i\
nkscape:cy=\x229.47\
69385\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox=\x22tru\
e\x22\x0a inkscape\
:bbox-paths=\x22tru\
e\x22\x0a inkscape\
:bbox-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-edge-\
midpoints=\x22true\x22\
\x0a inkscape:s\
nap-bbox-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-nod\
es=\x22true\x22\x0a i\
nkscape:object-p\
aths=\x22true\x22\x0a \
inkscape:snap-i\
ntersection-path\
s=\x22true\x22\x0a in\
kscape:snap-smoo\
th-nodes=\x22true\x22\x0a\
inkscape:sn\
ap-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-global=\x22t\
rue\x22\x0a fit-ma\
rgin-top=\x220\x22\x0a \
fit-margin-lef\
t=\x220\x22\x0a fit-m\
argin-right=\x220\x22\x0a\
fit-margin-\
bottom=\x220\x22\x0a \
inkscape:guide-b\
box=\x22true\x22>\x0a \
<inkscape:grid\x0a \
type=\x22xygr\
id\x22\x0a id=\x22g\
rid974\x22\x0a e\
mpspacing=\x228\x22\x0a \
spacingx=\x220\
.26458332\x22\x0a \
spacingy=\x220.26\
458332\x22\x0a d\
otted=\x22false\x22\x0a \
visible=\x22tr\
ue\x22\x0a enabl\
ed=\x22true\x22\x0a \
snapvisiblegrid\
linesonly=\x22true\x22\
\x0a originx=\
\x220\x22\x0a origi\
ny=\x220\x22 />\x0a </so\
dipodi:namedview\
>\x0a <metadata\x0a \
id=\x22metadata5\
\x22>\x0a <rdf:RDF>\
\x0a <cc:Work\x0a\
rdf:abo\
ut=\x22\x22>\x0a <\
dc:format>image/\
svg+xml</dc:form\
at>\x0a <dc:\
type\x0a \
rdf:resource=\x22ht\
tp://purl.org/dc\
/dcmitype/StillI\
mage\x22 />\x0a \
<dc:title></dc:\
title>\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#0000ff;fi\
ll-opacity:0.352\
94119;stroke:non\
e;stroke-width:2\
.1008215;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a d=\x22M \
10.097656,3.0078\
125 A 6.9930773,\
6.9930773 0 0 0 \
3.1054688,10 6.9\
930773,6.9930773\
0 0 0 10.097656\
,16.994141 6.993\
0773,6.9930773 0\
0 0 17.091797,1\
0 6.9930773,6.99\
30773 0 0 0 10.0\
97656,3.0078125 \
Z M 10,4 a 5.999\
9844,5.9999844 0\
0 1 6,6 5.99998\
44,5.9999844 0 0\
1 -6,6 5.999984\
4,5.9999844 0 0 \
1 -6,-6 5.999984\
4,5.9999844 0 0 \
1 6,-6 z\x22\x0a \
transform=\x22matr\
ix(0.26458332,0,\
0,0.26458332,0,2\
91.70835)\x22\x0a \
id=\x22path826\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22 />\x0a </g>\
\x0a</svg>\x0a\
\x00\x00\x1f\xb4\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22tab\
_close.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2220.297438\x22\x0a \
inkscape:cx=\
\x2210.202825\x22\x0a \
inkscape:cy=\x228.\
9235955\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221015\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
false\x22\x0a inks\
cape:snap-bbox=\x22\
true\x22\x0a inksc\
ape:bbox-paths=\x22\
true\x22\x0a inksc\
ape:bbox-nodes=\x22\
true\x22\x0a inksc\
ape:snap-bbox-ed\
ge-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
nodes=\x22true\x22\x0a \
inkscape:objec\
t-paths=\x22true\x22\x0a \
inkscape:sna\
p-intersection-p\
aths=\x22true\x22\x0a \
inkscape:snap-s\
mooth-nodes=\x22tru\
e\x22\x0a inkscape\
:snap-midpoints=\
\x22true\x22\x0a inks\
cape:snap-global\
=\x22true\x22\x0a fit\
-margin-top=\x220\x22\x0a\
fit-margin-\
left=\x220\x22\x0a fi\
t-margin-right=\x22\
0\x22\x0a fit-marg\
in-bottom=\x220\x22\x0a \
inkscape:guid\
e-bbox=\x22true\x22>\x0a \
<inkscape:gri\
d\x0a type=\x22x\
ygrid\x22\x0a id\
=\x22grid974\x22\x0a \
empspacing=\x228\x22\
\x0a spacingx\
=\x220.26458332\x22\x0a \
spacingy=\x220\
.26458332\x22\x0a \
dotted=\x22false\x22\
\x0a visible=\
\x22true\x22\x0a en\
abled=\x22true\x22\x0a \
snapvisibleg\
ridlinesonly=\x22tr\
ue\x22\x0a origi\
nx=\x220\x22\x0a or\
iginy=\x220\x22 />\x0a <\
/sodipodi:namedv\
iew>\x0a <metadata\
\x0a id=\x22metada\
ta5\x22>\x0a <rdf:R\
DF>\x0a <cc:Wo\
rk\x0a rdf:\
about=\x22\x22>\x0a \
<dc:format>ima\
ge/svg+xml</dc:f\
ormat>\x0a <\
dc:type\x0a \
rdf:resource=\
\x22http://purl.org\
/dc/dcmitype/Sti\
llImage\x22 />\x0a \
<dc:title />\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22fill:#0000ff;st\
roke:#0000ff;str\
oke-width:0.5291\
6664;stroke-line\
cap:butt;stroke-\
linejoin:miter;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
opacity:1\x22\x0a \
d=\x22m 0.9809021\
5,292.68924 3.32\
986205,3.32989\x22\x0a\
id=\x22path8\
26\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<path\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id=\x22path\
842\x22\x0a d=\x22m\
4.3107782,292.6\
8925 -3.32989002\
,3.32987\x22\x0a \
style=\x22fill:#00\
00ff;stroke:#000\
0ff;stroke-width\
:0.52916664;stro\
ke-linecap:butt;\
stroke-linejoin:\
miter;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-opacity:1\
\x22 />\x0a </g>\x0a</sv\
g>\x0a\
\x00\x00)I\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22siz\
egrip.svg\x22\x0a in\
kscape:export-fi\
lename=\x22/home/ye\
ison/Development\
/piton/art/icon_\
lite.png\x22\x0a ink\
scape:export-xdp\
i=\x2296\x22\x0a inksca\
pe:export-ydpi=\x22\
96\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\x0a\
<sodipodi:name\
dview\x0a id=\x22b\
ase\x22\x0a pageco\
lor=\x22#ffffff\x22\x0a \
bordercolor=\x22\
#666666\x22\x0a bo\
rderopacity=\x221.0\
\x22\x0a inkscape:\
pageopacity=\x220.0\
\x22\x0a inkscape:\
pageshadow=\x222\x22\x0a \
inkscape:zoo\
m=\x2216\x22\x0a inks\
cape:cx=\x224.85602\
4\x22\x0a inkscape\
:cy=\x229.6877956\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle />\x0a </c\
c:Work>\x0a </rd\
f:RDF>\x0a </metad\
ata>\x0a <g\x0a i\
nkscape:label=\x22L\
ayer 1\x22\x0a ink\
scape:groupmode=\
\x22layer\x22\x0a id=\
\x22layer1\x22\x0a tr\
ansform=\x22transla\
te(0,-291.70835)\
\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22mat\
rix(0.05207439,0\
,0,0.05207453,-0\
.90125164,282.41\
203)\x22>\x0a <g\x0a\
id=\x22g85\
1\x22>\x0a <g\x0a \
id=\x22g1\
059\x22\x0a \
transform=\x22matri\
x(1.9986219,0,0,\
1.9986185,17.324\
484,-313.52314)\x22\
>\x0a <pat\
h\x0a i\
nkscape:transfor\
m-center-y=\x223.17\
5\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07000433;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22\x0a \
d=\x22M 25.3999\
99,271.60002 -8.\
0000008e-7,246.2\
0002 H 50.799999\
Z\x22\x0a \
id=\x22path883\x22\x0a \
inksc\
ape:connector-cu\
rvature=\x220\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22 />\x0a \
<path\x0a \
sodipodi:node\
types=\x22cccc\x22\x0a \
inksca\
pe:connector-cur\
vature=\x220\x22\x0a \
id=\x22path\
880\x22\x0a \
d=\x22m 25.399999\
,271.60002 25.39\
9999,25.4 H 0 Z\x22\
\x0a in\
kscape:transform\
-center-y=\x22-3.17\
49995\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22 />\x0a \
<rect\x0a \
ry=\x225\
.0534658\x22\x0a \
y=\x22253.84\
885\x22\x0a \
x=\x227.6487389\x22\x0a\
hei\
ght=\x2235.528759\x22\x0a\
wid\
th=\x2235.528786\x22\x0a \
id=\x22\
rect870\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0618441\
9;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<circl\
e\x0a r\
=\x2225.396828\x22\x0a \
cy=\x2227\
1.60001\x22\x0a \
cx=\x2225.4\x22\x0a\
id=\
\x22path872\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.076358\
82;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1;pai\
nt-order:stroke \
fill markers\x22 />\
\x0a <circ\
le\x0a \
transform=\x22rotat\
e(-45)\x22\x0a \
cx=\x22-174.08\
969\x22\x0a \
cy=\x22210.01071\x22\
\x0a r=\
\x2212.656071\x22\x0a \
id=\x22pat\
h876\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07399406;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x22-3.174999\
9\x22\x0a \
sodipodi:nodetyp\
es=\x22cccc\x22\x0a \
inkscape:\
connector-curvat\
ure=\x220\x22\x0a \
id=\x22path904\
\x22\x0a d\
=\x22m 25.4,271.600\
02 -25.400000400\
00004,25.4 v -50\
.8 z\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22 />\x0a \
<path\x0a \
inksca\
pe:transform-cen\
ter-x=\x223.175\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22\x0a d\
=\x22m 25.399999,27\
1.60002 25.4,-25\
.4 v 50.8 z\x22\x0a \
id=\x22pa\
th906\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<rect\x0a \
ry=\x22\
5.0514922\x22\x0a \
y=\x22256.3\
9301\x22\x0a \
x=\x222.5663135\x22\
\x0a he\
ight=\x2230.440479\x22\
\x0a wi\
dth=\x2245.693634\x22\x0a\
id=\
\x22rect837\x22\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
0657438;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a \
id=\x22rect831\x22\x0a \
width=\
\x2245.693588\x22\x0a \
height=\
\x2230.44051\x22\x0a \
x=\x22248.7\
6645\x22\x0a \
y=\x22-40.633385\
\x22\x0a r\
y=\x225.051497\x22\x0a \
transf\
orm=\x22rotate(90)\x22\
/>\x0a </g>\
\x0a </g>\x0a \
</g>\x0a <path\x0a \
style=\x22opa\
city:1;fill:#ffc\
107;fill-opacity\
:1;stroke:none;s\
troke-width:0.38\
596651;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22\x0a d=\x22m 5\
0.206421,401.676\
83 c 110.217209,\
0.71279 55.10860\
9,0.3564 0,0 z\x22\x0a\
id=\x22rect9\
97\x22\x0a inksc\
ape:connector-cu\
rvature=\x220\x22 />\x0a \
<rect\x0a \
style=\x22opacity:1\
;fill:#0000ff;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:1.05832505\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect832\x22\x0a\
width=\x220.\
52916664\x22\x0a \
height=\x220.52915\
841\x22\x0a x=\x224\
.2333331\x22\x0a \
y=\x22295.94168\x22 /\
>\x0a <rect\x0a \
y=\x22295.94168\x22\
\x0a x=\x221.058\
3333\x22\x0a hei\
ght=\x220.52915841\x22\
\x0a width=\x220\
.52916664\x22\x0a \
id=\x22rect836\x22\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:1.05\
832505;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22 />\x0a <rect\x0a \
style=\x22opa\
city:1;fill:#000\
0ff;fill-opacity\
:1;stroke:none;s\
troke-width:1.05\
832505;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
\x22\x0a id=\x22rec\
t838\x22\x0a wid\
th=\x220.52916664\x22\x0a\
height=\x220\
.52915841\x22\x0a \
x=\x222.6458333\x22\x0a\
y=\x22295.94\
168\x22 />\x0a <rec\
t\x0a y=\x22292.\
76669\x22\x0a x=\
\x224.2333331\x22\x0a \
height=\x220.529\
15841\x22\x0a wi\
dth=\x220.52916664\x22\
\x0a id=\x22rect\
840\x22\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:1.05832505;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22 />\x0a <\
rect\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:1.05832505;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22\x0a i\
d=\x22rect842\x22\x0a \
width=\x220.5291\
6664\x22\x0a hei\
ght=\x220.52915841\x22\
\x0a x=\x221.852\
0832\x22\x0a y=\x22\
295.14795\x22 />\x0a \
<rect\x0a s\
tyle=\x22opacity:1;\
fill:#0000ff;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:1.05832505;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22\x0a \
id=\x22rect844\x22\x0a \
width=\x220.5\
2916664\x22\x0a \
height=\x220.529158\
41\x22\x0a x=\x223.\
4395831\x22\x0a \
y=\x22295.14795\x22 />\
\x0a <rect\x0a \
y=\x22294.35419\x22\x0a\
x=\x224.2333\
331\x22\x0a heig\
ht=\x220.52915841\x22\x0a\
width=\x220.\
52916664\x22\x0a \
id=\x22rect846\x22\x0a \
style=\x22opac\
ity:1;fill:#0000\
ff;fill-opacity:\
1;stroke:none;st\
roke-width:1.058\
32505;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
/>\x0a <rect\x0a \
style=\x22opac\
ity:1;fill:#0000\
ff;fill-opacity:\
1;stroke:none;st\
roke-width:1.058\
32505;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1\x22\
\x0a id=\x22rect\
848\x22\x0a widt\
h=\x220.52916664\x22\x0a \
height=\x220.\
52915841\x22\x0a \
x=\x223.4395831\x22\x0a \
y=\x22293.560\
42\x22 />\x0a <rect\
\x0a style=\x22o\
pacity:1;fill:#0\
000ff;fill-opaci\
ty:1;stroke:none\
;stroke-width:1.\
05832505;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\x22r\
ect852\x22\x0a w\
idth=\x220.52916664\
\x22\x0a height=\
\x220.52915841\x22\x0a \
x=\x222.6458333\
\x22\x0a y=\x22294.\
35419\x22 />\x0a </g>\
\x0a</svg>\x0a\
\x00\x00$v\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22spl\
itter-horizontal\
.svg\x22\x0a inkscap\
e:export-filenam\
e=\x22/home/yeison/\
Development/pito\
n/art/icon_lite.\
png\x22\x0a inkscape\
:export-xdpi=\x2296\
\x22\x0a inkscape:ex\
port-ydpi=\x2296\x22>\x0a\
<defs\x0a id=\
\x22defs2\x22 />\x0a <so\
dipodi:namedview\
\x0a id=\x22base\x22\x0a\
pagecolor=\x22\
#ffffff\x22\x0a bo\
rdercolor=\x22#6666\
66\x22\x0a bordero\
pacity=\x221.0\x22\x0a \
inkscape:pageo\
pacity=\x220.0\x22\x0a \
inkscape:pages\
hadow=\x222\x22\x0a i\
nkscape:zoom=\x2224\
.802598\x22\x0a in\
kscape:cx=\x226.302\
3018\x22\x0a inksc\
ape:cy=\x228.969841\
\x22\x0a inkscape:\
document-units=\x22\
px\x22\x0a inkscap\
e:current-layer=\
\x22layer1\x22\x0a sh\
owgrid=\x22true\x22\x0a \
inkscape:wind\
ow-width=\x221920\x22\x0a\
inkscape:wi\
ndow-height=\x22100\
4\x22\x0a inkscape\
:window-x=\x220\x22\x0a \
inkscape:wind\
ow-y=\x220\x22\x0a in\
kscape:window-ma\
ximized=\x221\x22\x0a \
inkscape:showpa\
geshadow=\x22false\x22\
\x0a units=\x22px\x22\
\x0a inkscape:p\
agecheckerboard=\
\x22false\x22\x0a sho\
wguides=\x22true\x22\x0a \
inkscape:sna\
p-bbox=\x22true\x22\x0a \
inkscape:bbox\
-paths=\x22true\x22\x0a \
inkscape:bbox\
-nodes=\x22true\x22\x0a \
inkscape:snap\
-bbox-edge-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-b\
box-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-nodes=\x22t\
rue\x22\x0a inksca\
pe:object-paths=\
\x22true\x22\x0a inks\
cape:snap-inters\
ection-paths=\x22tr\
ue\x22\x0a inkscap\
e:snap-smooth-no\
des=\x22true\x22\x0a \
inkscape:snap-mi\
dpoints=\x22true\x22\x0a \
inkscape:sna\
p-global=\x22true\x22\x0a\
fit-margin-\
top=\x220\x22\x0a fit\
-margin-left=\x220\x22\
\x0a fit-margin\
-right=\x220\x22\x0a \
fit-margin-botto\
m=\x220\x22\x0a inksc\
ape:guide-bbox=\x22\
true\x22>\x0a <inks\
cape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid97\
4\x22\x0a empspa\
cing=\x228\x22\x0a \
spacingx=\x220.2645\
8332\x22\x0a spa\
cingy=\x220.2645833\
2\x22\x0a dotted\
=\x22false\x22\x0a \
visible=\x22true\x22\x0a \
enabled=\x22t\
rue\x22\x0a snap\
visiblegridlines\
only=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\
\x22 />\x0a </sodipod\
i:namedview>\x0a <\
metadata\x0a id\
=\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22\
>\x0a <dc:fo\
rmat>image/svg+x\
ml</dc:format>\x0a \
<dc:type\x0a\
rdf:r\
esource=\x22http://\
purl.org/dc/dcmi\
type/StillImage\x22\
/>\x0a <dc:\
title />\x0a <\
/cc:Work>\x0a </\
rdf:RDF>\x0a </met\
adata>\x0a <g\x0a \
inkscape:label=\
\x22Layer 1\x22\x0a i\
nkscape:groupmod\
e=\x22layer\x22\x0a i\
d=\x22layer1\x22\x0a \
transform=\x22trans\
late(0,-291.7083\
5)\x22>\x0a <g\x0a \
id=\x22g847\x22\x0a \
transform=\x22m\
atrix(0.05207439\
,0,0,0.05207453,\
-0.90125164,282.\
41203)\x22>\x0a <\
g\x0a id=\x22g\
851\x22>\x0a <g\
\x0a id=\x22\
g1059\x22\x0a \
transform=\x22mat\
rix(1.9986219,0,\
0,1.9986185,17.3\
24484,-313.52314\
)\x22>\x0a <p\
ath\x0a \
inkscape:transf\
orm-center-y=\x223.\
175\x22\x0a \
style=\x22opacity\
:1;fill:none;fil\
l-opacity:0.4938\
2719;stroke:#fff\
fff00;stroke-wid\
th:0.07000433;st\
roke-linecap:rou\
nd;stroke-linejo\
in:round;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-dashof\
fset:0;stroke-op\
acity:1;paint-or\
der:stroke fill \
markers\x22\x0a \
d=\x22M 25.39\
9999,271.60002 -\
8.0000008e-7,246\
.20002 H 50.7999\
99 Z\x22\x0a \
id=\x22path883\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<path\x0a \
sodipodi:no\
detypes=\x22cccc\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
id=\x22pa\
th880\x22\x0a \
d=\x22m 25.3999\
99,271.60002 25.\
399999,25.4 H 0 \
Z\x22\x0a \
inkscape:transfo\
rm-center-y=\x22-3.\
1749995\x22\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0700043\
3;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22 />\x0a\
<rect\x0a\
ry=\
\x225.0534658\x22\x0a \
y=\x22253.\
84885\x22\x0a \
x=\x227.6487389\
\x22\x0a h\
eight=\x2235.528759\
\x22\x0a w\
idth=\x2235.528786\x22\
\x0a id\
=\x22rect870\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.06184\
419;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <cir\
cle\x0a \
r=\x2225.396828\x22\x0a \
cy=\x22\
271.60001\x22\x0a \
cx=\x2225.4\
\x22\x0a i\
d=\x22path872\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0763\
5882;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22 \
/>\x0a <ci\
rcle\x0a \
transform=\x22rot\
ate(-45)\x22\x0a \
cx=\x22-174.\
08969\x22\x0a \
cy=\x22210.0107\
1\x22\x0a \
r=\x2212.656071\x22\x0a \
id=\x22p\
ath876\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07399406\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x22-3.1749\
999\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22\x0a \
inkscap\
e:connector-curv\
ature=\x220\x22\x0a \
id=\x22path9\
04\x22\x0a \
d=\x22m 25.4,271.6\
0002 -25.4000004\
0000004,25.4 v -\
50.8 z\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22 />\x0a \
<path\x0a \
inks\
cape:transform-c\
enter-x=\x223.175\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22\x0a \
d=\x22m 25.399999,\
271.60002 25.4,-\
25.4 v 50.8 z\x22\x0a \
id=\x22\
path906\x22\x0a \
inkscape:c\
onnector-curvatu\
re=\x220\x22\x0a \
sodipodi:nod\
etypes=\x22cccc\x22 />\
\x0a <rect\
\x0a ry\
=\x225.0514922\x22\x0a \
y=\x22256\
.39301\x22\x0a \
x=\x222.566313\
5\x22\x0a \
height=\x2230.44047\
9\x22\x0a \
width=\x2245.693634\
\x22\x0a i\
d=\x22rect837\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0657\
438;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22 /\
>\x0a <rec\
t\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22\x0a \
id=\x22rect831\x22\x0a \
widt\
h=\x2245.693588\x22\x0a \
heigh\
t=\x2230.44051\x22\x0a \
x=\x22248\
.76645\x22\x0a \
y=\x22-40.6333\
85\x22\x0a \
ry=\x225.051497\x22\x0a \
tran\
sform=\x22rotate(90\
)\x22 />\x0a </\
g>\x0a </g>\x0a \
</g>\x0a <path\
\x0a style=\x22o\
pacity:1;fill:#f\
fc107;fill-opaci\
ty:1;stroke:none\
;stroke-width:0.\
38596651;stroke-\
miterlimit:4;str\
oke-dasharray:no\
ne;stroke-opacit\
y:1\x22\x0a d=\x22m\
50.206421,401.6\
7683 c 110.21720\
9,0.71279 55.108\
609,0.3564 0,0 z\
\x22\x0a id=\x22rec\
t997\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a <g\x0a i\
d=\x22g839\x22>\x0a \
<rect\x0a y\
=\x22291.97293\x22\x0a \
x=\x222.38124\
99\x22\x0a hei\
ght=\x220.52916664\x22\
\x0a width=\
\x220.52916664\x22\x0a \
id=\x22rect82\
7\x22\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
none;stroke-widt\
h:0.52916664;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1\x22 />\x0a \
<rect\x0a \
style=\x22opacity:1\
;fill:#0000ff;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.52916664\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1\x22\x0a \
id=\x22rect829\
\x22\x0a width\
=\x220.52916664\x22\x0a \
height=\x220\
.52916664\x22\x0a \
x=\x222.3812499\
\x22\x0a y=\x2229\
6.20627\x22 />\x0a \
<rect\x0a \
y=\x22295.14792\x22\x0a \
x=\x222.381\
2499\x22\x0a h\
eight=\x220.5291666\
4\x22\x0a widt\
h=\x220.52916664\x22\x0a \
id=\x22rect\
832\x22\x0a st\
yle=\x22opacity:1;f\
ill:#0000ff;fill\
-opacity:1;strok\
e:none;stroke-wi\
dth:0.52916664;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1\x22 />\x0a \
<rect\x0a \
style=\x22opacity\
:1;fill:#0000ff;\
fill-opacity:1;s\
troke:none;strok\
e-width:0.529166\
64;stroke-lineca\
p:round;stroke-l\
inejoin:round;st\
roke-miterlimit:\
4;stroke-dasharr\
ay:none;stroke-d\
ashoffset:0;stro\
ke-opacity:1\x22\x0a \
id=\x22rect8\
34\x22\x0a wid\
th=\x220.52916664\x22\x0a\
height=\
\x220.52916664\x22\x0a \
x=\x222.38124\
99\x22\x0a y=\x22\
294.0896\x22 />\x0a \
<rect\x0a \
y=\x22293.03128\x22\x0a\
x=\x222.38\
12499\x22\x0a \
height=\x220.529166\
64\x22\x0a wid\
th=\x220.52916664\x22\x0a\
id=\x22rec\
t836\x22\x0a s\
tyle=\x22opacity:1;\
fill:#0000ff;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.52916664;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1\x22 />\x0a \
</g>\x0a </g>\x0a</\
svg>\x0a\
\x00\x00\x1e\xbb\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22dow\
narrow.svg\x22\x0a i\
nkscape:export-f\
ilename=\x22/home/y\
eison/Developmen\
t/piton/art/icon\
_lite.png\x22\x0a in\
kscape:export-xd\
pi=\x2296\x22\x0a inksc\
ape:export-ydpi=\
\x2296\x22>\x0a <defs\x0a \
id=\x22defs2\x22 />\
\x0a <sodipodi:nam\
edview\x0a id=\x22\
base\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageopacity=\x220.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:zo\
om=\x2228.704913\x22\x0a \
inkscape:cx=\
\x2211.479559\x22\x0a \
inkscape:cy=\x225.\
0026685\x22\x0a in\
kscape:document-\
units=\x22px\x22\x0a \
inkscape:current\
-layer=\x22layer1\x22\x0a\
showgrid=\x22t\
rue\x22\x0a inksca\
pe:window-width=\
\x221920\x22\x0a inks\
cape:window-heig\
ht=\x221004\x22\x0a i\
nkscape:window-x\
=\x220\x22\x0a inksca\
pe:window-y=\x220\x22\x0a\
inkscape:wi\
ndow-maximized=\x22\
1\x22\x0a inkscape\
:showpageshadow=\
\x22false\x22\x0a uni\
ts=\x22px\x22\x0a ink\
scape:pagechecke\
rboard=\x22false\x22\x0a \
showguides=\x22\
true\x22\x0a inksc\
ape:snap-bbox=\x22t\
rue\x22\x0a inksca\
pe:bbox-paths=\x22t\
rue\x22\x0a inksca\
pe:bbox-nodes=\x22t\
rue\x22\x0a inksca\
pe:snap-bbox-edg\
e-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-bbox-midpo\
ints=\x22true\x22\x0a \
inkscape:snap-n\
odes=\x22true\x22\x0a \
inkscape:object\
-paths=\x22true\x22\x0a \
inkscape:snap\
-intersection-pa\
ths=\x22true\x22\x0a \
inkscape:snap-sm\
ooth-nodes=\x22true\
\x22\x0a inkscape:\
snap-midpoints=\x22\
true\x22\x0a inksc\
ape:snap-global=\
\x22true\x22\x0a fit-\
margin-top=\x220\x22\x0a \
fit-margin-l\
eft=\x220\x22\x0a fit\
-margin-right=\x220\
\x22\x0a fit-margi\
n-bottom=\x220\x22\x0a \
inkscape:guide\
-bbox=\x22true\x22>\x0a \
<inkscape:grid\
\x0a type=\x22xy\
grid\x22\x0a id=\
\x22grid974\x22\x0a \
empspacing=\x228\x22\x0a\
spacingx=\
\x220.26458332\x22\x0a \
spacingy=\x220.\
26458332\x22\x0a \
dotted=\x22false\x22\x0a\
visible=\x22\
true\x22\x0a ena\
bled=\x22true\x22\x0a \
snapvisiblegr\
idlinesonly=\x22tru\
e\x22\x0a origin\
x=\x220\x22\x0a ori\
giny=\x220\x22 />\x0a </\
sodipodi:namedvi\
ew>\x0a <metadata\x0a\
id=\x22metadat\
a5\x22>\x0a <rdf:RD\
F>\x0a <cc:Wor\
k\x0a rdf:a\
bout=\x22\x22>\x0a \
<dc:format>imag\
e/svg+xml</dc:fo\
rmat>\x0a <d\
c:type\x0a \
rdf:resource=\x22\
http://purl.org/\
dc/dcmitype/Stil\
lImage\x22 />\x0a \
<dc:title />\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a <pat\
h\x0a style=\x22\
fill:none;stroke\
:#0000ff;stroke-\
width:0.52916664\
;stroke-linecap:\
butt;stroke-line\
join:bevel;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-opac\
ity:1\x22\x0a d=\
\x22m 1.5997022,293\
.52889 1.0461311\
,1.65058 1.04613\
09,-1.65058\x22\x0a \
id=\x22path827\x22\
\x0a inkscape\
:connector-curva\
ture=\x220\x22\x0a \
sodipodi:nodetyp\
es=\x22ccc\x22 />\x0a </\
g>\x0a</svg>\x0a\
\x00\x00\x1f\xba\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22clo\
se.svg\x22\x0a inksc\
ape:export-filen\
ame=\x22/home/yeiso\
n/Development/pi\
ton/art/icon_lit\
e.png\x22\x0a inksca\
pe:export-xdpi=\x22\
96\x22\x0a inkscape:\
export-ydpi=\x2296\x22\
>\x0a <defs\x0a i\
d=\x22defs2\x22 />\x0a <\
sodipodi:namedvi\
ew\x0a id=\x22base\
\x22\x0a pagecolor\
=\x22#ffffff\x22\x0a \
bordercolor=\x22#66\
6666\x22\x0a borde\
ropacity=\x221.0\x22\x0a \
inkscape:pag\
eopacity=\x220.0\x22\x0a \
inkscape:pag\
eshadow=\x222\x22\x0a \
inkscape:zoom=\x22\
0.44851425\x22\x0a \
inkscape:cx=\x22-4\
61.15031\x22\x0a i\
nkscape:cy=\x2268.2\
80762\x22\x0a inks\
cape:document-un\
its=\x22px\x22\x0a in\
kscape:current-l\
ayer=\x22layer1\x22\x0a \
showgrid=\x22tru\
e\x22\x0a inkscape\
:window-width=\x221\
920\x22\x0a inksca\
pe:window-height\
=\x221004\x22\x0a ink\
scape:window-x=\x22\
0\x22\x0a inkscape\
:window-y=\x220\x22\x0a \
inkscape:wind\
ow-maximized=\x221\x22\
\x0a inkscape:s\
howpageshadow=\x22f\
alse\x22\x0a units\
=\x22px\x22\x0a inksc\
ape:pagecheckerb\
oard=\x22false\x22\x0a \
showguides=\x22fa\
lse\x22\x0a inksca\
pe:snap-bbox=\x22tr\
ue\x22\x0a inkscap\
e:bbox-paths=\x22tr\
ue\x22\x0a inkscap\
e:bbox-nodes=\x22tr\
ue\x22\x0a inkscap\
e:snap-bbox-edge\
-midpoints=\x22true\
\x22\x0a inkscape:\
snap-bbox-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-no\
des=\x22true\x22\x0a \
inkscape:object-\
paths=\x22true\x22\x0a \
inkscape:snap-\
intersection-pat\
hs=\x22true\x22\x0a i\
nkscape:snap-smo\
oth-nodes=\x22true\x22\
\x0a inkscape:s\
nap-midpoints=\x22t\
rue\x22\x0a inksca\
pe:snap-global=\x22\
true\x22\x0a fit-m\
argin-top=\x220\x22\x0a \
fit-margin-le\
ft=\x220\x22\x0a fit-\
margin-right=\x220\x22\
\x0a fit-margin\
-bottom=\x220\x22\x0a \
inkscape:guide-\
bbox=\x22true\x22>\x0a \
<inkscape:grid\x0a\
type=\x22xyg\
rid\x22\x0a id=\x22\
grid974\x22\x0a \
empspacing=\x228\x22\x0a \
spacingx=\x22\
0.26458332\x22\x0a \
spacingy=\x220.2\
6458332\x22\x0a \
dotted=\x22false\x22\x0a \
visible=\x22t\
rue\x22\x0a enab\
led=\x22true\x22\x0a \
snapvisiblegri\
dlinesonly=\x22true\
\x22\x0a originx\
=\x220\x22\x0a orig\
iny=\x220\x22 />\x0a </s\
odipodi:namedvie\
w>\x0a <metadata\x0a \
id=\x22metadata\
5\x22>\x0a <rdf:RDF\
>\x0a <cc:Work\
\x0a rdf:ab\
out=\x22\x22>\x0a \
<dc:format>image\
/svg+xml</dc:for\
mat>\x0a <dc\
:type\x0a \
rdf:resource=\x22h\
ttp://purl.org/d\
c/dcmitype/Still\
Image\x22 />\x0a \
<dc:title />\x0a \
</cc:Work>\x0a\
</rdf:RDF>\x0a \
</metadata>\x0a <\
g\x0a inkscape:\
label=\x22Layer 1\x22\x0a\
inkscape:gr\
oupmode=\x22layer\x22\x0a\
id=\x22layer1\x22\
\x0a transform=\
\x22translate(0,-29\
1.70835)\x22>\x0a <\
g\x0a id=\x22g84\
7\x22\x0a transf\
orm=\x22matrix(0.05\
207439,0,0,0.052\
07453,-0.9012516\
4,282.41203)\x22>\x0a \
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfor\
m=\x22matrix(1.9986\
219,0,0,1.998618\
5,17.324484,-313\
.52314)\x22>\x0a \
<path\x0a \
inkscape:\
transform-center\
-y=\x223.175\x22\x0a \
style=\x22o\
pacity:1;fill:no\
ne;fill-opacity:\
0.49382719;strok\
e:#ffffff00;stro\
ke-width:0.07000\
433;stroke-linec\
ap:round;stroke-\
linejoin:round;s\
troke-miterlimit\
:4;stroke-dashar\
ray:none;stroke-\
dashoffset:0;str\
oke-opacity:1;pa\
int-order:stroke\
fill markers\x22\x0a \
d=\x22M\
25.399999,271.6\
0002 -8.0000008e\
-7,246.20002 H 5\
0.799999 Z\x22\x0a \
id=\x22pat\
h883\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
sodipodi:nodety\
pes=\x22cccc\x22 />\x0a \
<path\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m 2\
5.399999,271.600\
02 25.399999,25.\
4 H 0 Z\x22\x0a \
inkscape:t\
ransform-center-\
y=\x22-3.1749995\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<rect\x0a \
ry=\x225.0534658\
\x22\x0a y\
=\x22253.84885\x22\x0a \
x=\x227.6\
487389\x22\x0a \
height=\x2235.\
528759\x22\x0a \
width=\x2235.5\
28786\x22\x0a \
id=\x22rect870\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.06184419;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<circle\x0a \
r=\x2225.396\
828\x22\x0a \
cy=\x22271.60001\x22\
\x0a cx\
=\x2225.4\x22\x0a \
id=\x22path872\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.07635882;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
transfor\
m=\x22rotate(-45)\x22\x0a\
cx=\
\x22-174.08969\x22\x0a \
cy=\x2221\
0.01071\x22\x0a \
r=\x2212.6560\
71\x22\x0a \
id=\x22path876\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
399406;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
path\x0a \
inkscape:trans\
form-center-x=\x22-\
3.1749999\x22\x0a \
sodipodi\
:nodetypes=\x22cccc\
\x22\x0a i\
nkscape:connecto\
r-curvature=\x220\x22\x0a\
id=\
\x22path904\x22\x0a \
d=\x22m 25.4\
,271.60002 -25.4\
0000040000004,25\
.4 v -50.8 z\x22\x0a \
style\
=\x22opacity:1;fill\
:none;fill-opaci\
ty:0.49382719;st\
roke:#ffffff00;s\
troke-width:0.07\
000433;stroke-li\
necap:round;stro\
ke-linejoin:roun\
d;stroke-miterli\
mit:4;stroke-das\
harray:none;stro\
ke-dashoffset:0;\
stroke-opacity:1\
;paint-order:str\
oke fill markers\
\x22 />\x0a <\
path\x0a \
inkscape:trans\
form-center-x=\x223\
.175\x22\x0a \
style=\x22opacit\
y:1;fill:none;fi\
ll-opacity:0.493\
82719;stroke:#ff\
ffff00;stroke-wi\
dth:0.07000433;s\
troke-linecap:ro\
und;stroke-linej\
oin:round;stroke\
-miterlimit:4;st\
roke-dasharray:n\
one;stroke-dasho\
ffset:0;stroke-o\
pacity:1;paint-o\
rder:stroke fill\
markers\x22\x0a \
d=\x22m 25.3\
99999,271.60002 \
25.4,-25.4 v 50.\
8 z\x22\x0a \
id=\x22path906\x22\x0a \
inks\
cape:connector-c\
urvature=\x220\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22 />\x0a \
<rect\x0a \
ry=\x225.051492\
2\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222.\
5663135\x22\x0a \
height=\x2230\
.440479\x22\x0a \
width=\x2245.\
693634\x22\x0a \
id=\x22rect837\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.0657438;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.0657438;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
id=\x22rect\
831\x22\x0a \
width=\x2245.6935\
88\x22\x0a \
height=\x2230.4405\
1\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-4\
0.633385\x22\x0a \
ry=\x225.051\
497\x22\x0a \
transform=\x22rot\
ate(90)\x22 />\x0a \
</g>\x0a <\
/g>\x0a </g>\x0a \
<path\x0a st\
yle=\x22fill:#0000f\
f;stroke:#0000ff\
;stroke-width:0.\
52916664;stroke-\
linecap:butt;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-opacity:1\x22\x0a \
d=\x22m 1.319\
2054,293.02755 2\
.6532555,2.65327\
\x22\x0a id=\x22pat\
h826\x22\x0a ink\
scape:connector-\
curvature=\x220\x22 />\
\x0a <path\x0a \
inkscape:conne\
ctor-curvature=\x22\
0\x22\x0a id=\x22pa\
th842\x22\x0a d=\
\x22m 3.9724723,293\
.02756 -2.653278\
2,2.65325\x22\x0a \
style=\x22fill:#0\
000ff;stroke:#00\
00ff;stroke-widt\
h:0.52916664;str\
oke-linecap:butt\
;stroke-linejoin\
:miter;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-opacity:\
1\x22 />\x0a <circl\
e\x0a style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:1;stroke:#0000f\
f;stroke-width:0\
.5292387;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1\x22\x0a id=\x22p\
ath829\x22\x0a c\
x=\x222.6458311\x22\x0a \
cy=\x22294.354\
19\x22\x0a r=\x222.\
381216\x22 />\x0a </g\
>\x0a</svg>\x0a\
\x00\x00\x225\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_checked.sv\
g\x22\x0a inkscape:e\
xport-filename=\x22\
/home/yeison/Dev\
elopment/piton/a\
rt/icon_lite.png\
\x22\x0a inkscape:ex\
port-xdpi=\x2296\x22\x0a \
inkscape:expor\
t-ydpi=\x2296\x22>\x0a <\
defs\x0a id=\x22de\
fs2\x22 />\x0a <sodip\
odi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#ff\
ffff\x22\x0a borde\
rcolor=\x22#666666\x22\
\x0a borderopac\
ity=\x221.0\x22\x0a i\
nkscape:pageopac\
ity=\x220.0\x22\x0a i\
nkscape:pageshad\
ow=\x222\x22\x0a inks\
cape:zoom=\x2240.59\
4876\x22\x0a inksc\
ape:cx=\x229.477075\
1\x22\x0a inkscape\
:cy=\x227.4819362\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221004\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22false\x22\x0a \
inkscape:snap\
-bbox=\x22true\x22\x0a \
inkscape:bbox-\
paths=\x22true\x22\x0a \
inkscape:bbox-\
nodes=\x22true\x22\x0a \
inkscape:snap-\
bbox-edge-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-bb\
ox-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-nodes=\x22tr\
ue\x22\x0a inkscap\
e:object-paths=\x22\
true\x22\x0a inksc\
ape:snap-interse\
ction-paths=\x22tru\
e\x22\x0a inkscape\
:snap-smooth-nod\
es=\x22true\x22\x0a i\
nkscape:snap-mid\
points=\x22true\x22\x0a \
inkscape:snap\
-global=\x22true\x22\x0a \
fit-margin-t\
op=\x220\x22\x0a fit-\
margin-left=\x220\x22\x0a\
fit-margin-\
right=\x220\x22\x0a f\
it-margin-bottom\
=\x220\x22\x0a inksca\
pe:guide-bbox=\x22t\
rue\x22>\x0a <inksc\
ape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid974\
\x22\x0a empspac\
ing=\x228\x22\x0a s\
pacingx=\x220.26458\
332\x22\x0a spac\
ingy=\x220.26458332\
\x22\x0a dotted=\
\x22false\x22\x0a v\
isible=\x22true\x22\x0a \
enabled=\x22tr\
ue\x22\x0a snapv\
isiblegridlineso\
nly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22\
/>\x0a </sodipodi\
:namedview>\x0a <m\
etadata\x0a id=\
\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\
\x0a <dc:for\
mat>image/svg+xm\
l</dc:format>\x0a \
<dc:type\x0a \
rdf:re\
source=\x22http://p\
url.org/dc/dcmit\
ype/StillImage\x22 \
/>\x0a <dc:t\
itle></dc:title>\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <pa\
th\x0a style=\
\x22opacity:1;fill:\
#0000ff;fill-opa\
city:1;stroke:#0\
000ff;stroke-wid\
th:0;stroke-line\
cap:square;strok\
e-linejoin:miter\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\x22m 1.3\
218831,292.50314\
c -0.292672,0 -\
0.52813314,0.235\
46 -0.52813314,0\
.52813 v 2.6479 \
c 0,0.29268 0.23\
546114,0.52814 0\
.52813314,0.5281\
4 h 2.6479 c 0.2\
92673,0 0.528133\
,-0.23546 0.5281\
33,-0.52814 v -2\
.6479 c 0,-0.292\
67 -0.23546,-0.5\
2813 -0.528133,-\
0.52813 z m 0.00\
1,0.26458 h 2.64\
5833 c 0.146573,\
0 0.264583,0.118\
01 0.264583,0.26\
459 v 2.64583 c \
0,0.14657 -0.118\
01,0.26458 -0.26\
4583,0.26458 h -\
2.645799 c -0.14\
6574,0 -0.264584\
,-0.11801 -0.264\
584,-0.26458 v -\
2.64583 c 0,-0.1\
4658 0.11801,-0.\
26459 0.264584,-\
0.26459 z\x22\x0a \
id=\x22rect1954\x22\x0a\
inkscape:\
connector-curvat\
ure=\x220\x22 />\x0a <\
rect\x0a styl\
e=\x22opacity:1;fil\
l:#0000ff;fill-o\
pacity:1;stroke:\
#0000ff;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22re\
ct2118\x22\x0a w\
idth=\x222.1166663\x22\
\x0a height=\x22\
2.1166787\x22\x0a \
x=\x221.5874999\x22\x0a\
y=\x22293.29\
584\x22\x0a ry=\x22\
0\x22 />\x0a </g>\x0a</s\
vg>\x0a\
\x00\x00\x1dw\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_checked.sv\
g\x22\x0a inkscape:e\
xport-filename=\x22\
/home/yeison/Dev\
elopment/piton/a\
rt/icon_lite.png\
\x22\x0a inkscape:ex\
port-xdpi=\x2296\x22\x0a \
inkscape:expor\
t-ydpi=\x2296\x22>\x0a <\
defs\x0a id=\x22de\
fs2\x22 />\x0a <sodip\
odi:namedview\x0a \
id=\x22base\x22\x0a \
pagecolor=\x22#ff\
ffff\x22\x0a borde\
rcolor=\x22#666666\x22\
\x0a borderopac\
ity=\x221.0\x22\x0a i\
nkscape:pageopac\
ity=\x220.0\x22\x0a i\
nkscape:pageshad\
ow=\x222\x22\x0a inks\
cape:zoom=\x2217.53\
8085\x22\x0a inksc\
ape:cx=\x226.604083\
6\x22\x0a inkscape\
:cy=\x229.0271347\x22\x0a\
inkscape:do\
cument-units=\x22px\
\x22\x0a inkscape:\
current-layer=\x22l\
ayer1\x22\x0a show\
grid=\x22true\x22\x0a \
inkscape:window\
-width=\x221920\x22\x0a \
inkscape:wind\
ow-height=\x221015\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x221\x22\x0a i\
nkscape:showpage\
shadow=\x22false\x22\x0a \
units=\x22px\x22\x0a \
inkscape:pag\
echeckerboard=\x22f\
alse\x22\x0a showg\
uides=\x22true\x22\x0a \
inkscape:snap-\
bbox=\x22true\x22\x0a \
inkscape:bbox-p\
aths=\x22true\x22\x0a \
inkscape:bbox-n\
odes=\x22true\x22\x0a \
inkscape:snap-b\
box-edge-midpoin\
ts=\x22true\x22\x0a i\
nkscape:snap-bbo\
x-midpoints=\x22tru\
e\x22\x0a inkscape\
:snap-nodes=\x22tru\
e\x22\x0a inkscape\
:object-paths=\x22t\
rue\x22\x0a inksca\
pe:snap-intersec\
tion-paths=\x22true\
\x22\x0a inkscape:\
snap-smooth-node\
s=\x22true\x22\x0a in\
kscape:snap-midp\
oints=\x22true\x22\x0a \
inkscape:snap-\
global=\x22true\x22\x0a \
fit-margin-to\
p=\x220\x22\x0a fit-m\
argin-left=\x220\x22\x0a \
fit-margin-r\
ight=\x220\x22\x0a fi\
t-margin-bottom=\
\x220\x22\x0a inkscap\
e:guide-bbox=\x22tr\
ue\x22>\x0a <inksca\
pe:grid\x0a t\
ype=\x22xygrid\x22\x0a \
id=\x22grid974\x22\
\x0a empspaci\
ng=\x228\x22\x0a sp\
acingx=\x220.264583\
32\x22\x0a spaci\
ngy=\x220.26458332\x22\
\x0a dotted=\x22\
false\x22\x0a vi\
sible=\x22true\x22\x0a \
enabled=\x22tru\
e\x22\x0a snapvi\
siblegridlineson\
ly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22 \
/>\x0a </sodipodi:\
namedview>\x0a <me\
tadata\x0a id=\x22\
metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\x0a\
<dc:form\
at>image/svg+xml\
</dc:format>\x0a \
<dc:type\x0a \
rdf:res\
ource=\x22http://pu\
rl.org/dc/dcmity\
pe/StillImage\x22 /\
>\x0a <dc:ti\
tle></dc:title>\x0a\
</cc:Work>\
\x0a </rdf:RDF>\x0a\
</metadata>\x0a \
<g\x0a inkscape\
:label=\x22Layer 1\x22\
\x0a inkscape:g\
roupmode=\x22layer\x22\
\x0a id=\x22layer1\
\x22\x0a transform\
=\x22translate(0,-2\
91.70835)\x22>\x0a \
<g\x0a id=\x22g8\
47\x22\x0a trans\
form=\x22matrix(0.0\
5207439,0,0,0.05\
207453,-0.901251\
64,282.41203)\x22>\x0a\
<g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transfo\
rm=\x22matrix(1.998\
6219,0,0,1.99861\
85,17.324484,-31\
3.52314)\x22>\x0a \
<path\x0a \
inkscape\
:transform-cente\
r-y=\x223.175\x22\x0a \
style=\x22\
opacity:1;fill:n\
one;fill-opacity\
:0.49382719;stro\
ke:#ffffff00;str\
oke-width:0.0700\
0433;stroke-line\
cap:round;stroke\
-linejoin:round;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-dashoffset:0;st\
roke-opacity:1;p\
aint-order:strok\
e fill markers\x22\x0a\
d=\x22\
M 25.399999,271.\
60002 -8.0000008\
e-7,246.20002 H \
50.799999 Z\x22\x0a \
id=\x22pa\
th883\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
sodipodi:nodet\
ypes=\x22cccc\x22 />\x0a \
<path\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22\x0a \
inkscape:conn\
ector-curvature=\
\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m \
25.399999,271.60\
002 25.399999,25\
.4 H 0 Z\x22\x0a \
inkscape:\
transform-center\
-y=\x22-3.1749995\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<rect\x0a \
ry=\x225.053465\
8\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227.\
6487389\x22\x0a \
height=\x2235\
.528759\x22\x0a \
width=\x2235.\
528786\x22\x0a \
id=\x22rect870\
\x22\x0a s\
tyle=\x22opacity:1;\
fill:none;fill-o\
pacity:0.4938271\
9;stroke:#ffffff\
00;stroke-width:\
0.06184419;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<circle\x0a \
r=\x2225.39\
6828\x22\x0a \
cy=\x22271.60001\
\x22\x0a c\
x=\x2225.4\x22\x0a \
id=\x22path87\
2\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.07635882;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
transfo\
rm=\x22rotate(-45)\x22\
\x0a cx\
=\x22-174.08969\x22\x0a \
cy=\x222\
10.01071\x22\x0a \
r=\x2212.656\
071\x22\x0a \
id=\x22path876\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7399406;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
-3.1749999\x22\x0a \
sodipod\
i:nodetypes=\x22ccc\
c\x22\x0a \
inkscape:connect\
or-curvature=\x220\x22\
\x0a id\
=\x22path904\x22\x0a \
d=\x22m 25.\
4,271.60002 -25.\
40000040000004,2\
5.4 v -50.8 z\x22\x0a \
styl\
e=\x22opacity:1;fil\
l:none;fill-opac\
ity:0.49382719;s\
troke:#ffffff00;\
stroke-width:0.0\
7000433;stroke-l\
inecap:round;str\
oke-linejoin:rou\
nd;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22 />\x0a \
<path\x0a \
inkscape:tran\
sform-center-x=\x22\
3.175\x22\x0a \
style=\x22opaci\
ty:1;fill:none;f\
ill-opacity:0.49\
382719;stroke:#f\
fffff00;stroke-w\
idth:0.07000433;\
stroke-linecap:r\
ound;stroke-line\
join:round;strok\
e-miterlimit:4;s\
troke-dasharray:\
none;stroke-dash\
offset:0;stroke-\
opacity:1;paint-\
order:stroke fil\
l markers\x22\x0a \
d=\x22m 25.\
399999,271.60002\
25.4,-25.4 v 50\
.8 z\x22\x0a \
id=\x22path906\x22\x0a\
ink\
scape:connector-\
curvature=\x220\x22\x0a \
sodip\
odi:nodetypes=\x22c\
ccc\x22 />\x0a \
<rect\x0a \
ry=\x225.05149\
22\x22\x0a \
y=\x22256.39301\x22\x0a \
x=\x222\
.5663135\x22\x0a \
height=\x223\
0.440479\x22\x0a \
width=\x2245\
.693634\x22\x0a \
id=\x22rect83\
7\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.0657438;strok\
e-linecap:round;\
stroke-linejoin:\
round;stroke-mit\
erlimit:4;stroke\
-dasharray:none;\
stroke-dashoffse\
t:0;stroke-opaci\
ty:1;paint-order\
:stroke fill mar\
kers\x22 />\x0a \
<rect\x0a \
style=\x22opa\
city:1;fill:none\
;fill-opacity:0.\
49382719;stroke:\
#ffffff00;stroke\
-width:0.0657438\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
id=\x22rec\
t831\x22\x0a \
width=\x2245.693\
588\x22\x0a \
height=\x2230.440\
51\x22\x0a \
x=\x22248.76645\x22\x0a \
y=\x22-\
40.633385\x22\x0a \
ry=\x225.05\
1497\x22\x0a \
transform=\x22ro\
tate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a s\
tyle=\x22opacity:1;\
fill:#ffc107;fil\
l-opacity:1;stro\
ke:none;stroke-w\
idth:0.38596651;\
stroke-miterlimi\
t:4;stroke-dasha\
rray:none;stroke\
-opacity:1\x22\x0a \
d=\x22m 50.20642\
1,401.67683 c 11\
0.217209,0.71279\
55.108609,0.356\
4 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22 />\x0a </g>\x0a<\
/svg>\x0a\
\x00\x00!\xef\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<!-- Creat\
ed with Inkscape\
(http://www.ink\
scape.org/) -->\x0a\
\x0a<svg\x0a xmlns:d\
c=\x22http://purl.o\
rg/dc/elements/1\
.1/\x22\x0a xmlns:cc\
=\x22http://creativ\
ecommons.org/ns#\
\x22\x0a xmlns:rdf=\x22\
http://www.w3.or\
g/1999/02/22-rdf\
-syntax-ns#\x22\x0a \
xmlns:svg=\x22http:\
//www.w3.org/200\
0/svg\x22\x0a xmlns=\
\x22http://www.w3.o\
rg/2000/svg\x22\x0a \
xmlns:sodipodi=\x22\
http://sodipodi.\
sourceforge.net/\
DTD/sodipodi-0.d\
td\x22\x0a xmlns:ink\
scape=\x22http://ww\
w.inkscape.org/n\
amespaces/inksca\
pe\x22\x0a width=\x2220\
\x22\x0a height=\x2220\x22\
\x0a viewBox=\x220 0\
5.2916664 5.291\
6664\x22\x0a version\
=\x221.1\x22\x0a id=\x22sv\
g8\x22\x0a inkscape:\
version=\x220.92.4 \
5da689c313, 2019\
-01-14\x22\x0a sodip\
odi:docname=\x22che\
ckbox_unchecked.\
svg\x22\x0a inkscape\
:export-filename\
=\x22/home/yeison/D\
evelopment/piton\
/art/icon_lite.p\
ng\x22\x0a inkscape:\
export-xdpi=\x2296\x22\
\x0a inkscape:exp\
ort-ydpi=\x2296\x22>\x0a \
<defs\x0a id=\x22\
defs2\x22 />\x0a <sod\
ipodi:namedview\x0a\
id=\x22base\x22\x0a \
pagecolor=\x22#\
ffffff\x22\x0a bor\
dercolor=\x22#66666\
6\x22\x0a borderop\
acity=\x221.0\x22\x0a \
inkscape:pageop\
acity=\x220.0\x22\x0a \
inkscape:pagesh\
adow=\x222\x22\x0a in\
kscape:zoom=\x2235.\
076169\x22\x0a ink\
scape:cx=\x222.2827\
573\x22\x0a inksca\
pe:cy=\x228.126074\x22\
\x0a inkscape:d\
ocument-units=\x22p\
x\x22\x0a inkscape\
:current-layer=\x22\
layer1\x22\x0a sho\
wgrid=\x22true\x22\x0a \
inkscape:windo\
w-width=\x221920\x22\x0a \
inkscape:win\
dow-height=\x221004\
\x22\x0a inkscape:\
window-x=\x220\x22\x0a \
inkscape:windo\
w-y=\x220\x22\x0a ink\
scape:window-max\
imized=\x221\x22\x0a \
inkscape:showpag\
eshadow=\x22false\x22\x0a\
units=\x22px\x22\x0a\
inkscape:pa\
gecheckerboard=\x22\
false\x22\x0a show\
guides=\x22true\x22\x0a \
inkscape:snap\
-bbox=\x22true\x22\x0a \
inkscape:bbox-\
paths=\x22true\x22\x0a \
inkscape:bbox-\
nodes=\x22true\x22\x0a \
inkscape:snap-\
bbox-edge-midpoi\
nts=\x22true\x22\x0a \
inkscape:snap-bb\
ox-midpoints=\x22tr\
ue\x22\x0a inkscap\
e:snap-nodes=\x22tr\
ue\x22\x0a inkscap\
e:object-paths=\x22\
true\x22\x0a inksc\
ape:snap-interse\
ction-paths=\x22tru\
e\x22\x0a inkscape\
:snap-smooth-nod\
es=\x22true\x22\x0a i\
nkscape:snap-mid\
points=\x22true\x22\x0a \
inkscape:snap\
-global=\x22true\x22\x0a \
fit-margin-t\
op=\x220\x22\x0a fit-\
margin-left=\x220\x22\x0a\
fit-margin-\
right=\x220\x22\x0a f\
it-margin-bottom\
=\x220\x22\x0a inksca\
pe:guide-bbox=\x22t\
rue\x22>\x0a <inksc\
ape:grid\x0a \
type=\x22xygrid\x22\x0a \
id=\x22grid974\
\x22\x0a empspac\
ing=\x228\x22\x0a s\
pacingx=\x220.26458\
332\x22\x0a spac\
ingy=\x220.26458332\
\x22\x0a dotted=\
\x22false\x22\x0a v\
isible=\x22true\x22\x0a \
enabled=\x22tr\
ue\x22\x0a snapv\
isiblegridlineso\
nly=\x22true\x22\x0a \
originx=\x220\x22\x0a \
originy=\x220\x22\
/>\x0a </sodipodi\
:namedview>\x0a <m\
etadata\x0a id=\
\x22metadata5\x22>\x0a \
<rdf:RDF>\x0a \
<cc:Work\x0a \
rdf:about=\x22\x22>\
\x0a <dc:for\
mat>image/svg+xm\
l</dc:format>\x0a \
<dc:type\x0a \
rdf:re\
source=\x22http://p\
url.org/dc/dcmit\
ype/StillImage\x22 \
/>\x0a <dc:t\
itle></dc:title>\
\x0a </cc:Work\
>\x0a </rdf:RDF>\
\x0a </metadata>\x0a \
<g\x0a inkscap\
e:label=\x22Layer 1\
\x22\x0a inkscape:\
groupmode=\x22layer\
\x22\x0a id=\x22layer\
1\x22\x0a transfor\
m=\x22translate(0,-\
291.70835)\x22>\x0a \
<g\x0a id=\x22g\
847\x22\x0a tran\
sform=\x22matrix(0.\
05207439,0,0,0.0\
5207453,-0.90125\
164,282.41203)\x22>\
\x0a <g\x0a \
id=\x22g851\x22>\x0a \
<g\x0a \
id=\x22g1059\x22\x0a \
transf\
orm=\x22matrix(1.99\
86219,0,0,1.9986\
185,17.324484,-3\
13.52314)\x22>\x0a \
<path\x0a \
inkscap\
e:transform-cent\
er-y=\x223.175\x22\x0a \
style=\
\x22opacity:1;fill:\
none;fill-opacit\
y:0.49382719;str\
oke:#ffffff00;st\
roke-width:0.070\
00433;stroke-lin\
ecap:round;strok\
e-linejoin:round\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-dashoffset:0;s\
troke-opacity:1;\
paint-order:stro\
ke fill markers\x22\
\x0a d=\
\x22M 25.399999,271\
.60002 -8.000000\
8e-7,246.20002 H\
50.799999 Z\x22\x0a \
id=\x22p\
ath883\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22\x0a \
sodipodi:node\
types=\x22cccc\x22 />\x0a\
<path\x0a\
sod\
ipodi:nodetypes=\
\x22cccc\x22\x0a \
inkscape:con\
nector-curvature\
=\x220\x22\x0a \
id=\x22path880\x22\x0a \
d=\x22m\
25.399999,271.6\
0002 25.399999,2\
5.4 H 0 Z\x22\x0a \
inkscape\
:transform-cente\
r-y=\x22-3.1749995\x22\
\x0a st\
yle=\x22opacity:1;f\
ill:none;fill-op\
acity:0.49382719\
;stroke:#ffffff0\
0;stroke-width:0\
.07000433;stroke\
-linecap:round;s\
troke-linejoin:r\
ound;stroke-mite\
rlimit:4;stroke-\
dasharray:none;s\
troke-dashoffset\
:0;stroke-opacit\
y:1;paint-order:\
stroke fill mark\
ers\x22 />\x0a \
<rect\x0a \
ry=\x225.05346\
58\x22\x0a \
y=\x22253.84885\x22\x0a \
x=\x227\
.6487389\x22\x0a \
height=\x223\
5.528759\x22\x0a \
width=\x2235\
.528786\x22\x0a \
id=\x22rect87\
0\x22\x0a \
style=\x22opacity:1\
;fill:none;fill-\
opacity:0.493827\
19;stroke:#fffff\
f00;stroke-width\
:0.06184419;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<circle\x0a \
r=\x2225.3\
96828\x22\x0a \
cy=\x22271.6000\
1\x22\x0a \
cx=\x2225.4\x22\x0a \
id=\x22path8\
72\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.07635882;str\
oke-linecap:roun\
d;stroke-linejoi\
n:round;stroke-m\
iterlimit:4;stro\
ke-dasharray:non\
e;stroke-dashoff\
set:0;stroke-opa\
city:1;paint-ord\
er:stroke fill m\
arkers\x22 />\x0a \
<circle\x0a \
transf\
orm=\x22rotate(-45)\
\x22\x0a c\
x=\x22-174.08969\x22\x0a \
cy=\x22\
210.01071\x22\x0a \
r=\x2212.65\
6071\x22\x0a \
id=\x22path876\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07399406;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x22-3.1749999\x22\x0a \
sodipo\
di:nodetypes=\x22cc\
cc\x22\x0a \
inkscape:connec\
tor-curvature=\x220\
\x22\x0a i\
d=\x22path904\x22\x0a \
d=\x22m 25\
.4,271.60002 -25\
.40000040000004,\
25.4 v -50.8 z\x22\x0a\
sty\
le=\x22opacity:1;fi\
ll:none;fill-opa\
city:0.49382719;\
stroke:#ffffff00\
;stroke-width:0.\
07000433;stroke-\
linecap:round;st\
roke-linejoin:ro\
und;stroke-miter\
limit:4;stroke-d\
asharray:none;st\
roke-dashoffset:\
0;stroke-opacity\
:1;paint-order:s\
troke fill marke\
rs\x22 />\x0a \
<path\x0a \
inkscape:tra\
nsform-center-x=\
\x223.175\x22\x0a \
style=\x22opac\
ity:1;fill:none;\
fill-opacity:0.4\
9382719;stroke:#\
ffffff00;stroke-\
width:0.07000433\
;stroke-linecap:\
round;stroke-lin\
ejoin:round;stro\
ke-miterlimit:4;\
stroke-dasharray\
:none;stroke-das\
hoffset:0;stroke\
-opacity:1;paint\
-order:stroke fi\
ll markers\x22\x0a \
d=\x22m 25\
.399999,271.6000\
2 25.4,-25.4 v 5\
0.8 z\x22\x0a \
id=\x22path906\x22\
\x0a in\
kscape:connector\
-curvature=\x220\x22\x0a \
sodi\
podi:nodetypes=\x22\
cccc\x22 />\x0a \
<rect\x0a \
ry=\x225.0514\
922\x22\x0a \
y=\x22256.39301\x22\x0a\
x=\x22\
2.5663135\x22\x0a \
height=\x22\
30.440479\x22\x0a \
width=\x224\
5.693634\x22\x0a \
id=\x22rect8\
37\x22\x0a \
style=\x22opacity:\
1;fill:none;fill\
-opacity:0.49382\
719;stroke:#ffff\
ff00;stroke-widt\
h:0.0657438;stro\
ke-linecap:round\
;stroke-linejoin\
:round;stroke-mi\
terlimit:4;strok\
e-dasharray:none\
;stroke-dashoffs\
et:0;stroke-opac\
ity:1;paint-orde\
r:stroke fill ma\
rkers\x22 />\x0a \
<rect\x0a \
style=\x22op\
acity:1;fill:non\
e;fill-opacity:0\
.49382719;stroke\
:#ffffff00;strok\
e-width:0.065743\
8;stroke-linecap\
:round;stroke-li\
nejoin:round;str\
oke-miterlimit:4\
;stroke-dasharra\
y:none;stroke-da\
shoffset:0;strok\
e-opacity:1;pain\
t-order:stroke f\
ill markers\x22\x0a \
id=\x22re\
ct831\x22\x0a \
width=\x2245.69\
3588\x22\x0a \
height=\x2230.44\
051\x22\x0a \
x=\x22248.76645\x22\x0a\
y=\x22\
-40.633385\x22\x0a \
ry=\x225.0\
51497\x22\x0a \
transform=\x22r\
otate(90)\x22 />\x0a \
</g>\x0a \
</g>\x0a </g>\x0a \
<path\x0a \
style=\x22opacity:1\
;fill:#ffc107;fi\
ll-opacity:1;str\
oke:none;stroke-\
width:0.38596651\
;stroke-miterlim\
it:4;stroke-dash\
array:none;strok\
e-opacity:1\x22\x0a \
d=\x22m 50.2064\
21,401.67683 c 1\
10.217209,0.7127\
9 55.108609,0.35\
64 0,0 z\x22\x0a \
id=\x22rect997\x22\x0a \
inkscape:co\
nnector-curvatur\
e=\x220\x22 />\x0a <re\
ct\x0a style=\
\x22opacity:1;fill:\
#ffffff00;fill-o\
pacity:0;stroke:\
#0000ff;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a id=\x22re\
ct5674\x22\x0a w\
idth=\x220.79374999\
\x22\x0a height=\
\x220.79374999\x22\x0a \
x=\x22-1.322916\
6\x22\x0a y=\x22294\
.61877\x22\x0a r\
y=\x220.39687499\x22 /\
>\x0a <path\x0a \
style=\x22opacit\
y:1;fill:#0000ff\
;fill-opacity:0.\
35294119;stroke:\
#0000ff;stroke-w\
idth:0;stroke-li\
necap:square;str\
oke-linejoin:mit\
er;stroke-miterl\
imit:4;stroke-da\
sharray:none;str\
oke-dashoffset:0\
;stroke-opacity:\
1;paint-order:st\
roke fill marker\
s\x22\x0a d=\x22M 4\
.9960938,3 C 3.8\
899304,3 3,3.889\
9304 3,4.9960938\
V 15.003906 C 3\
,16.11007 3.8899\
304,17 4.9960938\
,17 H 15.003906 \
C 16.11007,17 17\
,16.11007 17,15.\
003906 V 4.99609\
38 C 17,3.889930\
4 16.11007,3 15.\
003906,3 Z M 5,4\
h 10 c 0.553979\
,0 1,0.4460206 1\
,1 v 10 c 0,0.55\
3979 -0.446021,1\
-1,1 H 5 C 4.44\
60206,16 4,15.55\
3979 4,15 V 5 C \
4,4.4460206 4.44\
60206,4 5,4 Z\x22\x0a \
transform=\
\x22matrix(0.264583\
32,0,0,0.2645833\
2,0,291.70835)\x22\x0a\
id=\x22rect1\
954\x22\x0a inks\
cape:connector-c\
urvature=\x220\x22 />\x0a\
</g>\x0a</svg>\x0a\
"
qt_resource_name = b"\
\x00\x04\
\x00\x06\xfa^\
\x00i\
\x00c\x00o\x00n\
\x00\x07\
\x07\x908y\
\x00p\
\x00r\x00i\x00m\x00a\x00r\x00y\
\x00\x08\
\x00\x97\x9f\xd4\
\x00d\
\x00i\x00s\x00a\x00b\x00l\x00e\x00d\
\x00\x0a\
\x0a\xca\x1f'\
\x00s\
\x00l\x00i\x00d\x00e\x00r\x00.\x00s\x00v\x00g\
\x00\x0b\
\x09\x92\x8b\xe7\
\x00u\
\x00p\x00a\x00r\x00r\x00o\x00w\x00.\x00s\x00v\x00g\
\x00\x1b\
\x09\xb79\xa7\
\x00t\
\x00o\x00o\x00l\x00b\x00a\x00r\x00-\x00h\x00a\x00n\x00d\x00l\x00e\x00-\x00v\x00e\
\x00r\x00t\x00i\x00c\x00a\x00l\x00.\x00s\x00v\x00g\
\x00\x15\
\x0c\xcb\x0d\xe7\
\x00s\
\x00p\x00l\x00i\x00t\x00t\x00e\x00r\x00-\x00v\x00e\x00r\x00t\x00i\x00c\x00a\x00l\
\x00.\x00s\x00v\x00g\
\x00\x0d\
\x0c\x15\x0a'\
\x00l\
\x00e\x00f\x00t\x00a\x00r\x00r\x00o\x00w\x00.\x00s\x00v\x00g\
\x00\x09\
\x05\x87\x80\xa7\
\x00f\
\x00l\x00o\x00a\x00t\x00.\x00s\x00v\x00g\
\x00\x1a\
\x01\x87\xa3\xe7\
\x00c\
\x00h\x00e\x00c\x00k\x00b\x00o\x00x\x00_\x00i\x00n\x00d\x00e\x00t\x00e\x00r\x00m\
\x00i\x00n\x00a\x00t\x00e\x00.\x00s\x00v\x00g\
\x00\x1d\
\x06\x0b\xe3\x07\
\x00t\
\x00o\x00o\x00l\x00b\x00a\x00r\x00-\x00h\x00a\x00n\x00d\x00l\x00e\x00-\x00h\x00o\
\x00r\x00i\x00z\x00o\x00n\x00t\x00a\x00l\x00.\x00s\x00v\x00g\
\x00\x17\
\x04\xb5\xb8'\
\x00r\
\x00a\x00d\x00i\x00o\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00c\x00h\x00e\x00c\x00k\
\x00e\x00d\x00.\x00s\x00v\x00g\
\x00\x0e\
\x06\xd9\x15\xc7\
\x00r\
\x00i\x00g\x00h\x00t\x00a\x00r\x00r\x00o\x00w\x00.\x00s\x00v\x00g\
\x00\x19\
\x08\x11$\xc7\
\x00r\
\x00a\x00d\x00i\x00o\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00u\x00n\x00c\x00h\x00e\
\x00c\x00k\x00e\x00d\x00.\x00s\x00v\x00g\
\x00\x0d\
\x02h\xefG\
\x00t\
\x00a\x00b\x00_\x00c\x00l\x00o\x00s\x00e\x00.\x00s\x00v\x00g\
\x00\x0c\
\x06AM\x07\
\x00s\
\x00i\x00z\x00e\x00g\x00r\x00i\x00p\x00.\x00s\x00v\x00g\
\x00\x17\
\x0a>Y\xe7\
\x00s\
\x00p\x00l\x00i\x00t\x00t\x00e\x00r\x00-\x00h\x00o\x00r\x00i\x00z\x00o\x00n\x00t\
\x00a\x00l\x00.\x00s\x00v\x00g\
\x00\x0d\
\x0f\xb6\xcaG\
\x00d\
\x00o\x00w\x00n\x00a\x00r\x00r\x00o\x00w\x00.\x00s\x00v\x00g\
\x00\x09\
\x06\x98\x8e\xa7\
\x00c\
\x00l\x00o\x00s\x00e\x00.\x00s\x00v\x00g\
\x00\x14\
\x07\xec\xdcG\
\x00c\
\x00h\x00e\x00c\x00k\x00b\x00o\x00x\x00_\x00c\x00h\x00e\x00c\x00k\x00e\x00d\x00.\
\x00s\x00v\x00g\
\x00\x08\
\x08\x98W\xc7\
\x00b\
\x00a\x00s\x00e\x00.\x00s\x00v\x00g\
\x00\x16\
\x01u\xc1\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00b\x00o\x00x\x00_\x00u\x00n\x00c\x00h\x00e\x00c\x00k\x00e\
\x00d\x00.\x00s\x00v\x00g\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x22\x00\x02\x00\x00\x00\x13\x00\x00\x00\x17\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x13\x00\x00\x00\x04\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x03\x08\x00\x00\x00\x00\x00\x01\x00\x04\xfa\xeb\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x01\x12\x00\x00\x00\x00\x00\x01\x00\x03^\xb8\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x02\x1a\x00\x00\x00\x00\x00\x01\x00\x04\x0f;\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01\x8c\x00\x00\x00\x00\x00\x01\x00\x03\xae\x90\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x03>C\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01L\x00\x00\x00\x00\x00\x01\x00\x03\x81\xcc\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02:\x00\x00\x00\x00\x00\x01\x00\x04.\xf3\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x02\xac\x00\x00\x00\x00\x00\x01\x00\x04\x9by\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x01\x00\x03\xd0 \
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x02\xc4\x00\x00\x00\x00\x00\x01\x00\x04\xbb7\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x01\xe2\x00\x00\x00\x00\x00\x01\x00\x03\xee\xde\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02\xf2\x00\x00\x00\x00\x00\x01\x00\x04\xddp\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x00R\x00\x00\x00\x00\x00\x01\x00\x02\xaeb\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x00n\x00\x00\x00\x00\x00\x01\x00\x02\xcd\x1d\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x02X\x00\x00\x00\x00\x00\x01\x00\x04X@\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x008\x00\x00\x00\x00\x00\x01\x00\x02\x8eo\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x00\xda\x00\x00\x00\x00\x00\x01\x00\x03\x1f\x88\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x00\xaa\x00\x00\x00\x00\x00\x01\x00\x02\xfa\xd7\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02\x8c\x00\x00\x00\x00\x00\x01\x00\x04|\xba\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x03\x08\x00\x00\x00\x00\x00\x01\x00\x02l|\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x01\x12\x00\x00\x00\x00\x00\x01\x00\x00\xd0I\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x02\x1a\x00\x00\x00\x00\x00\x01\x00\x01\x80\xcc\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01\x8c\x00\x00\x00\x00\x00\x01\x00\x01 !\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x00\xfa\x00\x00\x00\x00\x00\x01\x00\x00\xaf\xd4\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01L\x00\x00\x00\x00\x00\x01\x00\x00\xf3]\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02:\x00\x00\x00\x00\x00\x01\x00\x01\xa0\x84\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x02\xac\x00\x00\x00\x00\x00\x01\x00\x02\x0d\x0a\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x01\x00\x01A\xb1\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x02\xc4\x00\x00\x00\x00\x00\x01\x00\x02,\xc8\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x01\xe2\x00\x00\x00\x00\x00\x01\x00\x01`o\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02\xf2\x00\x00\x00\x00\x00\x01\x00\x02O\x01\
\x00\x00\x01vR\xa7\x01\xbc\
\x00\x00\x00R\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xf3\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x00n\x00\x00\x00\x00\x00\x01\x00\x00>\xae\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02X\x00\x00\x00\x00\x00\x01\x00\x01\xc9\xd1\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x008\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01vR\xa7\x01\xc2\
\x00\x00\x00\xda\x00\x00\x00\x00\x00\x01\x00\x00\x91\x19\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x00\xaa\x00\x00\x00\x00\x00\x01\x00\x00lh\
\x00\x00\x01vR\xa7\x01\xbf\
\x00\x00\x02\x8c\x00\x00\x00\x00\x00\x01\x00\x01\xeeK\
\x00\x00\x01vR\xa7\x01\xbc\
"
def qInitResources():
QtCore.qRegisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 20.876066
| 96
| 0.623652
| 70,456
| 443,178
| 3.921781
| 0.026314
| 0.131156
| 0.044587
| 0.019
| 0.995639
| 0.995444
| 0.99502
| 0.994915
| 0.994351
| 0.993446
| 0
| 0.214855
| 0.188807
| 443,178
| 21,228
| 97
| 20.877049
| 0.553742
| 0.00037
| 0
| 0.992129
| 0
| 0.002781
| 0
| 0
| 0
| 0
| 0.000018
| 0
| 0
| 1
| 0.000094
| false
| 0
| 0.000047
| 0
| 0.000141
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a8bc71eaf4121f378dddeb5f632816aa7c464a7
| 2,444
|
py
|
Python
|
Clean.py
|
parrenin/PaleoChrono
|
90237b551c569e55ee6e6696c4b176234da4d94f
|
[
"MIT"
] | null | null | null |
Clean.py
|
parrenin/PaleoChrono
|
90237b551c569e55ee6e6696c4b176234da4d94f
|
[
"MIT"
] | null | null | null |
Clean.py
|
parrenin/PaleoChrono
|
90237b551c569e55ee6e6696c4b176234da4d94f
|
[
"MIT"
] | null | null | null |
import os,re
import sys
dir=sys.argv[1]
inclusive=True
pattern='.pdf'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
pattern='restart.'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
pattern='~'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
pattern='.bak'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
pattern='output.txt'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
pattern='.pyc'
regexObj = re.compile(pattern)
for root, dirs, files in os.walk(dir, topdown=False):
for name in files:
path = os.path.join(root, name)
if bool(regexObj.search(path)) == bool(inclusive):
os.remove(path)
for name in dirs:
path = os.path.join(root, name)
if len(os.listdir(path)) == 0:
os.rmdir(path)
| 30.17284
| 59
| 0.567921
| 341
| 2,444
| 4.070381
| 0.108504
| 0.060519
| 0.07781
| 0.121037
| 0.945965
| 0.945965
| 0.945965
| 0.945965
| 0.945965
| 0.945965
| 0
| 0.004075
| 0.297054
| 2,444
| 80
| 60
| 30.55
| 0.803842
| 0
| 0
| 0.857143
| 0
| 0
| 0.013113
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76d29dd5509d70f5876ccbcce31d1a868e9c77e2
| 23,008
|
py
|
Python
|
tests/test_managedblockchain/test_managedblockchain_members.py
|
nourishcare/moto
|
8d3d43da90be101216d16330aeacaf7bd1fff6f4
|
[
"Apache-2.0"
] | null | null | null |
tests/test_managedblockchain/test_managedblockchain_members.py
|
nourishcare/moto
|
8d3d43da90be101216d16330aeacaf7bd1fff6f4
|
[
"Apache-2.0"
] | null | null | null |
tests/test_managedblockchain/test_managedblockchain_members.py
|
nourishcare/moto
|
8d3d43da90be101216d16330aeacaf7bd1fff6f4
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import boto3
import sure # noqa
from moto.managedblockchain.exceptions import BadRequestException
from moto import mock_managedblockchain
from . import helpers
@mock_managedblockchain
def test_create_another_member():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Description="Test Network 1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
response["Invitations"][0]["NetworkSummary"]["Id"].should.equal(network_id)
response["Invitations"][0]["Status"].should.equal("PENDING")
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False
),
)
member_id2 = response["MemberId"]
# Check the invitation status
response = conn.list_invitations()
response["Invitations"][0]["InvitationId"].should.equal(invitation_id)
response["Invitations"][0]["Status"].should.equal("ACCEPTED")
# Find member in full list
response = conn.list_members(NetworkId=network_id)
members = response["Members"]
members.should.have.length_of(2)
helpers.member_id_exist_in_list(members, member_id2).should.equal(True)
# Get member 2 details
response = conn.get_member(NetworkId=network_id, MemberId=member_id2)
response["Member"]["Name"].should.equal("testmember2")
# Update member
logconfignewenabled = not helpers.default_memberconfiguration[
"LogPublishingConfiguration"
]["Fabric"]["CaLogs"]["Cloudwatch"]["Enabled"]
logconfignew = {
"Fabric": {"CaLogs": {"Cloudwatch": {"Enabled": logconfignewenabled}}}
}
conn.update_member(
NetworkId=network_id,
MemberId=member_id2,
LogPublishingConfiguration=logconfignew,
)
# Get member 2 details
response = conn.get_member(NetworkId=network_id, MemberId=member_id2)
response["Member"]["LogPublishingConfiguration"]["Fabric"]["CaLogs"]["Cloudwatch"][
"Enabled"
].should.equal(logconfignewenabled)
@mock_managedblockchain
def test_create_another_member_withopts():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
response["Invitations"][0]["NetworkSummary"]["Id"].should.equal(network_id)
response["Invitations"][0]["Status"].should.equal("PENDING")
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
# Check the invitation status
response = conn.list_invitations()
response["Invitations"][0]["InvitationId"].should.equal(invitation_id)
response["Invitations"][0]["Status"].should.equal("ACCEPTED")
# Find member in full list
response = conn.list_members(NetworkId=network_id)
members = response["Members"]
members.should.have.length_of(2)
helpers.member_id_exist_in_list(members, member_id2).should.equal(True)
# Get member 2 details
response = conn.get_member(NetworkId=network_id, MemberId=member_id2)
response["Member"]["Description"].should.equal("Test Member 2")
# Try to create member with already used invitation
response = conn.create_member.when.called_with(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2 Duplicate"
),
).should.throw(Exception, "Invitation {0} not valid".format(invitation_id))
# Delete member 2
conn.delete_member(NetworkId=network_id, MemberId=member_id2)
# Member is still in the list
response = conn.list_members(NetworkId=network_id)
members = response["Members"]
members.should.have.length_of(2)
# But cannot get
response = conn.get_member.when.called_with(
NetworkId=network_id, MemberId=member_id2,
).should.throw(Exception, "Member {0} not found".format(member_id2))
# Delete member 1
conn.delete_member(NetworkId=network_id, MemberId=member_id)
# Network should be gone
response = conn.list_networks()
mbcnetworks = response["Networks"]
mbcnetworks.should.have.length_of(0)
# Verify the invitation network status is DELETED
# Get the invitation
response = conn.list_invitations()
response["Invitations"].should.have.length_of(1)
response["Invitations"][0]["NetworkSummary"]["Id"].should.equal(network_id)
response["Invitations"][0]["NetworkSummary"]["Status"].should.equal("DELETED")
@mock_managedblockchain
def test_create_and_delete_member():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal (create additional member)
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
both_policy_actions = {
"Invitations": [{"Principal": "123456789012"}],
"Removals": [{"MemberId": member_id2}],
}
# Create proposal (invite and remove member)
response = conn.create_proposal(
NetworkId=network_id, MemberId=member_id, Actions=both_policy_actions,
)
proposal_id2 = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id2)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id2,
VoterMemberId=member_id,
Vote="YES",
)
# Check the invitation status
response = conn.list_invitations()
invitations = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
invitations.should.have.length_of(1)
# Member is still in the list
response = conn.list_members(NetworkId=network_id)
members = response["Members"]
members.should.have.length_of(2)
foundmember2 = False
for member in members:
if member["Id"] == member_id2 and member["Status"] == "DELETED":
foundmember2 = True
foundmember2.should.equal(True)
@mock_managedblockchain
def test_create_too_many_members():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create 4 more members - create invitations for 5
for counter in range(2, 7):
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
for counter in range(2, 6):
# Get the invitation
response = conn.list_invitations()
invitation_id = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)[0]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember" + str(counter),
"admin",
"Admin12345",
False,
"Test Member " + str(counter),
),
)
member_id = response["MemberId"]
# Find member in full list
response = conn.list_members(NetworkId=network_id)
members = response["Members"]
members.should.have.length_of(counter)
helpers.member_id_exist_in_list(members, member_id).should.equal(True)
# Get member details
response = conn.get_member(NetworkId=network_id, MemberId=member_id)
response["Member"]["Description"].should.equal("Test Member " + str(counter))
# Try to create the sixth
response = conn.list_invitations()
invitation_id = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)[0]
# Try to create member with already used invitation
response = conn.create_member.when.called_with(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember6", "admin", "Admin12345", False, "Test Member 6"
),
).should.throw(
Exception,
"5 is the maximum number of members allowed in a STARTER Edition network",
)
@mock_managedblockchain
def test_create_another_member_alreadyhave():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Description="Test Network 1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Should fail trying to create with same name
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember1", "admin", "Admin12345", False
),
).should.throw(
Exception,
"Member name {0} already exists in network {1}".format(
"testmember1", network_id
),
)
@mock_managedblockchain
def test_create_another_member_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.create_member.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
InvitationId="id-ABCDEFGHIJKLMNOP0123456789",
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False
),
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_create_another_member_badinvitation():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId="in-ABCDEFGHIJKLMNOP0123456789",
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False
),
).should.throw(Exception, "Invitation in-ABCDEFGHIJKLMNOP0123456789 not valid")
@mock_managedblockchain
def test_create_another_member_adminpassword():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
badadminpassmemberconf = helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False
)
# Too short
badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][
"AdminPassword"
] = "badap"
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=badadminpassmemberconf,
).should.throw(
Exception,
"Invalid length for parameter MemberConfiguration.FrameworkConfiguration.Fabric.AdminPassword",
)
# No uppercase or numbers
badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][
"AdminPassword"
] = "badadminpwd"
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=badadminpassmemberconf,
).should.throw(Exception, "Invalid request body")
# No lowercase or numbers
badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][
"AdminPassword"
] = "BADADMINPWD"
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=badadminpassmemberconf,
).should.throw(Exception, "Invalid request body")
# No numbers
badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][
"AdminPassword"
] = "badAdminpwd"
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=badadminpassmemberconf,
).should.throw(Exception, "Invalid request body")
# Invalid character
badadminpassmemberconf["FrameworkConfiguration"]["Fabric"][
"AdminPassword"
] = "badAdmin@pwd1"
response = conn.create_member.when.called_with(
NetworkId=network_id,
InvitationId=invitation_id,
MemberConfiguration=badadminpassmemberconf,
).should.throw(Exception, "Invalid request body")
@mock_managedblockchain
def test_list_members_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.list_members.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_get_member_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.get_member.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_get_member_badmember():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.get_member.when.called_with(
NetworkId=network_id, MemberId="m-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_delete_member_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.delete_member.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_delete_member_badmember():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.delete_member.when.called_with(
NetworkId=network_id, MemberId="m-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_update_member_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.update_member.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
LogPublishingConfiguration=helpers.default_memberconfiguration[
"LogPublishingConfiguration"
],
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_update_member_badmember():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.update_member.when.called_with(
NetworkId=network_id,
MemberId="m-ABCDEFGHIJKLMNOP0123456789",
LogPublishingConfiguration=helpers.default_memberconfiguration[
"LogPublishingConfiguration"
],
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
| 34.340299
| 103
| 0.696323
| 2,266
| 23,008
| 6.872462
| 0.07767
| 0.05471
| 0.054325
| 0.030052
| 0.884672
| 0.863353
| 0.853464
| 0.827715
| 0.805111
| 0.798562
| 0
| 0.022965
| 0.201321
| 23,008
| 669
| 104
| 34.391629
| 0.824499
| 0.065847
| 0
| 0.755061
| 0
| 0
| 0.17345
| 0.041822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030364
| false
| 0.036437
| 0.012146
| 0
| 0.04251
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76db04944b5837e0e83f4504e61545d02bdb34e5
| 34,009
|
py
|
Python
|
tests/components/elkm1/test_config_flow.py
|
khmelevskikh/core
|
f30f6234f9500965c68aeea710c5bd18db55ecb0
|
[
"Apache-2.0"
] | null | null | null |
tests/components/elkm1/test_config_flow.py
|
khmelevskikh/core
|
f30f6234f9500965c68aeea710c5bd18db55ecb0
|
[
"Apache-2.0"
] | 9
|
2022-01-26T06:25:43.000Z
|
2022-03-31T07:15:42.000Z
|
tests/components/elkm1/test_config_flow.py
|
khmelevskikh/core
|
f30f6234f9500965c68aeea710c5bd18db55ecb0
|
[
"Apache-2.0"
] | 1
|
2022-01-03T06:44:44.000Z
|
2022-01-03T06:44:44.000Z
|
"""Test the Elk-M1 Control config flow."""
from dataclasses import asdict
from unittest.mock import patch
import pytest
from homeassistant import config_entries
from homeassistant.components import dhcp
from homeassistant.components.elkm1.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_FORM
from . import (
ELK_DISCOVERY,
ELK_NON_SECURE_DISCOVERY,
MOCK_IP_ADDRESS,
MOCK_MAC,
_patch_discovery,
_patch_elk,
mock_elk,
)
from tests.common import MockConfigEntry
DHCP_DISCOVERY = dhcp.DhcpServiceInfo(MOCK_IP_ADDRESS, "", MOCK_MAC)
ELK_DISCOVERY_INFO = asdict(ELK_DISCOVERY)
MODULE = "homeassistant.components.elkm1"
async def test_form_user_with_secure_elk_no_discovery(hass):
"""Test we can setup a secure elk."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] == {}
assert result["step_id"] == "manual_connection"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1"
assert result2["data"] == {
"auto_configure": True,
"host": "elks://1.2.3.4",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_secure_elk_no_discovery_ip_already_configured(hass):
"""Test we abort when we try to configure the same ip."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"elks://{MOCK_IP_ADDRESS}"},
unique_id="cc:cc:cc:cc:cc:cc",
)
config_entry.add_to_hass(hass)
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] == {}
assert result["step_id"] == "manual_connection"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "127.0.0.1",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_ABORT
assert result2["reason"] == "address_already_configured"
async def test_form_user_with_secure_elk_with_discovery(hass):
"""Test we can setup a secure elk."""
with _patch_discovery():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] is None
assert result["step_id"] == "user"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_elk(elk=mocked_elk):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"device": MOCK_MAC},
)
await hass.async_block_till_done()
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "ElkM1 ddeeff"
assert result3["data"] == {
"auto_configure": True,
"host": "elks://127.0.0.1:2601",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert result3["result"].unique_id == "aa:bb:cc:dd:ee:ff"
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_secure_elk_with_discovery_pick_manual(hass):
"""Test we can setup a secure elk with discovery but user picks manual and directed discovery fails."""
with _patch_discovery():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] is None
assert result["step_id"] == "user"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_elk(elk=mocked_elk):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"device": None},
)
await hass.async_block_till_done()
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "ElkM1"
assert result3["data"] == {
"auto_configure": True,
"host": "elks://1.2.3.4",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert result3["result"].unique_id is None
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_secure_elk_with_discovery_pick_manual_direct_discovery(
hass,
):
"""Test we can setup a secure elk with discovery but user picks manual and directed discovery succeeds."""
with _patch_discovery():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] is None
assert result["step_id"] == "user"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_elk(elk=mocked_elk):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"device": None},
)
await hass.async_block_till_done()
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{
"protocol": "secure",
"address": "127.0.0.1",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "ElkM1 ddeeff"
assert result3["data"] == {
"auto_configure": True,
"host": "elks://127.0.0.1:2601",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert result3["result"].unique_id == MOCK_MAC
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_tls_elk_no_discovery(hass):
"""Test we can setup a secure elk."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] == {}
assert result["step_id"] == "manual_connection"
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "TLS 1.2",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1"
assert result2["data"] == {
"auto_configure": True,
"host": "elksv1_2://1.2.3.4",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_non_secure_elk_no_discovery(hass):
"""Test we can setup a non-secure elk."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] == {}
assert result["step_id"] == "manual_connection"
mocked_elk = mock_elk(invalid_auth=None, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "non-secure",
"address": "1.2.3.4",
"prefix": "guest_house",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "guest_house"
assert result2["data"] == {
"auto_configure": True,
"host": "elk://1.2.3.4",
"prefix": "guest_house",
"username": "",
"password": "",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_user_with_serial_elk_no_discovery(hass):
"""Test we can setup a serial elk."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["errors"] == {}
assert result["step_id"] == "manual_connection"
mocked_elk = mock_elk(invalid_auth=None, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "serial",
"address": "/dev/ttyS0:115200",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1"
assert result2["data"] == {
"auto_configure": True,
"host": "serial:///dev/ttyS0:115200",
"prefix": "",
"username": "",
"password": "",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=None, sync_complete=None)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.config_flow.VALIDATE_TIMEOUT",
0,
), patch(
"homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT",
0,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {CONF_HOST: "cannot_connect"}
async def test_unknown_exception(hass):
"""Test we handle an unknown exception during connecting."""
with _patch_discovery(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=None, sync_complete=None, exception=OSError)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.config_flow.VALIDATE_TIMEOUT",
0,
), patch(
"homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT",
0,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=True, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {CONF_PASSWORD: "invalid_auth"}
async def test_form_invalid_auth_no_password(hass):
"""Test we handle invalid auth error when no password is provided."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=True, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {CONF_PASSWORD: "invalid_auth"}
async def test_form_import(hass):
"""Test we get the form with import source."""
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(no_device=True), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"host": "elks://1.2.3.4",
"username": "friend",
"password": "love",
"temperature_unit": "C",
"auto_configure": False,
"keypad": {
"enabled": True,
"exclude": [],
"include": [[1, 1], [2, 2], [3, 3]],
},
"output": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"thermostat": {"enabled": False, "exclude": [], "include": []},
"zone": {
"enabled": True,
"exclude": [[15, 15], [28, 208]],
"include": [],
},
},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "ohana"
assert result["data"] == {
"auto_configure": False,
"host": "elks://1.2.3.4",
"keypad": {"enabled": True, "exclude": [], "include": [[1, 1], [2, 2], [3, 3]]},
"output": {"enabled": False, "exclude": [], "include": []},
"password": "love",
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"temperature_unit": "C",
"thermostat": {"enabled": False, "exclude": [], "include": []},
"username": "friend",
"zone": {"enabled": True, "exclude": [[15, 15], [28, 208]], "include": []},
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import_device_discovered(hass):
"""Test we can import with discovery."""
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"host": "elks://127.0.0.1",
"username": "friend",
"password": "love",
"temperature_unit": "C",
"auto_configure": False,
"keypad": {
"enabled": True,
"exclude": [],
"include": [[1, 1], [2, 2], [3, 3]],
},
"output": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"thermostat": {"enabled": False, "exclude": [], "include": []},
"zone": {
"enabled": True,
"exclude": [[15, 15], [28, 208]],
"include": [],
},
},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "ohana"
assert result["result"].unique_id == MOCK_MAC
assert result["data"] == {
"auto_configure": False,
"host": "elks://127.0.0.1",
"keypad": {"enabled": True, "exclude": [], "include": [[1, 1], [2, 2], [3, 3]]},
"output": {"enabled": False, "exclude": [], "include": []},
"password": "love",
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"temperature_unit": "C",
"thermostat": {"enabled": False, "exclude": [], "include": []},
"username": "friend",
"zone": {"enabled": True, "exclude": [[15, 15], [28, 208]], "include": []},
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_DISCOVERY, ELK_DISCOVERY_INFO),
],
)
async def test_discovered_by_dhcp_or_discovery_mac_address_mismatch_host_already_configured(
hass, source, data
):
"""Test we abort if the host is already configured but the mac does not match."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"elks://{MOCK_IP_ADDRESS}"},
unique_id="cc:cc:cc:cc:cc:cc",
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == "cc:cc:cc:cc:cc:cc"
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_DISCOVERY, ELK_DISCOVERY_INFO),
],
)
async def test_discovered_by_dhcp_or_discovery_adds_missing_unique_id(
hass, source, data
):
"""Test we add a missing unique id to the config entry."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"elks://{MOCK_IP_ADDRESS}"},
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == MOCK_MAC
async def test_discovered_by_discovery_and_dhcp(hass):
"""Test we get the form with discovery and abort for dhcp source when we get both."""
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DISCOVERY},
data=ELK_DISCOVERY_INFO,
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
with _patch_discovery(), _patch_elk():
result2 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=DHCP_DISCOVERY,
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_ABORT
assert result2["reason"] == "already_in_progress"
with _patch_discovery(), _patch_elk():
result3 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=dhcp.DhcpServiceInfo(
hostname="any",
ip=MOCK_IP_ADDRESS,
macaddress="00:00:00:00:00:00",
),
)
await hass.async_block_till_done()
assert result3["type"] == RESULT_TYPE_ABORT
assert result3["reason"] == "already_in_progress"
async def test_discovered_by_discovery(hass):
"""Test we can setup when discovered from discovery."""
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DISCOVERY},
data=ELK_DISCOVERY_INFO,
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "discovered_connection"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1 ddeeff"
assert result2["data"] == {
"auto_configure": True,
"host": "elks://127.0.0.1:2601",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovered_by_discovery_url_already_configured(hass):
"""Test we abort when we discover a device that is already setup."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: f"elks://{MOCK_IP_ADDRESS}"},
unique_id="cc:cc:cc:cc:cc:cc",
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DISCOVERY},
data=ELK_DISCOVERY_INFO,
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_discovered_by_dhcp_udp_responds(hass):
"""Test we can setup when discovered from dhcp but with udp response."""
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "discovered_connection"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1 ddeeff"
assert result2["data"] == {
"auto_configure": True,
"host": "elks://127.0.0.1:2601",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovered_by_dhcp_udp_responds_with_nonsecure_port(hass):
"""Test we can setup when discovered from dhcp but with udp response using the non-secure port."""
with _patch_discovery(device=ELK_NON_SECURE_DISCOVERY), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "discovered_connection"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(device=ELK_NON_SECURE_DISCOVERY), _patch_elk(
elk=mocked_elk
), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1 ddeeff"
assert result2["data"] == {
"auto_configure": True,
"host": "elk://127.0.0.1:2101",
"password": "test-password",
"prefix": "",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovered_by_dhcp_udp_responds_existing_config_entry(hass):
"""Test we can setup when discovered from dhcp but with udp response with an existing config entry."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: "elks://6.6.6.6"},
unique_id="cc:cc:cc:cc:cc:cc",
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "discovered_connection"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with _patch_discovery(), _patch_elk(elk=mocked_elk), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1 ddeeff"
assert result2["data"] == {
"auto_configure": True,
"host": "elks://127.0.0.1:2601",
"password": "test-password",
"prefix": "ddeeff",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 2
async def test_discovered_by_dhcp_no_udp_response(hass):
"""Test we can setup when discovered from dhcp but no udp response."""
with _patch_discovery(no_device=True), _patch_elk():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
| 35.206004
| 110
| 0.601341
| 3,819
| 34,009
| 5.083268
| 0.05237
| 0.037089
| 0.033998
| 0.049863
| 0.926235
| 0.91583
| 0.905115
| 0.896925
| 0.879823
| 0.875856
| 0
| 0.015306
| 0.260372
| 34,009
| 965
| 111
| 35.242487
| 0.75646
| 0.001059
| 0
| 0.760351
| 0
| 0
| 0.199072
| 0.057523
| 0
| 0
| 0
| 0
| 0.164366
| 1
| 0
| false
| 0.041405
| 0.017566
| 0
| 0.017566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76de0791af00471585d4762002be733dc1b2ae65
| 114
|
py
|
Python
|
astropy/visualization/wcsaxes/tests/setup_package.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | 8
|
2019-04-27T01:19:45.000Z
|
2020-09-21T03:31:01.000Z
|
astropy/visualization/wcsaxes/tests/setup_package.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | 11
|
2017-12-18T16:27:29.000Z
|
2018-08-29T14:54:22.000Z
|
astropy/visualization/wcsaxes/tests/setup_package.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | 5
|
2019-04-27T01:19:47.000Z
|
2020-09-20T15:15:19.000Z
|
def get_package_data():
return {'astropy.visualization.wcsaxes.tests': ['baseline_images/*/*.png', 'data/*']}
| 38
| 89
| 0.692982
| 13
| 114
| 5.846154
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 114
| 2
| 90
| 57
| 0.730769
| 0
| 0
| 0
| 0
| 0
| 0.561404
| 0.508772
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0a059c3d4cebb1ffcdd929eb3981086d8398ebbb
| 13,780
|
py
|
Python
|
easytransfer/layers/mask.py
|
mczhuge/Kaleido-BERT
|
50579660fb8dc1e250c7cc40e0f10294c54532e3
|
[
"MIT"
] | 109
|
2021-04-14T04:15:53.000Z
|
2022-03-24T05:24:43.000Z
|
easytransfer/layers/mask.py
|
NoLoPhe/Kaleido-BERT
|
1b14073e3ad3490c50bbd1e7e94846830671b332
|
[
"MIT"
] | 12
|
2021-04-18T13:21:07.000Z
|
2022-01-27T09:42:51.000Z
|
easytransfer/layers/mask.py
|
NoLoPhe/Kaleido-BERT
|
1b14073e3ad3490c50bbd1e7e94846830671b332
|
[
"MIT"
] | 12
|
2021-04-25T08:40:09.000Z
|
2022-03-24T08:56:29.000Z
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
# Copyright (c) 2019 Alibaba PAI team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from .utils import get_shape_list, get_shape_list_imagebert
def get_attn_mask_xlnet(inputs):
input_mask = inputs
batch_size = tf.shape(input_mask)[0]
target_len = tf.shape(input_mask)[1]
input_mask_trans = tf.transpose(input_mask)
data_mask = input_mask_trans[None]
mems_mask = tf.zeros([tf.shape(data_mask)[0], 0, batch_size], dtype=tf.float32)
data_mask = tf.concat([mems_mask, data_mask], 1)
attn_mask = data_mask[:, :, :, None]
attn_mask = tf.cast(attn_mask > 0, dtype=tf.float32)
non_tgt_mask = -tf.eye(target_len, dtype=tf.float32)
non_tgt_mask = tf.concat([tf.zeros([target_len, 0], dtype=tf.float32), non_tgt_mask], axis=-1)
attn_mask = tf.cast((attn_mask + non_tgt_mask[:, :, None, None]) > 0, dtype=tf.float32)
return attn_mask
def get_attn_mask_bert(from_tensor, to_mask):
"""Create 3D attention mask from a 2D tensor mask.
Args:
from_tensor: 2D or 3D Tensor of shape [batch_size, from_seq_length, ...].
to_mask: int32 Tensor of shape [batch_size, to_seq_length].
Returns:
float Tensor of shape [batch_size, from_seq_length, to_seq_length].
"""
from_shape = get_shape_list(from_tensor)
batch_size = from_shape[0]
from_seq_length = from_shape[1]
to_shape = get_shape_list(to_mask)
to_seq_length = to_shape[1]
to_mask = tf.cast(
tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32)
# We don't assume that `from_tensor` is a mask (although it could be). We
# don't actually care if we attend *from* padding tokens (only *to* padding)
# tokens so we create a tensor of all ones.
#
# `broadcast_ones` = [batch_size, from_seq_length, 1]
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
# Here we broadcast along two dimensions to create the mask.
mask = broadcast_ones * to_mask
return mask
def get_attn_mask_kaleidobert(input_ids, input_mask, \
img_feature_convert_rotation, img_feature_rotation_mask, \
img_feature_convert_jigsaw, img_feature_jigsaw_mask, \
img_feature_convert_camouflage, img_feature_camouflage_mask, \
img_feature_convert_greymask, img_feature_greymask_mask, \
img_feature_convert_blankmask, img_feature_blankmask_mask):
from_text_shape = get_shape_list_imagebert(input_ids, expected_rank=[2, 3])
batch_size = from_text_shape[0]
from_text_length = from_text_shape[1]
from_rotation_shape = get_shape_list_imagebert(img_feature_convert_rotation, expected_rank=[2, 3])
from_rotation_length = from_rotation_shape[1]
from_jigsaw_shape = get_shape_list_imagebert(img_feature_convert_jigsaw, expected_rank=[2, 3])
from_jigsaw_length = from_jigsaw_shape[1]
from_camouflage_shape = get_shape_list_imagebert(img_feature_convert_camouflage, expected_rank=[2, 3])
from_camouflage_length = from_camouflage_shape[1]
from_greymask_shape = get_shape_list_imagebert(img_feature_convert_greymask, expected_rank=[2, 3])
from_greymask_length = from_greymask_shape[1]
from_blankmask_shape = get_shape_list_imagebert(img_feature_convert_blankmask, expected_rank=[2, 3])
from_blankmask_length = from_blankmask_shape[1]
to_text_shape = get_shape_list_imagebert(input_mask, expected_rank=2)
to_text_length = to_text_shape[1]
to_rotation_shape = get_shape_list_imagebert(img_feature_rotation_mask, expected_rank=2)
to_rotation_length = to_rotation_shape[1]
to_jigsaw_shape = get_shape_list_imagebert(img_feature_jigsaw_mask, expected_rank=2)
to_jigsaw_length = to_jigsaw_shape[1]
to_camouflage_shape = get_shape_list_imagebert(img_feature_camouflage_mask, expected_rank=2)
to_camouflage_length = to_camouflage_shape[1]
to_greymask_shape = get_shape_list_imagebert(img_feature_greymask_mask, expected_rank=2)
to_greymask_length = to_greymask_shape[1]
to_blankmask_shape = get_shape_list_imagebert(img_feature_blankmask_mask, expected_rank=2)
to_blankmask_length = to_blankmask_shape[1]
input_mask = tf.cast(input_mask, tf.int32)
to_rotation_mask = tf.cast(img_feature_rotation_mask, tf.int32)
to_jigsaw_mask = tf.cast(img_feature_jigsaw_mask, tf.int32)
to_camouflage_mask = tf.cast(img_feature_camouflage_mask, tf.int32)
to_greymask_mask = tf.cast(img_feature_greymask_mask, tf.int32)
to_blankmask_mask = tf.cast(img_feature_blankmask_mask, tf.int32)
to_mask = tf.concat([input_mask, to_rotation_mask, \
to_jigsaw_mask, to_camouflage_mask, \
to_greymask_mask, to_blankmask_mask], axis=1)
to_seq_length = to_text_length + to_rotation_length + \
to_jigsaw_length + to_camouflage_length + \
to_greymask_length + to_blankmask_length
from_seq_length = from_text_length + from_rotation_length + \
from_jigsaw_length + from_camouflage_length + \
from_greymask_length + from_blankmask_length
to_mask = tf.cast(
tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * to_mask
return mask
def get_attn_mask_fashionbert2(input_ids, input_mask, \
img_feature_convert_rotation, img_feature_rotation_mask, \
img_feature_convert_jigsaw, img_feature_jigsaw_mask, \
img_feature_convert_camouflage, img_feature_camouflage_mask, \
img_feature_convert_greymask, img_feature_greymask_mask, \
img_feature_convert_blankmask, img_feature_blankmask_mask):
from_text_shape = get_shape_list_imagebert(input_ids, expected_rank=[2, 3])
batch_size = from_text_shape[0]
from_text_length = from_text_shape[1]
from_rotation_shape = get_shape_list_imagebert(img_feature_convert_rotation, expected_rank=[2, 3])
from_rotation_length = from_rotation_shape[1]
from_jigsaw_shape = get_shape_list_imagebert(img_feature_convert_jigsaw, expected_rank=[2, 3])
from_jigsaw_length = from_jigsaw_shape[1]
from_camouflage_shape = get_shape_list_imagebert(img_feature_convert_camouflage, expected_rank=[2, 3])
from_camouflage_length = from_camouflage_shape[1]
from_greymask_shape = get_shape_list_imagebert(img_feature_convert_greymask, expected_rank=[2, 3])
from_greymask_length = from_greymask_shape[1]
from_blankmask_shape = get_shape_list_imagebert(img_feature_convert_blankmask, expected_rank=[2, 3])
from_blankmask_length = from_blankmask_shape[1]
to_text_shape = get_shape_list_imagebert(input_mask, expected_rank=2)
to_text_length = to_text_shape[1]
to_rotation_shape = get_shape_list_imagebert(img_feature_rotation_mask, expected_rank=2)
to_rotation_length = to_rotation_shape[1]
to_jigsaw_shape = get_shape_list_imagebert(img_feature_jigsaw_mask, expected_rank=2)
to_jigsaw_length = to_jigsaw_shape[1]
to_camouflage_shape = get_shape_list_imagebert(img_feature_camouflage_mask, expected_rank=2)
to_camouflage_length = to_camouflage_shape[1]
to_greymask_shape = get_shape_list_imagebert(img_feature_greymask_mask, expected_rank=2)
to_greymask_length = to_greymask_shape[1]
to_blankmask_shape = get_shape_list_imagebert(img_feature_blankmask_mask, expected_rank=2)
to_blankmask_length = to_blankmask_shape[1]
input_mask = tf.cast(input_mask, tf.int32)
to_rotation_mask = tf.cast(img_feature_rotation_mask, tf.int32)
to_jigsaw_mask = tf.cast(img_feature_jigsaw_mask, tf.int32)
to_camouflage_mask = tf.cast(img_feature_camouflage_mask, tf.int32)
to_greymask_mask = tf.cast(img_feature_greymask_mask, tf.int32)
to_blankmask_mask = tf.cast(img_feature_blankmask_mask, tf.int32)
to_mask = tf.concat([input_mask, to_rotation_mask, \
to_jigsaw_mask, to_camouflage_mask, \
to_greymask_mask, to_blankmask_mask], axis=1)
to_seq_length = to_text_length + to_rotation_length + \
to_jigsaw_length + to_camouflage_length + \
to_greymask_length + to_blankmask_length
from_seq_length = from_text_length + from_rotation_length + \
from_jigsaw_length + from_camouflage_length + \
from_greymask_length + from_blankmask_length
to_mask = tf.cast(
tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * to_mask
return mask
def get_attn_mask_imagebert(from_text_ids,
to_text_mask, from_image_feature, to_image_mask):
from_text_shape = get_shape_list_imagebert(from_text_ids, expected_rank=[2, 3])
batch_size = from_text_shape[0]
from_text_seq_length = from_text_shape[1]
# print("Create FB mask - from_text_shape: ", from_text_shape)
from_image_shape = get_shape_list_imagebert(from_image_feature, expected_rank=[2, 3])
from_image_seq_length = from_image_shape[1]
# print("Create FB mask - from_image_shape: ", from_image_shape)
to_text_shape = get_shape_list_imagebert(to_text_mask, expected_rank=2)
to_text_seq_length = to_text_shape[1]
# print("Create FB mask - to_text_shape: ", to_text_shape)
to_image_shape = get_shape_list_imagebert(to_image_mask, expected_rank=2)
to_image_seq_length = to_image_shape[1]
# print("Create FB mask - to_image_shape: ", to_image_shape)
to_image_mask = tf.cast(to_image_mask, tf.int32)
to_text_mask = tf.cast(to_text_mask, tf.int32)
to_mask = tf.concat([to_text_mask, to_image_mask], axis=1)
to_seq_length = to_text_seq_length + to_image_seq_length
from_seq_length = from_text_seq_length + from_image_seq_length
to_mask = tf.cast(
tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32)
# print("Create FB mask - to_mask_shape: ", to_mask.shape)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * to_mask
# print("Create FB mask - mask_shape: ", mask.shape)
return mask
def get_attn_mask_videobert(from_text_ids,
to_text_mask, from_image_feature, to_image_mask):
from_text_shape = get_shape_list_imagebert(from_text_ids, expected_rank=[2, 3])
batch_size = from_text_shape[0]
from_text_seq_length = from_text_shape[1]
# print("Create FB mask - from_text_shape: ", from_text_shape)
from_image_shape = get_shape_list_imagebert(from_image_feature, expected_rank=[2, 3])
from_image_seq_length = from_image_shape[1]
# print("Create FB mask - from_image_shape: ", from_image_shape)
to_text_shape = get_shape_list_imagebert(to_text_mask, expected_rank=2)
to_text_seq_length = to_text_shape[1]
# print("Create FB mask - to_text_shape: ", to_text_shape)
to_image_shape = get_shape_list_imagebert(to_image_mask, expected_rank=2)
to_image_seq_length = to_image_shape[1]
# print("Create FB mask - to_image_shape: ", to_image_shape)
to_image_mask = tf.cast(to_image_mask, tf.int32)
to_text_mask = tf.cast(to_text_mask, tf.int32)
to_mask = tf.concat([to_text_mask, to_image_mask], axis=1)
to_seq_length = to_text_seq_length + to_image_seq_length
from_seq_length = from_text_seq_length + from_image_seq_length
to_mask = tf.cast(
tf.reshape(to_mask, [batch_size, 1, to_seq_length]), tf.float32)
# print("Create FB mask - to_mask_shape: ", to_mask.shape)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * to_mask
# print("Create FB mask - mask_shape: ", mask.shape)
return mask
def create_look_ahead_mask(from_tensor):
from_shape = get_shape_list(from_tensor, expected_rank=[2, 3])
batch_size = from_shape[0]
from_seq_length = from_shape[1]
mask = tf.linalg.band_part(tf.ones((from_seq_length, from_seq_length)), -1, 0)
mask = tf.cast(
tf.reshape(mask, [1, from_seq_length, from_seq_length]), tf.float32)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * mask
return mask
def create_padding_mask(from_tensor):
from_shape = get_shape_list(from_tensor, expected_rank=[2, 3])
batch_size = from_shape[0]
from_seq_length = from_shape[1]
mask = 1 - tf.cast(tf.math.equal(from_tensor, 0), tf.float32)
mask = tf.cast(
tf.reshape(mask, [batch_size, 1, from_seq_length]), tf.float32)
broadcast_ones = tf.ones(
shape=[batch_size, from_seq_length, 1], dtype=tf.float32)
mask = broadcast_ones * mask
return mask
| 47.517241
| 106
| 0.725762
| 2,034
| 13,780
| 4.459685
| 0.083579
| 0.033072
| 0.05027
| 0.067468
| 0.843898
| 0.839488
| 0.821519
| 0.809503
| 0.809503
| 0.801345
| 0
| 0.019662
| 0.191727
| 13,780
| 289
| 107
| 47.681661
| 0.794757
| 0.137228
| 0
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040404
| false
| 0
| 0.010101
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a14242c2dc403287304f08400f1506967a20729
| 96,753
|
py
|
Python
|
tensorflow_data_validation/statistics/generators/basic_stats_generator_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_data_validation/statistics/generators/basic_stats_generator_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_data_validation/statistics/generators/basic_stats_generator_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for basic statistics generator."""
import sys
sys.path.append("/home/MD00560695/workdir/tfx")
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
import pyarrow as pa
from tensorflow_data_validation import types
from tensorflow_data_validation.statistics.generators import basic_stats_generator
from tensorflow_data_validation.utils import test_util
from tensorflow_data_validation.utils.example_weight_map import ExampleWeightMap
from google.protobuf import text_format
from tensorflow_metadata.proto.v0 import schema_pb2
from tensorflow_metadata.proto.v0 import statistics_pb2
class BasicStatsGeneratorTest(test_util.CombinerStatsGeneratorTest):
def test_single_feature(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays([pa.array([[1.0, 2.0], [3.0, 4.0, 5.0]])],
['a'])
b2 = pa.RecordBatch.from_arrays([pa.array([[1.0]])], ['a'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
tot_num_values: 6
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.75
}
type: QUANTILES
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=4, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_infinity(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays([
pa.array([[1.0, 2.0, np.inf, np.inf, -np.inf], [3.0, 4.0, 5.0, -np.inf]
])
], ['a'])
b2 = pa.RecordBatch.from_arrays([pa.array([[1.0, np.inf, -np.inf]])], ['a'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 3
max_num_values: 5
avg_num_values: 4.0
tot_num_values: 12
num_values_histogram {
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 0.75
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 0.75
}
buckets {
low_value: 5.0
high_value: 5.0
sample_count: 0.75
}
type: QUANTILES
}
}
mean: nan
num_zeros: 0
min: -inf
max: inf
median: 3.0
histograms {
buckets {
low_value: -inf
high_value: 2.0
sample_count: 4.5
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.02
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 0.99
}
buckets {
low_value: 4.0
high_value: inf
sample_count: 5.49
}
type: STANDARD
}
histograms {
buckets {
low_value: -inf
high_value: -inf
sample_count: 3.0
}
buckets {
low_value: -inf
high_value: 3.0
sample_count: 3.0
}
buckets {
low_value: 3.0
high_value: inf
sample_count: 3.0
}
buckets {
low_value: inf
high_value: inf
sample_count: 3.0
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=4, num_histogram_buckets=4,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_no_runtime_warnings_close_to_max_int(self):
# input has batches with values that are slightly smaller than the maximum
# integer value.
less_than_max_int_value = np.iinfo(np.int64).max - 1
batches = ([
pa.RecordBatch.from_arrays([pa.array([[less_than_max_int_value]])],
['a'])
] * 2)
generator = basic_stats_generator.BasicStatsGenerator()
old_nperr = np.geterr()
np.seterr(over='raise')
accumulators = [
generator.add_input(generator.create_accumulator(), batch)
for batch in batches
]
generator.merge_accumulators(accumulators)
np.seterr(**old_nperr)
def test_handle_null_column(self):
# Feature 'a' covers null coming before non-null.
# Feature 'b' covers null coming after non-null.
b1 = pa.RecordBatch.from_arrays([
pa.array([None, None, None], type=pa.null()),
pa.array([[1.0, 2.0, 3.0], [4.0], [5.0]]),
], ['a', 'b'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1, 2], None], type=pa.list_(pa.int64())),
pa.array([None, None], type=pa.null()),
], ['a', 'b'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: "a"
}
num_stats {
common_stats {
num_non_missing: 1
min_num_values: 2
max_num_values: 2
avg_num_values: 2.0
num_values_histogram {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
type: QUANTILES
}
tot_num_values: 2
}
mean: 1.5
std_dev: 0.5
min: 1.0
median: 2.0
max: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 1.3333333
sample_count: 0.9955556
}
buckets {
low_value: 1.3333333
high_value: 1.6666667
sample_count: 0.0022222
}
buckets {
low_value: 1.6666667
high_value: 2.0
sample_count: 1.0022222
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['b']): text_format.Parse(
"""
path {
step: 'b'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 1.66666698456
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.75
}
type: QUANTILES
}
tot_num_values: 5
}
mean: 3.0
std_dev: 1.4142136
min: 1.0
median: 3.0
max: 5.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 1.9888889
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0055556
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0055556
}
}
histograms {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.25
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.25
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.25
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.25
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics()),
}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=4,
num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_pure_null_column(self):
batches = [
pa.RecordBatch.from_arrays([
pa.array([None, None], type=pa.null()),
pa.array([[1.0], [1.0]]),
], ['a', 'w']),
pa.RecordBatch.from_arrays([
pa.array([None], type=pa.null()),
pa.array([[1.0]]),
], ['a', 'w']),
]
expected_result = {
types.FeaturePath(['a']):
text_format.Parse("""
type: STRING
string_stats {
common_stats {
weighted_common_stats {
}
}
}
path {
step: "a"
}
""", statistics_pb2.FeatureNameStatistics()),
}
generator = basic_stats_generator.BasicStatsGenerator(
example_weight_map=ExampleWeightMap(weight_feature='w'),
num_values_histogram_buckets=4, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(
batches, generator, expected_result,
only_match_expected_feature_stats=True)
def test_with_weight_feature(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays([
pa.array([[1.0, 2.0], [3.0, 4.0, 5.0]]),
pa.array([[1, 2], [3, 4, 5]]),
pa.array([[1.0], [2.0]])
], ['a', 'b', 'w'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1.0, np.NaN, np.NaN, np.NaN], None]),
pa.array([[1], None]),
pa.array([[3.0], [2.0]])
], ['a', 'b', 'w'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']):
text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 2
max_num_values: 4
avg_num_values: 3.0
tot_num_values: 9
num_values_histogram {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 0.75
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 0.75
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 6.0
avg_num_values: 3.33333333
tot_num_values: 20.0
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.7272727
std_dev: 1.5427784
median: 3.0
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 4.9988889
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.9922222
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 4.0088889
}
}
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 2.75
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 2.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 2.75
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 2.75
}
type: QUANTILES
}
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['b']):
text_format.Parse(
"""
path {
step: 'b'
}
type: INT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
tot_num_values: 6
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.75
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 6.0
avg_num_values: 1.83333333
tot_num_values: 11.0
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.7272727
std_dev: 1.5427784
median: 3.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 4.9988889
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.9922222
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 4.0088889
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 2.75
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 2.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 2.75
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 2.75
}
type: QUANTILES
}
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['w']):
text_format.Parse(
"""
path {
step: 'w'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 4
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
tot_num_values: 4
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 8.0
avg_num_values: 1.0
tot_num_values: 8.0
}
}
mean: 2.0
std_dev: 0.7071068
num_zeros: 0
min: 1.0
max: 3.0
median: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 1.6666667
sample_count: 0.9955556
}
buckets {
low_value: 1.6666667
high_value: 2.3333333
sample_count: 1.9955556
}
buckets {
low_value: 2.3333333
high_value: 3.0
sample_count: 1.0088889
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.0
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.0
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.25
std_dev: 0.6614378
median: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 1.6666667
sample_count: 1.0044444
}
buckets {
low_value: 1.6666667
high_value: 2.3333333
sample_count: 3.9911111
}
buckets {
low_value: 2.3333333
high_value: 3.0
sample_count: 3.0044444
}
}
histograms {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 2.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 2.0
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 2.0
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 2.0
}
type: QUANTILES
}
}
}
""", statistics_pb2.FeatureNameStatistics())
}
generator = basic_stats_generator.BasicStatsGenerator(
example_weight_map=ExampleWeightMap(weight_feature='w'),
num_values_histogram_buckets=4, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_with_per_feature_weight(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays([
pa.array([[1.0, 2.0], [3.0, 4.0, 5.0]]),
pa.array([[1, 2], [3, 4, 5]]),
pa.array([[1.0], [2.0]]),
pa.array([[2.0], [1.0]]),
], ['a', 'b', 'w_a', 'w_b'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1.0, np.NaN, np.NaN, np.NaN], None]),
pa.array([[1], None]),
pa.array([[3.0], [2.0]]),
pa.array([[2.0], [3.0]]),
], ['a', 'b', 'w_a', 'w_b'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']):
text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 2
max_num_values: 4
avg_num_values: 3.0
tot_num_values: 9
num_values_histogram {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 0.75
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 0.75
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 6.0
avg_num_values: 3.33333333
tot_num_values: 20.0
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.7272727
std_dev: 1.5427784
median: 3.0
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 4.9988889
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.9922222
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 4.0088889
}
}
histograms {
num_nan: 3
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 2.75
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 2.75
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 2.75
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 2.75
}
type: QUANTILES
}
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['b']):
text_format.Parse(
"""
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 0.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.75
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 5.0
avg_num_values: 1.8
tot_num_values: 9.0
}
tot_num_values: 6
}
mean: 2.6666667
std_dev: 1.490712
min: 1.0
median: 3.0
max: 5.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.2222222
std_dev: 1.396645
median: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 5.98
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.03
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 1.99
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 2.25
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 2.25
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 2.25
}
buckets {
low_value: 3.0
high_value: 5.0
sample_count: 2.25
}
type: QUANTILES
}
}
}
path {
step: "b"
}
""", statistics_pb2.FeatureNameStatistics()),
}
generator = basic_stats_generator.BasicStatsGenerator(
example_weight_map=ExampleWeightMap(
weight_feature='w_a',
per_feature_override={types.FeaturePath(['b']): 'w_b'}),
num_values_histogram_buckets=4,
num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result,
only_match_expected_feature_stats=True)
def test_with_entire_feature_value_list_missing(self):
# input with two batches: first batch has three examples and second batch
# has two examples.
b1 = pa.RecordBatch.from_arrays([
pa.array([[1.0, 2.0], None, [3.0, 4.0, 5.0]]),
pa.array([['x', 'y', 'z', 'w'], None, ['qwe', 'abc']]),
], ['a', 'b'])
b2 = pa.RecordBatch.from_arrays(
[pa.array([[1.0], None]),
pa.array([None, ['qwe']])], ['a', 'b'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
tot_num_values: 6
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.0
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.0
}
type: QUANTILES
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['b']): text_format.Parse(
"""
path {
step: 'b'
}
type: STRING
string_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 4
avg_num_values: 2.33333333
tot_num_values: 7
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 4.0
sample_count: 1.0
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 1.0
}
type: QUANTILES
}
}
avg_length: 1.85714285
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=3, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_with_individual_feature_value_missing(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays(
[pa.array([[1.0, 2.0], [3.0, 4.0, np.NaN, 5.0]])], ['a'])
b2 = pa.RecordBatch.from_arrays([pa.array([[np.NaN, 1.0]])], ['a'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 2
max_num_values: 4
avg_num_values: 2.66666666
tot_num_values: 8
num_values_histogram {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 4.0
sample_count: 1.0
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 1.0
}
type: QUANTILES
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
num_nan: 2
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
num_nan: 2
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=3, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_with_multiple_features(self):
# Test that columns of ListArray, LargeListArray can be handled. Also test
# that columns whose values are LargeBinaryArray can be handled.
b1 = pa.RecordBatch.from_arrays([
pa.array([[1.0, 2.0], [3.0, 4.0, 5.0]],
type=pa.large_list(pa.float32())),
pa.array([[b'x', b'y', b'z', b'w'], [b'qwe', b'abc']],
type=pa.list_(pa.large_binary())),
pa.array([
np.linspace(1, 1000, 1000, dtype=np.int32),
np.linspace(1001, 2000, 1000, dtype=np.int32)
],
type=pa.list_(pa.int32())),
], ['a', 'b', 'c'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1.0]], type=pa.large_list(pa.float32())),
pa.array([[b'ab']], type=pa.list_(pa.large_binary())),
pa.array([np.linspace(2001, 3000, 1000, dtype=np.int32)],
type=pa.list_(pa.int32())),
], ['a', 'b', 'c'])
batches = [b1, b2]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
tot_num_values: 6
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.0
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.0
}
type: QUANTILES
}
}
mean: 2.66666666
std_dev: 1.49071198
num_zeros: 0
min: 1.0
max: 5.0
median: 3.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 2.9866667
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0066667
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0066667
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.5
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.5
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['b']): text_format.Parse(
"""
path {
step: 'b'
}
type: STRING
string_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 4
avg_num_values: 2.33333333
tot_num_values: 7
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 4.0
sample_count: 1.0
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 1.0
}
type: QUANTILES
}
}
avg_length: 1.71428571
}
""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['c']): text_format.Parse(
"""
path {
step: 'c'
}
type: INT
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1000
max_num_values: 1000
avg_num_values: 1000.0
tot_num_values: 3000
num_values_histogram {
buckets {
low_value: 1000.0
high_value: 1000.0
sample_count: 1.0
}
buckets {
low_value: 1000.0
high_value: 1000.0
sample_count: 1.0
}
buckets {
low_value: 1000.0
high_value: 1000.0
sample_count: 1.0
}
type: QUANTILES
}
}
mean: 1500.5
std_dev: 866.025355672
min: 1.0
max: 3000.0
median: 1501.0
histograms {
buckets {
low_value: 1.0
high_value: 1000.66666667
sample_count: 999.666666667
}
buckets {
low_value: 1000.66666667
high_value: 2000.33333333
sample_count: 999.666666667
}
buckets {
low_value: 2000.33333333
high_value: 3000.0
sample_count: 1000.66666667
}
type: STANDARD
}
histograms {
buckets {
low_value: 1.0
high_value: 751.0
sample_count: 750.0
}
buckets {
low_value: 751.0
high_value: 1501.0
sample_count: 750.0
}
buckets {
low_value: 1501.0
high_value: 2251.0
sample_count: 750.0
}
buckets {
low_value: 2251.0
high_value: 3000.0
sample_count: 750.0
}
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=3, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4, epsilon=0.001)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_with_bytes_features(self):
b1 = pa.RecordBatch.from_arrays([
pa.array([[b'x', b'y', b'z', b'w'], [b'qwe', b'abc']]),], ['b'])
b2 = pa.RecordBatch.from_arrays([pa.array([[b'ab']]),], ['b'])
batches = [b1, b2]
schema = text_format.Parse(
"""
feature {
name: "b"
type: BYTES
image_domain { }
}
""", schema_pb2.Schema())
expected_result = {
types.FeaturePath(['b']): text_format.Parse(
"""
path {
step: 'b'
}
type: BYTES
bytes_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 4
avg_num_values: 2.33333333
tot_num_values: 7
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 4.0
sample_count: 1.0
}
buckets {
low_value: 4.0
high_value: 4.0
sample_count: 1.0
}
type: QUANTILES
}
}
avg_num_bytes: 1.71428571
min_num_bytes: 1
max_num_bytes: 3
}
""", statistics_pb2.FeatureNameStatistics()),
}
generator = basic_stats_generator.BasicStatsGenerator(
schema=schema,
num_values_histogram_buckets=3, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4, epsilon=0.001)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_categorical_feature(self):
batches = [
pa.RecordBatch.from_arrays([pa.array([[1, 5, 10], [0]])], ['c']),
pa.RecordBatch.from_arrays([pa.array([[1, 1, 1, 5, 15], [-1]])], ['c']),
pa.RecordBatch.from_arrays([pa.array([None, None], type=pa.null())],
['c'])
]
expected_result = {
types.FeaturePath(['c']): text_format.Parse(
"""
path {
step: 'c'
}
string_stats {
common_stats {
num_non_missing: 4
min_num_values: 1
max_num_values: 5
avg_num_values: 2.5
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.3333333
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.3333333
}
buckets {
low_value: 3.0
high_value: 5.0
sample_count: 1.3333333
}
type: QUANTILES
}
tot_num_values: 10
}
avg_length: 1.29999995232
}
""", statistics_pb2.FeatureNameStatistics())}
schema = text_format.Parse(
"""
feature {
name: "c"
type: INT
int_domain {
is_categorical: true
}
}
""", schema_pb2.Schema())
generator = basic_stats_generator.BasicStatsGenerator(
schema=schema,
num_values_histogram_buckets=3, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_empty_batch(self):
batches = [
pa.RecordBatch.from_arrays([pa.array([], type=pa.list_(pa.binary()))],
['a'])
]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: STRING
string_stats {
common_stats {
num_non_missing: 0
tot_num_values: 0
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator()
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_no_value_in_batch(self):
batches = [
pa.RecordBatch.from_arrays([
pa.array([[], [], []], type=pa.list_(pa.int64()))], ['a'])]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
num_stats {
common_stats {
num_non_missing: 3
num_values_histogram {
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
buckets {
sample_count: 0.3
}
type: QUANTILES
}
}
}""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator()
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_only_nan(self):
b1 = pa.RecordBatch.from_arrays(
[pa.array([[np.NaN]], type=pa.list_(pa.float32()))], ['a'])
batches = [b1]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
"""
path {
step: 'a'
}
type: FLOAT
num_stats {
common_stats {
num_non_missing: 1
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
tot_num_values: 1
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
type: QUANTILES
}
}
histograms {
num_nan: 1
type: STANDARD
}
histograms {
num_nan: 1
type: QUANTILES
}
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_schema_claims_bytes_but_actually_int(self):
schema = text_format.Parse("""
feature {
name: "a"
type: BYTES
image_domain { }
}""", schema_pb2.Schema())
batches = [pa.RecordBatch.from_arrays([
pa.array([], type=pa.list_(pa.int64()))], ['a'])]
expected_result = {
types.FeaturePath(['a']): text_format.Parse("""
type: INT
num_stats {
common_stats {
}
}
path {
step: "a"
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
schema=schema,
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_schema_claims_categorical_but_actually_float(self):
schema = text_format.Parse("""
feature {
name: "a"
type: INT
int_domain { is_categorical: true }
}""", schema_pb2.Schema())
batches = [pa.RecordBatch.from_arrays([
pa.array([], type=pa.list_(pa.float32()))], ['a'])]
expected_result = {
types.FeaturePath(['a']): text_format.Parse("""
type: FLOAT
num_stats {
common_stats {
}
}
path {
step: "a"
}
""", statistics_pb2.FeatureNameStatistics())}
generator = basic_stats_generator.BasicStatsGenerator(
schema=schema,
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_column_not_list(self):
batches = [pa.RecordBatch.from_arrays([pa.array([1, 2, 3])], ['a'])]
generator = basic_stats_generator.BasicStatsGenerator()
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
TypeError, r'Expected feature column to be a \(Large\)List'):
self.assertCombinerOutputEqual(batches, generator, None)
def test_invalid_value_numpy_dtype(self):
batches = [pa.RecordBatch.from_arrays(
[pa.array([[]], type=pa.list_(pa.date32()))], ['a'])]
generator = basic_stats_generator.BasicStatsGenerator()
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
TypeError, 'Feature a has unsupported arrow type'):
self.assertCombinerOutputEqual(batches, generator, None)
def test_feature_with_inconsistent_types(self):
batches = [
pa.RecordBatch.from_arrays([pa.array([[1.0, 2.0], [3.0, 4.0, 5.0]])],
['a']),
pa.RecordBatch.from_arrays([pa.array([[1]])], ['a']),
]
generator = basic_stats_generator.BasicStatsGenerator()
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
TypeError, 'Cannot determine the type'):
self.assertCombinerOutputEqual(batches, generator, None)
_STRUCT_TEST_CASES = [
dict(
testcase_name='deep_struct',
struct_column_as_list_dicts=[[{
'l2': [
{
'l3': [1, 2, 3]
},
{
'l3': [4, 5]
},
],
}, {
'l2': [{}],
}, {
'l2': [{
'l3': None
}],
}], None],
expected_result_text_protos={
('c',):
"""
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 1
min_num_values: 3
max_num_values: 3
avg_num_values: 3.0
num_values_histogram {
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.5
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 0.5
}
type: QUANTILES
}
tot_num_values: 3
}
}""",
('c', 'l2'):
"""
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 2
avg_num_values: 1.333333
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.5
}
type: QUANTILES
}
tot_num_values: 4
}
}""",
('c', 'l2', 'l3'):
"""
type: INT
num_stats {
common_stats {
num_non_missing: 2
num_missing: 2
min_num_values: 2
max_num_values: 3
avg_num_values: 2.5
num_values_histogram {
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.0
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.0
}
type: QUANTILES
}
tot_num_values: 5
}
mean: 3.0
std_dev: 1.4142136
min: 1.0
median: 3.0
max: 5.0
histograms {
buckets {
low_value: 1.0
high_value: 2.3333333
sample_count: 1.9888889
}
buckets {
low_value: 2.3333333
high_value: 3.6666667
sample_count: 1.0055556
}
buckets {
low_value: 3.6666667
high_value: 5.0
sample_count: 2.0055556
}
}
histograms {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.25
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.25
}
buckets {
low_value: 3.0
high_value: 4.0
sample_count: 1.25
}
buckets {
low_value: 4.0
high_value: 5.0
sample_count: 1.25
}
type: QUANTILES
}
}""",
}),
dict(
testcase_name='leaf_is_categorical',
struct_column_as_list_dicts=[
[{
'f1': [1, 2, 3],
'f2': ['b']
}],
[{
'f1': [3, 1],
'f2': ['a']
}, {
'f1': [2]
}],
],
struct_column_schema="""
name: "f1"
type: INT
int_domain {
is_categorical: true
}
""",
expected_result_text_protos={
('c',):
"""
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 2
min_num_values: 1
max_num_values: 2
avg_num_values: 1.5
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
type: QUANTILES
}
tot_num_values: 3
}
}""",
('c', 'f1'):
"""
string_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 3
avg_num_values: 2.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.5
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.5
}
type: QUANTILES
}
tot_num_values: 6
}
avg_length: 1.0
}""",
('c', 'f2'):
"""
type: STRING
string_stats {
common_stats {
num_non_missing: 2
num_missing: 1
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
type: QUANTILES
}
tot_num_values: 2
}
avg_length: 1.0
}""",
}),
dict(
testcase_name='nulls',
struct_column_as_list_dicts=[
[ # first element of 'c'
{
'f1': [1.0],
# f2 is missing.
},
{
# f1, f2 are missing.
}
],
None, # second element of 'c' -- missing/null.
[ # third element of 'c' -- a list<struct> of length 2.
{
'f2': [2.0],
# f1 is missing
},
None, # f1, f2 are missing
],
[ # fourth element of 'c'
None, # f1, f2 are missing
],
[], # fifth element of 'c'; note this is not counted as missing.
],
expected_result_text_protos={
('c',): """
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 4
max_num_values: 2
avg_num_values: 1.25
num_values_histogram {
buckets {
high_value: 2.0
sample_count: 2.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 2.0
}
type: QUANTILES
}
tot_num_values: 5
}
}
""",
('c', 'f1'): """
type: FLOAT
num_stats {
common_stats {
num_non_missing: 1
num_missing: 4
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
type: QUANTILES
}
tot_num_values: 1
}
mean: 1.0
min: 1.0
median: 1.0
max: 1.0
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.25
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.25
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.25
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.25
}
type: QUANTILES
}
}""",
('c', 'f2'): """
type: FLOAT
num_stats {
common_stats {
num_non_missing: 1
num_missing: 4
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
type: QUANTILES
}
tot_num_values: 1
}
mean: 2.0
min: 2.0
median: 2.0
max: 2.0
histograms {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
}
histograms {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.25
}
type: QUANTILES
}
}""",
}),
dict(
testcase_name='struct_not_nested_in_list',
struct_column_as_list_dicts=[
{'a': [b'meow', b'nyan']},
{'b': [b'foo']},
],
expected_result_text_protos={
('c',): """
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 2
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
type: QUANTILES
}
tot_num_values: 2
}
}""",
('c', 'a'): """
type: STRING
string_stats {
common_stats {
num_non_missing: 1
num_missing: 1
min_num_values: 2
max_num_values: 2
avg_num_values: 2.0
num_values_histogram {
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
type: QUANTILES
}
tot_num_values: 2
}
avg_length: 4.0
}""",
('c', 'b'): """
type: STRING
string_stats {
common_stats {
num_non_missing: 1
num_missing: 1
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
type: QUANTILES
}
tot_num_values: 1
}
avg_length: 3.0
}""",
}
),
]
class BasicStatsGeneratorStructStatsTest(test_util.CombinerStatsGeneratorTest,
parameterized.TestCase):
@parameterized.named_parameters(*_STRUCT_TEST_CASES)
def test_struct(self, struct_column_as_list_dicts,
expected_result_text_protos, struct_column_schema=None):
mid = len(struct_column_as_list_dicts) // 2
# Also test merging multiple batches.
batches = [
pa.RecordBatch.from_arrays(
[pa.array(struct_column_as_list_dicts[:mid])], ['c']),
pa.RecordBatch.from_arrays(
[pa.array(struct_column_as_list_dicts[mid:])], ['c']),
]
expected_result = {}
for k, v in expected_result_text_protos.items():
feature_stats = text_format.Parse(
v, statistics_pb2.FeatureNameStatistics())
feature_path = types.FeaturePath(k)
feature_stats.path.CopyFrom(feature_path.to_proto())
expected_result[types.FeaturePath(k)] = feature_stats
schema = None
if struct_column_schema is not None:
schema = text_format.Parse("""
feature {
name: "c"
type: STRUCT
struct_domain {
}
}""", schema_pb2.Schema())
schema.feature[0].struct_domain.feature.add().CopyFrom(text_format.Parse(
struct_column_schema, schema_pb2.Feature()))
generator = basic_stats_generator.BasicStatsGenerator(
schema=schema,
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
def test_with_weights(self):
batches = [
pa.RecordBatch.from_arrays([
pa.array([[1.0], [2.0]]),
pa.array([[{
'f1': [{
'f2': [1, 2]
}, {
'f2': [0]
}]
}], [{
'f1': [{
'f2': [3, 3]
}]
}]])
], ['w', 'c'])
]
expected_result = {
types.FeaturePath(['c']):
text_format.Parse(
"""
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 2
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 3.0
avg_num_values: 1.0
tot_num_values: 3.0
}
tot_num_values: 2
}
}
path {
step: "c"
}""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['c', 'f1']):
text_format.Parse(
"""
type: STRUCT
struct_stats {
common_stats {
num_non_missing: 2
min_num_values: 1
max_num_values: 2
avg_num_values: 1.5
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 3.0
avg_num_values: 1.3333333
tot_num_values: 4.0
}
tot_num_values: 3
}
}
path {
step: "c"
step: "f1"
}""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['c', 'f1', 'f2']):
text_format.Parse(
"""
num_stats {
common_stats {
num_non_missing: 3
min_num_values: 1
max_num_values: 2
avg_num_values: 1.666667
num_values_histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 4.0
avg_num_values: 1.75
tot_num_values: 7.0
}
tot_num_values: 5
}
mean: 1.8
std_dev: 1.1661904
num_zeros: 1
median: 2.0
max: 3.0
histograms {
buckets {
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 3.0
}
}
histograms {
buckets {
high_value: 1.0
sample_count: 1.25
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.25
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.25
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.25
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 2.1428571
std_dev: 1.1248583
median: 3.0
histograms {
buckets {
high_value: 1.0
sample_count: 1.0033333
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 1.0033333
}
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 4.9933333
}
}
histograms {
buckets {
high_value: 1.0
sample_count: 1.75
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.75
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.75
}
type: QUANTILES
}
}
}
path {
step: "c"
step: "f1"
step: "f2"
}""", statistics_pb2.FeatureNameStatistics()),
types.FeaturePath(['w']):
text_format.Parse(
"""
type: FLOAT
num_stats {
common_stats {
num_non_missing: 2
min_num_values: 1
max_num_values: 1
avg_num_values: 1.0
num_values_histogram {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 1.0
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 3.0
avg_num_values: 1.0
tot_num_values: 3.0
}
tot_num_values: 2
}
mean: 1.5
std_dev: 0.5
min: 1.0
median: 2.0
max: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 1.33333333333
sample_count: 0.995555555556
}
buckets {
low_value: 1.33333333333
high_value: 1.66666666667
sample_count: 0.00222222222222
}
buckets {
low_value: 1.66666666667
high_value: 2.0
sample_count: 1.00222222222
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.5
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.5
}
type: QUANTILES
}
weighted_numeric_stats {
mean: 1.66666666667
std_dev: 0.471404520791
median: 2.0
histograms {
buckets {
low_value: 1.0
high_value: 1.33333333333
sample_count: 0.993333333333
}
buckets {
low_value: 1.33333333333
high_value: 1.66666666667
sample_count: 0.00333333333333
}
buckets {
low_value: 1.66666666667
high_value: 2.0
sample_count: 2.00333333333
}
}
histograms {
buckets {
low_value: 1.0
high_value: 1.0
sample_count: 0.75
}
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.75
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 0.75
}
type: QUANTILES
}
}
}
path {
step: "w"
}
""", statistics_pb2.FeatureNameStatistics()),
}
generator = basic_stats_generator.BasicStatsGenerator(
example_weight_map=ExampleWeightMap(weight_feature='w'),
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4)
self.assertCombinerOutputEqual(batches, generator, expected_result)
_NESTED_TEST_CASES = [
dict(
testcase_name='nested',
batches=[
pa.RecordBatch.from_arrays([
pa.array([None, None],
type=pa.large_list(
pa.large_list(pa.list_(pa.large_binary())))),
pa.array([[1.0], [1.0]]),
], ['a', 'w']),
pa.RecordBatch.from_arrays([
pa.array([
[[[b'a', b'a'], [b'a'], None], None, []],
[[[b'a', b'a']], [[b'a']]],
]),
pa.array([[1.0], [1.0]]),
], ['a', 'w']),
# in this batch, 'a' has the same nestedness, but its type is
# unknown. Note that here pa.null() means pa.list_(<unknown_type>).
pa.RecordBatch.from_arrays([
pa.array([
[[None, None], None, []],
], type=pa.list_(
pa.list_(pa.null()))),
pa.array([[1.0]])
], ['a', 'w'])
],
weight_column='w',
expected_result={
types.FeaturePath(['a']):
"""
type: STRING
string_stats {
common_stats {
num_non_missing: 3
min_num_values: 2
max_num_values: 3
avg_num_values: 2.666667
num_values_histogram {
buckets {
low_value: 2.0
high_value: 3.0
sample_count: 1.5
}
buckets {
low_value: 3.0
high_value: 3.0
sample_count: 1.5
}
type: QUANTILES
}
weighted_common_stats {
num_non_missing: 3.0
avg_num_values: 2.6666667
tot_num_values: 8.0
}
tot_num_values: 8
presence_and_valency_stats {
num_non_missing: 3
min_num_values: 2
max_num_values: 3
tot_num_values: 8
}
presence_and_valency_stats {
num_non_missing: 6
num_missing: 2
max_num_values: 3
tot_num_values: 7
}
presence_and_valency_stats {
num_non_missing: 4
num_missing: 3
min_num_values: 1
max_num_values: 2
tot_num_values: 6
}
weighted_presence_and_valency_stats {
num_non_missing: 3.0
avg_num_values: 2.6666667
tot_num_values: 8.0
}
weighted_presence_and_valency_stats {
num_non_missing: 6.0
num_missing: 2.0
avg_num_values: 1.1666667
tot_num_values: 7.0
}
weighted_presence_and_valency_stats {
num_non_missing: 4.0
num_missing: 3.0
avg_num_values: 1.5
tot_num_values: 6.0
}
}
avg_length: 1.0
}
custom_stats {
name: "level_2_value_list_length"
histogram {
buckets {
high_value: 1.0
sample_count: 1.5
}
buckets {
low_value: 1.0
high_value: 3.0
sample_count: 1.5
}
type: QUANTILES
}
}
custom_stats {
name: "level_3_value_list_length"
histogram {
buckets {
low_value: 1.0
high_value: 2.0
sample_count: 3.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 3.0
}
type: QUANTILES
}
}
path {
step: "a"
}"""}),
dict(
testcase_name='nested_null',
batches=[
pa.RecordBatch.from_arrays([
pa.array([[None, None], None, []],
type=pa.large_list(pa.null()))
], ['a']),
],
expected_result={types.FeaturePath(['a']): """
type: STRING
string_stats {
common_stats {
num_non_missing: 2
max_num_values: 2
avg_num_values: 1.0
num_values_histogram {
buckets {
high_value: 2.0
sample_count: 1.0
}
buckets {
low_value: 2.0
high_value: 2.0
sample_count: 1.0
}
type: QUANTILES
}
tot_num_values: 2
presence_and_valency_stats {
num_non_missing: 2
max_num_values: 2
tot_num_values: 2
}
presence_and_valency_stats {
num_missing: 2
}
}
}
path {
step: "a"
}"""}),
]
class BasicStatsGeneratorNestedListTest(
test_util.CombinerStatsGeneratorTest, parameterized.TestCase):
# pylint: disable=g-error-prone-assert-raises
@parameterized.named_parameters(*_NESTED_TEST_CASES)
def test_nested_list(self, batches, expected_result, weight_column=None):
generator = basic_stats_generator.BasicStatsGenerator(
num_values_histogram_buckets=2, num_histogram_buckets=3,
num_quantiles_histogram_buckets=4,
example_weight_map=ExampleWeightMap(weight_feature=weight_column))
expected_result = {
path: text_format.Parse(pbtxt, statistics_pb2.FeatureNameStatistics())
for path, pbtxt in expected_result.items()
}
self.assertCombinerOutputEqual(batches, generator, expected_result,
only_match_expected_feature_stats=True)
def test_basic_stats_generator_different_nest_levels(self):
batches = [
pa.RecordBatch.from_arrays([pa.array([[1]])], ['a']),
pa.RecordBatch.from_arrays([pa.array([[[1]]])], ['a']),
]
generator = basic_stats_generator.BasicStatsGenerator()
with self.assertRaisesRegex(
ValueError, 'Unable to merge common stats with different nest levels'):
self.assertCombinerOutputEqual(batches, generator, None)
if __name__ == '__main__':
absltest.main()
| 31.454161
| 82
| 0.391264
| 8,607
| 96,753
| 4.140351
| 0.046241
| 0.087664
| 0.112386
| 0.042682
| 0.874986
| 0.847963
| 0.834577
| 0.806263
| 0.786564
| 0.758334
| 0
| 0.091718
| 0.534257
| 96,753
| 3,075
| 83
| 31.46439
| 0.699103
| 0.021436
| 0
| 0.615752
| 0
| 0
| 0.292374
| 0.004073
| 0
| 0
| 0
| 0
| 0.03222
| 1
| 0.02864
| false
| 0
| 0.01432
| 0
| 0.046539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a1d7e76fca05857307c56a1a2c4a152c060b108
| 3,508
|
py
|
Python
|
src/apps/climsoft/schemas/station_schema.py
|
opencdms/opencdms-api
|
f1ed6e1d883025a8658746fe457e0c975718c7be
|
[
"MIT"
] | 3
|
2020-12-01T09:25:18.000Z
|
2022-02-14T23:57:34.000Z
|
src/apps/climsoft/schemas/station_schema.py
|
opencdms/opencdms-api
|
f1ed6e1d883025a8658746fe457e0c975718c7be
|
[
"MIT"
] | 11
|
2021-12-05T10:09:00.000Z
|
2022-02-17T08:11:22.000Z
|
src/apps/climsoft/schemas/station_schema.py
|
opencdms/opencdms-api
|
f1ed6e1d883025a8658746fe457e0c975718c7be
|
[
"MIT"
] | 2
|
2021-03-10T19:03:05.000Z
|
2021-12-11T08:36:04.000Z
|
import datetime
from typing import List, Optional
from pydantic import BaseModel, constr
class CreateStation(BaseModel):
stationId: constr(max_length=255)
stationName: constr(max_length=255)
wmoid: Optional[constr(max_length=20)]
icaoid: Optional[constr(max_length=20)]
latitude: float
qualifier: Optional[constr(max_length=20)]
longitude: float
elevation: constr(max_length=255)
geoLocationMethod: Optional[constr(max_length=255)]
geoLocationAccuracy: Optional[float]
openingDatetime: Optional[str]
closingDatetime: str
country: constr(max_length=50)
authority: Optional[constr(max_length=255)]
adminRegion: Optional[constr(max_length=255)]
drainageBasin: Optional[constr(max_length=255)]
wacaSelection: bool
cptSelection: bool
stationOperational: bool
class Config:
fields = {
"stationId": "station_id",
"stationName": "station_name",
"geoLocationMethod": "geolocation_method",
"geoLocationAccuracy": "geolocation_accuracy",
"openingDatetime": "opening_datetime",
"closingDatetime": "closing_datetime",
"adminRegion": "admin_region",
"drainageBasin": "drainage_basin",
"wacaSelection": "waca_selection",
"cptSelection": "cpt_selection",
"stationOperational": "station_operational"
}
class UpdateStation(BaseModel):
stationName: constr(max_length=255)
wmoid: Optional[constr(max_length=20)]
icaoid: Optional[constr(max_length=20)]
latitude: float
qualifier: Optional[constr(max_length=20)]
longitude: float
elevation: constr(max_length=255)
geoLocationMethod: Optional[constr(max_length=255)]
geoLocationAccuracy: Optional[float]
openingDatetime: Optional[str]
closingDatetime: str
country: constr(max_length=50)
authority: Optional[constr(max_length=255)]
adminRegion: Optional[constr(max_length=255)]
drainageBasin: Optional[constr(max_length=255)]
wacaSelection: bool
cptSelection: bool
stationOperational: bool
class Config:
fields = {
"stationName": "station_name",
"geoLocationMethod": "geolocation_method",
"geoLocationAccuracy": "geolocation_accuracy",
"openingDatetime": "opening_datetime",
"closingDatetime": "closing_datetime",
"adminRegion": "admin_region",
"drainageBasin": "drainage_basin",
"wacaSelection": "waca_selection",
"cptSelection": "cpt_selection",
"stationOperational": "station_operational"
}
class Station(CreateStation):
openingDatetime: Optional[str]
closingDatetime: str
class Config:
orm_mode = True
allow_population_by_field_name = True
fields = {
"stationId": "station_id",
"stationName": "station_name",
"geoLocationMethod": "geolocation_method",
"geoLocationAccuracy": "geolocation_accuracy",
"openingDatetime": "opening_datetime",
"closingDatetime": "closing_datetime",
"adminRegion": "admin_region",
"drainageBasin": "drainage_basin",
"wacaSelection": "waca_selection",
"cptSelection": "cpt_selection",
"stationOperational": "station_operational"
}
class StationResponse(BaseModel):
result: List[Station]
message: str
status: str
| 33.409524
| 58
| 0.659635
| 305
| 3,508
| 7.396721
| 0.222951
| 0.083777
| 0.139628
| 0.142731
| 0.874557
| 0.855053
| 0.855053
| 0.855053
| 0.855053
| 0.855053
| 0
| 0.020553
| 0.237172
| 3,508
| 104
| 59
| 33.730769
| 0.822496
| 0
| 0
| 0.826087
| 0
| 0
| 0.265754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032609
| 0
| 0.565217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
0a4ec326be8a8b90a230b15c917c3f31d977f8d5
| 6,734
|
py
|
Python
|
L1Trigger/TrackFindingTracklet/python/Tracklet_cfi.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/TrackFindingTracklet/python/Tracklet_cfi.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/TrackFindingTracklet/python/Tracklet_cfi.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
TTTracksFromTracklet = cms.EDProducer("L1TrackProducer",
SimTrackSource = cms.InputTag("g4SimHits"),
SimVertexSource = cms.InputTag("g4SimHits"),
TTStubSource = cms.InputTag("TTStubsFromPhase2TrackerDigis","StubAccepted"),
MCTruthClusterInputTag = cms.InputTag("TTClusterAssociatorFromPixelDigis", "ClusterAccepted"),
MCTruthStubInputTag = cms.InputTag("TTStubAssociatorFromPixelDigis", "StubAccepted"),
TrackingParticleInputTag = cms.InputTag("mix", "MergedTrackTruth"),
TrackingVertexInputTag = cms.InputTag("mix", "MergedTrackTruth"),
BeamSpotSource = cms.InputTag("offlineBeamSpot"),
asciiFileName = cms.untracked.string(""),
failscenario = cms.untracked.int32(0),
trackerGeometryType = cms.untracked.string("") #tilted barrel is assumed, use "flat" if running on flat
)
TTTracksFromTrackletEmulation = cms.EDProducer("L1FPGATrackProducer",
# general L1 tracking inputs
SimTrackSource = cms.InputTag("g4SimHits"),
SimVertexSource = cms.InputTag("g4SimHits"),
TTStubSource = cms.InputTag("TTStubsFromPhase2TrackerDigis","StubAccepted"),
MCTruthClusterInputTag = cms.InputTag("TTClusterAssociatorFromPixelDigis", "ClusterAccepted"),
MCTruthStubInputTag = cms.InputTag("TTStubAssociatorFromPixelDigis", "StubAccepted"),
TrackingParticleInputTag = cms.InputTag("mix", "MergedTrackTruth"),
TrackingVertexInputTag = cms.InputTag("mix", "MergedTrackTruth"),
BeamSpotSource = cms.InputTag("offlineBeamSpot"),
asciiFileName = cms.untracked.string(""),
failscenario = cms.untracked.int32(0),
trackerGeometryType = cms.untracked.string(""), #tilted barrel is assumed, use "flat" if running on flat
# specific emulation inputs
# (if running on CRAB use "../../fitpattern.txt" etc instead)
Extended=cms.untracked.bool(False),
Hnpar=cms.untracked.int32(4),
fitPatternFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/fitpattern.txt'),
memoryModulesFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/memorymodules_hourglass.dat'), #change it to extended if running on it
processingModulesFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/processingmodules_hourglass.dat'), #change it to extended if running on it
wiresFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/wires_hourglass.dat'), #change it to extended if running on it
DTCLinkFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/calcNumDTCLinks.txt'),
moduleCablingFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/modules_T5v3_27SP_nonant_tracklet.dat')
)
TTTracksFromExtendedTrackletEmulation = cms.EDProducer("L1FPGATrackProducer",
# general L1 tracking inputs
SimTrackSource = cms.InputTag("g4SimHits"),
SimVertexSource = cms.InputTag("g4SimHits"),
TTStubSource = cms.InputTag("TTStubsFromPhase2TrackerDigis","StubAccepted"),
MCTruthClusterInputTag = cms.InputTag("TTClusterAssociatorFromPixelDigis", "ClusterAccepted"),
MCTruthStubInputTag = cms.InputTag("TTStubAssociatorFromPixelDigis", "StubAccepted"),
TrackingParticleInputTag = cms.InputTag("mix", "MergedTrackTruth"),
TrackingVertexInputTag = cms.InputTag("mix", "MergedTrackTruth"),
BeamSpotSource = cms.InputTag("offlineBeamSpot"),
asciiFileName = cms.untracked.string(""),
failscenario = cms.untracked.int32(0),
trackerGeometryType = cms.untracked.string(""), #tilted barrel is assumed, use "flat" if running on flat
# specific emulation inputs
# (if running on CRAB use "../../fitpattern.txt" etc instead)
Extended=cms.untracked.bool(True),
Hnpar=cms.untracked.int32(5),
fitPatternFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/fitpattern.txt'),
memoryModulesFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/memorymodules_hourglassExtended.dat'), #change it to extended if running on it
processingModulesFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/processingmodules_hourglassExtended.dat'), #change it to extended if running on it
wiresFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/wires_hourglassExtended.dat'), #change it to extended if running on it
DTCLinkFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/calcNumDTCLinks.txt'),
moduleCablingFile = cms.FileInPath('L1Trigger/TrackFindingTracklet/data/modules_T5v3_27SP_nonant_tracklet.dat')
)
| 102.030303
| 206
| 0.509504
| 408
| 6,734
| 8.375
| 0.218137
| 0.077261
| 0.077261
| 0.147498
| 0.940299
| 0.940299
| 0.940299
| 0.940299
| 0.940299
| 0.940299
| 0
| 0.012462
| 0.416097
| 6,734
| 65
| 207
| 103.6
| 0.856562
| 0.092219
| 0
| 0.678571
| 0
| 0
| 0.231059
| 0.168252
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017857
| 0
| 0.017857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a539b08e0cee9480760350df317f0304497c381
| 284,546
|
py
|
Python
|
src/genie/libs/parser/junos/tests/test_show_system.py
|
danielgraziano/genieparser
|
74d5e1ded9794561af1ac3284307c58365617673
|
[
"Apache-2.0"
] | 1
|
2020-12-01T00:45:34.000Z
|
2020-12-01T00:45:34.000Z
|
src/genie/libs/parser/junos/tests/test_show_system.py
|
dalwar23/genieparser
|
a9df45d3ee23f107bfb55915068e90782f92fc99
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_system.py
|
dalwar23/genieparser
|
a9df45d3ee23f107bfb55915068e90782f92fc99
|
[
"Apache-2.0"
] | 2
|
2021-02-12T21:42:30.000Z
|
2021-02-12T21:47:51.000Z
|
# Python
import unittest
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Metaparset
from genie.metaparser.util.exceptions import (
SchemaEmptyParserError,
SchemaMissingKeyError,
)
# Parser
from genie.libs.parser.junos.show_system import (
ShowSystemUptime, ShowSystemUptimeNoForwarding, ShowSystemBuffers,
ShowSystemCommit, ShowSystemQueues, ShowSystemQueuesNoForwarding,
ShowSystemUsers, ShowSystemBuffersNoForwarding, ShowSystemUsers,
ShowSystemStorage, ShowSystemCoreDumps, ShowSystemCoreDumpsNoForwarding,
ShowSystemStorageNoForwarding, ShowSystemStatistics,
ShowSystemStatisticsNoForwarding, ShowSystemInformation, ShowSystemConnections)
# =========================================================
# Unit test for show system information
# =========================================================
class TestShowSystemInformation(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"system-information": {
"hardware-model": "vmx",
"host-name": "P4",
"os-name": "junos",
"os-version": "19.2R1.8"
}
}
golden_output_1 = {
"execute.return_value":
"""
show system information
Model: vmx
Family: junos
Junos: 19.2R1.8
Hostname: P4
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemInformation(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemInformation(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system buffers
# =========================================================
class TestShowSystemBuffers(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"memory-statistics": {
"cached-bytes": "1971",
"cached-jumbo-clusters-16k": "0",
"cached-jumbo-clusters-4k": "2",
"cached-jumbo-clusters-9k": "0",
"cached-mbuf-clusters": "714",
"cached-mbufs": "2142",
"cluster-failures": "0",
"current-bytes-in-use": "1179",
"current-jumbo-clusters-16k": "0",
"current-jumbo-clusters-4k": "0",
"current-jumbo-clusters-9k": "0",
"current-mbuf-clusters": "516",
"current-mbufs": "588",
"io-initiated": "0",
"jumbo-cluster-failures-16k": "0",
"jumbo-cluster-failures-4k": "0",
"jumbo-cluster-failures-9k": "0",
"max-jumbo-clusters-16k": "10396",
"max-jumbo-clusters-4k": "62377",
"max-jumbo-clusters-9k": "18482",
"max-mbuf-clusters": "124756",
"mbuf-failures": "0",
"packet-count": "513",
"packet-failures": "0",
"packet-free": "499",
"sfbuf-requests-delayed": "0",
"sfbuf-requests-denied": "0",
"total-bytes": "3150",
"total-jumbo-clusters-16k": "0",
"total-jumbo-clusters-4k": "2",
"total-jumbo-clusters-9k": "0",
"total-mbuf-clusters": "1230",
"total-mbufs": "2730",
}
}
golden_output_1 = {
"execute.return_value":
"""
show system buffers
588/2142/2730 mbufs in use (current/cache/total)
516/714/1230/124756 mbuf clusters in use (current/cache/total/max)
513/499 mbuf+clusters out of packet secondary zone in use (current/cache)
0/2/2/62377 4k (page size) jumbo clusters in use (current/cache/total/max)
0/0/0/18482 9k (page size) jumbo clusters in use (current/cache/total/max)
0/0/0/10396 16k (page size) jumbo clusters in use (current/cache/total/max)
1179K/1971K/3150K bytes allocated to network (current/cache/total)
0/0/0 requests for mbufs denied (mbufs/clusters/mbuf+clusters)
0/0/0 requests for jumbo clusters denied (4k/9k/16k)
0 requests for sfbufs denied
0 requests for sfbufs delayed
0 requests for I/O initiated by sendfile
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemBuffers(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemBuffers(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system Users
# =========================================================
class TestShowSystemUsers(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"execute.return_value":
"""
show system users
9:38AM up 209 days, 37 mins, 3 users, load averages: 0.28, 0.39, 0.37
USER TTY FROM LOGIN@ IDLE WHAT
cisco pts/0 10.1.0.1 2:35AM - -cl
cisco pts/1 10.1.0.1 8:31AM 56 -cl
cisco pts/2 10.1.0.1 7:45AM 3 -cl
"""
}
golden_output_1 = {
"system-users-information": {
"uptime-information": {
"active-user-count": {
"#text": "3"
},
"date-time": {
"#text": "9:38AM"
},
"load-average-1": "0.28",
"load-average-15": "0.39",
"load-average-5": "0.37",
"up-time": {
"#text": "209 days, 37 mins"
},
"user-table": {
"user-entry": [
{
"command": "-cl",
"from": "10.1.0.1",
"idle-time": {
"#text": "-"
},
"login-time": {
"#text": "2:35AM"
},
"tty": "pts/0",
"user": "cisco",
},
{
"command": "-cl",
"from": "10.1.0.1",
"idle-time": {
"#text": "56"
},
"login-time": {
"#text": "8:31AM"
},
"tty": "pts/1",
"user": "cisco",
},
{
"command": "-cl",
"from": "10.1.0.1",
"idle-time": {
"#text": "3"
},
"login-time": {
"#text": "7:45AM"
},
"tty": "pts/2",
"user": "cisco",
},
]
},
}
}
}
golden_parsed_output_2 = {
"execute.return_value":
"""
11:31PM up 2 days, 1:04, 1 user, load averages: 0.04, 0.03, 0.01
USER TTY FROM LOGIN@ IDLE WHAT
cisco d0 - Mon10PM - -cli (cli)
"""
}
golden_output_2 = {
"system-users-information": {
"uptime-information": {
"active-user-count": {
"#text": "1"
},
"date-time": {
"#text": "11:31PM"
},
"load-average-1": "0.04",
"load-average-15": "0.03",
"load-average-5": "0.01",
"up-time": {
"#text": "2 days, 1:04"
},
"user-table": {
"user-entry": [{
"command": "-cli (cli)",
"from": "-",
"idle-time": {
"#text": "-"
},
"login-time": {
"#text": "Mon10PM"
},
"tty": "d0",
"user": "cisco",
}]
},
}
}
}
golden_parsed_output_3 = {
"execute.return_value":
"""
11:36PM up 2 days, 5 hrs, 2 users, load averages: 0.00, 0.00, 0.00
USER TTY FROM LOGIN@ IDLE WHAT
cisco d0 - Mon10PM 2days -cli (cli)
cisco p0 255.255.255.255 11:36PM - -cli (cli)
"""
}
golden_output_3 = {
"system-users-information": {
"uptime-information": {
"active-user-count": {
"#text": "2"
},
"date-time": {
"#text": "11:36PM"
},
"load-average-1": "0.00",
"load-average-15": "0.00",
"load-average-5": "0.00",
"up-time": {
"#text": "2 days, 5 hrs"
},
"user-table": {
"user-entry": [{
"command": "-cli (cli)",
"from": "-",
"idle-time": {
"#text": "2days"
},
"login-time": {
"#text": "Mon10PM"
},
"tty": "d0",
"user": "cisco",
}, {
"command": "-cli (cli)",
"from": "255.255.255.255",
"idle-time": {
"#text": "-"
},
"login-time": {
"#text": "11:36PM"
},
"tty": "p0",
"user": "cisco",
}]
},
}
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemUsers(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowSystemUsers(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_1)
def test_golden_2(self):
self.device = Mock(**self.golden_parsed_output_2)
obj = ShowSystemUsers(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_2)
def test_golden_3(self):
self.device = Mock(**self.golden_parsed_output_3)
obj = ShowSystemUsers(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_3)
# =========================================================
# Unit test for show system commit
# =========================================================
class TestShowSystemCommit(unittest.TestCase):
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
maxDiff = None
golden_parsed_output_1 = {
"commit-information": {
"commit-history": [
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 16:04:34 UTC",
},
"sequence-number": "0",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 16:01:49 UTC",
},
"sequence-number": "1",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 15:53:03 UTC",
},
"sequence-number": "2",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 15:51:16 UTC",
},
"sequence-number": "3",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 15:02:37 UTC",
},
"sequence-number": "4",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 15:00:57 UTC",
},
"sequence-number": "5",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 14:58:06 UTC",
},
"sequence-number": "6",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 14:49:36 UTC",
},
"sequence-number": "7",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 14:47:49 UTC",
},
"sequence-number": "8",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 00:07:34 UTC",
},
"sequence-number": "9",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-05 00:04:48 UTC",
},
"sequence-number": "10",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 23:58:42 UTC",
},
"sequence-number": "11",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 21:58:30 UTC",
},
"sequence-number": "12",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 02:27:13 UTC",
},
"sequence-number": "13",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 02:11:40 UTC",
},
"sequence-number": "14",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 01:50:35 UTC",
},
"sequence-number": "15",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 01:06:08 UTC",
},
"sequence-number": "16",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-04 00:23:13 UTC",
},
"sequence-number": "17",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 23:15:16 UTC",
},
"sequence-number": "18",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 18:32:59 UTC",
},
"sequence-number": "19",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 18:30:05 UTC",
},
"sequence-number": "20",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 18:24:06 UTC",
},
"sequence-number": "21",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 15:58:04 UTC",
},
"sequence-number": "22",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 15:46:09 UTC",
},
"sequence-number": "23",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 15:26:19 UTC",
},
"sequence-number": "24",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 15:07:59 UTC",
},
"sequence-number": "25",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 14:48:07 UTC",
},
"sequence-number": "26",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 14:22:09 UTC",
},
"sequence-number": "27",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 14:20:28 UTC",
},
"sequence-number": "28",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 14:17:33 UTC",
},
"sequence-number": "29",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 14:15:45 UTC",
},
"sequence-number": "30",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 11:10:33 UTC",
},
"sequence-number": "31",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 11:08:14 UTC",
},
"sequence-number": "32",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 08:41:29 UTC",
},
"sequence-number": "33",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 08:25:57 UTC",
},
"sequence-number": "34",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 08:09:34 UTC",
},
"sequence-number": "35",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 07:49:00 UTC",
},
"sequence-number": "36",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 07:39:35 UTC",
},
"sequence-number": "37",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 07:23:14 UTC",
},
"sequence-number": "38",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 05:41:34 UTC",
},
"sequence-number": "39",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-03 04:23:30 UTC",
},
"sequence-number": "40",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 19:05:48 UTC",
},
"sequence-number": "41",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 19:02:29 UTC",
},
"sequence-number": "42",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 16:34:53 UTC",
},
"sequence-number": "43",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 16:26:08 UTC",
},
"sequence-number": "44",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 16:10:44 UTC",
},
"sequence-number": "45",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 16:04:23 UTC",
},
"sequence-number": "46",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 15:45:11 UTC",
},
"sequence-number": "47",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 09:28:52 UTC",
},
"sequence-number": "48",
"user": "cisco",
},
{
"client": "cli",
"date-time": {
"#text": "2020-03-02 08:42:26 UTC",
},
"sequence-number": "49",
"user": "cisco",
},
]
}
}
golden_output_1 = {
"execute.return_value":
"""
show system commit
0 2020-03-05 16:04:34 UTC by cisco via cli
1 2020-03-05 16:01:49 UTC by cisco via cli
2 2020-03-05 15:53:03 UTC by cisco via cli
3 2020-03-05 15:51:16 UTC by cisco via cli
4 2020-03-05 15:02:37 UTC by cisco via cli
5 2020-03-05 15:00:57 UTC by cisco via cli
6 2020-03-05 14:58:06 UTC by cisco via cli
7 2020-03-05 14:49:36 UTC by cisco via cli
8 2020-03-05 14:47:49 UTC by cisco via cli
9 2020-03-05 00:07:34 UTC by cisco via cli
10 2020-03-05 00:04:48 UTC by cisco via cli
11 2020-03-04 23:58:42 UTC by cisco via cli
12 2020-03-04 21:58:30 UTC by cisco via cli
13 2020-03-04 02:27:13 UTC by cisco via cli
14 2020-03-04 02:11:40 UTC by cisco via cli
15 2020-03-04 01:50:35 UTC by cisco via cli
16 2020-03-04 01:06:08 UTC by cisco via cli
17 2020-03-04 00:23:13 UTC by cisco via cli
18 2020-03-03 23:15:16 UTC by cisco via cli
19 2020-03-03 18:32:59 UTC by cisco via cli
20 2020-03-03 18:30:05 UTC by cisco via cli
21 2020-03-03 18:24:06 UTC by cisco via cli
22 2020-03-03 15:58:04 UTC by cisco via cli
23 2020-03-03 15:46:09 UTC by cisco via cli
24 2020-03-03 15:26:19 UTC by cisco via cli
25 2020-03-03 15:07:59 UTC by cisco via cli
26 2020-03-03 14:48:07 UTC by cisco via cli
27 2020-03-03 14:22:09 UTC by cisco via cli
28 2020-03-03 14:20:28 UTC by cisco via cli
29 2020-03-03 14:17:33 UTC by cisco via cli
30 2020-03-03 14:15:45 UTC by cisco via cli
31 2020-03-03 11:10:33 UTC by cisco via cli
32 2020-03-03 11:08:14 UTC by cisco via cli
33 2020-03-03 08:41:29 UTC by cisco via cli
34 2020-03-03 08:25:57 UTC by cisco via cli
35 2020-03-03 08:09:34 UTC by cisco via cli
36 2020-03-03 07:49:00 UTC by cisco via cli
37 2020-03-03 07:39:35 UTC by cisco via cli
38 2020-03-03 07:23:14 UTC by cisco via cli
39 2020-03-03 05:41:34 UTC by cisco via cli
40 2020-03-03 04:23:30 UTC by cisco via cli
41 2020-03-02 19:05:48 UTC by cisco via cli
42 2020-03-02 19:02:29 UTC by cisco via cli
43 2020-03-02 16:34:53 UTC by cisco via cli
44 2020-03-02 16:26:08 UTC by cisco via cli
45 2020-03-02 16:10:44 UTC by cisco via cli
46 2020-03-02 16:04:23 UTC by cisco via cli
47 2020-03-02 15:45:11 UTC by cisco via cli
48 2020-03-02 09:28:52 UTC by cisco via cli
49 2020-03-02 08:42:26 UTC by cisco via cli
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemCommit(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemCommit(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system queues
# =========================================================
class TestShowSystemQueues(unittest.TestCase):
maxDiff = None
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"queues-statistics": {
"interface-queues-statistics": {
"interface-queue": [
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "lsi",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "dsc",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "lo0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "gre",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "ipip",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "tap",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "pime",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "pimd",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "fxp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "em1",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "mtun",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "demux0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "cbp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "pip0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "125000",
"max-packets-allowed": "416",
"name": "pp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "irb",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "vtep",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "esi",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "rbeb",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti1",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti2",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti3",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti4",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti5",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti6",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti7",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "jsrv",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "lc-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "pfh-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "pfe-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/0",
"number-of-queue-drops": "3",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/1",
"number-of-queue-drops": "3",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/2",
"number-of-queue-drops": "132",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/3",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/4",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/5",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/6",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/7",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/8",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/9",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
]
},
"protocol-queues-statistics": {
"protocol-queue": [
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "splfwdq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "splnetq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "optionq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "50000",
"max-packets-allowed": "50",
"name": "icmpq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "frlmiq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "25000",
"max-packets-allowed": "1000",
"name": "spppintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "atmctlpktq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "atmoamq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "tnpintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "200000",
"max-packets-allowed": "200",
"name": "tagintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "200000",
"max-packets-allowed": "200",
"name": "tagfragq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
]
},
}
}
golden_output_1 = {
"execute.return_value":
"""
show system queues
output interface bytes max packets max drops
lsi 0 12500 0 41 0
dsc 0 0 0 0 0
lo0 0 0 0 0 0
gre 0 12500 0 41 0
ipip 0 12500 0 41 0
tap 0 0 0 0 0
pime 0 12500 0 41 0
pimd 0 12500 0 41 0
fxp0 0 12500000 0 41666 0
em1 0 12500000 0 41666 0
mtun 0 12500 0 41 0
demux0 0 0 0 0 0
cbp0 0 12500000 0 41666 0
pip0 0 12500000 0 41666 0
pp0 0 125000 0 416 0
irb 0 12500000 0 41666 0
vtep 0 12500000 0 41666 0
esi 0 12500000 0 41666 0
rbeb 0 12500000 0 41666 0
fti0 0 0 0 0 0
fti1 0 0 0 0 0
fti2 0 0 0 0 0
fti3 0 0 0 0 0
fti4 0 0 0 0 0
fti5 0 0 0 0 0
fti6 0 0 0 0 0
fti7 0 0 0 0 0
jsrv 0 12500000 0 41666 0
lc-0/0/0 0 0 0 0 0
pfh-0/0/0 0 0 0 0 0
pfe-0/0/0 0 0 0 0 0
ge-0/0/0 0 1250000 0 4166 3
ge-0/0/1 0 1250000 0 4166 3
ge-0/0/2 0 1250000 0 4166 132
ge-0/0/3 0 1250000 0 4166 0
ge-0/0/4 0 1250000 0 4166 0
ge-0/0/5 0 1250000 0 4166 0
ge-0/0/6 0 1250000 0 4166 0
ge-0/0/7 0 1250000 0 4166 0
ge-0/0/8 0 1250000 0 4166 0
ge-0/0/9 0 1250000 0 4166 0
input protocol bytes max packets max drops
splfwdq 0 1000000 0 1000 0
splnetq 0 1000000 0 1000 0
optionq 0 1000000 0 1000 0
icmpq 0 50000 0 50 0
frlmiq 0 0 0 0 0
spppintrq 0 25000 0 1000 0
atmctlpktq 0 0 0 0 0
atmoamq 0 0 0 0 0
tnpintrq 0 1250000 0 4166 0
tagintrq 0 200000 0 200 0
tagfragq 0 200000 0 200 0
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemQueues(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_1)
obj = ShowSystemQueues(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system queues no-forwarding
# =========================================================
class TestShowSystemQueuesNoForwarding(unittest.TestCase):
maxDiff = None
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"queues-statistics": {
"interface-queues-statistics": {
"interface-queue": [
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "lsi",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "dsc",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "lo0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "gre",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "ipip",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "tap",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "pime",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "pimd",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "fxp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "em1",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500",
"max-packets-allowed": "41",
"name": "mtun",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "demux0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "cbp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "pip0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "125000",
"max-packets-allowed": "416",
"name": "pp0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "irb",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "vtep",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "esi",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "rbeb",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti1",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti2",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti3",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti4",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti5",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti6",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "fti7",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "12500000",
"max-packets-allowed": "41666",
"name": "jsrv",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "lc-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "pfh-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "pfe-0/0/0",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/0",
"number-of-queue-drops": "3",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/1",
"number-of-queue-drops": "3",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/2",
"number-of-queue-drops": "132",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/3",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/4",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/5",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/6",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/7",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/8",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "ge-0/0/9",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
]
},
"protocol-queues-statistics": {
"protocol-queue": [
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "splfwdq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "splnetq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1000000",
"max-packets-allowed": "1000",
"name": "optionq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "50000",
"max-packets-allowed": "50",
"name": "icmpq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "frlmiq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "25000",
"max-packets-allowed": "1000",
"name": "spppintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "atmctlpktq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "0",
"max-packets-allowed": "0",
"name": "atmoamq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "1250000",
"max-packets-allowed": "4166",
"name": "tnpintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "200000",
"max-packets-allowed": "200",
"name": "tagintrq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
{
"max-octets-allowed": "200000",
"max-packets-allowed": "200",
"name": "tagfragq",
"number-of-queue-drops": "0",
"octets-in-queue": "0",
"packets-in-queue": "0",
},
]
},
}
}
golden_output_1 = {
"execute.return_value":
"""
show system queues
output interface bytes max packets max drops
lsi 0 12500 0 41 0
dsc 0 0 0 0 0
lo0 0 0 0 0 0
gre 0 12500 0 41 0
ipip 0 12500 0 41 0
tap 0 0 0 0 0
pime 0 12500 0 41 0
pimd 0 12500 0 41 0
fxp0 0 12500000 0 41666 0
em1 0 12500000 0 41666 0
mtun 0 12500 0 41 0
demux0 0 0 0 0 0
cbp0 0 12500000 0 41666 0
pip0 0 12500000 0 41666 0
pp0 0 125000 0 416 0
irb 0 12500000 0 41666 0
vtep 0 12500000 0 41666 0
esi 0 12500000 0 41666 0
rbeb 0 12500000 0 41666 0
fti0 0 0 0 0 0
fti1 0 0 0 0 0
fti2 0 0 0 0 0
fti3 0 0 0 0 0
fti4 0 0 0 0 0
fti5 0 0 0 0 0
fti6 0 0 0 0 0
fti7 0 0 0 0 0
jsrv 0 12500000 0 41666 0
lc-0/0/0 0 0 0 0 0
pfh-0/0/0 0 0 0 0 0
pfe-0/0/0 0 0 0 0 0
ge-0/0/0 0 1250000 0 4166 3
ge-0/0/1 0 1250000 0 4166 3
ge-0/0/2 0 1250000 0 4166 132
ge-0/0/3 0 1250000 0 4166 0
ge-0/0/4 0 1250000 0 4166 0
ge-0/0/5 0 1250000 0 4166 0
ge-0/0/6 0 1250000 0 4166 0
ge-0/0/7 0 1250000 0 4166 0
ge-0/0/8 0 1250000 0 4166 0
ge-0/0/9 0 1250000 0 4166 0
input protocol bytes max packets max drops
splfwdq 0 1000000 0 1000 0
splnetq 0 1000000 0 1000 0
optionq 0 1000000 0 1000 0
icmpq 0 50000 0 50 0
frlmiq 0 0 0 0 0
spppintrq 0 25000 0 1000 0
atmctlpktq 0 0 0 0 0
atmoamq 0 0 0 0 0
tnpintrq 0 1250000 0 4166 0
tagintrq 0 200000 0 200 0
tagfragq 0 200000 0 200 0
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemQueuesNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_1)
obj = ShowSystemQueuesNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system storage
# =========================================================
class TestShowSystemStorage(unittest.TestCase):
maxDiff = None
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"system-storage-information": {
"filesystem": [
{
"available-blocks": {
"junos:format": "17G"
},
"filesystem-name": "/dev/gpt/junos",
"mounted-on": "/.mount",
"total-blocks": {
"junos:format": "20G"
},
"used-blocks": {
"junos:format": "1.2G"
},
"used-percent": "7%",
},
{
"available-blocks": {
"junos:format": "730M"
},
"filesystem-name": "/dev/gpt/config",
"mounted-on": "/.mount/config",
"total-blocks": {
"junos:format": "793M"
},
"used-blocks": {
"junos:format": "60K"
},
"used-percent": "0%",
},
{
"available-blocks": {
"junos:format": "6.3G"
},
"filesystem-name": "/dev/gpt/var",
"mounted-on": "/.mount/var",
"total-blocks": {
"junos:format": "7.0G"
},
"used-blocks": {
"junos:format": "117M"
},
"used-percent": "2%",
},
{
"available-blocks": {
"junos:format": "3.2G"
},
"filesystem-name": "tmpfs",
"mounted-on": "/.mount/tmp",
"total-blocks": {
"junos:format": "3.2G"
},
"used-blocks": {
"junos:format": "196K"
},
"used-percent": "0%",
},
{
"available-blocks": {
"junos:format": "333M"
},
"filesystem-name": "tmpfs",
"mounted-on": "/.mount/mfs",
"total-blocks": {
"junos:format": "334M"
},
"used-blocks": {
"junos:format": "748K"
},
"used-percent": "0%",
},
]
}
}
golden_output_1 = {
"execute.return_value":
"""
show system storage | no-more
Filesystem Size Used Avail Capacity Mounted on
/dev/gpt/junos 20G 1.2G 17G 7% /.mount
/dev/gpt/config 793M 60K 730M 0% /.mount/config
/dev/gpt/var 7.0G 117M 6.3G 2% /.mount/var
tmpfs 3.2G 196K 3.2G 0% /.mount/tmp
tmpfs 334M 748K 333M 0% /.mount/mfs
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemStorage(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemStorage(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system storage no-forwarding
# =========================================================
class TestShowSystemStorageNoForwarding(unittest.TestCase):
maxDiff = None
device = Device(name="aDevice")
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"system-storage-information": {
"filesystem": [
{
"available-blocks": {
"junos:format": "17G"
},
"filesystem-name": "/dev/gpt/junos",
"mounted-on": "/.mount",
"total-blocks": {
"junos:format": "20G"
},
"used-blocks": {
"junos:format": "1.2G"
},
"used-percent": "7%",
},
{
"available-blocks": {
"junos:format": "730M"
},
"filesystem-name": "/dev/gpt/config",
"mounted-on": "/.mount/config",
"total-blocks": {
"junos:format": "793M"
},
"used-blocks": {
"junos:format": "60K"
},
"used-percent": "0%",
},
{
"available-blocks": {
"junos:format": "6.3G"
},
"filesystem-name": "/dev/gpt/var",
"mounted-on": "/.mount/var",
"total-blocks": {
"junos:format": "7.0G"
},
"used-blocks": {
"junos:format": "117M"
},
"used-percent": "2%",
},
{
"available-blocks": {
"junos:format": "3.2G"
},
"filesystem-name": "tmpfs",
"mounted-on": "/.mount/tmp",
"total-blocks": {
"junos:format": "3.2G"
},
"used-blocks": {
"junos:format": "196K"
},
"used-percent": "0%",
},
{
"available-blocks": {
"junos:format": "333M"
},
"filesystem-name": "tmpfs",
"mounted-on": "/.mount/mfs",
"total-blocks": {
"junos:format": "334M"
},
"used-blocks": {
"junos:format": "748K"
},
"used-percent": "0%",
},
]
}
}
golden_output_1 = {
"execute.return_value":
"""
show system storage no-forwarding
Filesystem Size Used Avail Capacity Mounted on
/dev/gpt/junos 20G 1.2G 17G 7% /.mount
/dev/gpt/config 793M 60K 730M 0% /.mount/config
/dev/gpt/var 7.0G 117M 6.3G 2% /.mount/var
tmpfs 3.2G 196K 3.2G 0% /.mount/tmp
tmpfs 334M 748K 333M 0% /.mount/mfs
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemStorageNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemStorageNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system buffers no-forwarding
# =========================================================
class TestShowSystemBufferNoForwarding(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"memory-statistics": {
"cached-bytes": "1975",
"cached-jumbo-clusters-16k": "0",
"cached-jumbo-clusters-4k": "3",
"cached-jumbo-clusters-9k": "0",
"cached-mbuf-clusters": "714",
"cached-mbufs": "2142",
"cluster-failures": "0",
"current-bytes-in-use": "1179",
"current-jumbo-clusters-16k": "0",
"current-jumbo-clusters-4k": "0",
"current-jumbo-clusters-9k": "0",
"current-mbuf-clusters": "516",
"current-mbufs": "588",
"io-initiated": "0",
"jumbo-cluster-failures-16k": "0",
"jumbo-cluster-failures-4k": "0",
"jumbo-cluster-failures-9k": "0",
"max-jumbo-clusters-16k": "10396",
"max-jumbo-clusters-4k": "62377",
"max-jumbo-clusters-9k": "18482",
"max-mbuf-clusters": "124756",
"mbuf-failures": "0",
"packet-count": "513",
"packet-failures": "0",
"packet-free": "499",
"sfbuf-requests-delayed": "0",
"sfbuf-requests-denied": "0",
"total-bytes": "3154",
"total-jumbo-clusters-16k": "0",
"total-jumbo-clusters-4k": "3",
"total-jumbo-clusters-9k": "0",
"total-mbuf-clusters": "1230",
"total-mbufs": "2730",
}
}
golden_output_1 = {
"execute.return_value":
"""
show system buffers no-forwarding
588/2142/2730 mbufs in use (current/cache/total)
516/714/1230/124756 mbuf clusters in use (current/cache/total/max)
513/499 mbuf+clusters out of packet secondary zone in use (current/cache)
0/3/3/62377 4k (page size) jumbo clusters in use (current/cache/total/max)
0/0/0/18482 9k (page size) jumbo clusters in use (current/cache/total/max)
0/0/0/10396 16k (page size) jumbo clusters in use (current/cache/total/max)
1179K/1975K/3154K bytes allocated to network (current/cache/total)
0/0/0 requests for mbufs denied (mbufs/clusters/mbuf+clusters)
0/0/0 requests for jumbo clusters denied (4k/9k/16k)
0 requests for sfbufs denied
0 requests for sfbufs delayed
0 requests for I/O initiated by sendfile
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemBuffersNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemBuffersNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system core-dumps
# =========================================================
class TestShowSystemCoreDumps(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"directory-list": {
"directory": {
"file-information": [
{
"file-date": {
"@junos:format": "Aug 8 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.riot.mpc0.1565307741.1716.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1252383",
},
{
"file-date": {
"@junos:format": "Aug 8 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565307747.1791.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "4576464",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565841060.1528.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139316",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565841991.4312.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139249",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565842608.6212.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139299",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565892564.3392.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139321",
},
],
"output": [
"/var/tmp/*core*: No such file or directory",
"/var/tmp/pics/*core*: No such file or directory",
"/var/crash/kernel.*: No such file or directory",
"/var/jails/rest-api/tmp/*core*: No such file or directory",
"/tftpboot/corefiles/*core*: No such file or directory",
],
"total-files":
"6",
}
}
}
golden_output_1 = {
"execute.return_value":
"""
show system core-dumps
-rw-r--r-- 1 root wheel 1252383 Aug 8 2019 /var/crash/core.riot.mpc0.1565307741.1716.gz
-rw-r--r-- 1 root wheel 4576464 Aug 8 2019 /var/crash/core.vmxt.mpc0.1565307747.1791.gz
-rw-r--r-- 1 root wheel 1139316 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565841060.1528.gz
-rw-r--r-- 1 root wheel 1139249 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565841991.4312.gz
-rw-r--r-- 1 root wheel 1139299 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565842608.6212.gz
-rw-r--r-- 1 root wheel 1139321 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565892564.3392.gz
/var/tmp/*core*: No such file or directory
/var/tmp/pics/*core*: No such file or directory
/var/crash/kernel.*: No such file or directory
/var/jails/rest-api/tmp/*core*: No such file or directory
/tftpboot/corefiles/*core*: No such file or directory
total files: 6
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemCoreDumps(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemCoreDumps(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system core-dumps no-forwarding
# =========================================================
class TestShowSystemCoreDumpsNoForwarding(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"directory-list": {
"directory": {
"file-information": [
{
"file-date": {
"@junos:format": "Aug 8 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.riot.mpc0.1565307741.1716.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1252383",
},
{
"file-date": {
"@junos:format": "Aug 8 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565307747.1791.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "4576464",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565841060.1528.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139316",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565841991.4312.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139249",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565842608.6212.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139299",
},
{
"file-date": {
"@junos:format": "Aug 15 2019"
},
"file-group": "wheel",
"file-links": "1",
"file-name":
"/var/crash/core.vmxt.mpc0.1565892564.3392.gz",
"file-owner": "root",
"file-permissions": {
"@junos:format": "-rw-r--r--"
},
"file-size": "1139321",
},
],
"output": [
"/var/tmp/*core*: No such file or directory",
"/var/tmp/pics/*core*: No such file or directory",
"/var/crash/kernel.*: No such file or directory",
"/var/jails/rest-api/tmp/*core*: No such file or directory",
"/tftpboot/corefiles/*core*: No such file or directory",
],
"total-files":
"6",
}
}
}
golden_output_1 = {
"execute.return_value":
"""
show system core-dumps no-forwarding
-rw-r--r-- 1 root wheel 1252383 Aug 8 2019 /var/crash/core.riot.mpc0.1565307741.1716.gz
-rw-r--r-- 1 root wheel 4576464 Aug 8 2019 /var/crash/core.vmxt.mpc0.1565307747.1791.gz
-rw-r--r-- 1 root wheel 1139316 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565841060.1528.gz
-rw-r--r-- 1 root wheel 1139249 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565841991.4312.gz
-rw-r--r-- 1 root wheel 1139299 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565842608.6212.gz
-rw-r--r-- 1 root wheel 1139321 Aug 15 2019 /var/crash/core.vmxt.mpc0.1565892564.3392.gz
/var/tmp/*core*: No such file or directory
/var/tmp/pics/*core*: No such file or directory
/var/crash/kernel.*: No such file or directory
/var/jails/rest-api/tmp/*core*: No such file or directory
/tftpboot/corefiles/*core*: No such file or directory
total files: 6
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemCoreDumpsNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemCoreDumpsNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system buffer
# =========================================================
class TestShowSystemUptime(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"execute.return_value":
"""
show system uptime
Current time: 2020-03-26 08:16:41 UTC
Time Source: LOCAL CLOCK
System booted: 2019-08-29 09:02:22 UTC (29w6d 23:14 ago)
Protocols started: 2019-08-29 09:03:25 UTC (29w6d 23:13 ago)
Last configured: 2020-03-05 16:04:34 UTC (2w6d 16:12 ago) by cisco
8:16AM up 209 days, 23:14, 5 users, load averages: 0.43, 0.43, 0.42
"""
}
golden_output_1 = {
"system-uptime-information": {
"current-time": {
"date-time": {
"#text": "2020-03-26 08:16:41 UTC"
}
},
"last-configured-time": {
"date-time": {
"#text": "2020-03-05 16:04:34 UTC "
},
"time-length": {
"#text": "2w6d 16:12"
},
"user": "cisco",
},
"protocols-started-time": {
"date-time": {
"#text": "2019-08-29 09:03:25 UTC"
},
"time-length": {
"#text": "29w6d 23:13"
},
},
"system-booted-time": {
"date-time": {
"#text": "2019-08-29 09:02:22 UTC"
},
"time-length": {
"#text": "29w6d 23:14"
},
},
"time-source": "LOCAL CLOCK",
"uptime-information": {
"active-user-count": {
"#text": "5"
},
"date-time": {
"#text": "8:16AM"
},
"load-average-1": "0.43",
"load-average-15": "0.43",
"load-average-5": "0.42",
"up-time": {
"#text": "209 days, 23:14 mins,",
'@junos:seconds': '18141240'
},
},
}
}
golden_output_2 = {'execute.return_value':'''
show system uptime
Current time: 2020-08-13 14:08:16 UTC
Time Source: LOCAL CLOCK
System booted: 2020-08-13 03:05:11 UTC (11:03:05 ago)
Protocols started: 2020-08-13 13:37:06 UTC (00:31:10 ago)
Last configured: 2020-08-13 14:08:16 UTC (00:00:00 ago) by genie
2:08PM up 11:03, 1 users, load averages: 0.31, 0.48, 0.50
'''
}
golden_parsed_output_2 = {
'system-uptime-information': {
'current-time': {
'date-time': {
'#text': '2020-08-13 14:08:16 UTC',
},
},
'last-configured-time': {
'date-time': {
'#text': '2020-08-13 14:08:16 UTC ',
},
'time-length': {
'#text': '00:00:00',
},
'user': 'genie',
},
'protocols-started-time': {
'date-time': {
'#text': '2020-08-13 13:37:06 UTC',
},
'time-length': {
'#text': '00:31:10',
},
},
'system-booted-time': {
'date-time': {
'#text': '2020-08-13 03:05:11 UTC',
},
'time-length': {
'#text': '11:03:05',
},
},
'time-source': 'LOCAL CLOCK',
'uptime-information': {
'active-user-count': {
'#text': '1',
},
'date-time': {
'#text': '2:08PM',
},
'load-average-1': '0.31',
'load-average-15': '0.48',
'load-average-5': '0.50',
'up-time': {
'#text': '11:03 mins,',
'@junos:seconds': '39780',
},
},
},
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemUptime(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowSystemUptime(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_1)
def test_golden_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowSystemUptime(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowSystemUptimeNoForwarding(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"execute.return_value":
"""
show system uptime no-forwarding
Current time: 2020-03-25 09:38:14 UTC
Time Source: LOCAL CLOCK
System booted: 2019-08-29 09:02:22 UTC (29w6d 00:35 ago)
Protocols started: 2019-08-29 09:03:25 UTC (29w6d 00:34 ago)
Last configured: 2020-03-05 16:04:34 UTC (2w5d 17:33 ago) by cisco
9:38AM up 209 days, 36 mins, 3 users, load averages: 0.29, 0.41, 0.38
"""
}
golden_output_1 = {
"system-uptime-information": {
"current-time": {
"date-time": {
"#text": "2020-03-25 09:38:14 UTC"
}
},
"last-configured-time": {
"date-time": {
"#text": "2020-03-05 16:04:34 UTC "
},
"time-length": {
"#text": "2w5d 17:33"
},
"user": "cisco",
},
"protocols-started-time": {
"date-time": {
"#text": "2019-08-29 09:03:25 UTC"
},
"time-length": {
"#text": "29w6d 00:34"
},
},
"system-booted-time": {
"date-time": {
"#text": "2019-08-29 09:02:22 UTC"
},
"time-length": {
"#text": "29w6d 00:35"
},
},
"time-source": "LOCAL CLOCK",
"uptime-information": {
"active-user-count": {
"#text": "3"
},
"date-time": {
"#text": "9:38AM"
},
"load-average-1": "0.29",
"load-average-15": "0.41",
"load-average-5": "0.38",
"up-time": {
"#text": "209 days, 36 mins,",
'@junos:seconds': '18187200'
},
},
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemUptimeNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowSystemUptimeNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_1)
# =========================================================
# Unit test for show system statistics
# =========================================================
class TestShowSystemStatistics(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"statistics": [
{
"ah": {
"bytes-in": "0",
"bytes-out": "0",
"crypto-processing-failure": "0",
"packets-blocked-due-to-policy": "0",
"packets-dropped-as-bad-authentication-detected": "0",
"packets-dropped-as-larger-than-ip-maxpacket": "0",
"packets-dropped-as-protocol-unsupported": "0",
"packets-dropped-due-to-bad-authentication-length": "0",
"packets-dropped-due-to-bad-kcr": "0",
"packets-dropped-due-to-invalid-tdb": "0",
"packets-dropped-due-to-no-tdb": "0",
"packets-dropped-due-to-no-transform": "0",
"packets-dropped-due-to-queue-full": "0",
"packets-in": "0",
"packets-out": "0",
"packets-shorter-than-header-shows": "0",
"possible-replay-packets-detected": "0",
"replay-counter-wrap": "0",
"tunnel-sanity-check-failures": "0",
},
"arp": {
"arp-iri-cnt":
"1",
"arp-iri-drop":
"0",
"arp-iri-max":
"200",
"arp-mgt-cnt":
"2",
"arp-mgt-drop":
"0",
"arp-mgt-max":
"14960",
"arp-packets-are-dropped-as-driver-call-failed":
"0",
"arp-packets-are-dropped-as-nexthop-allocation-failed":
"0",
"arp-packets-are-dropped-as-source-is-not-validated":
"0",
"arp-packets-are-dropped-from-peer-vrrp":
"0",
"arp-packets-are-rejected-as-target-ip-arp-resolve-is-in-progress":
"0",
"arp-packets-received-from-peer-vrrp-router-and-discarded":
"0",
"arp-packets-rejected-as-family-is-configured-with-deny-arp":
"0",
"arp-probe-for-proxy-address-reachable-from-the-incoming-interface":
"0",
"arp-public-cnt":
"4",
"arp-public-drop":
"0",
"arp-public-max":
"59840",
"arp-replies-are-rejected-as-source-and-destination-is-same":
"0",
"arp-replies-received":
"54355",
"arp-replies-sent":
"39895",
"arp-request-discarded-for-vrrp-source-address":
"0",
"arp-requests-received":
"39895",
"arp-requests-sent":
"55086",
"arp-response-packets-are-rejected-on-mace-icl-interface":
"0",
"arp-system-drop":
"0",
"arp-system-max":
"75000",
"datagrams-for-an-address-not-on-the-interface":
"0",
"datagrams-for-non-ip-protocol":
"0",
"datagrams-received":
"200794",
"datagrams-which-were-not-for-me":
"106457",
"datagrams-with-a-broadcast-source-address":
"0",
"datagrams-with-bad-hardware-address-length":
"0",
"datagrams-with-bad-protocol-address-length":
"0",
"datagrams-with-bogus-interface":
"0",
"datagrams-with-incorrect-length":
"0",
"datagrams-with-multicast-source-address":
"0",
"datagrams-with-multicast-target-address":
"87",
"datagrams-with-my-own-hardware-address":
"0",
"datagrams-with-source-address-duplicate-to-mine":
"0",
"datagrams-with-unsupported-opcode":
"0",
"grat-arp-packets-are-ignored-as-mac-address-is-not-changed":
"0",
"new-requests-on-unnumbered-interfaces":
"0",
"packets-discarded-waiting-for-resolution":
"7",
"packets-sent-after-waiting-for-resolution":
"15",
"proxy-arp-request-discarded-as-source-ip-is-a-proxy-target":
"0",
"proxy-requests-not-proxied":
"0",
"received-proxy-requests":
"0",
"replies-from-unnumbered-interface-with-non-subnetted-donor":
"0",
"replies-from-unnumbered-interfaces":
"0",
"requests-dropped-due-to-interface-deletion":
"0",
"requests-dropped-during-retry":
"0",
"requests-dropped-on-entry":
"0",
"requests-for-memory-denied":
"0",
"requests-on-unnumbered-interface-with-non-subnetted-donor":
"0",
"requests-on-unnumbered-interfaces":
"0",
"resolution-request-dropped":
"0",
"resolution-request-received":
"109",
"restricted-proxy-requests":
"0",
"restricted-proxy-requests-not-proxied":
"0",
"self-arp-request-packet-received-on-irb-interface":
"0",
"unrestricted-proxy-requests":
"0",
},
"clnl": {
"address-fields-were-not-reasonable": "0",
"bad-version-packets": "0",
"er-pdu-generation-failure": "0",
"error-pdu-rate-drops": "0",
"forwarded-packets": "0",
"fragmentation-prohibited": "0",
"fragments-discarded": "0",
"fragments-sent": "0",
"fragments-timed-out": "0",
"mcopy-failure": "0",
"no-free-memory-in-socket-buffer": "0",
"non-forwarded-packets": "0",
"output-packets-discarded": "0",
"packets-delivered": "0",
"packets-destined-to-dead-nexthop": "0",
"packets-discarded-due-to-no-route": "0",
"packets-fragmented": "0",
"packets-reconstructed": "0",
"packets-with-bad-checksum": "0",
"packets-with-bad-header-length": "0",
"packets-with-bogus-sdl-size": "0",
"sbappend-failure": "0",
"segment-information-forgotten": "0",
"send-packets-discarded": "0",
"too-small-packets": "0",
"total-clnl-packets-received": "0",
"total-packets-sent": "0",
"unknown-or-unsupported-protocol-packets": "0",
},
"esis": {
"iso-family-not-configured": "0",
"mcopy-failure": "0",
"no-free-memory-in-socket-buffer": "0",
"pdus-received-with-bad-checksum": "0",
"pdus-received-with-bad-type-field": "0",
"pdus-received-with-bad-version-number": "0",
"pdus-with-bad-header-length": "0",
"pdus-with-bogus-sdl-size": "0",
"pdus-with-unknown-or-unsupport-protocol": "0",
"sbappend-failure": "0",
"send-packets-discarded": "0",
"short-pdus-received": "0",
"total-esis-packets-received": "0",
"total-packets-consumed-by-protocol": "0",
},
"esp": {
"esp-bytes-in": "0",
"esp-bytes-out": "0",
"esp-crypto-processing-failure": "0",
"esp-packets-blocked-due-to-policy": "0",
"esp-packets-dropped-as-bad-authentication-detected": "0",
"esp-packets-dropped-as-bad-encryption-detected": "0",
"esp-packets-dropped-as-bad-ilen": "0",
"esp-packets-dropped-as-invalid-tdb": "0",
"esp-packets-dropped-as-larger-than-ip-maxpacket": "0",
"esp-packets-dropped-as-protocol-not-supported": "0",
"esp-packets-dropped-due-to-bad-kcr": "0",
"esp-packets-dropped-due-to-no-tdb": "0",
"esp-packets-dropped-due-to-no-transform": "0",
"esp-packets-dropped-due-to-queue-full": "0",
"esp-packets-in": "0",
"esp-packets-out": "0",
"esp-packets-shorter-than-header-shows": "0",
"esp-possible-replay-packets-detected": "0",
"esp-replay-counter-wrap": "0",
"esp-tunnel-sanity-check-failures": "0",
},
"ethoamcfm": {
"flood-requests-dropped": "0",
"flood-requests-forwarded-to-pfe": "0",
"input-packets-drop-bad-interface-state": "0",
"output-packets-drop-bad-interface-state": "0",
"packets-sent": "0",
"received-packets-forwarded": "0",
"total-packets-received": "0",
"total-packets-transmitted": "0",
},
"ethoamlfm": {
"input-packets-drop-bad-interface-state": "0",
"output-packets-drop-bad-interface-state": "0",
"packets-sent": "0",
"received-packets-forwarded": "0",
"total-packets-received": "0",
"total-packets-transmitted": "0",
},
"icmp": {
"calls-to-icmp-error":
"17647",
"drops-due-to-rate-limit":
"0",
"echo-drops-with-broadcast-or-multicast-destinaton-address":
"0",
"errors-not-generated-because-old-message-was-icmp":
"115",
"histogram": [
{
"destination-unreachable": "13553",
"icmp-echo": "15",
"icmp-echo-reply": "18108704",
"time-exceeded": "4094",
"type-of-histogram": "Output "
"Histogram",
},
{
"destination-unreachable": "7376316",
"icmp-echo": "18108704",
"icmp-echo-reply": "15",
"time-exceeded": "11308300",
"type-of-histogram": "Input "
"Histogram",
},
],
"message-responses-generated":
"18108704",
"messages-less-than-the-minimum-length":
"0",
"messages-with-bad-checksum":
"0",
"messages-with-bad-code-fields":
"0",
"messages-with-bad-length":
"0",
"messages-with-bad-source-address":
"0",
"timestamp-drops-with-broadcast-or-multicast-destination-address":
"0",
},
"icmp6": {
"address-unreachable":
"31",
"administratively-prohibited":
"0",
"bad-checksums":
"0",
"beyond-scope":
"0",
"calls-to-icmp6-error":
"31",
"erroneous-header-field":
"0",
"errors-not-generated-because-old-message-was-icmp-error":
"0",
"errors-not-generated-because-rate-limitation":
"0",
"histogram-of-error-messages-to-be-generated":
"Histogram "
"of "
"error "
"messages "
"to "
"be "
"generated:",
"icmp6-message-responses-generated":
"0",
"icmp6-messages-with-bad-code-fields":
"0",
"icmp6-messages-with-bad-length":
"0",
"input-histogram": {
"histogram-type": "Input "
"histogram:",
"neighbor-advertisement": "543766",
"neighbor-solicitation": "544587",
"router-advertisement-icmp6-packets": "168",
"router-solicitation-icmp6-packets": "8",
"time-exceeded-icmp6-packets": "6773206",
"unreachable-icmp6-packets": "319",
},
"messages-less-than-minimum-length":
"0",
"messages-with-too-many-nd-options":
"0",
"nd-iri-cnt":
"1",
"nd-iri-drop":
"0",
"nd-iri-max":
"200",
"nd-mgt-cnt":
"0",
"nd-mgt-drop":
"0",
"nd-mgt-max":
"14960",
"nd-public-cnt":
"3",
"nd-public-drop":
"0",
"nd-public-max":
"59840",
"nd-system-drop":
"0",
"nd-system-max":
"75000",
"nd6-dad-proxy-conflicts":
"0",
"nd6-dad-proxy-eqmac-drop":
"0",
"nd6-dad-proxy-nomac-drop":
"543766",
"nd6-dad-proxy-requests":
"0",
"nd6-dad-proxy-resolve-cnt":
"0",
"nd6-dup-proxy-responses":
"0",
"nd6-ndp-proxy-requests":
"0",
"nd6-ndp-proxy-resolve-cnt":
"0",
"nd6-ndp-proxy-responses":
"0",
"nd6-requests-dropped-during-retry":
"0",
"nd6-requests-dropped-on-entry":
"0",
"no-route":
"0",
"output-histogram": {
"histogram-type": "Output "
"histogram:",
"neighbor-advertisement": "544593",
"neighbor-solicitation": "544914",
"unreachable-icmp6-packets": "31",
},
"port-unreachable":
"0",
"protocol-name":
"icmp6:",
"time-exceed-reassembly":
"0",
"time-exceed-transit":
"0",
"unknown":
"0",
"unrecognized-next-header":
"0",
"unrecognized-option":
"0",
},
"igmp": {
"membership-queries-received": "308",
"membership-queries-received-with-invalid-fields": "0",
"membership-reports-received": "0",
"membership-reports-received-for-groups-to-which-we-belong":
"0",
"membership-reports-received-with-invalid-fields": "0",
"membership-reports-sent": "943",
"messages-received": "310",
"messages-received-with-bad-checksum": "0",
"messages-received-with-too-few-bytes": "0",
},
"ip": {
"bad-header-checksums": "0",
"datagrams-that-can-not-be-fragmented": "0",
"fragments-created": "458290",
"fragments-dropped-after-timeout": "2330",
"fragments-dropped-due-to-outofspace-or-dup": "0",
"fragments-dropped-due-to-queueoverflow": "0",
"fragments-received": "7776172",
"incoming-rawip-packets-dropped-no-socket-buffer": "46",
"incoming-ttpoip-packets-dropped": "0",
"incoming-ttpoip-packets-received": "184307157",
"incoming-virtual-node-packets-delivered": "0",
"loose-source-and-record-route-options": "0",
"multicast-packets-dropped": "0",
"option-packets-dropped-due-to-rate-limit": "0",
"outgoing-ttpoip-packets-dropped": "0",
"outgoing-ttpoip-packets-sent": "185307601",
"output-datagrams-fragmented": "189762",
"output-packets-discarded-due-to-no-route": "221",
"output-packets-dropped-due-to-no-bufs": "0",
"packets-destined-to-dead-next-hop": "0",
"packets-dropped": "0",
"packets-for-this-host": "820964812",
"packets-for-unknown-or-unsupported-protocol": "311",
"packets-forwarded": "0",
"packets-not-forwardable": "0",
"packets-reassembled-ok": "3840557",
"packets-received": "791039285",
"packets-sent-from-this-host": "894567482",
"packets-sent-with-fabricated-ip-header": "10684334",
"packets-used-first-nexthop-in-ecmp-unilist": "0",
"packets-with-bad-options": "0",
"packets-with-data-length-less-than-headerlength": "0",
"packets-with-data-size-less-than-datalength": "0",
"packets-with-header-length-less-than-data-size": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-options-handled-without-error": "310",
"packets-with-size-smaller-than-minimum": "0",
"record-route-options": "0",
"redirects-sent": "0",
"router-alert-options": "310",
"strict-source-and-record-route-options": "0",
"timestamp-and-address-options": "0",
"timestamp-and-prespecified-address-options": "0",
"timestamp-options": "0",
"transit-re-packets-dropped-on-mgmt-interface": "0",
},
"ip6": {
"duplicate-or-out-of-space-fragments-dropped":
"0",
"failures-of-source-address-selection":
"0",
"forward-cache-hit":
"0",
"forward-cache-miss":
"0",
"fragments-that-exceeded-limit":
"0",
"header-type": [
{
"globals":
"557",
"header-for-source-address-selection":
"source "
"addresses "
"on "
"an "
"outgoing "
"I/F",
"link-locals":
"1088799",
},
{
"globals":
"556",
"header-for-source-address-selection":
"source "
"addresses "
"of "
"same "
"scope",
"link-locals":
"1088799",
},
{
"globals":
"1",
"header-for-source-address-selection":
"source "
"addresses "
"of "
"a "
"different "
"scope",
},
],
"histogram":
"Input histogram:",
"ip6-datagrams-that-can-not-be-fragmented":
"0",
"ip6-fragments-created":
"0",
"ip6-fragments-dropped-after-timeout":
"0",
"ip6-fragments-received":
"0",
"ip6-option-packets-dropped-due-to-rate-limit":
"0",
"ip6-output-datagrams-fragmented":
"0",
"ip6-output-packets-discarded-due-to-no-route":
"1026",
"ip6-output-packets-dropped-due-to-no-bufs":
"0",
"ip6-packets-destined-to-dead-next-hop":
"0",
"ip6-packets-dropped":
"0",
"ip6-packets-for-this-host":
"100720272",
"ip6-packets-forwarded":
"0",
"ip6-packets-not-forwardable":
"0",
"ip6-packets-reassembled-ok":
"0",
"ip6-packets-sent-from-this-host":
"101649920",
"ip6-packets-sent-with-fabricated-ip-header":
"4506372",
"ip6-packets-with-bad-options":
"0",
"ip6-packets-with-incorrect-version-number":
"0",
"ip6-packets-with-size-smaller-than-minimum":
"0",
"ip6-redirects-sent":
"0",
"ip6nh-icmp6":
"7862032",
"ip6nh-ospf":
"4501665",
"ip6nh-tcp":
"5981247",
"ip6nh-udp":
"82375306",
"multicast-packets-which-we-do-not-join":
"0",
"packets-discarded-due-to-too-may-headers":
"0",
"packets-dropped-due-to-bad-protocol":
"0",
"packets-that-violated-scope-rules":
"0",
"packets-whose-headers-are-not-continuous":
"0",
"packets-with-datasize-less-than-data-length":
"0",
"total-packets-received":
"100720281",
"transit-re-packet-dropped-on-mgmt-interface":
"0",
"tunneling-packets-that-can-not-find-gif":
"0",
},
"ipcomp": {
"ipcomp-bytes-in": "0",
"ipcomp-bytes-out": "0",
"ipcomp-crypto-processing-failure": "0",
"ipcomp-packets-blocked-due-to-policy": "0",
"ipcomp-packets-dropped-as-invalid-tdb": "0",
"ipcomp-packets-dropped-as-larger-than-ip-maxpacket": "0",
"ipcomp-packets-dropped-as-protocol-not-supported": "0",
"ipcomp-packets-dropped-due-to-bad-kcr": "0",
"ipcomp-packets-dropped-due-to-no-tdb": "0",
"ipcomp-packets-dropped-due-to-no-transform": "0",
"ipcomp-packets-dropped-due-to-queue-full": "0",
"ipcomp-packets-in": "0",
"ipcomp-packets-out": "0",
"ipcomp-packets-shorter-than-header-shows": "0",
"ipcomp-replay-counter-wrap": "0",
"packets-sent-uncompressed-threshold": "0",
"packets-sent-uncompressed-useless": "0",
},
"ipsec": {
"cluster-coalesced-during-clone": "0",
"cluster-copied-during-clone": "0",
"inbound-packets-violated-process-security-policy": "0",
"invalid-outbound-packets": "0",
"mbuf-coalesced-during-clone": "0",
"mbuf-inserted-during-makespace": "0",
"outbound-packets-failed-due-to-insufficient-memory": "0",
"outbound-packets-violated-process-security-policy": "0",
"outbound-packets-with-bundled-sa": "0",
"outbound-packets-with-no-route": "0",
"outbound-packets-with-no-sa-available": "0",
},
"ipsec6": {
"cluster-coalesced-during-clone": "0",
"cluster-copied-during-clone": "0",
"inbound-packets-violated-process-security-policy": "0",
"invalid-outbound-packets": "0",
"mbuf-coalesced-during-clone": "0",
"mbuf-inserted-during-makespace": "0",
"outbound-packets-failed-due-to-insufficient-memory": "0",
"outbound-packets-violated-process-security-policy": "0",
"outbound-packets-with-bundled-sa": "0",
"outbound-packets-with-no-route": "0",
"outbound-packets-with-no-sa-available": "0",
},
"mpls": {
"after-tagging-packets-can-not-fit-link-mtu": "0",
"lsp-ping-packets": "5",
"packets-discarded-due-to-no-route": "0",
"packets-dropped": "0",
"packets-dropped-at-mpls-socket-send": "0",
"packets-dropped-at-p2mp-cnh-output": "0",
"packets-dropped-due-to-ifl-down": "0",
"packets-forwarded": "6118",
"packets-forwarded-at-mpls-socket-send": "0",
"packets-used-first-nexthop-in-ecmp-unilist": "0",
"packets-with-header-too-small": "0",
"packets-with-ipv4-explicit-null-checksum-errors": "0",
"packets-with-ipv4-explicit-null-tag": "0",
"packets-with-router-alert-tag": "0",
"packets-with-tag-encoding-error": "0",
"packets-with-ttl-expired": "4209",
"total-mpls-packets-received": "4214",
},
"pfkey": {
"bytes-sent-from-userland": "69304",
"bytes-sent-to-userland": "3189032",
"incoming-messages-with-memory-allocation-failure": "0",
"input-histogram": {
"add": "17",
"dump": "10626",
"histogram": "histogram by "
"message type:",
"reserved": "626",
},
"messages-too-short": "0",
"messages-toward-all-sockets": "0",
"messages-toward-registered-sockets": "0",
"messages-toward-single-socket": "22500",
"messages-with-duplicate-extension": "0",
"messages-with-invalid-address-extension": "0",
"messages-with-invalid-extension-type": "0",
"messages-with-invalid-length-field": "0",
"messages-with-invalid-message-type-field": "0",
"messages-with-invalid-sa-type": "0",
"messages-with-invalid-version-field": "0",
"outgoing-messages-with-memory-allocation-failure": "0",
"output-histogram": {
"add": "17",
"dump": "626",
"histogram": "histogram by "
"message type:",
"reserved": "626",
},
"requests-sent-from-userland": "1269",
"requests-sent-to-userland": "11269",
},
"raw-interface": {
"dialer-packets-received": "0",
"dialer-packets-transmitted": "0",
"faboam-packets-dropped": "0",
"faboam-packets-received": "0",
"faboam-packets-transmitted": "0",
"fibre-channel-packets-dropped": "0",
"fibre-channel-packets-received": "0",
"fibre-channel-packets-transmitted": "0",
"fip-packets-dropped": "0",
"fip-packets-received": "0",
"fip-packets-transmitted": "0",
"igmpl2-packets-received": "0",
"igmpl2-packets-transmitted": "0",
"input-drops-due-to-bogus-protocol": "0",
"input-drops-due-to-no-mbufs-available": "0",
"input-drops-due-to-no-socket": "0",
"input-drops-due-to-no-space-in-socket": "0",
"isdn-packets-received": "0",
"isdn-packets-transmitted": "0",
"lacp-packets-dropped": "0",
"lacp-packets-received": "0",
"lacp-packets-transmitted": "0",
"mldl2-packets-received": "0",
"mldl2-packets-transmitted": "0",
"mpu-packets-received": "0",
"mpu-packets-transmitted": "0",
"output-drops-due-to-transmit-error": "0",
"ppoe-packets-transmitted": "0",
"ppp-packets-received-from-jppd": "0",
"ppp-packets-received-from-pppd": "0",
"ppp-packets-transmitted-to-jppd": "0",
"ppp-packets-transmitted-to-pppd": "0",
"pppoe-packets-received": "0",
"raw-packets-transmitted": "0",
"stp-packets-dropped": "0",
"stp-packets-received": "0",
"stp-packets-transmitted": "0",
"vccp-packets-dropped": "0",
"vccp-packets-received": "0",
"vccp-packets-transmitted": "0",
},
"rdp": {
"acks-received": "0",
"acks-sent": "0",
"closes": "0",
"connects": "0",
"input-packets": "0",
"keepalives-received": "0",
"keepalives-sent": "0",
"output-packets": "0",
"packets-discarded-due-to-bad-sequence-number": "0",
"packets-discarded-for-bad-checksum": "0",
"packets-dropped-due-to-full-socket-buffers": "0",
"packets-dropped-full-repl-sock-buf": "0",
"refused-connections": "0",
"retransmits": "0",
},
"tcp": {
"aborted": "0",
"ack-header-predictions": "7954887",
"acks-bytes": "50912129",
"acks-sent-in-response-but-not-exact-rsts": "0",
"acks-sent-in-response-to-syns-on-established-connections":
"0",
"attempts": "48561265",
"bad-connection-attempts": "445",
"badack": "0",
"bucket-overflow": "0",
"byte-retransmits": "72",
"bytes": "589372",
"cache-overflow": "0",
"completed": "1258",
"connection-accepts": "1258",
"connection-requests": "12181850",
"connections-closed": "12185111",
"connections-dropped-by-persist-timeout": "0",
"connections-dropped-by-retransmit-timeout": "162",
"connections-established": "1921",
"connections-updated-rtt-on-close": "1295",
"connections-updated-ssthresh-on-close": "360",
"connections-updated-variance-on-close": "1295",
"cookies-received": "0",
"cookies-sent": "0",
"data-packet-header-predictions": "50195470",
"data-packets-bytes": "49634888",
"dropped": "22",
"drops": "438",
"duplicate-in-bytes": "724472",
"dupsyn": "66",
"embryonic-connections-dropped": "12177708",
"icmp-packets-ignored": "1",
"in-sequence-bytes": "285528163",
"keepalive-connections-dropped": "981871",
"keepalive-probes-sent": "206620576",
"keepalive-timeouts": "207602447",
"listen-queue-overflows": "0",
"out-of-order-in-bytes": "58516475",
"out-of-sequence-segment-drops": "0",
"outgoing-segments-dropped": "0",
"packets-received": "568914028",
"packets-received-after-close": "300",
"packets-received-in-sequence": "66028460",
"packets-sent": "265063785",
"persist-timeouts": "20",
"rcv-packets-dropped": "0",
"rcv-packets-dropped-due-to-bad-address": "0",
"received-acks": "40875092",
"received-acks-for-unsent-data": "0",
"received-completely-duplicate-packet": "133612660",
"received-discarded-because-packet-too-short": "0",
"received-discarded-for-bad-checksum": "1054",
"received-discarded-for-bad-header-offset": "0",
"received-duplicate-acks": "286370388",
"received-old-duplicate-packets": "0",
"received-out-of-order-packets": "124832",
"received-packets-of-data-after-window": "1207",
"received-packets-with-some-dupliacte-data": "463",
"received-window-probes": "13",
"received-window-update-packets": "2896764",
"reset": "10",
"retransmit-timeouts": "7925644",
"retransmitted": "193",
"retransmitted-bytes": "49356338",
"rst-packets": "179222038",
"sack-opitions-sent": "112",
"sack-options-received": "4488",
"sack-recovery-episodes": "820",
"sack-scoreboard-overflow": "0",
"segment-retransmits": "7",
"segments-updated-rtt": "38162864",
"send-packets-dropped": "0",
"sent-ack-only-packets": "196250492",
"sent-control-packets": "191405194",
"sent-data-packets": "52538606",
"sent-data-packets-retransmitted": "106366",
"sent-packets-delayed": "48858785",
"sent-resends-by-mtu-discovery": "0",
"sent-urg-only-packets": "0",
"sent-window-probe-packets": "0",
"sent-window-update-packets": "3986235",
"some-duplicate-in-bytes": "79013",
"stale": "15",
"syncache-entries-added": "1283",
"unreach": "0",
"zone-failures": "0",
},
"tnp": {
"broadcast-packets-received": "18139196",
"broadcast-packets-sent": "18140767",
"control-packets-received": "0",
"control-packets-sent": "0",
"fragment-reassembly-queue-flushes": "0",
"fragmented-packets-received": "0",
"fragmented-packets-sent": "0",
"hello-packets-received": "18139196",
"hello-packets-sent": "18140767",
"input-packets-discarded-with-no-protocol": "0",
"packets-of-version-unspecified-received": "0",
"packets-of-version-unspecified-sent": "0",
"packets-of-version1-received": "0",
"packets-of-version1-sent": "0",
"packets-of-version2-received": "0",
"packets-of-version2-sent": "0",
"packets-of-version3-received": "18139196",
"packets-of-version3-sent": "18140767",
"packets-sent-with-unknown-protocol": "0",
"packets-with-tnp-src-address-collision-received": "0",
"rdp-packets-received": "0",
"rdp-packets-sent": "0",
"received-fragments-dropped": "0",
"received-hello-packets-dropped": "0",
"sent-fragments-dropped": "0",
"sent-hello-packets-dropped": "0",
"tunnel-packets-received": "0",
"tunnel-packets-sent": "0",
"udp-packets-received": "0",
"udp-packets-sent": "0",
"unicast-packets-received": "0",
"unicast-packets-sent": "0",
},
"ttp": {
"arp-l3-packets-received": "0",
"clnp-l3-packets-received": "0",
"cyclotron-cycle-l3-packets-received": "0",
"cyclotron-send-l3-packets-received": "0",
"input-packets-could-not-get-buffer": "0",
"input-packets-for-which-route-lookup-is-bypassed": "0",
"input-packets-tlv-dropped": "0",
"input-packets-with-bad-af": "0",
"input-packets-with-bad-tlv-header": "0",
"input-packets-with-bad-tlv-type": "0",
"input-packets-with-bad-type": "0",
"input-packets-with-discard-type": "0",
"input-packets-with-too-many-tlvs": "0",
"input-packets-with-ttp-tlv-p2mp-nbr-nhid-type": "0",
"input-packets-with-unknown-p2mp-nbr-nhid": "0",
"input-packets-with-vxlan-bfd-pkts": "0",
"ipv4-l3-packets-received": "83525851",
"ipv4-to-mpls-l3-packets-received": "4214",
"ipv6-l3-packets-received": "100720250",
"l2-packets-received": "56842",
"l3-packets-dropped": "0",
"l3-packets-sent-could-not-get-buffer": "0",
"mpls-l3-packets-received": "0",
"mpls-to-ipv4-l3-packets-received": "0",
"null-l3-packets-received": "0",
"openflow-packets-received": "0",
"packets-received-from-unknown-ifl": "0",
"packets-received-while-unconnected": "0",
"packets-sent-could-not-find-neighbor": "0",
"packets-sent-could-not-get-buffer": "0",
"packets-sent-when-host_unreachable": "0",
"packets-sent-when-transmit-disabled": "0",
"packets-sent-while-interface-down": "0",
"packets-sent-while-unconnected": "0",
"packets-sent-with-bad-af": "0",
"packets-sent-with-bad-ifl": "0",
"tnp-l3-packets-received": "0",
"ttp-packets-sent": "185307601",
"unknown-l3-packets-received": "0",
"vpls-l3-packets-received": "0",
},
"tudp": {
"broadcast-or-multicast-datagrams-dropped-due-to-no-socket":
"0",
"datagrams-dropped-due-to-full-socket-buffers": "0",
"datagrams-dropped-due-to-no-socket": "0",
"datagrams-output": "1",
"datagrams-received": "0",
"datagrams-with-bad-checksum": "0",
"datagrams-with-bad-data-length-field": "0",
"datagrams-with-incomplete-header": "0",
"delivered": "0",
},
"udp": {
"broadcast-or-multicast-datagrams-dropped-due-to-no-socket":
"0",
"datagrams-delivered": "86615318",
"datagrams-dropped-due-to-full-socket-buffers": "26",
"datagrams-dropped-due-to-no-socket": "13553",
"datagrams-not-for-hashed-pcb": "0",
"datagrams-output": "98245187",
"datagrams-received": "86628897",
"datagrams-with-bad-checksum": "0",
"datagrams-with-bad-datalength-field": "0",
"datagrams-with-incomplete-header": "0",
},
},
{
"bridge": {
"aging-acks-from-pfe": "0",
"aging-non-acks-from-pfe": "0",
"aging-requests-over-max-rate": "0",
"aging-requests-timed-out-waiting-on-fes": "0",
"bogus-address-in-aging-requests": "0",
"errors-finding-peer-fes": "0",
"learning-requests-over-capacity": "0",
"learning-requests-while-learning-disabled-on-interface":
"0",
"mac-route-aging-requests": "0",
"mac-route-learning-requests": "0",
"mac-routes-aged": "0",
"mac-routes-learned": "0",
"mac-routes-moved": "0",
"packets-dropped-due-to-no-l3-route-table": "0",
"packets-dropped-due-to-no-local-ifl": "0",
"packets-dropped-due-to-no-socket": "0",
"packets-for-this-host": "0",
"packets-punted": "0",
"packets-received": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-no-auxiliary-table": "0",
"packets-with-no-ce-facing-entry": "0",
"packets-with-no-core-facing-entry": "0",
"packets-with-no-family": "0",
"packets-with-no-logical-interface": "0",
"packets-with-no-route-table": "0",
"packets-with-size-smaller-than-minimum": "0",
"requests-involving-multiple-peer-fes": "0",
"requests-to-age-static-route": "0",
"requests-to-learn-an-existing-route": "0",
"requests-to-move-static-route": "0",
"requests-to-re-ageout-aged-route": "0",
"unsupported-platform": "0",
},
"vpls": {
"aging-acks-from-pfe": "0",
"aging-non-acks-from-pfe": "0",
"aging-requests-over-max-rate": "0",
"aging-requests-timed-out-waiting-on-fes": "0",
"bogus-address-in-aging-requests": "0",
"errors-finding-peer-fes": "0",
"learning-requests-over-capacity": "0",
"learning-requests-while-learning-disabled-on-interface":
"0",
"mac-route-aging-requests": "0",
"mac-route-learning-requests": "0",
"mac-routes-aged": "0",
"mac-routes-learned": "0",
"mac-routes-moved": "0",
"packets-dropped-due-to-no-l3-route-table": "0",
"packets-dropped-due-to-no-local-ifl": "0",
"packets-dropped-due-to-no-socket": "0",
"packets-for-this-host": "0",
"packets-punted": "0",
"packets-received": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-no-auxiliary-table": "0",
"packets-with-no-ce-facing-entry": "0",
"packets-with-no-core-facing-entry": "0",
"packets-with-no-family": "0",
"packets-with-no-logical-interface": "0",
"packets-with-no-route-table": "0",
"packets-with-size-smaller-than-minimum": "0",
"requests-involving-multiple-peer-fes": "0",
"requests-to-age-static-route": "0",
"requests-to-learn-an-existing-route": "0",
"requests-to-move-static-route": "0",
"requests-to-re-ageout-aged-route": "0",
"unsupported-platform": "0",
},
},
]
}
golden_output_1 = {
"execute.return_value":
"""
show system statistics
Tcp:
265063785 packets sent
52538606 data packets (49634888 bytes)
106366 data packets retransmitted (49356338 bytes)
0 resends initiated by MTU discovery
196250492 ack only packets (48858785 packets delayed)
0 URG only packets
0 window probe packets
3986235 window update packets
191405194 control packets
568914028 packets received
40875092 acks(for 50912129 bytes)
286370388 duplicate acks
0 acks for unsent data
66028460 packets received in-sequence(285528163 bytes)
133612660 completely duplicate packets(724472 bytes)
0 old duplicate packets
463 packets with some duplicate data(79013 bytes duped)
124832 out-of-order packets(58516475 bytes)
1207 packets of data after window(589372 bytes)
13 window probes
2896764 window update packets
300 packets received after close
1054 discarded for bad checksums
0 discarded for bad header offset fields
0 discarded because packet too short
12181850 connection requests
1258 connection accepts
445 bad connection attempts
0 listen queue overflows
1921 connections established (including accepts)
12185111 connections closed (including 438 drops)
1295 connections updated cached RTT on close
1295 connections updated cached RTT variance on close
360 connections updated cached ssthresh on close
12177708 embryonic connections dropped
38162864 segments updated rtt(of 48561265 attempts)
7925644 retransmit timeouts
162 connections dropped by retransmit timeout
20 persist timeouts
0 connections dropped by persist timeout
207602447 keepalive timeouts
206620576 keepalive probes sent
981871 connections dropped by keepalive
7954887 correct ACK header predictions
50195470 correct data packet header predictions
1283 syncache entries added
193 retransmitted
66 dupsyn
22 dropped
1258 completed
0 bucket overflow
0 cache overflow
10 reset
15 stale
0 aborted
0 badack
0 unreach
0 zone failures
0 cookies sent
0 cookies received
820 SACK recovery episodes
7 segment retransmits in SACK recovery episodes
72 byte retransmits in SACK recovery episodes
4488 SACK options (SACK blocks) received
112 SACK options (SACK blocks) sent
0 SACK scoreboard overflow
0 ACKs sent in response to in-window but not exact RSTs
0 ACKs sent in response to in-window SYNs on established connections
0 rcv packets dropped by TCP due to bad address
0 out-of-sequence segment drops due to insufficient memory
179222038 RST packets
1 ICMP packets ignored by TCP
0 send packets dropped by TCP due to auth errors
0 rcv packets dropped by TCP due to auth errors
0 outgoing segments dropped due to policing
udp:
86628897 datagrams received
0 with incomplete header
0 with bad data length field
0 with bad checksum
13553 dropped due to no socket
0 broadcast/multicast datagrams dropped due to no socket
26 dropped due to full socket buffers
0 not for hashed pcb
86615318 delivered
98245187 datagrams output
ip:
791039285 total packets received
0 bad header checksums
0 with size smaller than minimum
0 with data size < data length
0 with header length < data size
0 with data length < header length
0 with incorrect version number
0 packets destined to dead next hop
7776172 fragments received
0 fragments dropped (dup or out of space)
0 fragment sessions dropped (queue overflow)
2330 fragments dropped after timeout
3840557 packets reassembled ok
820964812 packets for this host
311 packets for unknown/unsupported protocol
0 packets forwarded
0 packets not forwardable
0 redirects sent
894567482 packets sent from this host
10684334 packets sent with fabricated ip header
0 output packets dropped due to no bufs
221 output packets discarded due to no route
189762 output datagrams fragmented
458290 fragments created
0 datagrams that can't be fragmented
0 packets with bad options
310 packets with options handled without error
0 strict source and record route options
0 loose source and record route options
0 record route options
0 timestamp options
0 timestamp and address options
0 timestamp and prespecified address options
0 option packets dropped due to rate limit
310 router alert options
0 multicast packets dropped (no iflist)
0 packets dropped (src and int don't match)
0 transit re packets dropped on mgmt i/f
0 packets used first nexthop in ecmp unilist
184307157 incoming ttpoip packets received
0 incoming ttpoip packets dropped
185307601 outgoing TTPoIP packets sent
0 outgoing TTPoIP packets dropped
46 raw packets dropped. no space in socket recv buffer
0 packets consumed by virtual-node processing
icmp:
0 drops due to rate limit
17647 calls to icmp_error
115 errors not generated because old message was icmp
Output Histogram
18108704 echo reply
13553 destination unreachable
15 echo
4094 time exceeded
0 messages with bad code fields
0 messages less than the minimum length
0 messages with bad checksum
0 messages with bad source address
0 messages with bad length
0 echo drops with broadcast or multicast destinaton address
0 timestamp drops with broadcast or multicast destination address
Input Histogram
15 echo reply
7376316 destination unreachable
18108704 echo
11308300 time exceeded
18108704 message responses generated
igmp:
310 messages received
0 messages received with too few bytes
0 messages received with bad checksum
308 membership queries received
0 membership queries received with invalid fields
0 membership reports received
0 membership reports received with invalid fields
0 membership reports received for groups to which we belong
943 Membership reports sent
ipsec:
0 inbound packets violated process security policy
0 Outbound packets violated process security policy
0 outbound packets with no SA available
0 outbound packets failed due to insufficient memory
0 outbound packets with no route
0 invalid outbound packets
0 Outbound packets with bundles SAs
0 mbuf coleasced during clone
0 Cluster coalesced during clone
0 Cluster copied during clone
0 mbuf inserted during makespace
ah:
0 packets shorter than header shows
0 packets dropped protocol unsupported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 replay counter wrap
0 packets dropped bad authentication detected
0 packets dropped bad authentication length
0 possible replay packets detected
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 tunnel sanity check failures
esp:
0 packets shorter than header shows
0 packets dropped protocol not supported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 packets dropped bad ilen
0 replay counter wrap
0 packets dropped bad encryption detected
0 packets dropped bad authentication detected
0 possible replay packets detected
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 tunnel sanity check failures
ipcomp:
0 packets shorter than header shows
0 packets dropped protocol not supported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 replay counter wrap
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 packets sent uncompressed threshold
0 packets sent uncompressed useless
raw_if:
0 RAW packets transmitted
0 PPPOE packets transmitted
0 ISDN packets transmitted
0 DIALER packets transmitted
0 PPP packets transmitted to pppd
0 PPP packets transmitted to jppd
0 IGMPL2 packets transmitted
0 MLDL2 packets transmitted
0 Fibre Channel packets transmitted
0 FIP packets transmitted
0 STP packets transmitted
0 LACP packets transmitted
0 VCCP packets transmitted
0 Fabric OAM packets transmitted
0 output drops due to tx error
0 MPU packets transmitted
0 PPPOE packets received
0 ISDN packets received
0 DIALER packets received
0 PPP packets received from pppd
0 MPU packets received
0 PPP packets received from jppd
0 IGMPL2 packets received
0 MLDL2 packets received
0 Fibre Channel packets received
0 FIP packets received
0 STP packets received
0 LACP packets received
0 VCCP packets received
0 Fabric OAM packets received
0 Fibre Channel packets dropped
0 FIP packets dropped
0 STP packets dropped
0 LACP packets dropped
0 Fabric OAM packets dropped
0 VCCP packets dropped
0 Input drops due to bogus protocol
0 input drops due to no mbufs available
0 input drops due to no space in socket
0 input drops due to no socket
arp:
200794 datagrams received
39895 ARP requests received
54355 ARP replies received
109 resolution request received
0 resolution request dropped
0 unrestricted proxy requests
0 restricted proxy requests
0 received proxy requests
0 unrestricted proxy requests not proxied
0 restricted proxy requests not proxied
0 datagrams with bogus interface
0 datagrams with incorrect length
0 datagrams for non-IP protocol
0 datagrams with unsupported op code
0 datagrams with bad protocol address length
0 datagrams with bad hardware address length
0 datagrams with multicast source address
87 datagrams with multicast target address
0 datagrams with my own hardware address
0 datagrams for an address not on the interface
0 datagrams with a broadcast source address
0 datagrams with source address duplicate to mine
106457 datagrams which were not for me
7 packets discarded waiting for resolution
15 packets sent after waiting for resolution
55086 ARP requests sent
39895 ARP replies sent
0 requests for memory denied
0 requests dropped on entry
0 requests dropped during retry
0 requests dropped due to interface deletion
0 requests on unnumbered interfaces
0 new requests on unnumbered interfaces
0 replies for from unnumbered interfaces
0 requests on unnumbered interface with non-subnetted donor
0 replies from unnumbered interface with non-subnetted donor
0 arp packets rejected as family is configured with deny arp
0 arp response packets are rejected on mace icl interface
0 arp replies are rejected as source and destination is same
0 arp probe for proxy address reachable from the incoming interface
0 arp request discarded for vrrp source address
0 self arp request packet received on irb interface
0 proxy arp request discarded as source ip is a proxy target
0 arp packets are dropped as nexthop allocation failed
0 arp packets received from peer vrrp rotuer and discarded
0 arp packets are rejected as target ip arp resolve is in progress
0 grat arp packets are ignored as mac address is not changed
0 arp packets are dropped from peer vrrp
0 arp packets are dropped as driver call failed
0 arp packets are dropped as source is not validated
75000 Max System ARP nh cache limit
59840 Max Public ARP nh cache limit
200 Max IRI ARP nh cache limit
14960 Max Management intf ARP nh cache limit
4 Current Public ARP nexthops present
1 Current IRI ARP nexthops present
2 Current Management ARP nexthops present
0 Total ARP nexthops creation failed as limit reached
0 Public ARP nexthops creation failed as public limit reached
0 IRI ARP nexthops creation failed as iri limit reached
0 Management ARP nexthops creation failed as mgt limit reached
ip6:
100720281 total packets received
0 packets with size smaller than minimum
0 packets with data size < data length
0 packets with bad options
0 packets with incorrect version number
0 fragments received
0 fragments dropped (dup or out of space)
0 fragments dropped after timeout
0 fragment sessions dropped (queue overflow)
0 packets reassembled ok
100720272 packets for this host
0 packets forwarded
0 packets not forwardable
0 redirects sent
101649920 packets sent from this host
4506372 packets sent with fabricated ip header
0 output packets dropped due to no bufs, etc.
1026 output packets discarded due to no route
0 output datagrams fragmented
0 fragments created
0 datagrams that can't be fragmented
0 packets that violated scope rules
0 multicast packets which we don't join
Input histogram:
5981247 TCP
82375306 UDP
7862032 ICMP6
4501665 OSPF
0 packets whose headers are not continuous
0 tunneling packets that can't find gif
0 packets discarded due to too may headers
0 failures of source address selection
source addresses on an outgoing I/F
1088799 link-locals
557 globals
source addresses of same scope
1088799 link-locals
556 globals
source addresses of a different scope
1 globals
0 forward cache hit
0 forward cache miss
0 Packets destined to dead next hop
0 option packets dropped due to rate limit
0 Packets dropped (src and int don't match)
0 packets dropped due to bad protocol
0 transit re packet(null) dropped on mgmt i/f
icmp6:
31 Calls to icmp_error
0 Errors not generated because old message was icmp error
0 Errors not generated because rate limitation
Output histogram:
31 unreach
544914 neighbor solicitation
544593 neighbor advertisement
0 Messages with bad code fields
0 Messages < minimum length
0 Bad checksums
0 Messages with bad length
Input histogram:
319 unreach
6773206 time exceeded
8 router solicitation
168 router advertisment
544587 neighbor solicitation
543766 neighbor advertisement
Histogram of error messages to be generated:
0 No route
0 Administratively prohibited
0 Beyond scope
31 Address unreachable
0 Port unreachable
0 Time exceed transit
0 Time exceed reassembly
0 Erroneous header field
0 Unrecognized next header
0 Unrecognized option
0 Unknown
0 Message responses generated
0 Messages with too many ND options
75000 Max System ND nh cache limit
59840 Max Public ND nh cache limit
200 Max IRI ND nh cache limit
14960 Max Management intf ND nh cache limit
3 Current Public ND nexthops present
1 Current IRI ND nexthops present
0 Current Management ND nexthops present
0 Total ND nexthops creation failed as limit reached
0 Public ND nexthops creation failed as public limit reached
0 IRI ND nexthops creation failed as iri limit reached
0 Management ND nexthops creation failed as mgt limit reached
0 interface-restricted ndp proxy requests
0 interface-restricted dad proxy requests
0 interface-restricted ndp proxy responses
0 interface-restricted dad proxy conflicts
0 interface-restricted dad proxy duplicates
0 interface-restricted ndp proxy resolve requests
0 interface-restricted dad proxy resolve requests
0 interface-restricted dad packets from same node dropped
543766 interface-restricted proxy packets dropped with nomac
0 ND hold nexthops dropped on entry by RED mark
0 ND hold nexthops dropped on timer expire by RED mark
ipsec6:
0 Inbound packets violated process security policy
0 Outbound packets violated process security policy
0 Outbound packets with no SA available
0 Outbound packets failed due to insufficient memory
0 Outbound packets with no route
0 Invalid outbound packets
0 Outbound packets with bundles SAs
0 mbuf coleasced during clone
0 Cluster coalesced during clone
0 Cluster copied during clone
0 mbuf inserted during makespace
pfkey:
1269 Requests sent from userland
69304 Bytes sent from userland
histogram by message type:
626 reserved
17 add
626 dump
pfkey:
0 Messages with invalid length field
0 Messages with invalid version field
0 Messages with invalid message type field
0 Messages too short
0 Messages with memory allocation failure
0 Messages with duplicate extension
0 Messages with invalid extension type
0 Messages with invalid sa type
0 Messages with invalid address extension
11269 Requests sent to userland
3189032 Bytes sent to userland
histogram by message type:
626 reserved
17 add
10626 dump
pfkey:
22500 Messages toward single socket
0 Messages toward all sockets
0 Messages toward registered sockets
0 Messages with memory allocation failure
clnl:
0 Total packets received
0 Packets delivered
0 Too small packets
0 Packets with bad header length
0 Packets with bad checksum
0 Bad version packets
0 Unknown or unsupported protocol packets
0 Packets with bogus sdl size
0 No free memory in socket buffer
0 Send packets discarded
0 Sbappend failure
0 Mcopy failure
0 Address fields were not reasonable
0 Segment information forgotten
0 Forwarded packets
0 Total packets sent
0 Output packets discarded
0 Non-forwarded packets
0 Packets fragmented
0 Fragments sent
0 Fragments discarded
0 Fragments timed out
0 Fragmentation prohibited
0 Packets reconstructed
0 Packets destined to dead nexthop
0 Packets discarded due to no route
0 Error pdu rate drops
0 ER pdu generation failure
esis:
0 Total pkts received
0 Total packets consumed by protocol
0 Pdus received with bad checksum
0 Pdus received with bad version number
0 Pdus received with bad type field
0 Short pdus received
0 Pdus withbogus sdl size
0 Pdus with bad header length
0 Pdus with unknown or unsupport protocol
0 No free memory in socket buffer
0 Send packets discarded
0 Sbappend failure
0 Mcopy failure
0 ISO family not configured
tnp:
0 Unicast packets received
18139196 Broadcast packets received
0 Fragmented packets received
0 Hello packets dropped
0 Fragments dropped
0 Fragment reassembly queue flushes
0 Packets with tnp src address collision received
18139196 Hello packets received
0 Control packets received
0 Rdp packets received
0 Udp packets received
0 Tunnel packets received
0 Input packets discarded with no protocol
0 Packets of version unspecified received
0 Packets of version 1 received
0 Packets of version 2 received
18139196 Packets of version 3 received
0 Unicast packets sent
18140767 Broadcast packets sent
0 Fragmented packets sent
0 Hello packets dropped
0 Fragments dropped
18140767 Hello packets sent
0 Control packets sent
0 Rdp packets sent
0 Udp packets sent
0 Tunnel packets sent
0 Packets sent with unknown protocol
0 Packets of version unspecified sent
0 Packets of version 1 sent
0 Packets of version 2 sent
18140767 Packets of version 3 sent
rdp:
0 Input packets
0 Packets discarded for bad checksum
0 Packets discarded due to bad sequence number
0 Refused connections
0 Acks received
0 Packets dropped due to full socket buffers
0 Retransmits
0 Output packets
0 Acks sent
0 Connects
0 Closes
0 Keepalives received
0 Keepalives sent
tudp:
0 Datagrams received
0 Datagrams with incomplete header
0 Datagrams with bad data length field
0 Datagrams with bad checksum
0 Datagrams dropped due to no socket
0 Broadcast/multicast datagrams dropped due to no socket
0 Datagrams dropped due to full socket buffers
0 Delivered
1 Datagrams output
ttp:
185307601 Packets sent
0 Packets sent while unconnected
0 Packets sent while interface down
0 Packets sent couldn't get buffer
0 Packets sent couldn't find neighbor
0 Packets sent when transmit is disable
0 Packets sent when host unreachable
0 L3 Packets sent could not get buffer
0 L3 Packets dropped
0 Packets sent with bad logical interface
0 Packets sent with bad address family
56842 L2 packets received
0 Unknown L3 packets received
83525851 IPv4 L3 packets received
0 MPLS L3 packets received
0 MPLS->IPV4 L3 packets received
4214 IPv4->MPLS L3 packets received
0 VPLS L3 packets received
100720250 IPv6 L3 packets received
0 ARP L3 packets received
0 CLNP L3 packets received
0 TNP L3 packets received
0 NULL L3 packets received
0 Cyclotron cycle L3 packets received
0 Cyclotron send L3 packets received
0 Openflow packets received
0 Packets received while unconnected
0 Packets received from unknown ifl
0 Input packets couldn't get buffer
0 Input packets with bad type
0 Input packets with discard type
0 Input packets with too many tlvs
0 Input packets with bad tlv header
0 Input packets with bad tlv type
0 Input packets dropped based on tlv result
0 Input packets with bad address family
0 Input packets for which rt lookup is bypassed
0 Input packets with ttp tlv of type TTP_TLV_P2MP_NBR_NHID
0 Input packets with unknown p2mp_nbr_nhid value
0 Input packets of type vxlan bfd
mpls:
4214 Total MPLS packets received
6118 Packets forwarded
0 Packets dropped
0 Packets with header too small
0 After tagging, packets can't fit link MTU
0 Packets with IPv4 explicit NULL tag
0 Packets with IPv4 explicit NULL cksum errors
0 Packets with router alert tag
5 LSP ping packets (ttl-expired/router alert)
4209 Packets with ttl expired
0 Packets with tag encoding error
0 Packets discarded due to no route
0 Packets used first nexthop in ecmp unilist
0 Packets dropped due to ifl down
0 Packets dropped at mpls socket send op
0 Packets forwarded at mpls socket send op
0 Packets dropped, over p2mp composite nexthop
ethoamlfm:
0 total received packets
0 input drops due to bad interface state
0 received packets forwarded
0 total transmitted packets
0 sent packets
0 output drops due to bad interface state
ethoamcfm:
0 total received packets
0 input drops due to bad interface state
0 received packets forwarded
0 total transmitted packets
0 sent packets
0 output drops due to bad interface state
0 flood requests forwarded to PFE
0 flood requests dropped
vpls:
0 Total packets received
0 Packets with size smaller than minimum
0 Packets with incorrect version number
0 Packets for this host
0 Packets with no logical interface
0 Packets with no family
0 Packets with no route table
0 Packets with no auxiliary table
0 Packets with no core-facing entry
0 packets with no CE-facing entry
0 MAC route learning requests
0 MAC routes learnt
0 Requests to learn an existing route
0 Learning requests while learning disabled on interface
0 Learning requests over capacity
0 MAC routes moved
0 Requests to move static route
0 MAC route aging requests
0 MAC routes aged
0 Bogus address in aging requests
0 Requests to age static route
0 Requests to re-ageout aged route
0 Requests involving multiple peer FEs
0 Aging acks from PFE
0 Aging non-acks from PFE
0 Aging requests timed out waiting on FEs
0 Aging requests over max-rate
0 Errors finding peer FEs
0 Unsupported platform
0 Packets dropped due to no l3 route table
0 Packets dropped due to no local ifl
0 Packets punted
0 Packets dropped due to no socket
bridge:
0 Total packets received
0 Packets with size smaller than minimum
0 Packets with incorrect version number
0 Packets for this host
0 Packets with no logical interface
0 Packets with no family
0 Packets with no route table
0 Packets with no auxiliary table
0 Packets with no core-facing entry
0 packets with no CE-facing entry
0 MAC route learning requests
0 MAC routes learnt
0 Requests to learn an existing route
0 Learning requests while learning disabled on interface
0 Learning requests over capacity
0 MAC routes moved
0 Requests to move static route
0 MAC route aging requests
0 MAC routes aged
0 Bogus address in aging requests
0 Requests to age static route
0 Requests to re-ageout aged route
0 Requests involving multiple peer FEs
0 Aging acks from PFE
0 Aging non-acks from PFE
0 Aging requests timed out waiting on FEs
0 Aging requests over max-rate
0 Errors finding peer FEs
0 Unsupported platform
0 Packets dropped due to no l3 route table
0 Packets dropped due to no local ifl
0 Packets punted
0 Packets dropped due to no socket
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemStatistics(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemStatistics(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# =========================================================
# Unit test for show system statistics no-forwarding
# =========================================================
class TestShowSystemStatisticsNoForwarding(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"statistics": [
{
"ah": {
"bytes-in": "0",
"bytes-out": "0",
"crypto-processing-failure": "0",
"packets-blocked-due-to-policy": "0",
"packets-dropped-as-bad-authentication-detected": "0",
"packets-dropped-as-larger-than-ip-maxpacket": "0",
"packets-dropped-as-protocol-unsupported": "0",
"packets-dropped-due-to-bad-authentication-length": "0",
"packets-dropped-due-to-bad-kcr": "0",
"packets-dropped-due-to-invalid-tdb": "0",
"packets-dropped-due-to-no-tdb": "0",
"packets-dropped-due-to-no-transform": "0",
"packets-dropped-due-to-queue-full": "0",
"packets-in": "0",
"packets-out": "0",
"packets-shorter-than-header-shows": "0",
"possible-replay-packets-detected": "0",
"replay-counter-wrap": "0",
"tunnel-sanity-check-failures": "0",
},
"arp": {
"arp-iri-cnt":
"1",
"arp-iri-drop":
"0",
"arp-iri-max":
"200",
"arp-mgt-cnt":
"2",
"arp-mgt-drop":
"0",
"arp-mgt-max":
"14960",
"arp-packets-are-dropped-as-driver-call-failed":
"0",
"arp-packets-are-dropped-as-nexthop-allocation-failed":
"0",
"arp-packets-are-dropped-as-source-is-not-validated":
"0",
"arp-packets-are-dropped-from-peer-vrrp":
"0",
"arp-packets-are-rejected-as-target-ip-arp-resolve-is-in-progress":
"0",
"arp-packets-received-from-peer-vrrp-router-and-discarded":
"0",
"arp-packets-rejected-as-family-is-configured-with-deny-arp":
"0",
"arp-probe-for-proxy-address-reachable-from-the-incoming-interface":
"0",
"arp-public-cnt":
"4",
"arp-public-drop":
"0",
"arp-public-max":
"59840",
"arp-replies-are-rejected-as-source-and-destination-is-same":
"0",
"arp-replies-received":
"54355",
"arp-replies-sent":
"39895",
"arp-request-discarded-for-vrrp-source-address":
"0",
"arp-requests-received":
"39895",
"arp-requests-sent":
"55086",
"arp-response-packets-are-rejected-on-mace-icl-interface":
"0",
"arp-system-drop":
"0",
"arp-system-max":
"75000",
"datagrams-for-an-address-not-on-the-interface":
"0",
"datagrams-for-non-ip-protocol":
"0",
"datagrams-received":
"200794",
"datagrams-which-were-not-for-me":
"106457",
"datagrams-with-a-broadcast-source-address":
"0",
"datagrams-with-bad-hardware-address-length":
"0",
"datagrams-with-bad-protocol-address-length":
"0",
"datagrams-with-bogus-interface":
"0",
"datagrams-with-incorrect-length":
"0",
"datagrams-with-multicast-source-address":
"0",
"datagrams-with-multicast-target-address":
"87",
"datagrams-with-my-own-hardware-address":
"0",
"datagrams-with-source-address-duplicate-to-mine":
"0",
"datagrams-with-unsupported-opcode":
"0",
"grat-arp-packets-are-ignored-as-mac-address-is-not-changed":
"0",
"new-requests-on-unnumbered-interfaces":
"0",
"packets-discarded-waiting-for-resolution":
"7",
"packets-sent-after-waiting-for-resolution":
"15",
"proxy-arp-request-discarded-as-source-ip-is-a-proxy-target":
"0",
"proxy-requests-not-proxied":
"0",
"received-proxy-requests":
"0",
"replies-from-unnumbered-interface-with-non-subnetted-donor":
"0",
"replies-from-unnumbered-interfaces":
"0",
"requests-dropped-due-to-interface-deletion":
"0",
"requests-dropped-during-retry":
"0",
"requests-dropped-on-entry":
"0",
"requests-for-memory-denied":
"0",
"requests-on-unnumbered-interface-with-non-subnetted-donor":
"0",
"requests-on-unnumbered-interfaces":
"0",
"resolution-request-dropped":
"0",
"resolution-request-received":
"109",
"restricted-proxy-requests":
"0",
"restricted-proxy-requests-not-proxied":
"0",
"self-arp-request-packet-received-on-irb-interface":
"0",
"unrestricted-proxy-requests":
"0",
},
"clnl": {
"address-fields-were-not-reasonable": "0",
"bad-version-packets": "0",
"er-pdu-generation-failure": "0",
"error-pdu-rate-drops": "0",
"forwarded-packets": "0",
"fragmentation-prohibited": "0",
"fragments-discarded": "0",
"fragments-sent": "0",
"fragments-timed-out": "0",
"mcopy-failure": "0",
"no-free-memory-in-socket-buffer": "0",
"non-forwarded-packets": "0",
"output-packets-discarded": "0",
"packets-delivered": "0",
"packets-destined-to-dead-nexthop": "0",
"packets-discarded-due-to-no-route": "0",
"packets-fragmented": "0",
"packets-reconstructed": "0",
"packets-with-bad-checksum": "0",
"packets-with-bad-header-length": "0",
"packets-with-bogus-sdl-size": "0",
"sbappend-failure": "0",
"segment-information-forgotten": "0",
"send-packets-discarded": "0",
"too-small-packets": "0",
"total-clnl-packets-received": "0",
"total-packets-sent": "0",
"unknown-or-unsupported-protocol-packets": "0",
},
"esis": {
"iso-family-not-configured": "0",
"mcopy-failure": "0",
"no-free-memory-in-socket-buffer": "0",
"pdus-received-with-bad-checksum": "0",
"pdus-received-with-bad-type-field": "0",
"pdus-received-with-bad-version-number": "0",
"pdus-with-bad-header-length": "0",
"pdus-with-bogus-sdl-size": "0",
"pdus-with-unknown-or-unsupport-protocol": "0",
"sbappend-failure": "0",
"send-packets-discarded": "0",
"short-pdus-received": "0",
"total-esis-packets-received": "0",
"total-packets-consumed-by-protocol": "0",
},
"esp": {
"esp-bytes-in": "0",
"esp-bytes-out": "0",
"esp-crypto-processing-failure": "0",
"esp-packets-blocked-due-to-policy": "0",
"esp-packets-dropped-as-bad-authentication-detected": "0",
"esp-packets-dropped-as-bad-encryption-detected": "0",
"esp-packets-dropped-as-bad-ilen": "0",
"esp-packets-dropped-as-invalid-tdb": "0",
"esp-packets-dropped-as-larger-than-ip-maxpacket": "0",
"esp-packets-dropped-as-protocol-not-supported": "0",
"esp-packets-dropped-due-to-bad-kcr": "0",
"esp-packets-dropped-due-to-no-tdb": "0",
"esp-packets-dropped-due-to-no-transform": "0",
"esp-packets-dropped-due-to-queue-full": "0",
"esp-packets-in": "0",
"esp-packets-out": "0",
"esp-packets-shorter-than-header-shows": "0",
"esp-possible-replay-packets-detected": "0",
"esp-replay-counter-wrap": "0",
"esp-tunnel-sanity-check-failures": "0",
},
"ethoamcfm": {
"flood-requests-dropped": "0",
"flood-requests-forwarded-to-pfe": "0",
"input-packets-drop-bad-interface-state": "0",
"output-packets-drop-bad-interface-state": "0",
"packets-sent": "0",
"received-packets-forwarded": "0",
"total-packets-received": "0",
"total-packets-transmitted": "0",
},
"ethoamlfm": {
"input-packets-drop-bad-interface-state": "0",
"output-packets-drop-bad-interface-state": "0",
"packets-sent": "0",
"received-packets-forwarded": "0",
"total-packets-received": "0",
"total-packets-transmitted": "0",
},
"icmp": {
"calls-to-icmp-error":
"17648",
"drops-due-to-rate-limit":
"0",
"echo-drops-with-broadcast-or-multicast-destinaton-address":
"0",
"errors-not-generated-because-old-message-was-icmp":
"115",
"histogram": [
{
"destination-unreachable": "13554",
"icmp-echo": "15",
"icmp-echo-reply": "18108802",
"time-exceeded": "4094",
"type-of-histogram": "Output "
"Histogram",
},
{
"destination-unreachable": "7376365",
"icmp-echo": "18108802",
"icmp-echo-reply": "15",
"time-exceeded": "11308301",
"type-of-histogram": "Input "
"Histogram",
},
],
"message-responses-generated":
"18108802",
"messages-less-than-the-minimum-length":
"0",
"messages-with-bad-checksum":
"0",
"messages-with-bad-code-fields":
"0",
"messages-with-bad-length":
"0",
"messages-with-bad-source-address":
"0",
"timestamp-drops-with-broadcast-or-multicast-destination-address":
"0",
},
"icmp6": {
"address-unreachable":
"31",
"administratively-prohibited":
"0",
"bad-checksums":
"0",
"beyond-scope":
"0",
"calls-to-icmp6-error":
"31",
"erroneous-header-field":
"0",
"errors-not-generated-because-old-message-was-icmp-error":
"0",
"errors-not-generated-because-rate-limitation":
"0",
"histogram-of-error-messages-to-be-generated":
"Histogram "
"of "
"error "
"messages "
"to "
"be "
"generated:",
"icmp6-message-responses-generated":
"0",
"icmp6-messages-with-bad-code-fields":
"0",
"icmp6-messages-with-bad-length":
"0",
"input-histogram": {
"histogram-type": "Input "
"histogram:",
"neighbor-advertisement": "543769",
"neighbor-solicitation": "544589",
"router-advertisement-icmp6-packets": "168",
"router-solicitation-icmp6-packets": "8",
"time-exceeded-icmp6-packets": "6773211",
"unreachable-icmp6-packets": "319",
},
"messages-less-than-minimum-length":
"0",
"messages-with-too-many-nd-options":
"0",
"nd-iri-cnt":
"1",
"nd-iri-drop":
"0",
"nd-iri-max":
"200",
"nd-mgt-cnt":
"0",
"nd-mgt-drop":
"0",
"nd-mgt-max":
"14960",
"nd-public-cnt":
"3",
"nd-public-drop":
"0",
"nd-public-max":
"59840",
"nd-system-drop":
"0",
"nd-system-max":
"75000",
"nd6-dad-proxy-conflicts":
"0",
"nd6-dad-proxy-eqmac-drop":
"0",
"nd6-dad-proxy-nomac-drop":
"543769",
"nd6-dad-proxy-requests":
"0",
"nd6-dad-proxy-resolve-cnt":
"0",
"nd6-dup-proxy-responses":
"0",
"nd6-ndp-proxy-requests":
"0",
"nd6-ndp-proxy-resolve-cnt":
"0",
"nd6-ndp-proxy-responses":
"0",
"nd6-requests-dropped-during-retry":
"0",
"nd6-requests-dropped-on-entry":
"0",
"no-route":
"0",
"output-histogram": {
"histogram-type": "Output "
"histogram:",
"neighbor-advertisement": "544595",
"neighbor-solicitation": "544917",
"unreachable-icmp6-packets": "31",
},
"port-unreachable":
"0",
"protocol-name":
"icmp6:",
"time-exceed-reassembly":
"0",
"time-exceed-transit":
"0",
"unknown":
"0",
"unrecognized-next-header":
"0",
"unrecognized-option":
"0",
},
"igmp": {
"membership-queries-received": "308",
"membership-queries-received-with-invalid-fields": "0",
"membership-reports-received": "0",
"membership-reports-received-for-groups-to-which-we-belong":
"0",
"membership-reports-received-with-invalid-fields": "0",
"membership-reports-sent": "943",
"messages-received": "310",
"messages-received-with-bad-checksum": "0",
"messages-received-with-too-few-bytes": "0",
},
"ip": {
"bad-header-checksums": "0",
"datagrams-that-can-not-be-fragmented": "0",
"fragments-created": "458290",
"fragments-dropped-after-timeout": "2330",
"fragments-dropped-due-to-outofspace-or-dup": "0",
"fragments-dropped-due-to-queueoverflow": "0",
"fragments-received": "7776172",
"incoming-rawip-packets-dropped-no-socket-buffer": "46",
"incoming-ttpoip-packets-dropped": "0",
"incoming-ttpoip-packets-received": "184307870",
"incoming-virtual-node-packets-delivered": "0",
"loose-source-and-record-route-options": "0",
"multicast-packets-dropped": "0",
"option-packets-dropped-due-to-rate-limit": "0",
"outgoing-ttpoip-packets-dropped": "0",
"outgoing-ttpoip-packets-sent": "185308407",
"output-datagrams-fragmented": "189762",
"output-packets-discarded-due-to-no-route": "221",
"output-packets-dropped-due-to-no-bufs": "0",
"packets-destined-to-dead-next-hop": "0",
"packets-dropped": "0",
"packets-for-this-host": "820969848",
"packets-for-unknown-or-unsupported-protocol": "311",
"packets-forwarded": "0",
"packets-not-forwardable": "0",
"packets-reassembled-ok": "3840557",
"packets-received": "791044982",
"packets-sent-from-this-host": "894573985",
"packets-sent-with-fabricated-ip-header": "10684398",
"packets-used-first-nexthop-in-ecmp-unilist": "0",
"packets-with-bad-options": "0",
"packets-with-data-length-less-than-headerlength": "0",
"packets-with-data-size-less-than-datalength": "0",
"packets-with-header-length-less-than-data-size": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-options-handled-without-error": "310",
"packets-with-size-smaller-than-minimum": "0",
"record-route-options": "0",
"redirects-sent": "0",
"router-alert-options": "310",
"strict-source-and-record-route-options": "0",
"timestamp-and-address-options": "0",
"timestamp-and-prespecified-address-options": "0",
"timestamp-options": "0",
"transit-re-packets-dropped-on-mgmt-interface": "0",
},
"ip6": {
"duplicate-or-out-of-space-fragments-dropped":
"0",
"failures-of-source-address-selection":
"0",
"forward-cache-hit":
"0",
"forward-cache-miss":
"0",
"fragments-that-exceeded-limit":
"0",
"header-type": [
{
"globals":
"557",
"header-for-source-address-selection":
"source "
"addresses "
"on "
"an "
"outgoing "
"I/F",
"link-locals":
"1088804",
},
{
"globals":
"556",
"header-for-source-address-selection":
"source "
"addresses "
"of "
"same "
"scope",
"link-locals":
"1088804",
},
{
"globals":
"1",
"header-for-source-address-selection":
"source "
"addresses "
"of "
"a "
"different "
"scope",
},
],
"histogram":
"Input histogram:",
"ip6-datagrams-that-can-not-be-fragmented":
"0",
"ip6-fragments-created":
"0",
"ip6-fragments-dropped-after-timeout":
"0",
"ip6-fragments-received":
"0",
"ip6-option-packets-dropped-due-to-rate-limit":
"0",
"ip6-output-datagrams-fragmented":
"0",
"ip6-output-packets-discarded-due-to-no-route":
"1026",
"ip6-output-packets-dropped-due-to-no-bufs":
"0",
"ip6-packets-destined-to-dead-next-hop":
"0",
"ip6-packets-dropped":
"0",
"ip6-packets-for-this-host":
"100720775",
"ip6-packets-forwarded":
"0",
"ip6-packets-not-forwardable":
"0",
"ip6-packets-reassembled-ok":
"0",
"ip6-packets-sent-from-this-host":
"101650461",
"ip6-packets-sent-with-fabricated-ip-header":
"4506397",
"ip6-packets-with-bad-options":
"0",
"ip6-packets-with-incorrect-version-number":
"0",
"ip6-packets-with-size-smaller-than-minimum":
"0",
"ip6-redirects-sent":
"0",
"ip6nh-icmp6":
"7862042",
"ip6nh-ospf":
"4501689",
"ip6nh-tcp":
"5981275",
"ip6nh-udp":
"82375747",
"multicast-packets-which-we-do-not-join":
"0",
"packets-discarded-due-to-too-may-headers":
"0",
"packets-dropped-due-to-bad-protocol":
"0",
"packets-that-violated-scope-rules":
"0",
"packets-whose-headers-are-not-continuous":
"0",
"packets-with-datasize-less-than-data-length":
"0",
"total-packets-received":
"100720784",
"transit-re-packet-dropped-on-mgmt-interface":
"0",
"tunneling-packets-that-can-not-find-gif":
"0",
},
"ipcomp": {
"ipcomp-bytes-in": "0",
"ipcomp-bytes-out": "0",
"ipcomp-crypto-processing-failure": "0",
"ipcomp-packets-blocked-due-to-policy": "0",
"ipcomp-packets-dropped-as-invalid-tdb": "0",
"ipcomp-packets-dropped-as-larger-than-ip-maxpacket": "0",
"ipcomp-packets-dropped-as-protocol-not-supported": "0",
"ipcomp-packets-dropped-due-to-bad-kcr": "0",
"ipcomp-packets-dropped-due-to-no-tdb": "0",
"ipcomp-packets-dropped-due-to-no-transform": "0",
"ipcomp-packets-dropped-due-to-queue-full": "0",
"ipcomp-packets-in": "0",
"ipcomp-packets-out": "0",
"ipcomp-packets-shorter-than-header-shows": "0",
"ipcomp-replay-counter-wrap": "0",
"packets-sent-uncompressed-threshold": "0",
"packets-sent-uncompressed-useless": "0",
},
"ipsec": {
"cluster-coalesced-during-clone": "0",
"cluster-copied-during-clone": "0",
"inbound-packets-violated-process-security-policy": "0",
"invalid-outbound-packets": "0",
"mbuf-coalesced-during-clone": "0",
"mbuf-inserted-during-makespace": "0",
"outbound-packets-failed-due-to-insufficient-memory": "0",
"outbound-packets-violated-process-security-policy": "0",
"outbound-packets-with-bundled-sa": "0",
"outbound-packets-with-no-route": "0",
"outbound-packets-with-no-sa-available": "0",
},
"ipsec6": {
"cluster-coalesced-during-clone": "0",
"cluster-copied-during-clone": "0",
"inbound-packets-violated-process-security-policy": "0",
"invalid-outbound-packets": "0",
"mbuf-coalesced-during-clone": "0",
"mbuf-inserted-during-makespace": "0",
"outbound-packets-failed-due-to-insufficient-memory": "0",
"outbound-packets-violated-process-security-policy": "0",
"outbound-packets-with-bundled-sa": "0",
"outbound-packets-with-no-route": "0",
"outbound-packets-with-no-sa-available": "0",
},
"mpls": {
"after-tagging-packets-can-not-fit-link-mtu": "0",
"lsp-ping-packets": "5",
"packets-discarded-due-to-no-route": "0",
"packets-dropped": "0",
"packets-dropped-at-mpls-socket-send": "0",
"packets-dropped-at-p2mp-cnh-output": "0",
"packets-dropped-due-to-ifl-down": "0",
"packets-forwarded": "6118",
"packets-forwarded-at-mpls-socket-send": "0",
"packets-used-first-nexthop-in-ecmp-unilist": "0",
"packets-with-header-too-small": "0",
"packets-with-ipv4-explicit-null-checksum-errors": "0",
"packets-with-ipv4-explicit-null-tag": "0",
"packets-with-router-alert-tag": "0",
"packets-with-tag-encoding-error": "0",
"packets-with-ttl-expired": "4209",
"total-mpls-packets-received": "4214",
},
"pfkey": {
"bytes-sent-from-userland": "69304",
"bytes-sent-to-userland": "3189032",
"incoming-messages-with-memory-allocation-failure": "0",
"input-histogram": {
"add": "17",
"dump": "10626",
"histogram": "histogram by "
"message type:",
"reserved": "626",
},
"messages-too-short": "0",
"messages-toward-all-sockets": "0",
"messages-toward-registered-sockets": "0",
"messages-toward-single-socket": "22500",
"messages-with-duplicate-extension": "0",
"messages-with-invalid-address-extension": "0",
"messages-with-invalid-extension-type": "0",
"messages-with-invalid-length-field": "0",
"messages-with-invalid-message-type-field": "0",
"messages-with-invalid-sa-type": "0",
"messages-with-invalid-version-field": "0",
"outgoing-messages-with-memory-allocation-failure": "0",
"output-histogram": {
"add": "17",
"dump": "626",
"histogram": "histogram by "
"message type:",
"reserved": "626",
},
"requests-sent-from-userland": "1269",
"requests-sent-to-userland": "11269",
},
"raw-interface": {
"dialer-packets-received": "0",
"dialer-packets-transmitted": "0",
"faboam-packets-dropped": "0",
"faboam-packets-received": "0",
"faboam-packets-transmitted": "0",
"fibre-channel-packets-dropped": "0",
"fibre-channel-packets-received": "0",
"fibre-channel-packets-transmitted": "0",
"fip-packets-dropped": "0",
"fip-packets-received": "0",
"fip-packets-transmitted": "0",
"igmpl2-packets-received": "0",
"igmpl2-packets-transmitted": "0",
"input-drops-due-to-bogus-protocol": "0",
"input-drops-due-to-no-mbufs-available": "0",
"input-drops-due-to-no-socket": "0",
"input-drops-due-to-no-space-in-socket": "0",
"isdn-packets-received": "0",
"isdn-packets-transmitted": "0",
"lacp-packets-dropped": "0",
"lacp-packets-received": "0",
"lacp-packets-transmitted": "0",
"mldl2-packets-received": "0",
"mldl2-packets-transmitted": "0",
"mpu-packets-received": "0",
"mpu-packets-transmitted": "0",
"output-drops-due-to-transmit-error": "0",
"ppoe-packets-transmitted": "0",
"ppp-packets-received-from-jppd": "0",
"ppp-packets-received-from-pppd": "0",
"ppp-packets-transmitted-to-jppd": "0",
"ppp-packets-transmitted-to-pppd": "0",
"pppoe-packets-received": "0",
"raw-packets-transmitted": "0",
"stp-packets-dropped": "0",
"stp-packets-received": "0",
"stp-packets-transmitted": "0",
"vccp-packets-dropped": "0",
"vccp-packets-received": "0",
"vccp-packets-transmitted": "0",
},
"rdp": {
"acks-received": "0",
"acks-sent": "0",
"closes": "0",
"connects": "0",
"input-packets": "0",
"keepalives-received": "0",
"keepalives-sent": "0",
"output-packets": "0",
"packets-discarded-due-to-bad-sequence-number": "0",
"packets-discarded-for-bad-checksum": "0",
"packets-dropped-due-to-full-socket-buffers": "0",
"packets-dropped-full-repl-sock-buf": "0",
"refused-connections": "0",
"retransmits": "0",
},
"tcp": {
"aborted": "0",
"ack-header-predictions": "7954923",
"acks-bytes": "51344691",
"acks-sent-in-response-but-not-exact-rsts": "0",
"acks-sent-in-response-to-syns-on-established-connections":
"0",
"attempts": "48562532",
"bad-connection-attempts": "445",
"badack": "0",
"bucket-overflow": "0",
"byte-retransmits": "72",
"bytes": "589372",
"cache-overflow": "0",
"completed": "1258",
"connection-accepts": "1258",
"connection-requests": "12181907",
"connections-closed": "12185170",
"connections-dropped-by-persist-timeout": "0",
"connections-dropped-by-retransmit-timeout": "162",
"connections-established": "1921",
"connections-updated-rtt-on-close": "1295",
"connections-updated-ssthresh-on-close": "360",
"connections-updated-variance-on-close": "1295",
"cookies-received": "0",
"cookies-sent": "0",
"data-packet-header-predictions": "50195730",
"data-packets-bytes": "50070222",
"dropped": "22",
"drops": "438",
"duplicate-in-bytes": "724472",
"dupsyn": "66",
"embryonic-connections-dropped": "12177767",
"icmp-packets-ignored": "1",
"in-sequence-bytes": "285830455",
"keepalive-connections-dropped": "981880",
"keepalive-probes-sent": "206621688",
"keepalive-timeouts": "207603568",
"listen-queue-overflows": "0",
"out-of-order-in-bytes": "58516475",
"out-of-sequence-segment-drops": "0",
"outgoing-segments-dropped": "0",
"packets-received": "568918236",
"packets-received-after-close": "300",
"packets-received-in-sequence": "66028796",
"packets-sent": "265067267",
"persist-timeouts": "20",
"rcv-packets-dropped": "0",
"rcv-packets-dropped-due-to-bad-address": "0",
"received-acks": "40876471",
"received-acks-for-unsent-data": "0",
"received-completely-duplicate-packet": "133613375",
"received-discarded-because-packet-too-short": "0",
"received-discarded-for-bad-checksum": "1054",
"received-discarded-for-bad-header-offset": "0",
"received-duplicate-acks": "286371924",
"received-old-duplicate-packets": "0",
"received-out-of-order-packets": "124832",
"received-packets-of-data-after-window": "1207",
"received-packets-with-some-dupliacte-data": "463",
"received-window-probes": "13",
"received-window-update-packets": "2896764",
"reset": "10",
"retransmit-timeouts": "7925703",
"retransmitted": "193",
"retransmitted-bytes": "49357786",
"rst-packets": "179223021",
"sack-opitions-sent": "112",
"sack-options-received": "4489",
"sack-recovery-episodes": "820",
"sack-scoreboard-overflow": "0",
"segment-retransmits": "7",
"segments-updated-rtt": "38164243",
"send-packets-dropped": "0",
"sent-ack-only-packets": "196251531",
"sent-control-packets": "191406234",
"sent-data-packets": "52540975",
"sent-data-packets-retransmitted": "106367",
"sent-packets-delayed": "48859032",
"sent-resends-by-mtu-discovery": "0",
"sent-urg-only-packets": "0",
"sent-window-probe-packets": "0",
"sent-window-update-packets": "3986251",
"some-duplicate-in-bytes": "79013",
"stale": "15",
"syncache-entries-added": "1283",
"unreach": "0",
"zone-failures": "0",
},
"tnp": {
"broadcast-packets-received": "18139293",
"broadcast-packets-sent": "18140864",
"control-packets-received": "0",
"control-packets-sent": "0",
"fragment-reassembly-queue-flushes": "0",
"fragmented-packets-received": "0",
"fragmented-packets-sent": "0",
"hello-packets-received": "18139293",
"hello-packets-sent": "18140864",
"input-packets-discarded-with-no-protocol": "0",
"packets-of-version-unspecified-received": "0",
"packets-of-version-unspecified-sent": "0",
"packets-of-version1-received": "0",
"packets-of-version1-sent": "0",
"packets-of-version2-received": "0",
"packets-of-version2-sent": "0",
"packets-of-version3-received": "18139293",
"packets-of-version3-sent": "18140864",
"packets-sent-with-unknown-protocol": "0",
"packets-with-tnp-src-address-collision-received": "0",
"rdp-packets-received": "0",
"rdp-packets-sent": "0",
"received-fragments-dropped": "0",
"received-hello-packets-dropped": "0",
"sent-fragments-dropped": "0",
"sent-hello-packets-dropped": "0",
"tunnel-packets-received": "0",
"tunnel-packets-sent": "0",
"udp-packets-received": "0",
"udp-packets-sent": "0",
"unicast-packets-received": "0",
"unicast-packets-sent": "0",
},
"ttp": {
"arp-l3-packets-received": "0",
"clnp-l3-packets-received": "0",
"cyclotron-cycle-l3-packets-received": "0",
"cyclotron-send-l3-packets-received": "0",
"input-packets-could-not-get-buffer": "0",
"input-packets-for-which-route-lookup-is-bypassed": "0",
"input-packets-tlv-dropped": "0",
"input-packets-with-bad-af": "0",
"input-packets-with-bad-tlv-header": "0",
"input-packets-with-bad-tlv-type": "0",
"input-packets-with-bad-type": "0",
"input-packets-with-discard-type": "0",
"input-packets-with-too-many-tlvs": "0",
"input-packets-with-ttp-tlv-p2mp-nbr-nhid-type": "0",
"input-packets-with-unknown-p2mp-nbr-nhid": "0",
"input-packets-with-vxlan-bfd-pkts": "0",
"ipv4-l3-packets-received": "83526062",
"ipv4-to-mpls-l3-packets-received": "4214",
"ipv6-l3-packets-received": "100720753",
"l2-packets-received": "56842",
"l3-packets-dropped": "0",
"l3-packets-sent-could-not-get-buffer": "0",
"mpls-l3-packets-received": "0",
"mpls-to-ipv4-l3-packets-received": "0",
"null-l3-packets-received": "0",
"openflow-packets-received": "0",
"packets-received-from-unknown-ifl": "0",
"packets-received-while-unconnected": "0",
"packets-sent-could-not-find-neighbor": "0",
"packets-sent-could-not-get-buffer": "0",
"packets-sent-when-host_unreachable": "0",
"packets-sent-when-transmit-disabled": "0",
"packets-sent-while-interface-down": "0",
"packets-sent-while-unconnected": "0",
"packets-sent-with-bad-af": "0",
"packets-sent-with-bad-ifl": "0",
"tnp-l3-packets-received": "0",
"ttp-packets-sent": "185308407",
"unknown-l3-packets-received": "0",
"vpls-l3-packets-received": "0",
},
"tudp": {
"broadcast-or-multicast-datagrams-dropped-due-to-no-socket":
"0",
"datagrams-dropped-due-to-full-socket-buffers": "0",
"datagrams-dropped-due-to-no-socket": "0",
"datagrams-output": "1",
"datagrams-received": "0",
"datagrams-with-bad-checksum": "0",
"datagrams-with-bad-data-length-field": "0",
"datagrams-with-incomplete-header": "0",
"delivered": "0",
},
"udp": {
"broadcast-or-multicast-datagrams-dropped-due-to-no-socket":
"0",
"datagrams-delivered": "86615785",
"datagrams-dropped-due-to-full-socket-buffers": "26",
"datagrams-dropped-due-to-no-socket": "13554",
"datagrams-not-for-hashed-pcb": "0",
"datagrams-output": "98245707",
"datagrams-received": "86629365",
"datagrams-with-bad-checksum": "0",
"datagrams-with-bad-datalength-field": "0",
"datagrams-with-incomplete-header": "0",
},
},
{
"bridge": {
"aging-acks-from-pfe": "0",
"aging-non-acks-from-pfe": "0",
"aging-requests-over-max-rate": "0",
"aging-requests-timed-out-waiting-on-fes": "0",
"bogus-address-in-aging-requests": "0",
"errors-finding-peer-fes": "0",
"learning-requests-over-capacity": "0",
"learning-requests-while-learning-disabled-on-interface":
"0",
"mac-route-aging-requests": "0",
"mac-route-learning-requests": "0",
"mac-routes-aged": "0",
"mac-routes-learned": "0",
"mac-routes-moved": "0",
"packets-dropped-due-to-no-l3-route-table": "0",
"packets-dropped-due-to-no-local-ifl": "0",
"packets-dropped-due-to-no-socket": "0",
"packets-for-this-host": "0",
"packets-punted": "0",
"packets-received": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-no-auxiliary-table": "0",
"packets-with-no-ce-facing-entry": "0",
"packets-with-no-core-facing-entry": "0",
"packets-with-no-family": "0",
"packets-with-no-logical-interface": "0",
"packets-with-no-route-table": "0",
"packets-with-size-smaller-than-minimum": "0",
"requests-involving-multiple-peer-fes": "0",
"requests-to-age-static-route": "0",
"requests-to-learn-an-existing-route": "0",
"requests-to-move-static-route": "0",
"requests-to-re-ageout-aged-route": "0",
"unsupported-platform": "0",
},
"vpls": {
"aging-acks-from-pfe": "0",
"aging-non-acks-from-pfe": "0",
"aging-requests-over-max-rate": "0",
"aging-requests-timed-out-waiting-on-fes": "0",
"bogus-address-in-aging-requests": "0",
"errors-finding-peer-fes": "0",
"learning-requests-over-capacity": "0",
"learning-requests-while-learning-disabled-on-interface":
"0",
"mac-route-aging-requests": "0",
"mac-route-learning-requests": "0",
"mac-routes-aged": "0",
"mac-routes-learned": "0",
"mac-routes-moved": "0",
"packets-dropped-due-to-no-l3-route-table": "0",
"packets-dropped-due-to-no-local-ifl": "0",
"packets-dropped-due-to-no-socket": "0",
"packets-for-this-host": "0",
"packets-punted": "0",
"packets-received": "0",
"packets-with-incorrect-version-number": "0",
"packets-with-no-auxiliary-table": "0",
"packets-with-no-ce-facing-entry": "0",
"packets-with-no-core-facing-entry": "0",
"packets-with-no-family": "0",
"packets-with-no-logical-interface": "0",
"packets-with-no-route-table": "0",
"packets-with-size-smaller-than-minimum": "0",
"requests-involving-multiple-peer-fes": "0",
"requests-to-age-static-route": "0",
"requests-to-learn-an-existing-route": "0",
"requests-to-move-static-route": "0",
"requests-to-re-ageout-aged-route": "0",
"unsupported-platform": "0",
},
},
]
}
golden_output_1 = {
"execute.return_value":
"""
show system statistics no-forwarding
Tcp:
265067267 packets sent
52540975 data packets (50070222 bytes)
106367 data packets retransmitted (49357786 bytes)
0 resends initiated by MTU discovery
196251531 ack only packets (48859032 packets delayed)
0 URG only packets
0 window probe packets
3986251 window update packets
191406234 control packets
568918236 packets received
40876471 acks(for 51344691 bytes)
286371924 duplicate acks
0 acks for unsent data
66028796 packets received in-sequence(285830455 bytes)
133613375 completely duplicate packets(724472 bytes)
0 old duplicate packets
463 packets with some duplicate data(79013 bytes duped)
124832 out-of-order packets(58516475 bytes)
1207 packets of data after window(589372 bytes)
13 window probes
2896764 window update packets
300 packets received after close
1054 discarded for bad checksums
0 discarded for bad header offset fields
0 discarded because packet too short
12181907 connection requests
1258 connection accepts
445 bad connection attempts
0 listen queue overflows
1921 connections established (including accepts)
12185170 connections closed (including 438 drops)
1295 connections updated cached RTT on close
1295 connections updated cached RTT variance on close
360 connections updated cached ssthresh on close
12177767 embryonic connections dropped
38164243 segments updated rtt(of 48562532 attempts)
7925703 retransmit timeouts
162 connections dropped by retransmit timeout
20 persist timeouts
0 connections dropped by persist timeout
207603568 keepalive timeouts
206621688 keepalive probes sent
981880 connections dropped by keepalive
7954923 correct ACK header predictions
50195730 correct data packet header predictions
1283 syncache entries added
193 retransmitted
66 dupsyn
22 dropped
1258 completed
0 bucket overflow
0 cache overflow
10 reset
15 stale
0 aborted
0 badack
0 unreach
0 zone failures
0 cookies sent
0 cookies received
820 SACK recovery episodes
7 segment retransmits in SACK recovery episodes
72 byte retransmits in SACK recovery episodes
4489 SACK options (SACK blocks) received
112 SACK options (SACK blocks) sent
0 SACK scoreboard overflow
0 ACKs sent in response to in-window but not exact RSTs
0 ACKs sent in response to in-window SYNs on established connections
0 rcv packets dropped by TCP due to bad address
0 out-of-sequence segment drops due to insufficient memory
179223021 RST packets
1 ICMP packets ignored by TCP
0 send packets dropped by TCP due to auth errors
0 rcv packets dropped by TCP due to auth errors
0 outgoing segments dropped due to policing
udp:
86629365 datagrams received
0 with incomplete header
0 with bad data length field
0 with bad checksum
13554 dropped due to no socket
0 broadcast/multicast datagrams dropped due to no socket
26 dropped due to full socket buffers
0 not for hashed pcb
86615785 delivered
98245707 datagrams output
ip:
791044982 total packets received
0 bad header checksums
0 with size smaller than minimum
0 with data size < data length
0 with header length < data size
0 with data length < header length
0 with incorrect version number
0 packets destined to dead next hop
7776172 fragments received
0 fragments dropped (dup or out of space)
0 fragment sessions dropped (queue overflow)
2330 fragments dropped after timeout
3840557 packets reassembled ok
820969848 packets for this host
311 packets for unknown/unsupported protocol
0 packets forwarded
0 packets not forwardable
0 redirects sent
894573985 packets sent from this host
10684398 packets sent with fabricated ip header
0 output packets dropped due to no bufs
221 output packets discarded due to no route
189762 output datagrams fragmented
458290 fragments created
0 datagrams that can't be fragmented
0 packets with bad options
310 packets with options handled without error
0 strict source and record route options
0 loose source and record route options
0 record route options
0 timestamp options
0 timestamp and address options
0 timestamp and prespecified address options
0 option packets dropped due to rate limit
310 router alert options
0 multicast packets dropped (no iflist)
0 packets dropped (src and int don't match)
0 transit re packets dropped on mgmt i/f
0 packets used first nexthop in ecmp unilist
184307870 incoming ttpoip packets received
0 incoming ttpoip packets dropped
185308407 outgoing TTPoIP packets sent
0 outgoing TTPoIP packets dropped
46 raw packets dropped. no space in socket recv buffer
0 packets consumed by virtual-node processing
icmp:
0 drops due to rate limit
17648 calls to icmp_error
115 errors not generated because old message was icmp
Output Histogram
18108802 echo reply
13554 destination unreachable
15 echo
4094 time exceeded
0 messages with bad code fields
0 messages less than the minimum length
0 messages with bad checksum
0 messages with bad source address
0 messages with bad length
0 echo drops with broadcast or multicast destinaton address
0 timestamp drops with broadcast or multicast destination address
Input Histogram
15 echo reply
7376365 destination unreachable
18108802 echo
11308301 time exceeded
18108802 message responses generated
igmp:
310 messages received
0 messages received with too few bytes
0 messages received with bad checksum
308 membership queries received
0 membership queries received with invalid fields
0 membership reports received
0 membership reports received with invalid fields
0 membership reports received for groups to which we belong
943 Membership reports sent
ipsec:
0 inbound packets violated process security policy
0 Outbound packets violated process security policy
0 outbound packets with no SA available
0 outbound packets failed due to insufficient memory
0 outbound packets with no route
0 invalid outbound packets
0 Outbound packets with bundles SAs
0 mbuf coleasced during clone
0 Cluster coalesced during clone
0 Cluster copied during clone
0 mbuf inserted during makespace
ah:
0 packets shorter than header shows
0 packets dropped protocol unsupported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 replay counter wrap
0 packets dropped bad authentication detected
0 packets dropped bad authentication length
0 possible replay packets detected
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 tunnel sanity check failures
esp:
0 packets shorter than header shows
0 packets dropped protocol not supported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 packets dropped bad ilen
0 replay counter wrap
0 packets dropped bad encryption detected
0 packets dropped bad authentication detected
0 possible replay packets detected
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 tunnel sanity check failures
ipcomp:
0 packets shorter than header shows
0 packets dropped protocol not supported
0 packets dropped no TDB
0 packets dropped bad KCR
0 packets dropped queue full
0 packets dropped no transform
0 replay counter wrap
0 packets in
0 packets out
0 packets dropped invalid TDB
0 bytes in
0 bytes out
0 packets dropped larger than maxpacket
0 packets blocked due to policy
0 crypto processing failure
0 packets sent uncompressed threshold
0 packets sent uncompressed useless
raw_if:
0 RAW packets transmitted
0 PPPOE packets transmitted
0 ISDN packets transmitted
0 DIALER packets transmitted
0 PPP packets transmitted to pppd
0 PPP packets transmitted to jppd
0 IGMPL2 packets transmitted
0 MLDL2 packets transmitted
0 Fibre Channel packets transmitted
0 FIP packets transmitted
0 STP packets transmitted
0 LACP packets transmitted
0 VCCP packets transmitted
0 Fabric OAM packets transmitted
0 output drops due to tx error
0 MPU packets transmitted
0 PPPOE packets received
0 ISDN packets received
0 DIALER packets received
0 PPP packets received from pppd
0 MPU packets received
0 PPP packets received from jppd
0 IGMPL2 packets received
0 MLDL2 packets received
0 Fibre Channel packets received
0 FIP packets received
0 STP packets received
0 LACP packets received
0 VCCP packets received
0 Fabric OAM packets received
0 Fibre Channel packets dropped
0 FIP packets dropped
0 STP packets dropped
0 LACP packets dropped
0 Fabric OAM packets dropped
0 VCCP packets dropped
0 Input drops due to bogus protocol
0 input drops due to no mbufs available
0 input drops due to no space in socket
0 input drops due to no socket
arp:
200794 datagrams received
39895 ARP requests received
54355 ARP replies received
109 resolution request received
0 resolution request dropped
0 unrestricted proxy requests
0 restricted proxy requests
0 received proxy requests
0 unrestricted proxy requests not proxied
0 restricted proxy requests not proxied
0 datagrams with bogus interface
0 datagrams with incorrect length
0 datagrams for non-IP protocol
0 datagrams with unsupported op code
0 datagrams with bad protocol address length
0 datagrams with bad hardware address length
0 datagrams with multicast source address
87 datagrams with multicast target address
0 datagrams with my own hardware address
0 datagrams for an address not on the interface
0 datagrams with a broadcast source address
0 datagrams with source address duplicate to mine
106457 datagrams which were not for me
7 packets discarded waiting for resolution
15 packets sent after waiting for resolution
55086 ARP requests sent
39895 ARP replies sent
0 requests for memory denied
0 requests dropped on entry
0 requests dropped during retry
0 requests dropped due to interface deletion
0 requests on unnumbered interfaces
0 new requests on unnumbered interfaces
0 replies for from unnumbered interfaces
0 requests on unnumbered interface with non-subnetted donor
0 replies from unnumbered interface with non-subnetted donor
0 arp packets rejected as family is configured with deny arp
0 arp response packets are rejected on mace icl interface
0 arp replies are rejected as source and destination is same
0 arp probe for proxy address reachable from the incoming interface
0 arp request discarded for vrrp source address
0 self arp request packet received on irb interface
0 proxy arp request discarded as source ip is a proxy target
0 arp packets are dropped as nexthop allocation failed
0 arp packets received from peer vrrp rotuer and discarded
0 arp packets are rejected as target ip arp resolve is in progress
0 grat arp packets are ignored as mac address is not changed
0 arp packets are dropped from peer vrrp
0 arp packets are dropped as driver call failed
0 arp packets are dropped as source is not validated
75000 Max System ARP nh cache limit
59840 Max Public ARP nh cache limit
200 Max IRI ARP nh cache limit
14960 Max Management intf ARP nh cache limit
4 Current Public ARP nexthops present
1 Current IRI ARP nexthops present
2 Current Management ARP nexthops present
0 Total ARP nexthops creation failed as limit reached
0 Public ARP nexthops creation failed as public limit reached
0 IRI ARP nexthops creation failed as iri limit reached
0 Management ARP nexthops creation failed as mgt limit reached
ip6:
100720784 total packets received
0 packets with size smaller than minimum
0 packets with data size < data length
0 packets with bad options
0 packets with incorrect version number
0 fragments received
0 fragments dropped (dup or out of space)
0 fragments dropped after timeout
0 fragment sessions dropped (queue overflow)
0 packets reassembled ok
100720775 packets for this host
0 packets forwarded
0 packets not forwardable
0 redirects sent
101650461 packets sent from this host
4506397 packets sent with fabricated ip header
0 output packets dropped due to no bufs, etc.
1026 output packets discarded due to no route
0 output datagrams fragmented
0 fragments created
0 datagrams that can't be fragmented
0 packets that violated scope rules
0 multicast packets which we don't join
Input histogram:
5981275 TCP
82375747 UDP
7862042 ICMP6
4501689 OSPF
0 packets whose headers are not continuous
0 tunneling packets that can't find gif
0 packets discarded due to too may headers
0 failures of source address selection
source addresses on an outgoing I/F
1088804 link-locals
557 globals
source addresses of same scope
1088804 link-locals
556 globals
source addresses of a different scope
1 globals
0 forward cache hit
0 forward cache miss
0 Packets destined to dead next hop
0 option packets dropped due to rate limit
0 Packets dropped (src and int don't match)
0 packets dropped due to bad protocol
0 transit re packet(null) dropped on mgmt i/f
icmp6:
31 Calls to icmp_error
0 Errors not generated because old message was icmp error
0 Errors not generated because rate limitation
Output histogram:
31 unreach
544917 neighbor solicitation
544595 neighbor advertisement
0 Messages with bad code fields
0 Messages < minimum length
0 Bad checksums
0 Messages with bad length
Input histogram:
319 unreach
6773211 time exceeded
8 router solicitation
168 router advertisment
544589 neighbor solicitation
543769 neighbor advertisement
Histogram of error messages to be generated:
0 No route
0 Administratively prohibited
0 Beyond scope
31 Address unreachable
0 Port unreachable
0 Time exceed transit
0 Time exceed reassembly
0 Erroneous header field
0 Unrecognized next header
0 Unrecognized option
0 Unknown
0 Message responses generated
0 Messages with too many ND options
75000 Max System ND nh cache limit
59840 Max Public ND nh cache limit
200 Max IRI ND nh cache limit
14960 Max Management intf ND nh cache limit
3 Current Public ND nexthops present
1 Current IRI ND nexthops present
0 Current Management ND nexthops present
0 Total ND nexthops creation failed as limit reached
0 Public ND nexthops creation failed as public limit reached
0 IRI ND nexthops creation failed as iri limit reached
0 Management ND nexthops creation failed as mgt limit reached
0 interface-restricted ndp proxy requests
0 interface-restricted dad proxy requests
0 interface-restricted ndp proxy responses
0 interface-restricted dad proxy conflicts
0 interface-restricted dad proxy duplicates
0 interface-restricted ndp proxy resolve requests
0 interface-restricted dad proxy resolve requests
0 interface-restricted dad packets from same node dropped
543769 interface-restricted proxy packets dropped with nomac
0 ND hold nexthops dropped on entry by RED mark
0 ND hold nexthops dropped on timer expire by RED mark
ipsec6:
0 Inbound packets violated process security policy
0 Outbound packets violated process security policy
0 Outbound packets with no SA available
0 Outbound packets failed due to insufficient memory
0 Outbound packets with no route
0 Invalid outbound packets
0 Outbound packets with bundles SAs
0 mbuf coleasced during clone
0 Cluster coalesced during clone
0 Cluster copied during clone
0 mbuf inserted during makespace
pfkey:
1269 Requests sent from userland
69304 Bytes sent from userland
histogram by message type:
626 reserved
17 add
626 dump
pfkey:
0 Messages with invalid length field
0 Messages with invalid version field
0 Messages with invalid message type field
0 Messages too short
0 Messages with memory allocation failure
0 Messages with duplicate extension
0 Messages with invalid extension type
0 Messages with invalid sa type
0 Messages with invalid address extension
11269 Requests sent to userland
3189032 Bytes sent to userland
histogram by message type:
626 reserved
17 add
10626 dump
pfkey:
22500 Messages toward single socket
0 Messages toward all sockets
0 Messages toward registered sockets
0 Messages with memory allocation failure
clnl:
0 Total packets received
0 Packets delivered
0 Too small packets
0 Packets with bad header length
0 Packets with bad checksum
0 Bad version packets
0 Unknown or unsupported protocol packets
0 Packets with bogus sdl size
0 No free memory in socket buffer
0 Send packets discarded
0 Sbappend failure
0 Mcopy failure
0 Address fields were not reasonable
0 Segment information forgotten
0 Forwarded packets
0 Total packets sent
0 Output packets discarded
0 Non-forwarded packets
0 Packets fragmented
0 Fragments sent
0 Fragments discarded
0 Fragments timed out
0 Fragmentation prohibited
0 Packets reconstructed
0 Packets destined to dead nexthop
0 Packets discarded due to no route
0 Error pdu rate drops
0 ER pdu generation failure
esis:
0 Total pkts received
0 Total packets consumed by protocol
0 Pdus received with bad checksum
0 Pdus received with bad version number
0 Pdus received with bad type field
0 Short pdus received
0 Pdus withbogus sdl size
0 Pdus with bad header length
0 Pdus with unknown or unsupport protocol
0 No free memory in socket buffer
0 Send packets discarded
0 Sbappend failure
0 Mcopy failure
0 ISO family not configured
tnp:
0 Unicast packets received
18139293 Broadcast packets received
0 Fragmented packets received
0 Hello packets dropped
0 Fragments dropped
0 Fragment reassembly queue flushes
0 Packets with tnp src address collision received
18139293 Hello packets received
0 Control packets received
0 Rdp packets received
0 Udp packets received
0 Tunnel packets received
0 Input packets discarded with no protocol
0 Packets of version unspecified received
0 Packets of version 1 received
0 Packets of version 2 received
18139293 Packets of version 3 received
0 Unicast packets sent
18140864 Broadcast packets sent
0 Fragmented packets sent
0 Hello packets dropped
0 Fragments dropped
18140864 Hello packets sent
0 Control packets sent
0 Rdp packets sent
0 Udp packets sent
0 Tunnel packets sent
0 Packets sent with unknown protocol
0 Packets of version unspecified sent
0 Packets of version 1 sent
0 Packets of version 2 sent
18140864 Packets of version 3 sent
rdp:
0 Input packets
0 Packets discarded for bad checksum
0 Packets discarded due to bad sequence number
0 Refused connections
0 Acks received
0 Packets dropped due to full socket buffers
0 Retransmits
0 Output packets
0 Acks sent
0 Connects
0 Closes
0 Keepalives received
0 Keepalives sent
tudp:
0 Datagrams received
0 Datagrams with incomplete header
0 Datagrams with bad data length field
0 Datagrams with bad checksum
0 Datagrams dropped due to no socket
0 Broadcast/multicast datagrams dropped due to no socket
0 Datagrams dropped due to full socket buffers
0 Delivered
1 Datagrams output
ttp:
185308407 Packets sent
0 Packets sent while unconnected
0 Packets sent while interface down
0 Packets sent couldn't get buffer
0 Packets sent couldn't find neighbor
0 Packets sent when transmit is disable
0 Packets sent when host unreachable
0 L3 Packets sent could not get buffer
0 L3 Packets dropped
0 Packets sent with bad logical interface
0 Packets sent with bad address family
56842 L2 packets received
0 Unknown L3 packets received
83526062 IPv4 L3 packets received
0 MPLS L3 packets received
0 MPLS->IPV4 L3 packets received
4214 IPv4->MPLS L3 packets received
0 VPLS L3 packets received
100720753 IPv6 L3 packets received
0 ARP L3 packets received
0 CLNP L3 packets received
0 TNP L3 packets received
0 NULL L3 packets received
0 Cyclotron cycle L3 packets received
0 Cyclotron send L3 packets received
0 Openflow packets received
0 Packets received while unconnected
0 Packets received from unknown ifl
0 Input packets couldn't get buffer
0 Input packets with bad type
0 Input packets with discard type
0 Input packets with too many tlvs
0 Input packets with bad tlv header
0 Input packets with bad tlv type
0 Input packets dropped based on tlv result
0 Input packets with bad address family
0 Input packets for which rt lookup is bypassed
0 Input packets with ttp tlv of type TTP_TLV_P2MP_NBR_NHID
0 Input packets with unknown p2mp_nbr_nhid value
0 Input packets of type vxlan bfd
mpls:
4214 Total MPLS packets received
6118 Packets forwarded
0 Packets dropped
0 Packets with header too small
0 After tagging, packets can't fit link MTU
0 Packets with IPv4 explicit NULL tag
0 Packets with IPv4 explicit NULL cksum errors
0 Packets with router alert tag
5 LSP ping packets (ttl-expired/router alert)
4209 Packets with ttl expired
0 Packets with tag encoding error
0 Packets discarded due to no route
0 Packets used first nexthop in ecmp unilist
0 Packets dropped due to ifl down
0 Packets dropped at mpls socket send op
0 Packets forwarded at mpls socket send op
0 Packets dropped, over p2mp composite nexthop
ethoamlfm:
0 total received packets
0 input drops due to bad interface state
0 received packets forwarded
0 total transmitted packets
0 sent packets
0 output drops due to bad interface state
ethoamcfm:
0 total received packets
0 input drops due to bad interface state
0 received packets forwarded
0 total transmitted packets
0 sent packets
0 output drops due to bad interface state
0 flood requests forwarded to PFE
0 flood requests dropped
vpls:
0 Total packets received
0 Packets with size smaller than minimum
0 Packets with incorrect version number
0 Packets for this host
0 Packets with no logical interface
0 Packets with no family
0 Packets with no route table
0 Packets with no auxiliary table
0 Packets with no core-facing entry
0 packets with no CE-facing entry
0 MAC route learning requests
0 MAC routes learnt
0 Requests to learn an existing route
0 Learning requests while learning disabled on interface
0 Learning requests over capacity
0 MAC routes moved
0 Requests to move static route
0 MAC route aging requests
0 MAC routes aged
0 Bogus address in aging requests
0 Requests to age static route
0 Requests to re-ageout aged route
0 Requests involving multiple peer FEs
0 Aging acks from PFE
0 Aging non-acks from PFE
0 Aging requests timed out waiting on FEs
0 Aging requests over max-rate
0 Errors finding peer FEs
0 Unsupported platform
0 Packets dropped due to no l3 route table
0 Packets dropped due to no local ifl
0 Packets punted
0 Packets dropped due to no socket
bridge:
0 Total packets received
0 Packets with size smaller than minimum
0 Packets with incorrect version number
0 Packets for this host
0 Packets with no logical interface
0 Packets with no family
0 Packets with no route table
0 Packets with no auxiliary table
0 Packets with no core-facing entry
0 packets with no CE-facing entry
0 MAC route learning requests
0 MAC routes learnt
0 Requests to learn an existing route
0 Learning requests while learning disabled on interface
0 Learning requests over capacity
0 MAC routes moved
0 Requests to move static route
0 MAC route aging requests
0 MAC routes aged
0 Bogus address in aging requests
0 Requests to age static route
0 Requests to re-ageout aged route
0 Requests involving multiple peer FEs
0 Aging acks from PFE
0 Aging non-acks from PFE
0 Aging requests timed out waiting on FEs
0 Aging requests over max-rate
0 Errors finding peer FEs
0 Unsupported platform
0 Packets dropped due to no l3 route table
0 Packets dropped due to no local ifl
0 Packets punted
0 Packets dropped due to no socket
"""
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemStatisticsNoForwarding(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowSystemStatisticsNoForwarding(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
class TestShowSystemConnections(unittest.TestCase):
device = Device(name="aDevice")
maxDiff = None
empty_output = {"execute.return_value": ""}
golden_parsed_output_1 = {
"execute.return_value":
"""
show system connections
Active Internet connections (including servers)
Proto Recv-Q Send-Q Local Address Foreign Address (state)
tcp4 0 0 10.1.0.192.22 10.1.0.1.56714 ESTABLISHED
tcp4 0 0 10.1.0.192.22 10.1.0.1.56708 ESTABLISHED
tcp4 0 0 *.33081 *.* LISTEN
tcp4 0 0 172.16.64.1.6988 172.16.64.16.43116 ESTABLISHED
"""
}
golden_output_1 = {
"output": {
"connections-table": [
{
"proto": "tcp4",
"recv-q": "0",
"send-q": "0",
"local-address": "10.1.0.192.22",
"foreign-address": "10.1.0.1.56714",
"state": "ESTABLISHED"
},
{
"proto": "tcp4",
"recv-q": "0",
"send-q": "0",
"local-address": "10.1.0.192.22",
"foreign-address": "10.1.0.1.56708",
"state": "ESTABLISHED"
},
{
"proto": "tcp4",
"recv-q": "0",
"send-q": "0",
"local-address": "*.33081",
"foreign-address": "*.*",
"state": "LISTEN"
},
{
"proto": "tcp4",
"recv-q": "0",
"send-q": "0",
"local-address": "172.16.64.1.6988",
"foreign-address": "172.16.64.16.43116",
"state": "ESTABLISHED"
}
]
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemConnections(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowSystemConnections(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_output_1)
if __name__ == "__main__":
unittest.main()
| 45.309873
| 136
| 0.402933
| 24,373
| 284,546
| 4.687933
| 0.047265
| 0.039769
| 0.014563
| 0.016384
| 0.935506
| 0.921862
| 0.915368
| 0.89978
| 0.894144
| 0.886188
| 0
| 0.085139
| 0.49327
| 284,546
| 6,279
| 137
| 45.317089
| 0.707292
| 0.00776
| 0
| 0.748842
| 0
| 0.002317
| 0.365996
| 0.200254
| 0
| 0
| 0
| 0
| 0.008109
| 1
| 0.008109
| false
| 0.000463
| 0.001158
| 0
| 0.032901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a69c5dd69e8fd54ec0ef69594654fd185324e59
| 41,642
|
py
|
Python
|
sdk/python/pulumi_aws/ec2/vpc.py
|
wgarcia79/pulumi-aws
|
c63c224734f1d72ba84986a33f36413c9f9cbe27
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/ec2/vpc.py
|
wgarcia79/pulumi-aws
|
c63c224734f1d72ba84986a33f36413c9f9cbe27
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2/vpc.py
|
wgarcia79/pulumi-aws
|
c63c224734f1d72ba84986a33f36413c9f9cbe27
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['VpcArgs', 'Vpc']
@pulumi.input_type
class VpcArgs:
def __init__(__self__, *,
cidr_block: pulumi.Input[str],
assign_generated_ipv6_cidr_block: Optional[pulumi.Input[bool]] = None,
enable_classiclink: Optional[pulumi.Input[bool]] = None,
enable_classiclink_dns_support: Optional[pulumi.Input[bool]] = None,
enable_dns_hostnames: Optional[pulumi.Input[bool]] = None,
enable_dns_support: Optional[pulumi.Input[bool]] = None,
instance_tenancy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Vpc resource.
:param pulumi.Input[str] cidr_block: The CIDR block for the VPC.
:param pulumi.Input[bool] assign_generated_ipv6_cidr_block: Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
:param pulumi.Input[bool] enable_classiclink: A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
:param pulumi.Input[bool] enable_classiclink_dns_support: A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
:param pulumi.Input[bool] enable_dns_hostnames: A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
:param pulumi.Input[bool] enable_dns_support: A boolean flag to enable/disable DNS support in the VPC. Defaults true.
:param pulumi.Input[str] instance_tenancy: A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
pulumi.set(__self__, "cidr_block", cidr_block)
if assign_generated_ipv6_cidr_block is not None:
pulumi.set(__self__, "assign_generated_ipv6_cidr_block", assign_generated_ipv6_cidr_block)
if enable_classiclink is not None:
pulumi.set(__self__, "enable_classiclink", enable_classiclink)
if enable_classiclink_dns_support is not None:
pulumi.set(__self__, "enable_classiclink_dns_support", enable_classiclink_dns_support)
if enable_dns_hostnames is not None:
pulumi.set(__self__, "enable_dns_hostnames", enable_dns_hostnames)
if enable_dns_support is not None:
pulumi.set(__self__, "enable_dns_support", enable_dns_support)
if instance_tenancy is not None:
pulumi.set(__self__, "instance_tenancy", instance_tenancy)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> pulumi.Input[str]:
"""
The CIDR block for the VPC.
"""
return pulumi.get(self, "cidr_block")
@cidr_block.setter
def cidr_block(self, value: pulumi.Input[str]):
pulumi.set(self, "cidr_block", value)
@property
@pulumi.getter(name="assignGeneratedIpv6CidrBlock")
def assign_generated_ipv6_cidr_block(self) -> Optional[pulumi.Input[bool]]:
"""
Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
"""
return pulumi.get(self, "assign_generated_ipv6_cidr_block")
@assign_generated_ipv6_cidr_block.setter
def assign_generated_ipv6_cidr_block(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "assign_generated_ipv6_cidr_block", value)
@property
@pulumi.getter(name="enableClassiclink")
def enable_classiclink(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
"""
return pulumi.get(self, "enable_classiclink")
@enable_classiclink.setter
def enable_classiclink(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_classiclink", value)
@property
@pulumi.getter(name="enableClassiclinkDnsSupport")
def enable_classiclink_dns_support(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
"""
return pulumi.get(self, "enable_classiclink_dns_support")
@enable_classiclink_dns_support.setter
def enable_classiclink_dns_support(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_classiclink_dns_support", value)
@property
@pulumi.getter(name="enableDnsHostnames")
def enable_dns_hostnames(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
"""
return pulumi.get(self, "enable_dns_hostnames")
@enable_dns_hostnames.setter
def enable_dns_hostnames(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dns_hostnames", value)
@property
@pulumi.getter(name="enableDnsSupport")
def enable_dns_support(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable DNS support in the VPC. Defaults true.
"""
return pulumi.get(self, "enable_dns_support")
@enable_dns_support.setter
def enable_dns_support(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dns_support", value)
@property
@pulumi.getter(name="instanceTenancy")
def instance_tenancy(self) -> Optional[pulumi.Input[str]]:
"""
A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
"""
return pulumi.get(self, "instance_tenancy")
@instance_tenancy.setter
def instance_tenancy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_tenancy", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _VpcState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
assign_generated_ipv6_cidr_block: Optional[pulumi.Input[bool]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
default_network_acl_id: Optional[pulumi.Input[str]] = None,
default_route_table_id: Optional[pulumi.Input[str]] = None,
default_security_group_id: Optional[pulumi.Input[str]] = None,
dhcp_options_id: Optional[pulumi.Input[str]] = None,
enable_classiclink: Optional[pulumi.Input[bool]] = None,
enable_classiclink_dns_support: Optional[pulumi.Input[bool]] = None,
enable_dns_hostnames: Optional[pulumi.Input[bool]] = None,
enable_dns_support: Optional[pulumi.Input[bool]] = None,
instance_tenancy: Optional[pulumi.Input[str]] = None,
ipv6_association_id: Optional[pulumi.Input[str]] = None,
ipv6_cidr_block: Optional[pulumi.Input[str]] = None,
main_route_table_id: Optional[pulumi.Input[str]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Vpc resources.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of VPC
:param pulumi.Input[bool] assign_generated_ipv6_cidr_block: Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
:param pulumi.Input[str] cidr_block: The CIDR block for the VPC.
:param pulumi.Input[str] default_network_acl_id: The ID of the network ACL created by default on VPC creation
:param pulumi.Input[str] default_route_table_id: The ID of the route table created by default on VPC creation
:param pulumi.Input[str] default_security_group_id: The ID of the security group created by default on VPC creation
:param pulumi.Input[bool] enable_classiclink: A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
:param pulumi.Input[bool] enable_classiclink_dns_support: A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
:param pulumi.Input[bool] enable_dns_hostnames: A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
:param pulumi.Input[bool] enable_dns_support: A boolean flag to enable/disable DNS support in the VPC. Defaults true.
:param pulumi.Input[str] instance_tenancy: A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
:param pulumi.Input[str] ipv6_association_id: The association ID for the IPv6 CIDR block.
:param pulumi.Input[str] ipv6_cidr_block: The IPv6 CIDR block.
:param pulumi.Input[str] main_route_table_id: The ID of the main route table associated with
this VPC. Note that you can change a VPC's main route table by using an
`ec2.MainRouteTableAssociation`.
:param pulumi.Input[str] owner_id: The ID of the AWS account that owns the VPC.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if assign_generated_ipv6_cidr_block is not None:
pulumi.set(__self__, "assign_generated_ipv6_cidr_block", assign_generated_ipv6_cidr_block)
if cidr_block is not None:
pulumi.set(__self__, "cidr_block", cidr_block)
if default_network_acl_id is not None:
pulumi.set(__self__, "default_network_acl_id", default_network_acl_id)
if default_route_table_id is not None:
pulumi.set(__self__, "default_route_table_id", default_route_table_id)
if default_security_group_id is not None:
pulumi.set(__self__, "default_security_group_id", default_security_group_id)
if dhcp_options_id is not None:
pulumi.set(__self__, "dhcp_options_id", dhcp_options_id)
if enable_classiclink is not None:
pulumi.set(__self__, "enable_classiclink", enable_classiclink)
if enable_classiclink_dns_support is not None:
pulumi.set(__self__, "enable_classiclink_dns_support", enable_classiclink_dns_support)
if enable_dns_hostnames is not None:
pulumi.set(__self__, "enable_dns_hostnames", enable_dns_hostnames)
if enable_dns_support is not None:
pulumi.set(__self__, "enable_dns_support", enable_dns_support)
if instance_tenancy is not None:
pulumi.set(__self__, "instance_tenancy", instance_tenancy)
if ipv6_association_id is not None:
pulumi.set(__self__, "ipv6_association_id", ipv6_association_id)
if ipv6_cidr_block is not None:
pulumi.set(__self__, "ipv6_cidr_block", ipv6_cidr_block)
if main_route_table_id is not None:
pulumi.set(__self__, "main_route_table_id", main_route_table_id)
if owner_id is not None:
pulumi.set(__self__, "owner_id", owner_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name (ARN) of VPC
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="assignGeneratedIpv6CidrBlock")
def assign_generated_ipv6_cidr_block(self) -> Optional[pulumi.Input[bool]]:
"""
Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
"""
return pulumi.get(self, "assign_generated_ipv6_cidr_block")
@assign_generated_ipv6_cidr_block.setter
def assign_generated_ipv6_cidr_block(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "assign_generated_ipv6_cidr_block", value)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
The CIDR block for the VPC.
"""
return pulumi.get(self, "cidr_block")
@cidr_block.setter
def cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cidr_block", value)
@property
@pulumi.getter(name="defaultNetworkAclId")
def default_network_acl_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the network ACL created by default on VPC creation
"""
return pulumi.get(self, "default_network_acl_id")
@default_network_acl_id.setter
def default_network_acl_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_network_acl_id", value)
@property
@pulumi.getter(name="defaultRouteTableId")
def default_route_table_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the route table created by default on VPC creation
"""
return pulumi.get(self, "default_route_table_id")
@default_route_table_id.setter
def default_route_table_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_route_table_id", value)
@property
@pulumi.getter(name="defaultSecurityGroupId")
def default_security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security group created by default on VPC creation
"""
return pulumi.get(self, "default_security_group_id")
@default_security_group_id.setter
def default_security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_security_group_id", value)
@property
@pulumi.getter(name="dhcpOptionsId")
def dhcp_options_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "dhcp_options_id")
@dhcp_options_id.setter
def dhcp_options_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dhcp_options_id", value)
@property
@pulumi.getter(name="enableClassiclink")
def enable_classiclink(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
"""
return pulumi.get(self, "enable_classiclink")
@enable_classiclink.setter
def enable_classiclink(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_classiclink", value)
@property
@pulumi.getter(name="enableClassiclinkDnsSupport")
def enable_classiclink_dns_support(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
"""
return pulumi.get(self, "enable_classiclink_dns_support")
@enable_classiclink_dns_support.setter
def enable_classiclink_dns_support(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_classiclink_dns_support", value)
@property
@pulumi.getter(name="enableDnsHostnames")
def enable_dns_hostnames(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
"""
return pulumi.get(self, "enable_dns_hostnames")
@enable_dns_hostnames.setter
def enable_dns_hostnames(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dns_hostnames", value)
@property
@pulumi.getter(name="enableDnsSupport")
def enable_dns_support(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag to enable/disable DNS support in the VPC. Defaults true.
"""
return pulumi.get(self, "enable_dns_support")
@enable_dns_support.setter
def enable_dns_support(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dns_support", value)
@property
@pulumi.getter(name="instanceTenancy")
def instance_tenancy(self) -> Optional[pulumi.Input[str]]:
"""
A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
"""
return pulumi.get(self, "instance_tenancy")
@instance_tenancy.setter
def instance_tenancy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_tenancy", value)
@property
@pulumi.getter(name="ipv6AssociationId")
def ipv6_association_id(self) -> Optional[pulumi.Input[str]]:
"""
The association ID for the IPv6 CIDR block.
"""
return pulumi.get(self, "ipv6_association_id")
@ipv6_association_id.setter
def ipv6_association_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ipv6_association_id", value)
@property
@pulumi.getter(name="ipv6CidrBlock")
def ipv6_cidr_block(self) -> Optional[pulumi.Input[str]]:
"""
The IPv6 CIDR block.
"""
return pulumi.get(self, "ipv6_cidr_block")
@ipv6_cidr_block.setter
def ipv6_cidr_block(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ipv6_cidr_block", value)
@property
@pulumi.getter(name="mainRouteTableId")
def main_route_table_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the main route table associated with
this VPC. Note that you can change a VPC's main route table by using an
`ec2.MainRouteTableAssociation`.
"""
return pulumi.get(self, "main_route_table_id")
@main_route_table_id.setter
def main_route_table_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "main_route_table_id", value)
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the AWS account that owns the VPC.
"""
return pulumi.get(self, "owner_id")
@owner_id.setter
def owner_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider .
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
class Vpc(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assign_generated_ipv6_cidr_block: Optional[pulumi.Input[bool]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
enable_classiclink: Optional[pulumi.Input[bool]] = None,
enable_classiclink_dns_support: Optional[pulumi.Input[bool]] = None,
enable_dns_hostnames: Optional[pulumi.Input[bool]] = None,
enable_dns_support: Optional[pulumi.Input[bool]] = None,
instance_tenancy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Provides a VPC resource.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_aws as aws
main = aws.ec2.Vpc("main", cidr_block="10.0.0.0/16")
```
Basic usage with tags:
```python
import pulumi
import pulumi_aws as aws
main = aws.ec2.Vpc("main",
cidr_block="10.0.0.0/16",
instance_tenancy="default",
tags={
"Name": "main",
})
```
## Import
VPCs can be imported using the `vpc id`, e.g.,
```sh
$ pulumi import aws:ec2/vpc:Vpc test_vpc vpc-a01106c2
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] assign_generated_ipv6_cidr_block: Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
:param pulumi.Input[str] cidr_block: The CIDR block for the VPC.
:param pulumi.Input[bool] enable_classiclink: A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
:param pulumi.Input[bool] enable_classiclink_dns_support: A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
:param pulumi.Input[bool] enable_dns_hostnames: A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
:param pulumi.Input[bool] enable_dns_support: A boolean flag to enable/disable DNS support in the VPC. Defaults true.
:param pulumi.Input[str] instance_tenancy: A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VpcArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a VPC resource.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_aws as aws
main = aws.ec2.Vpc("main", cidr_block="10.0.0.0/16")
```
Basic usage with tags:
```python
import pulumi
import pulumi_aws as aws
main = aws.ec2.Vpc("main",
cidr_block="10.0.0.0/16",
instance_tenancy="default",
tags={
"Name": "main",
})
```
## Import
VPCs can be imported using the `vpc id`, e.g.,
```sh
$ pulumi import aws:ec2/vpc:Vpc test_vpc vpc-a01106c2
```
:param str resource_name: The name of the resource.
:param VpcArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VpcArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assign_generated_ipv6_cidr_block: Optional[pulumi.Input[bool]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
enable_classiclink: Optional[pulumi.Input[bool]] = None,
enable_classiclink_dns_support: Optional[pulumi.Input[bool]] = None,
enable_dns_hostnames: Optional[pulumi.Input[bool]] = None,
enable_dns_support: Optional[pulumi.Input[bool]] = None,
instance_tenancy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VpcArgs.__new__(VpcArgs)
__props__.__dict__["assign_generated_ipv6_cidr_block"] = assign_generated_ipv6_cidr_block
if cidr_block is None and not opts.urn:
raise TypeError("Missing required property 'cidr_block'")
__props__.__dict__["cidr_block"] = cidr_block
__props__.__dict__["enable_classiclink"] = enable_classiclink
__props__.__dict__["enable_classiclink_dns_support"] = enable_classiclink_dns_support
__props__.__dict__["enable_dns_hostnames"] = enable_dns_hostnames
__props__.__dict__["enable_dns_support"] = enable_dns_support
__props__.__dict__["instance_tenancy"] = instance_tenancy
__props__.__dict__["tags"] = tags
__props__.__dict__["arn"] = None
__props__.__dict__["default_network_acl_id"] = None
__props__.__dict__["default_route_table_id"] = None
__props__.__dict__["default_security_group_id"] = None
__props__.__dict__["dhcp_options_id"] = None
__props__.__dict__["ipv6_association_id"] = None
__props__.__dict__["ipv6_cidr_block"] = None
__props__.__dict__["main_route_table_id"] = None
__props__.__dict__["owner_id"] = None
__props__.__dict__["tags_all"] = None
super(Vpc, __self__).__init__(
'aws:ec2/vpc:Vpc',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
assign_generated_ipv6_cidr_block: Optional[pulumi.Input[bool]] = None,
cidr_block: Optional[pulumi.Input[str]] = None,
default_network_acl_id: Optional[pulumi.Input[str]] = None,
default_route_table_id: Optional[pulumi.Input[str]] = None,
default_security_group_id: Optional[pulumi.Input[str]] = None,
dhcp_options_id: Optional[pulumi.Input[str]] = None,
enable_classiclink: Optional[pulumi.Input[bool]] = None,
enable_classiclink_dns_support: Optional[pulumi.Input[bool]] = None,
enable_dns_hostnames: Optional[pulumi.Input[bool]] = None,
enable_dns_support: Optional[pulumi.Input[bool]] = None,
instance_tenancy: Optional[pulumi.Input[str]] = None,
ipv6_association_id: Optional[pulumi.Input[str]] = None,
ipv6_cidr_block: Optional[pulumi.Input[str]] = None,
main_route_table_id: Optional[pulumi.Input[str]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Vpc':
"""
Get an existing Vpc resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of VPC
:param pulumi.Input[bool] assign_generated_ipv6_cidr_block: Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
:param pulumi.Input[str] cidr_block: The CIDR block for the VPC.
:param pulumi.Input[str] default_network_acl_id: The ID of the network ACL created by default on VPC creation
:param pulumi.Input[str] default_route_table_id: The ID of the route table created by default on VPC creation
:param pulumi.Input[str] default_security_group_id: The ID of the security group created by default on VPC creation
:param pulumi.Input[bool] enable_classiclink: A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
:param pulumi.Input[bool] enable_classiclink_dns_support: A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
:param pulumi.Input[bool] enable_dns_hostnames: A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
:param pulumi.Input[bool] enable_dns_support: A boolean flag to enable/disable DNS support in the VPC. Defaults true.
:param pulumi.Input[str] instance_tenancy: A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
:param pulumi.Input[str] ipv6_association_id: The association ID for the IPv6 CIDR block.
:param pulumi.Input[str] ipv6_cidr_block: The IPv6 CIDR block.
:param pulumi.Input[str] main_route_table_id: The ID of the main route table associated with
this VPC. Note that you can change a VPC's main route table by using an
`ec2.MainRouteTableAssociation`.
:param pulumi.Input[str] owner_id: The ID of the AWS account that owns the VPC.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VpcState.__new__(_VpcState)
__props__.__dict__["arn"] = arn
__props__.__dict__["assign_generated_ipv6_cidr_block"] = assign_generated_ipv6_cidr_block
__props__.__dict__["cidr_block"] = cidr_block
__props__.__dict__["default_network_acl_id"] = default_network_acl_id
__props__.__dict__["default_route_table_id"] = default_route_table_id
__props__.__dict__["default_security_group_id"] = default_security_group_id
__props__.__dict__["dhcp_options_id"] = dhcp_options_id
__props__.__dict__["enable_classiclink"] = enable_classiclink
__props__.__dict__["enable_classiclink_dns_support"] = enable_classiclink_dns_support
__props__.__dict__["enable_dns_hostnames"] = enable_dns_hostnames
__props__.__dict__["enable_dns_support"] = enable_dns_support
__props__.__dict__["instance_tenancy"] = instance_tenancy
__props__.__dict__["ipv6_association_id"] = ipv6_association_id
__props__.__dict__["ipv6_cidr_block"] = ipv6_cidr_block
__props__.__dict__["main_route_table_id"] = main_route_table_id
__props__.__dict__["owner_id"] = owner_id
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
return Vpc(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
Amazon Resource Name (ARN) of VPC
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="assignGeneratedIpv6CidrBlock")
def assign_generated_ipv6_cidr_block(self) -> pulumi.Output[Optional[bool]]:
"""
Requests an Amazon-provided IPv6 CIDR
block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or
the size of the CIDR block. Default is `false`.
"""
return pulumi.get(self, "assign_generated_ipv6_cidr_block")
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> pulumi.Output[str]:
"""
The CIDR block for the VPC.
"""
return pulumi.get(self, "cidr_block")
@property
@pulumi.getter(name="defaultNetworkAclId")
def default_network_acl_id(self) -> pulumi.Output[str]:
"""
The ID of the network ACL created by default on VPC creation
"""
return pulumi.get(self, "default_network_acl_id")
@property
@pulumi.getter(name="defaultRouteTableId")
def default_route_table_id(self) -> pulumi.Output[str]:
"""
The ID of the route table created by default on VPC creation
"""
return pulumi.get(self, "default_route_table_id")
@property
@pulumi.getter(name="defaultSecurityGroupId")
def default_security_group_id(self) -> pulumi.Output[str]:
"""
The ID of the security group created by default on VPC creation
"""
return pulumi.get(self, "default_security_group_id")
@property
@pulumi.getter(name="dhcpOptionsId")
def dhcp_options_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "dhcp_options_id")
@property
@pulumi.getter(name="enableClassiclink")
def enable_classiclink(self) -> pulumi.Output[bool]:
"""
A boolean flag to enable/disable ClassicLink
for the VPC. Only valid in regions and accounts that support EC2 Classic.
See the [ClassicLink documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/vpc-classiclink.html) for more information. Defaults false.
"""
return pulumi.get(self, "enable_classiclink")
@property
@pulumi.getter(name="enableClassiclinkDnsSupport")
def enable_classiclink_dns_support(self) -> pulumi.Output[bool]:
"""
A boolean flag to enable/disable ClassicLink DNS Support for the VPC.
Only valid in regions and accounts that support EC2 Classic.
"""
return pulumi.get(self, "enable_classiclink_dns_support")
@property
@pulumi.getter(name="enableDnsHostnames")
def enable_dns_hostnames(self) -> pulumi.Output[bool]:
"""
A boolean flag to enable/disable DNS hostnames in the VPC. Defaults false.
"""
return pulumi.get(self, "enable_dns_hostnames")
@property
@pulumi.getter(name="enableDnsSupport")
def enable_dns_support(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean flag to enable/disable DNS support in the VPC. Defaults true.
"""
return pulumi.get(self, "enable_dns_support")
@property
@pulumi.getter(name="instanceTenancy")
def instance_tenancy(self) -> pulumi.Output[Optional[str]]:
"""
A tenancy option for instances launched into the VPC. Default is `default`, which
makes your instances shared on the host. Using either of the other options (`dedicated` or `host`) costs at least $2/hr.
"""
return pulumi.get(self, "instance_tenancy")
@property
@pulumi.getter(name="ipv6AssociationId")
def ipv6_association_id(self) -> pulumi.Output[str]:
"""
The association ID for the IPv6 CIDR block.
"""
return pulumi.get(self, "ipv6_association_id")
@property
@pulumi.getter(name="ipv6CidrBlock")
def ipv6_cidr_block(self) -> pulumi.Output[str]:
"""
The IPv6 CIDR block.
"""
return pulumi.get(self, "ipv6_cidr_block")
@property
@pulumi.getter(name="mainRouteTableId")
def main_route_table_id(self) -> pulumi.Output[str]:
"""
The ID of the main route table associated with
this VPC. Note that you can change a VPC's main route table by using an
`ec2.MainRouteTableAssociation`.
"""
return pulumi.get(self, "main_route_table_id")
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[str]:
"""
The ID of the AWS account that owns the VPC.
"""
return pulumi.get(self, "owner_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider .
"""
return pulumi.get(self, "tags_all")
| 48.08545
| 258
| 0.669348
| 5,302
| 41,642
| 5.00679
| 0.048284
| 0.076659
| 0.078016
| 0.042266
| 0.931666
| 0.915995
| 0.894071
| 0.879229
| 0.861787
| 0.834062
| 0
| 0.005687
| 0.235676
| 41,642
| 865
| 259
| 48.14104
| 0.828359
| 0.356611
| 0
| 0.678261
| 1
| 0
| 0.128622
| 0.048269
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167391
| false
| 0.002174
| 0.01087
| 0.004348
| 0.282609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a6efc5f668a7df19d44fa51c2878b6563983d46
| 42,333
|
py
|
Python
|
tests/expected_normalized_results.py
|
msteijaert/descriptastorus
|
d10fb973eff6cb841694a191cd296e7008259831
|
[
"BSD-3-Clause"
] | null | null | null |
tests/expected_normalized_results.py
|
msteijaert/descriptastorus
|
d10fb973eff6cb841694a191cd296e7008259831
|
[
"BSD-3-Clause"
] | null | null | null |
tests/expected_normalized_results.py
|
msteijaert/descriptastorus
|
d10fb973eff6cb841694a191cd296e7008259831
|
[
"BSD-3-Clause"
] | null | null | null |
expected = [(True, 0.9749367759562906, 0.0017697497084894365, 0.0001226379059079931, 0.00016402878019849648, 0.00012344246793706903, 0.00031922002005261777, 0.0005362138683486838, 0.0006850762300778668, 0.0004942567639564529, 0.00018708933024663262, 0.0006994743745435789, 0.00040398057664498856, 0.0012545958406222754, 0.0006995602759455857, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 9.258416945554019e-11, 5.856109102466708e-17, 0.67748413597752, 2.3914504100228754e-11, 7.522455917366477e-13, 7.344415091328211e-05, 0.0014396455494463365, 0.0012053384296394612, 0.0008866174046595724, 2.1555330316300734e-14, 0.9594279052184913, 0.0002074703959143888, 7.55481640150461e-05, 0.9999989976196331, 1.207923229416487e-05, 0.00027583827601894315, 0.0698270416107376, 0.00018453781411443678, 0.009863389098877339, 0.022123722654315392, 0.0098633890988774, 0.004592843796987434, 0.9998351236451024, 0.013951132732773557, 0.925405890765279, 0.9755569060880589, 0.14830565476181345, 0.0004521609628475594, 7.534120729066002e-05, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.0021522983926278924, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.020326964585520954, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.6391707315526944, 0.023724068249895314, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.01985439680376681, 0.030449466764316042, 0.21657492863434677, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 1.204930599755324e-11, 2.863050056034425e-09, 0.008253922509688207, 0.35655697923220714, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.03618098130844001, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 2.868092430219349e-10, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 4.7035980879892365e-08, 0.16663339666347546, 0.28139584922525007),
(True, 0.9759402110326365, 0.003859470808797959, 0.000365102267782451, 0.0007398589702052536, 0.000485497209632223, 0.000610736173920557, 0.0013191430838316181, 0.0012554726344615142, 0.001882471392918716, 0.0008014864991408377, 0.0021797740913533695, 0.0011754538798584655, 0.0029068132849091356, 0.0016100571186500735, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.628584800417054, 5.856109102466708e-17, 0.5855782913710358, 0.5639311333572717, 7.522455917366477e-13, 0.00019175865208437016, 0.2949063498433872, 0.15421138805901285, 0.04201667450090395, 0.1674686336627659, 0.9594279052184913, 0.00048091646393783017, 0.0001822718129598278, 0.9999989976196331, 9.959236613847391e-05, 0.0004905768654966154, 0.13221528111928338, 0.0004543011968957863, 0.009964082422744462, 0.02212145365983826, 0.009964082422744523, 0.004592843796987434, 0.9986661307738313, 0.009344685767824776, 0.9211560286991001, 0.9755593892703659, 0.19089489586703817, 0.001100203819200652, 0.000195353938538486, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.0021522983926278924, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.023154605826211493, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.632758701730094, 0.04357690733133318, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.12338336359658242, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 1.204930599755324e-11, 0.9472168082942851, 0.07252908115375234, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.039526361699395315, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 4.7035980879892365e-08, 0.16663339666347546, 0.301803221006645),
(True, 0.9650930535627298, 0.004446882645999163, 0.000779830744354495, 0.0018291583684619643, 0.001156848194089809, 0.0013267424086233672, 0.0036299952076220402, 0.0027191890481778553, 0.0028542417683358585, 0.0012582815526366896, 0.005804597598461859, 0.0030752616287224883, 0.0064325637046985865, 0.0035802530886988467, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 0.1702061839526885, 0.628584800417054, 5.856109102466708e-17, 0.46476604902502333, 0.7772487345236945, 7.522455917366477e-13, 0.00043870678760216363, 0.5620371665742858, 0.35802358026758546, 0.11810551140986231, 0.31694305740570716, 0.9594279052184913, 0.000997592586663745, 0.0003918935200591895, 0.9999989976196331, 0.00044549328223300307, 0.002994722061278165, 0.1397420296811136, 0.0009922143825485518, 0.010060854870684866, 0.022121413589501402, 0.010060854870684931, 0.005169288695657137, 0.9974188774918888, 0.00795596400914535, 0.9123139305661856, 0.9755594331232822, 0.2331496431301822, 0.002330710525648013, 0.0004439356527548031, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.016145845513504845, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.02650960597748037, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.6499506456961532, 0.06740843302484462, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.2657390751934702, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.20731642102779674, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.04287931331076668, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 4.7035980879892365e-08, 0.16663339666347546, 0.3773393505694206),
(True, 0.9446546136628688, 0.005119310675270623, 0.0015276758928106622, 0.00392465836781208, 0.0024628949384377947, 0.002496863087055844, 0.007719283133850986, 0.005161120533665423, 0.006306326014843984, 0.0029659593695567903, 0.008208755191274326, 0.0043563268699832215, 0.013937089127215597, 0.007862971477146348, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.8489495578328278, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.0009079339332145865, 0.5312223976455256, 0.4024482806188474, 0.2641612329547135, 0.456266555546848, 0.9594279052184913, 0.0019002202202663212, 0.0007711290852290978, 0.9999989976196331, 0.0014224289790193776, 0.011837839257462677, 0.24596355246958002, 0.0019768413472982995, 0.010109863367934919, 0.022452497772528456, 0.010109863367934986, 0.005376156708420288, 0.9980055594610482, 0.00759661067151978, 0.915906194109299, 0.9751971384408027, 0.3109742480528484, 0.004494865540960488, 0.0009129182903651183, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.05205690448505847, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.03051334349281324, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.7245796787382166, 0.06740843302484462, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.40937506641082566, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.35071358560365845, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.04659909357942778, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 4.7035980879892365e-08, 0.16663339666347546, 0.44752037155154656),
(True, 0.9148570430415872, 0.005878709943021506, 0.002792905542185768, 0.0075894368538410445, 0.004804630733613945, 0.004393849106506193, 0.014820685016774457, 0.00938744609118805, 0.011649786913846162, 0.005756131370649765, 0.015760775469750833, 0.008475895607575506, 0.018412094949817548, 0.01045916251868392, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.8982272708212705, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.0017366618823920316, 0.5063937185064782, 0.44008412539154557, 0.33731163450536594, 0.5722741725809378, 0.9594279052184913, 0.003382784376313171, 0.0014142827979925852, 0.9999989976196331, 0.0036644810806478172, 0.03518713434611009, 0.2900462801767014, 0.0036616728049894826, 0.010139645140688136, 0.022488799892004533, 0.010139645140688205, 0.005396729877744144, 0.9981743932734319, 0.00756251339989579, 0.9170964928541225, 0.9751574194747613, 0.40266237630266705, 0.008074137264439831, 0.001735836076723291, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.11744762389331218, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.03531944981668431, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.7245796787382166, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.5346551102427208, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.4878677353402995, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.05073274651199229, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 4.7035980879892365e-08, 0.16663339666347546, 0.4992269885725577),
(True, 0.8773900422564633, 0.006737683689354504, 0.0048247619101781805, 0.013548449813255039, 0.00873744943462956, 0.007321229981810169, 0.02627235571148601, 0.01639147296275744, 0.02007666541207044, 0.010365829219755766, 0.025961087618306386, 0.014222212896890334, 0.03127417315766773, 0.018052437271065293, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.9307858067310805, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.003116819077375695, 0.2949063498433872, 0.32411464409514057, 0.29561449420793584, 0.663568247880646, 0.9594279052184913, 0.005699067374558093, 0.0024494762664879067, 0.9999989976196331, 0.008128639474665522, 0.08444739472522397, 0.4164401045970738, 0.006390743128115311, 0.0101597023577282, 0.02249012253280331, 0.01015970235772827, 0.005397537673694719, 0.9982470873588286, 0.0075611803707368655, 0.9176363519133461, 0.9751559723632889, 0.5031213401534023, 0.013686771604148678, 0.0030982888101555874, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.2142975169478418, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.0411229744187351, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.7884317853270039, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.6367707804864168, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.6070824318152669, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.055333770115485736, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 0.960706684375328, 0.16663339666347546, 0.4648987049382678),
(True, 0.8351433470765426, 0.007706099388131821, 0.007947956066516883, 0.022686947292310716, 0.014991735875121545, 0.01165676885872922, 0.04363307088403046, 0.02749911604495756, 0.032662911515606415, 0.017559065834968416, 0.04026960261153979, 0.02255342469024842, 0.047093618061642833, 0.027604880004625645, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.9527866535679592, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.005306407287245952, 0.1643182689263654, 0.2452637246445762, 0.2641612329547135, 0.7336695581715281, 0.9594279052184913, 0.009170044543122991, 0.004045051693865415, 0.9999989976196331, 0.016128763770813574, 0.16933619262867025, 0.45804363114686764, 0.010612373006048977, 0.010174143427333127, 0.02249014468472591, 0.010174143427333195, 0.0053975516451804095, 0.9982852169376433, 0.007561157318746206, 0.9179267374547876, 0.9751559481267165, 0.6045870173147739, 0.022099416719337316, 0.005248596585887298, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.3356692195895122, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.04817195965121822, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.8417287418088772, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.7172944132100186, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.7046095919319575, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.060462928433970345, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 0.999199216932124, 0.16663339666347546, 0.47051853675791894),
(True, 0.7911998190455012, 0.008800649616294641, 0.012570648480435946, 0.0360263772579401, 0.024478322176278763, 0.0178555885816629, 0.06855090610604317, 0.044329234858940546, 0.050578490811933365, 0.028246449024710487, 0.05949049780401471, 0.034139907925263516, 0.06764352461984338, 0.04030307802044964, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.9678060848252985, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.008640006016889879, 0.0936889372333889, 0.1307941996207988, 0.17847498111695453, 0.7870936438338828, 0.9594279052184913, 0.014188842444193574, 0.006415745073166158, 0.9999989976196331, 0.029313240791841205, 0.2891738918265846, 0.586653594130873, 0.016887993395513938, 0.010185042847651588, 0.022490144859944602, 0.010185042847651654, 0.005397551757332697, 0.9983077598749016, 0.00756115713370315, 0.9181008652246858, 0.9751559479350059, 0.6987423700245083, 0.03421776780713893, 0.008507855659691374, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.4663102638320793, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.056781709571613034, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.8850064479703426, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.7797881455384017, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.781215519801578, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.06618910789702505, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 0.999984030798407, 0.16663339666347546, 0.4586610020915609),
(True, 0.7480715976509739, 0.010020123249078111, 0.01918745294320575, 0.05466672261311227, 0.038266648952003934, 0.02644755790287131, 0.10252981512083055, 0.06863664111326576, 0.07497062698983498, 0.04344292663361637, 0.08435276828703127, 0.04968226279965248, 0.09341788667508033, 0.05663748902514149, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.9780980123097015, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.01353667324995541, 0.05626872891117488, 0.07521129726842302, 0.12665389351705464, 0.8278732746082242, 0.9594279052184913, 0.021221392591123968, 0.009828000700507902, 0.9999989976196331, 0.04954819370276316, 0.42841740464384837, 0.6215317554945545, 0.025891577012630264, 0.010193563530303111, 0.022490144860609185, 0.010193563530303177, 0.005397551757761576, 0.9983222139676136, 0.0075611571329955215, 0.9182134947177039, 0.975155947934278, 0.7792571695927133, 0.051050972845539444, 0.013277956653669779, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.5891256641404885, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.06735166756092135, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.9190928996143982, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.8279632700575962, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.8397176204588883, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.07259018957341272, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 0.9999996825114967, 0.16663339666347546, 0.43104757445818576),
(True, 0.7074277156045184, 0.011389350278344522, 0.028374169579839063, 0.07968495304501298, 0.05752115635026047, 0.03802595393886921, 0.14659756816753064, 0.10201305959998892, 0.10679379407623839, 0.06418588013195548, 0.11541923233364575, 0.06986317260754552, 0.12473537821991337, 0.0770536102709045, 4.074887748553275e-10, 2.1752345554537253e-08, 6.893568743986756e-07, 0.10021777036808066, 0.019674268425336955, 2.509947805151569e-11, 0.9851507964891669, 5.856109102466708e-17, 1.0887143007578768e-06, 0.8435295694744586, 7.522455917366477e-13, 0.020502691481613995, 0.03583189105440813, 0.04670544957106467, 0.07255557607247386, 0.8592031241241455, 0.9594279052184913, 0.030800301501078944, 0.014603503516862845, 0.9999989976196331, 0.07867146842174601, 0.5643305618956167, 0.7358722993322504, 0.03839377129635346, 0.010200408718714183, 0.022490144860609185, 0.010200408718714254, 0.005397551757762353, 0.9983320439869565, 0.007561157132994219, 0.9182905382437727, 0.9751559479342766, 0.8432672673074932, 0.07363903705016071, 0.020045399762039697, 0.06131633436401962, 0.033330412715665114, 9.938586887440123e-22, 0.14249225481045538, 0.06296313323086282, 0.3582069667640977, 4.829929905968561e-15, 0.08375773817691123, 0.018975839977920975, 0.055286669276326375, 5.2299730302677424e-05, 5.695163496355349e-08, 0.6926334011962839, 0.0, 1.142426579532403e-21, 2.4024551296122493e-23, 0.08038395565370364, 0.008721533485877977, 5.761429172990433e-21, 3.608752519347532e-15, 0.14598011853624576, 1.7355671790267907e-22, 1.1809375699556313e-10, 0.05998337856769929, 9.05498589442999e-08, 4.6097836658987035e-10, 0.1570723761512004, 0.9450522527061462, 0.1156448000491021, 0.08076015142621962, 0.02750088405103144, 0.026103288321088658, 0.12445962953209416, 0.07318164957419467, 0.16709687358966352, 0.07558100887358327, 8.786222326940566e-24, 0.8650419623325789, 0.030449466764316042, 0.21106179193952035, 5.301380942484303e-05, 1.426575645079419e-16, 0.037316039578400684, 6.952720170237173e-13, 0.0, 9.796908728381612e-13, 0.0002642813530468513, 0.32334960795567924, 2.863050056034425e-09, 0.8835152300577735, 0.2561373864373436, 2.9948775774335523e-06, 0.2776390123534972, 5.301380942484303e-05, 0.006171383087998217, 5.301380942484303e-05, 0.5, 0.3847104505818606, 5.301380942484303e-05, 5.301380942484303e-05, 0.16466451519846614, 5.301380942484303e-05, 0.9986534460857486, 0.39982063322767364, 0.07975389133191409, 5.708678464956527e-19, 3.323628039453297e-10, 9.641976432207614e-10, 7.105427357601002e-15, 5.837075859767282e-13, 1.1988064218527426e-20, 0.16507954777521547, 0.16704063109164155, 0.16649833422826882, 0.16648681554273437, 0.20286466138783588, 0.06936588086433818, 7.105427357601002e-15, 0.16834648002367886, 0.16798293243083, 6.871898605709248e-10, 0.0011815729111892548, 0.16433263444740043, 0.0008377769174453542, 0.16632573432045866, 0.1630341420539464, 0.16507954777521783, 9.569704921590424e-08, 3.4970892216197174e-08, 0.16820617469834753, 0.16580685767199677, 0.1673465945690054, 7.13964618592744e-07, 2.64115098167329e-12, 0.09991279113071351, 0.9995291135114167, 0.37773784802857724, 0.004506167784075659, 0.13325025138007268, 0.35822936499573854, 1.6148291592799069e-09, 1.875173153042101e-18, 2.0941053890943806e-07, 7.105427357601002e-15, 0.49926428135900186, 0.16492940199937894, 1.3174450829220094e-17, 2.1116435458495668e-16, 1.1681587528983658e-09, 3.2592359986208345e-22, 6.246014200977821e-10, 0.16814918247297933, 0.16545072935045158, 1.1711026222995016e-13, 0.0, 0.16466886776470133, 0.1669247276390117, 0.0, 5.100713274794273e-08, 7.105427357601002e-15, 0.1546541084142275, 2.794209383371835e-22, 0.0, 0.16763973281840122, 6.314992660308343e-25, 0.16818612974609914, 0.009088502666434949, 0.16836320220350243, 8.265423131146944e-11, 0.15634635438381983, 0.0, 0.0, 0.021135423572131908, 0.021135423572131908, 2.388155749350249e-20, 0.0, 8.336724500779247e-25, 5.301380942484303e-05, 0.1569510658025109, 4.034345030660423e-08, 1.552591960229748e-23, 1.5930611717254445e-17, 5.766100772128885e-14, 2.957989408969297e-11, 0.16837836924209137, 0.167380186090632, 1.4815146472600273e-18, 2.3241499374144116e-16, 0.9999999937047724, 0.16663339666347546, 0.39028355156239286),]
| 3,848.454545
| 4,294
| 0.840881
| 4,702
| 42,333
| 7.570608
| 0.132497
| 0.006742
| 0.037363
| 0.028092
| 0.752901
| 0.752901
| 0.750906
| 0.750906
| 0.750906
| 0.741467
| 0
| 0.864512
| 0.047528
| 42,333
| 10
| 4,295
| 4,233.3
| 0.018328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6a97b4c2b7ec433eaf00f217083e56999c7243d8
| 10,666
|
py
|
Python
|
wallee/api/legal_organization_form_service_api.py
|
bluedynamics/wallee-python-sdk
|
7f20df96d2c3dba3b1ca5236e8deca578819eea2
|
[
"Apache-2.0"
] | 2
|
2020-01-16T13:24:06.000Z
|
2020-11-21T17:40:17.000Z
|
wallee/api/legal_organization_form_service_api.py
|
bluedynamics/wallee-python-sdk
|
7f20df96d2c3dba3b1ca5236e8deca578819eea2
|
[
"Apache-2.0"
] | 4
|
2019-10-14T17:33:23.000Z
|
2021-10-01T14:49:11.000Z
|
wallee/api/legal_organization_form_service_api.py
|
bluedynamics/wallee-python-sdk
|
7f20df96d2c3dba3b1ca5236e8deca578819eea2
|
[
"Apache-2.0"
] | 2
|
2019-10-15T14:17:10.000Z
|
2021-09-17T13:07:09.000Z
|
# coding: utf-8
from __future__ import absolute_import
import six
from wallee.api_client import ApiClient
class LegalOrganizationFormServiceApi:
def __init__(self, configuration):
self.api_client = ApiClient(configuration=configuration)
def all(self, **kwargs):
"""All
This operation returns all entities which are available.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.all(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[LegalOrganizationForm]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.all_with_http_info(**kwargs)
else:
(data) = self.all_with_http_info(**kwargs)
return data
def all_with_http_info(self, **kwargs):
"""All
This operation returns all entities which are available.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[LegalOrganizationForm]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/legal-organization-form/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LegalOrganizationForm]',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def country(self, code, **kwargs):
"""Find by Country
This operation returns all legal organization forms for a given country.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country(code, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str code: The country in ISO 3166-1 alpha-2 format, for which all legal organization forms should be returned. (required)
:return: list[LegalOrganizationForm]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.country_with_http_info(code, **kwargs)
else:
(data) = self.country_with_http_info(code, **kwargs)
return data
def country_with_http_info(self, code, **kwargs):
"""Find by Country
This operation returns all legal organization forms for a given country.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country_with_http_info(code, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str code: The country in ISO 3166-1 alpha-2 format, for which all legal organization forms should be returned. (required)
:return: list[LegalOrganizationForm]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['code']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method country" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'code' is set
if ('code' not in params or
params['code'] is None):
raise ValueError("Missing the required parameter `code` when calling `country`")
collection_formats = {}
path_params = {}
query_params = []
if 'code' in params:
query_params.append(('code', params['code']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/legal-organization-form/country', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LegalOrganizationForm]',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read(self, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The id of the legal organization form which should be returned. (required)
:return: LegalOrganizationForm
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_with_http_info(id, **kwargs)
else:
(data) = self.read_with_http_info(id, **kwargs)
return data
def read_with_http_info(self, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The id of the legal organization form which should be returned. (required)
:return: LegalOrganizationForm
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `read`")
collection_formats = {}
path_params = {}
query_params = []
if 'id' in params:
query_params.append(('id', params['id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/legal-organization-form/read', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LegalOrganizationForm',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 34.62987
| 136
| 0.599287
| 1,194
| 10,666
| 5.118928
| 0.117253
| 0.039267
| 0.02356
| 0.03534
| 0.923429
| 0.905105
| 0.891361
| 0.854712
| 0.839005
| 0.839005
| 0
| 0.002176
| 0.310613
| 10,666
| 307
| 137
| 34.742671
| 0.829049
| 0.302081
| 0
| 0.748466
| 0
| 0
| 0.166764
| 0.06566
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042945
| false
| 0
| 0.018405
| 0
| 0.122699
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ab8142f876ebe43b29eb38bbd13694043420d3d
| 1,747
|
py
|
Python
|
tests/simple_test.py
|
bravomikekilo/servent
|
bc93227e82fecf81abcf4387aa673163f2a24bf3
|
[
"Apache-2.0"
] | null | null | null |
tests/simple_test.py
|
bravomikekilo/servent
|
bc93227e82fecf81abcf4387aa673163f2a24bf3
|
[
"Apache-2.0"
] | null | null | null |
tests/simple_test.py
|
bravomikekilo/servent
|
bc93227e82fecf81abcf4387aa673163f2a24bf3
|
[
"Apache-2.0"
] | null | null | null |
import h5py as h5
import servent
import numpy as np
def integrity_test():
try:
with servent.Archive('test') as archive:
label = archive.getLogger('label', (10,))
for i in range(1000):
if i == 768:
raise ValueError('intented')
label.log(i * np.ones((10,)))
except Exception as e:
if e.args[0] != 'intented':
raise e
with h5.File('test.hdf5', 'r') as f:
label = f['default/label']
assert label.shape == (768, 10)
for i in range(768):
assert np.all(label[i] == i * np.ones(10,))
def scaler_test():
try:
with servent.Archive('test') as archive:
label = archive.getLogger('label', None)
for i in range(1000):
if i == 768:
raise ValueError('intented')
label.log(i)
except Exception as e:
if e.args[0] != 'intented':
raise e
with h5.File('test.hdf5', 'r') as f:
label = f['default/label']
assert label.shape == (768,)
for i in range(768):
assert label[i] == i, 'content error'
def multi_dimension_test():
try:
with servent.Archive('test') as archive:
label = archive.getLogger('label', None)
for i in range(1000):
if i == 768:
raise ValueError('intented')
label.log(i)
except Exception as e:
if e.args[0] != 'intented':
raise e
with h5.File('test.hdf5', 'r') as f:
label = f['default/label']
assert label.shape == (768,)
for i in range(768):
assert label[i] == i, 'content error'
| 24.957143
| 55
| 0.497997
| 219
| 1,747
| 3.954338
| 0.214612
| 0.027714
| 0.04157
| 0.076212
| 0.875289
| 0.87067
| 0.847575
| 0.847575
| 0.847575
| 0.847575
| 0
| 0.052823
| 0.371494
| 1,747
| 69
| 56
| 25.318841
| 0.735883
| 0
| 0
| 0.803922
| 0
| 0
| 0.097365
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.058824
| false
| 0
| 0.058824
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ac01bf2a58a0cd5baee5601bca4d6522542cd01
| 13,956
|
py
|
Python
|
pytpp/api/websdk/endpoints/permissions.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | 4
|
2022-02-04T23:58:55.000Z
|
2022-02-15T18:53:08.000Z
|
pytpp/api/websdk/endpoints/permissions.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
pytpp/api/websdk/endpoints/permissions.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
from typing import List
from pytpp.api.api_base import API, APIResponse, api_response_property
from pytpp.properties.response_objects.permissions import Permissions
class _Permissions:
def __init__(self, api_obj):
self.Object = self._Object(api_obj=api_obj)
self.Refresh = self._Refresh(api_obj=api_obj)
class _Object:
def __init__(self, api_obj):
self._api_obj = api_obj
def Guid(self, guid):
return self._Guid(guid=guid, api_obj=self._api_obj)
class _Guid(API):
def __init__(self, guid: str, api_obj):
super().__init__(api_obj=api_obj, url=f'/Permissions/Object/{guid}')
self._guid = guid
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def principals(self) -> List[str]:
return self._from_json()
return _Response(response=self._get())
def Ptype(self, ptype='Local'):
return self._Ptype(guid=self._guid, ptype=ptype, api_obj=self._api_obj)
class _Ptype:
def __init__(self, guid:str, ptype: str, api_obj):
self._guid = guid
self._ptype = ptype
self._api_obj = api_obj
def Pname(self, pname):
return self._Pname(guid=self._guid, ptype=self._ptype, pname=pname, api_obj=self._api_obj)
def Principal(self, uuid: str):
return self._Principal(guid=self._guid, ptype=self._ptype, uuid=uuid, api_obj=self._api_obj)
class _Pname:
def __init__(self, guid: str, ptype: str, pname: str, api_obj):
self._guid = guid
self._ptype = ptype
self._pname = pname
self._api_obj = api_obj
def Principal(self, principal: str):
return self._Principal(guid=self._guid, ptype=self._ptype, pname=self._pname,
principal=principal, api_obj=self._api_obj)
class _Principal(API):
def __init__(self, guid: str, ptype: str, pname: str, principal: str, api_obj):
super().__init__(
api_obj=api_obj,
url=f'/Permissions/Object/{guid}/{ptype}/{pname}/{principal}'
)
self.Effective = self._Effective(guid=guid, ptype=ptype, pname=pname, principal=principal, api_obj=api_obj)
def delete(self):
return APIResponse(response=self._delete())
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def explicit_permissions(self):
return Permissions.Permissions(self._from_json(key='ExplicitPermissions'))
@property
@api_response_property()
def implicit_permissions(self):
return Permissions.Permissions(self._from_json(key='ImplicitPermissions'))
return _Response(response=self._get())
def post(self, is_associate_allowed: bool = None, is_create_allowed: bool = None, is_delete_allowed: bool = None,
is_manage_permissions_allowed: bool = None, is_policy_write_allowed: bool = None,
is_private_key_read_allowed: bool = None, is_private_key_write_allowed: bool = None, is_read_allowed: bool = None,
is_rename_allowed: bool = None, is_revoke_allowed: bool = None, is_view_allowed: bool = None,
is_write_allowed: bool = None):
body = {
'IsAssociateAllowed': is_associate_allowed,
'IsCreateAllowed': is_create_allowed,
'IsDeleteAllowed': is_delete_allowed,
'IsManagePermissionsAllowed': is_manage_permissions_allowed,
'IsPolicyWriteAllowed': is_policy_write_allowed,
'IsPrivateKeyReadAllowed': is_private_key_read_allowed,
'IsPrivateKeyWriteAllowed': is_private_key_write_allowed,
'IsReadAllowed': is_read_allowed,
'IsRenameAllowed': is_rename_allowed,
'IsRevokeAllowed': is_revoke_allowed,
'IsViewAllowed': is_view_allowed,
'IsWriteAllowed': is_write_allowed
}
return APIResponse(response=self._post(data=body))
def put(self, is_associate_allowed: bool = None, is_create_allowed: bool = None, is_delete_allowed: bool = None,
is_manage_permissions_allowed: bool = None, is_policy_write_allowed: bool = None,
is_private_key_read_allowed: bool = None, is_private_key_write_allowed: bool = None, is_read_allowed: bool = None,
is_rename_allowed: bool = None, is_revoke_allowed: bool = None, is_view_allowed: bool = None,
is_write_allowed: bool = None):
body = {
'IsAssociateAllowed': is_associate_allowed,
'IsCreateAllowed': is_create_allowed,
'IsDeleteAllowed': is_delete_allowed,
'IsManagePermissionsAllowed': is_manage_permissions_allowed,
'IsPolicyWriteAllowed': is_policy_write_allowed,
'IsPrivateKeyReadAllowed': is_private_key_read_allowed,
'IsPrivateKeyWriteAllowed': is_private_key_write_allowed,
'IsReadAllowed': is_read_allowed,
'IsRenameAllowed': is_rename_allowed,
'IsRevokeAllowed': is_revoke_allowed,
'IsViewAllowed': is_view_allowed,
'IsWriteAllowed': is_write_allowed
}
return APIResponse(response=self._put(data=body))
class _Effective(API):
def __init__(self, guid: str, ptype: str, pname: str, principal: str, api_obj):
super().__init__(
api_obj=api_obj,
url=f'/Permissions/Object/{guid}/{ptype}/{pname}/{principal}/Effective'
)
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def effective_permissions(self):
return Permissions.Permissions(self._from_json('EffectivePermissions'))
return _Response(response=self._get())
class _Principal(API):
def __init__(self, guid: str, ptype: str, uuid: str, api_obj):
super().__init__(
api_obj=api_obj,
url=f'/Permissions/Object/{guid}/{ptype}/{uuid}'
)
self.Effective = self._Effective(guid=guid, uuid=uuid, api_obj=api_obj)
def delete(self):
return APIResponse(response=self._delete())
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def explicit_permissions(self):
return Permissions.Permissions(self._from_json('ExplicitPermissions'))
@property
@api_response_property()
def implicit_permissions(self):
return Permissions.Permissions(self._from_json('ImplicitPermissions'))
return _Response(response=self._get())
def post(self, is_associate_allowed: bool = None, is_create_allowed: bool = None, is_delete_allowed: bool = None,
is_manage_permissions_allowed: bool = None, is_policy_write_allowed: bool = None,
is_private_key_read_allowed: bool = None, is_private_key_write_allowed: bool = None, is_read_allowed: bool = None,
is_rename_allowed: bool = None, is_revoke_allowed: bool = None, is_view_allowed: bool = None,
is_write_allowed: bool = None):
body = {
'IsAssociateAllowed': is_associate_allowed,
'IsCreateAllowed': is_create_allowed,
'IsDeleteAllowed': is_delete_allowed,
'IsManagePermissionsAllowed': is_manage_permissions_allowed,
'IsPolicyWriteAllowed': is_policy_write_allowed,
'IsPrivateKeyReadAllowed': is_private_key_read_allowed,
'IsPrivateKeyWriteAllowed': is_private_key_write_allowed,
'IsReadAllowed': is_read_allowed,
'IsRenameAllowed': is_rename_allowed,
'IsRevokeAllowed': is_revoke_allowed,
'IsViewAllowed': is_view_allowed,
'IsWriteAllowed': is_write_allowed
}
return APIResponse(response=self._post(data=body))
def put(self, is_associate_allowed: bool = None, is_create_allowed: bool = None, is_delete_allowed: bool = None,
is_manage_permissions_allowed: bool = None, is_policy_write_allowed: bool = None,
is_private_key_read_allowed: bool = None, is_private_key_write_allowed: bool = None, is_read_allowed: bool = None,
is_rename_allowed: bool = None, is_revoke_allowed: bool = None, is_view_allowed: bool = None,
is_write_allowed: bool = None):
body = {
'IsAssociateAllowed': is_associate_allowed,
'IsCreateAllowed': is_create_allowed,
'IsDeleteAllowed': is_delete_allowed,
'IsManagePermissionsAllowed': is_manage_permissions_allowed,
'IsPolicyWriteAllowed': is_policy_write_allowed,
'IsPrivateKeyReadAllowed': is_private_key_read_allowed,
'IsPrivateKeyWriteAllowed': is_private_key_write_allowed,
'IsReadAllowed': is_read_allowed,
'IsRenameAllowed': is_rename_allowed,
'IsRevokeAllowed': is_revoke_allowed,
'IsViewAllowed': is_view_allowed,
'IsWriteAllowed': is_write_allowed
}
return APIResponse(response=self._put(data=body))
class _Effective(API):
def __init__(self, guid: str, uuid: str, api_obj):
super().__init__(
api_obj=api_obj,
url=f'/Permissions/Object/{guid}/Local/{uuid}/Effective'
)
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def effective_permissions(self):
return Permissions.Permissions(self._from_json('EffectivePermissions'))
return _Response(response=self._get())
class _Refresh(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Permissions/Refresh')
def get(self):
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self) -> int:
return self._from_json('Result')
return _Response(response=self._get())
| 54.093023
| 147
| 0.494483
| 1,159
| 13,956
| 5.51855
| 0.063848
| 0.082552
| 0.11257
| 0.116948
| 0.913383
| 0.892902
| 0.847874
| 0.838649
| 0.838649
| 0.828174
| 0
| 0
| 0.432717
| 13,956
| 257
| 148
| 54.303502
| 0.807882
| 0
| 0
| 0.753555
| 0
| 0
| 0.087776
| 0.03769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.194313
| false
| 0
| 0.014218
| 0.07109
| 0.402844
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c1037a3ae6eda4eae2f2fbad40aefc58ee347a2
| 840
|
py
|
Python
|
klasy/tic-tac-class.py
|
kobe81venum/tic-tac
|
c34841f31655bbfa85b3ae3ec548e99d0555589a
|
[
"MIT"
] | null | null | null |
klasy/tic-tac-class.py
|
kobe81venum/tic-tac
|
c34841f31655bbfa85b3ae3ec548e99d0555589a
|
[
"MIT"
] | null | null | null |
klasy/tic-tac-class.py
|
kobe81venum/tic-tac
|
c34841f31655bbfa85b3ae3ec548e99d0555589a
|
[
"MIT"
] | null | null | null |
#%%
# class moja_klasa:
# def wyswietl(x):
# return 'Witaj swiecie'
# x = moja_klasa()
# print(x.wyswietl())
# %%
class prostopadloscian:
def __init__(self): #konstruktor klasy włącza się jako pierwszy
self.podstawa_a = 0 #domyslnie ustawiamy wartosc
self.podstawa_b = 0
self.wysokosc_h = 0
def objetosc(self):
return self.podstawa_a * self.podstawa_b * self.wysokosc_h
wtc = prostopadloscian()
wtc.podstawa_a = 100
wtc.podstawa_b = 200
wtc.wysokosc_h = 400
print(wtc.objetosc())
# %%
class prostopadloscian:
def __init__(self): #konstruktor klasy włącza się jako pierwszy
self.podstawa_a = 0 #domyslnie ustawiamy wartosc
self.podstawa_b = 0
self.wysokosc_h = 0
def objetosc(self):
return self.podstawa_a * self.podstawa_b * self.wysokosc_h
| 24
| 67
| 0.672619
| 110
| 840
| 4.909091
| 0.3
| 0.177778
| 0.096296
| 0.103704
| 0.725926
| 0.725926
| 0.725926
| 0.725926
| 0.725926
| 0.725926
| 0
| 0.023184
| 0.229762
| 840
| 35
| 68
| 24
| 0.811437
| 0.30119
| 0
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0
| 0.105263
| 0.421053
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
7c23cda41f6873deaa07312d8c6f120b6b4fd4ce
| 15,393
|
py
|
Python
|
senlin/tests/unit/engine/actions/test_recover.py
|
openstack/senlin
|
390779ca1e08f819683e79993696f945f1c0393e
|
[
"Apache-2.0"
] | 45
|
2015-10-18T02:56:50.000Z
|
2022-03-01T15:28:02.000Z
|
senlin/tests/unit/engine/actions/test_recover.py
|
openstack/senlin
|
390779ca1e08f819683e79993696f945f1c0393e
|
[
"Apache-2.0"
] | 2
|
2019-04-26T10:44:47.000Z
|
2020-12-16T19:45:34.000Z
|
senlin/tests/unit/engine/actions/test_recover.py
|
openstack/senlin
|
390779ca1e08f819683e79993696f945f1c0393e
|
[
"Apache-2.0"
] | 45
|
2015-10-19T02:35:57.000Z
|
2021-09-28T09:01:42.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from senlin.common import consts
from senlin.common import scaleutils as su
from senlin.engine.actions import base as ab
from senlin.engine.actions import cluster_action as ca
from senlin.engine import cluster as cm
from senlin.engine import dispatcher
from senlin.engine import node as nm
from senlin.objects import action as ao
from senlin.objects import dependency as dobj
from senlin.objects import node as no
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
@mock.patch.object(cm.Cluster, 'load')
class ClusterRecoverTest(base.SenlinTestCase):
def setUp(self):
super(ClusterRecoverTest, self).setUp()
self.ctx = utils.dummy_context()
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
def test_do_recover(self, mock_wait, mock_start, mock_dep,
mock_action, mock_update, mock_load):
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID', status='ERROR')
cluster = mock.Mock(id='FAKE_ID', RECOVERING='RECOVERING',
desired_capacity=2)
cluster.do_recover.return_value = True
mock_load.return_value = cluster
cluster.nodes = [node1, node2]
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.data = {}
mock_action.return_value = 'NODE_RECOVER_ID'
mock_wait.return_value = (action.RES_OK, 'Everything is Okay')
# do it
res_code, res_msg = action.do_recover()
# assertions
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster recovery succeeded.', res_msg)
cluster.do_recover.assert_called_once_with(action.context)
mock_action.assert_called_once_with(
action.context, 'NODE_2', 'NODE_RECOVER',
name='node_recover_NODE_2',
cause=consts.CAUSE_DERIVED,
inputs={'operation': None, 'operation_params': None}
)
mock_dep.assert_called_once_with(action.context, ['NODE_RECOVER_ID'],
'CLUSTER_ACTION_ID')
mock_update.assert_called_once_with(action.context, 'NODE_RECOVER_ID',
{'status': 'READY'})
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
@mock.patch.object(ca.ClusterAction, '_check_capacity')
def test_do_recover_with_input(self, mock_check, mock_wait, mock_start,
mock_dep, mock_action, mock_update,
mock_load):
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ERROR')
cluster = mock.Mock(id='FAKE_ID', RECOVERING='RECOVERING',
desired_capacity=2)
cluster.nodes = [node1]
cluster.do_recover.return_value = True
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.inputs = {
'operation': consts.RECOVER_REBOOT,
'check': False,
'check_capacity': True
}
mock_action.return_value = 'NODE_RECOVER_ID'
mock_wait.return_value = (action.RES_OK, 'Everything is Okay')
# do it
res_code, res_msg = action.do_recover()
# assertions
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster recovery succeeded.', res_msg)
cluster.do_recover.assert_called_once_with(action.context)
mock_action.assert_called_once_with(
action.context, 'NODE_1', 'NODE_RECOVER',
name='node_recover_NODE_1',
cause=consts.CAUSE_DERIVED,
inputs={
'operation': consts.RECOVER_REBOOT,
'operation_params': None
}
)
mock_dep.assert_called_once_with(action.context, ['NODE_RECOVER_ID'],
'CLUSTER_ACTION_ID')
mock_update.assert_called_once_with(action.context, 'NODE_RECOVER_ID',
{'status': 'READY'})
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
mock_check.assert_called_once_with()
def test_do_recover_all_nodes_active(self, mock_load):
cluster = mock.Mock(id='FAKE_ID', desired_capacity=2)
cluster.do_recover.return_value = True
mock_load.return_value = cluster
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID', status='ACTIVE')
cluster.nodes = [node1, node2]
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
# do it
res_code, res_msg = action.do_recover()
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster recovery succeeded.', res_msg)
cluster.do_recover.assert_called_once_with(self.ctx)
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
@mock.patch.object(ca.ClusterAction, '_check_capacity')
def test_do_recover_failed_waiting(self, mock_check, mock_wait,
mock_start, mock_dep, mock_action,
mock_update, mock_load):
node = mock.Mock(id='NODE_1', cluster_id='CID', status='ERROR')
cluster = mock.Mock(id='CID', desired_capacity=2)
cluster.do_recover.return_value = True
cluster.nodes = [node]
mock_load.return_value = cluster
mock_action.return_value = 'NODE_ACTION_ID'
action = ca.ClusterAction('FAKE_CLUSTER', 'CLUSTER_RECOVER', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.inputs = {
'operation': consts.RECOVER_RECREATE,
'check': False,
'check_capacity': False
}
mock_wait.return_value = (action.RES_TIMEOUT, 'Timeout!')
res_code, res_msg = action.do_recover()
self.assertEqual(action.RES_TIMEOUT, res_code)
self.assertEqual('Timeout!', res_msg)
mock_load.assert_called_once_with(self.ctx, 'FAKE_CLUSTER')
cluster.do_recover.assert_called_once_with(action.context)
mock_action.assert_called_once_with(
action.context, 'NODE_1', 'NODE_RECOVER',
name='node_recover_NODE_1',
cause=consts.CAUSE_DERIVED,
inputs={
'operation': consts.RECOVER_RECREATE,
'operation_params': None
}
)
mock_dep.assert_called_once_with(action.context, ['NODE_ACTION_ID'],
'CLUSTER_ACTION_ID')
mock_update.assert_called_once_with(action.context, 'NODE_ACTION_ID',
{'status': 'READY'})
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
self.assertFalse(mock_check.called)
@mock.patch.object(ca.ClusterAction, '_check_capacity')
@mock.patch.object(nm.Node, 'load')
def test_do_recover_with_check_active(self, mock_node, mock_desired,
mock_load):
cluster = mock.Mock(id='FAKE_ID', desired_capacity=2)
cluster.do_recover.return_value = True
mock_load.return_value = cluster
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID', status='ERROR')
cluster.nodes = [node1, node2]
eng_node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID',
status='ACTIVE')
eng_node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID',
status='ERROR')
mock_node.side_effect = [eng_node1, eng_node2]
def set_status(*args, **kwargs):
eng_node2.status = 'ACTIVE'
mock_check = self.patchobject(nm.Node, 'do_check')
mock_check.side_effect = set_status
eng_node2.do_check = mock_check
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action.inputs = {'check': True}
# do it
res_code, res_msg = action.do_recover()
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster recovery succeeded.', res_msg)
node_calls = [
mock.call(self.ctx, node_id='NODE_1'),
mock.call(self.ctx, node_id='NODE_2')
]
mock_node.assert_has_calls(node_calls)
eng_node1.do_check.assert_called_once_with(self.ctx)
eng_node2.do_check.assert_called_once_with(self.ctx)
cluster.do_recover.assert_called_once_with(self.ctx)
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
self.assertFalse(mock_desired.called)
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
@mock.patch.object(ca.ClusterAction, '_check_capacity')
@mock.patch.object(nm.Node, 'load')
def test_do_recover_with_check_error(self, mock_node, mock_desired,
mock_wait, mock_start, mock_dep,
mock_action, mock_update, mock_load):
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID', status='ACTIVE')
cluster = mock.Mock(id='FAKE_ID', RECOVERING='RECOVERING',
desired_capacity=2)
cluster.do_recover.return_value = True
mock_load.return_value = cluster
cluster.nodes = [node1, node2]
eng_node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID',
status='ACTIVE')
eng_node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID',
status='ACTIVE')
mock_node.side_effect = [eng_node1, eng_node2]
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.inputs = {'check': True,
'check_capacity': True}
mock_action.return_value = 'NODE_RECOVER_ID'
mock_wait.return_value = (action.RES_OK, 'Everything is Okay')
def set_status(*args, **kwargs):
eng_node2.status = 'ERROR'
mock_check = self.patchobject(nm.Node, 'do_check')
mock_check.side_effect = set_status
eng_node2.do_check = mock_check
# do it
res_code, res_msg = action.do_recover()
# assertions
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster recovery succeeded.', res_msg)
cluster.do_recover.assert_called_once_with(action.context)
mock_action.assert_called_once_with(
action.context, 'NODE_2', 'NODE_RECOVER',
name='node_recover_NODE_2',
cause=consts.CAUSE_DERIVED,
inputs={'operation': None,
'operation_params': None}
)
node_calls = [
mock.call(self.ctx, node_id='NODE_1'),
mock.call(self.ctx, node_id='NODE_2')
]
mock_node.assert_has_calls(node_calls)
eng_node1.do_check.assert_called_once_with(self.ctx)
eng_node2.do_check.assert_called_once_with(self.ctx)
mock_dep.assert_called_once_with(action.context, ['NODE_RECOVER_ID'],
'CLUSTER_ACTION_ID')
mock_update.assert_called_once_with(action.context, 'NODE_RECOVER_ID',
{'status': 'READY'})
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_RECOVER)
mock_desired.assert_called_once_with()
@mock.patch.object(ca.ClusterAction, '_create_nodes')
def test_check_capacity_create(self, mock_create, mock_load):
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
cluster = mock.Mock(id='FAKE_ID', RECOVERING='RECOVERING',
desired_capacity=2)
mock_load.return_value = cluster
cluster.nodes = [node1]
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action._check_capacity()
mock_create.assert_called_once_with(1)
@mock.patch.object(su, 'nodes_by_random')
@mock.patch.object(no.Node, 'get_all_by_cluster')
@mock.patch.object(ca.ClusterAction, '_delete_nodes')
def test_check_capacity_delete(self, mock_delete, mock_get,
mock_su, mock_load):
node1 = mock.Mock(id='NODE_1', cluster_id='FAKE_ID', status='ACTIVE')
node2 = mock.Mock(id='NODE_2', cluster_id='FAKE_ID', status='ERROR')
cluster = mock.Mock(id='FAKE_ID', RECOVERING='RECOVERING',
desired_capacity=1)
mock_load.return_value = cluster
cluster.nodes = [node1, node2]
mock_get.return_value = [node1, node2]
mock_su.return_value = [node2.id]
action = ca.ClusterAction(cluster.id, 'CLUSTER_RECOVER', self.ctx)
action._check_capacity()
mock_get.assert_called_once_with(action.context, cluster.id)
mock_su.assert_called_once_with([node1, node2], 1)
mock_delete.assert_called_once_with(['NODE_2'])
| 42.522099
| 78
| 0.638732
| 1,889
| 15,393
| 4.8973
| 0.091583
| 0.055778
| 0.07437
| 0.092963
| 0.836991
| 0.804994
| 0.78078
| 0.778186
| 0.758945
| 0.747379
| 0
| 0.007761
| 0.255051
| 15,393
| 361
| 79
| 42.639889
| 0.798988
| 0.038199
| 0
| 0.719298
| 0
| 0
| 0.124248
| 0
| 0
| 0
| 0
| 0
| 0.207018
| 1
| 0.038596
| false
| 0
| 0.045614
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c2de286adc34414e5c05a1f067756907f13fe22
| 8,130
|
py
|
Python
|
test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 3,027
|
2019-04-04T18:52:17.000Z
|
2022-03-30T09:38:34.000Z
|
test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 4,080
|
2019-04-04T19:51:11.000Z
|
2022-03-31T23:45:21.000Z
|
test/hummingbot/connector/exchange/coinbase_pro/test_coinbase_pro_in_flight_order.py
|
BGTCapital/hummingbot
|
2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242
|
[
"Apache-2.0"
] | 1,342
|
2019-04-04T20:50:53.000Z
|
2022-03-31T15:22:36.000Z
|
from decimal import Decimal
from unittest import TestCase
from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_in_flight_order import CoinbaseProInFlightOrder
from hummingbot.core.event.events import OrderType, TradeType
class CoinbaseProInFlightOrderTests(TestCase):
def setUp(self):
super().setUp()
self.base_token = "BTC"
self.quote_token = "USDT"
self.trading_pair = f"{self.base_token}-{self.quote_token}"
def test_update_with_partial_trade_event(self):
order = CoinbaseProInFlightOrder(
client_order_id="OID1",
exchange_order_id="EOID1",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1)
)
trade_event_info = {
"type": "match",
"trade_id": 1,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.1",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.005"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual("open", order.last_state)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid)
self.assertEqual(order.quote_asset, order.fee_asset)
def test_update_with_full_fill_trade_event(self):
order = CoinbaseProInFlightOrder(
client_order_id="OID1",
exchange_order_id="EOID1",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1)
)
trade_event_info = {
"type": "match",
"trade_id": 1,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.1",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.005"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual("open", order.last_state)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(
str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
expected_partial_event_fee = (Decimal(trade_event_info["taker_fee_rate"]) *
expected_executed_quote_amount)
self.assertEqual(expected_partial_event_fee, order.fee_paid)
complete_event_info = {
"type": "match",
"trade_id": 2,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.9",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.001"
}
update_result = order.update_with_trade_update(complete_event_info)
self.assertTrue(update_result)
# orders are marked as done with the done event
self.assertFalse(order.is_done)
self.assertEqual("open", order.last_state)
self.assertEqual(order.amount, order.executed_amount_base)
expected_executed_quote_amount += Decimal(str(complete_event_info["size"])) * Decimal(
str(complete_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
expected_complete_event_fee = (Decimal(complete_event_info["taker_fee_rate"]) *
Decimal(str(complete_event_info["size"])) *
Decimal(str(complete_event_info["price"])))
self.assertEqual(expected_partial_event_fee + expected_complete_event_fee, order.fee_paid)
def test_update_with_repeated_trade_id_is_ignored(self):
order = CoinbaseProInFlightOrder(
client_order_id="OID1",
exchange_order_id="EOID1",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1)
)
trade_event_info = {
"type": "match",
"trade_id": 1,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.1",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.005"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
complete_event_info = {
"type": "match",
"trade_id": 1,
"sequence": 50,
"maker_order_id": "EOID1",
"taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1",
"time": "2014-11-07T08:19:27.028459Z",
"product_id": "BTC-USDT",
"size": "0.9",
"price": "10050.0",
"side": "buy",
"taker_user_id": "5844eceecf7e803e259d0365",
"user_id": "5844eceecf7e803e259d0365",
"taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352",
"taker_fee_rate": "0.001"
}
update_result = order.update_with_trade_update(complete_event_info)
self.assertFalse(update_result)
self.assertFalse(order.is_done)
self.assertEqual("open", order.last_state)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(
str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid)
| 42.34375
| 121
| 0.615744
| 880
| 8,130
| 5.3625
| 0.125
| 0.051494
| 0.053401
| 0.062937
| 0.873914
| 0.862683
| 0.84679
| 0.838101
| 0.838101
| 0.838101
| 0
| 0.11364
| 0.265068
| 8,130
| 191
| 122
| 42.565445
| 0.676151
| 0.005535
| 0
| 0.791667
| 0
| 0
| 0.243598
| 0.117654
| 0
| 0
| 0
| 0
| 0.154762
| 1
| 0.02381
| false
| 0
| 0.02381
| 0
| 0.053571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c88ed93577608884e91b634de8cb34a5d7f63f6
| 83,872
|
py
|
Python
|
tests/client_test.py
|
alexgolec/td-ameritrade-api
|
e791616c27c983292f7ab5acc7b07cbabad25746
|
[
"MIT"
] | null | null | null |
tests/client_test.py
|
alexgolec/td-ameritrade-api
|
e791616c27c983292f7ab5acc7b07cbabad25746
|
[
"MIT"
] | null | null | null |
tests/client_test.py
|
alexgolec/td-ameritrade-api
|
e791616c27c983292f7ab5acc7b07cbabad25746
|
[
"MIT"
] | null | null | null |
import asyncio
import datetime
import logging
import os
import pytest
import pytz
import unittest
from unittest.mock import ANY, MagicMock, Mock, patch
from tda.client import AsyncClient, Client
from tda.orders.generic import OrderBuilder
from .utils import AsyncMagicMock, ResyncProxy, no_duplicates
# Constants
API_KEY = '1234567890'
ACCOUNT_ID = 100000
ORDER_ID = 200000
SAVED_ORDER_ID = 300000
CUSIP = '000919239'
MARKET = 'EQUITY'
INDEX = '$SPX.X'
SYMBOL = 'AAPL'
TRANSACTION_ID = 400000
WATCHLIST_ID = 5000000
MIN_DATETIME = datetime.datetime(year=1971, month=1, day=1)
MIN_ISO = '1971-01-01T00:00:00+0000'
MIN_TIMESTAMP_MILLIS = int(MIN_DATETIME.timestamp()) * 1000
NOW_DATETIME = datetime.datetime(2020, 1, 2, 3, 4, 5)
NOW_DATE = datetime.date(2020, 1, 2)
NOW_DATETIME_ISO = '2020-01-02T03:04:05+0000'
NOW_DATE_ISO = '2020-01-02'
NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS = \
int((NOW_DATETIME + datetime.timedelta(days=7)).timestamp()) * 1000
class mockdatetime(datetime.datetime):
@classmethod
def utcnow(cls):
return NOW_DATETIME
EARLIER_DATETIME = datetime.datetime(2001, 1, 2, 3, 4, 5,
tzinfo=pytz.timezone('America/New_York'))
EARLIER_ISO = '2001-01-02T03:04:05-0456'
EARLIER_MILLIS = 978422405000
EARLIER_DATE_STR = '2001-01-02'
class _TestClient:
"""
Test suite used for both Client and AsyncClient
"""
def setUp(self):
self.mock_session = self.magicmock_class()
self.client = self.client_class(API_KEY, self.mock_session)
# Set the logging level to DEBUG to force all lazily-evaluated messages
# to be evaluated
self.client.logger.setLevel('DEBUG')
def make_url(self, path):
path = path.format(
accountId=ACCOUNT_ID,
orderId=ORDER_ID,
savedOrderId=SAVED_ORDER_ID,
cusip=CUSIP,
market=MARKET,
index=INDEX,
symbol=SYMBOL,
transactionId=TRANSACTION_ID,
watchlistId=WATCHLIST_ID)
return 'https://api.tdameritrade.com' + path
# Generic functionality
def test_set_timeout(self):
timeout = 'dummy'
self.client.set_timeout(timeout)
self.assertEqual(timeout, self.client.session.timeout)
# get_order
def test_get_order(self):
self.client.get_order(ORDER_ID, ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'),
params={})
def test_get_order_str(self):
self.client.get_order(str(ORDER_ID), str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'),
params={})
# cancel_order
def test_cancel_order(self):
self.client.cancel_order(ORDER_ID, ACCOUNT_ID)
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'))
def test_cancel_order_str(self):
self.client.cancel_order(str(ORDER_ID), str(ACCOUNT_ID))
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'))
# get_orders_by_path
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_vanilla(self):
self.client.get_orders_by_path(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_vanilla_str(self):
self.client.get_orders_by_path(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_from_not_datetime(self):
with self.assertRaises(ValueError) as cm:
self.client.get_orders_by_path(
ACCOUNT_ID, from_entered_datetime='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type 'datetime.datetime' for " +
"from_entered_datetime, got 'builtins.str'")
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_to_not_datetime(self):
with self.assertRaises(ValueError) as cm:
self.client.get_orders_by_path(
ACCOUNT_ID, to_entered_datetime='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type 'datetime.datetime' for " +
"to_entered_datetime, got 'builtins.str'")
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_max_results(self):
self.client.get_orders_by_path(ACCOUNT_ID, max_results=100)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'maxResults': 100,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_from_entered_datetime(self):
self.client.get_orders_by_path(
ACCOUNT_ID, from_entered_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': EARLIER_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_to_entered_datetime(self):
self.client.get_orders_by_path(
ACCOUNT_ID, to_entered_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': EARLIER_ISO,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_status_and_statuses(self):
self.assertRaises(ValueError, lambda: self.client.get_orders_by_path(
ACCOUNT_ID, to_entered_datetime=EARLIER_DATETIME,
status='EXPIRED', statuses=[self.client_class.Order.Status.FILLED]))
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_status(self):
self.client.get_orders_by_path(
ACCOUNT_ID, status=self.client_class.Order.Status.FILLED)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_status_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_orders_by_path(ACCOUNT_ID, status='FILLED')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_statuses(self):
self.client.get_orders_by_path(
ACCOUNT_ID, statuses=[
self.client_class.Order.Status.FILLED,
self.client_class.Order.Status.EXPIRED])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED,EXPIRED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_statuses_scalar(self):
self.client.get_orders_by_path(
ACCOUNT_ID, statuses=self.client_class.Order.Status.FILLED)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_path_statuses_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_orders_by_path(
ACCOUNT_ID, statuses=['FILLED', 'EXPIRED'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED,EXPIRED'
})
# get_orders_by_query
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_vanilla(self):
self.client.get_orders_by_query()
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_max_results(self):
self.client.get_orders_by_query(max_results=100)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'maxResults': 100,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_from_entered_datetime(self):
self.client.get_orders_by_query(from_entered_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': EARLIER_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_to_entered_datetime(self):
self.client.get_orders_by_query(to_entered_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': EARLIER_ISO,
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_status_and_statuses(self):
with self.assertRaises(
ValueError, msg='at most one of status or statuses may be set'):
self.client.get_orders_by_query(
to_entered_datetime=EARLIER_DATETIME,
status='EXPIRED', statuses=[
self.client_class.Order.Status.FILLED,
self.client_class.Order.Status.EXPIRED])
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_status(self):
self.client.get_orders_by_query(status=self.client_class.Order.Status.FILLED)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_status_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_orders_by_query(status='FILLED')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_statuses(self):
self.client.get_orders_by_query(statuses=[
self.client_class.Order.Status.FILLED,
self.client_class.Order.Status.EXPIRED])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED,EXPIRED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_statuses_scalar(self):
self.client.get_orders_by_query(statuses=self.client_class.Order.Status.FILLED)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED'
})
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_orders_by_query_statuses_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_orders_by_query(statuses=['FILLED', 'EXPIRED'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/orders'), params={
'fromEnteredTime': MIN_ISO,
'toEnteredTime': NOW_DATETIME_ISO,
'status': 'FILLED,EXPIRED'
})
# place_order
def test_place_order(self):
order_spec = {'order': 'spec'}
self.client.place_order(ACCOUNT_ID, order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), json=order_spec)
def test_place_order_order_builder(self):
order_spec = OrderBuilder(enforce_enums=False).set_order_type('LIMIT')
expected_spec = {'orderType': 'LIMIT'}
self.client.place_order(ACCOUNT_ID, order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'),
json=expected_spec)
def test_place_order_str(self):
order_spec = {'order': 'spec'}
self.client.place_order(str(ACCOUNT_ID), order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders'), json=order_spec)
# replace_order
def test_replace_order(self):
order_spec = {'order': 'spec'}
self.client.replace_order(ACCOUNT_ID, ORDER_ID, order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'),
json=order_spec)
def test_replace_order_order_builder(self):
order_spec = OrderBuilder(enforce_enums=False).set_order_type('LIMIT')
expected_spec = {'orderType': 'LIMIT'}
self.client.replace_order(ACCOUNT_ID, ORDER_ID, order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'),
json=expected_spec)
def test_replace_order_str(self):
order_spec = {'order': 'spec'}
self.client.replace_order(str(ACCOUNT_ID), str(ORDER_ID), order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/orders/{orderId}'),
json=order_spec)
# create_saved_order
def test_create_saved_order(self):
order_spec = {'order': 'spec'}
self.client.create_saved_order(ACCOUNT_ID, order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders'),
json=order_spec)
def test_create_saved_order_order_builder(self):
order_spec = OrderBuilder(enforce_enums=False).set_order_type('LIMIT')
expected_spec = {'orderType': 'LIMIT'}
self.client.create_saved_order(ACCOUNT_ID, order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders'),
json=expected_spec)
def test_create_saved_order_str(self):
order_spec = {'order': 'spec'}
self.client.create_saved_order(str(ACCOUNT_ID), order_spec)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders'),
json=order_spec)
# delete_saved_order
def test_delete_saved_order(self):
self.client.delete_saved_order(ACCOUNT_ID, SAVED_ORDER_ID)
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders/{savedOrderId}'))
def test_delete_saved_order_str(self):
self.client.delete_saved_order(str(ACCOUNT_ID), str(SAVED_ORDER_ID))
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders/{savedOrderId}'))
# delete_saved_order
def test_get_saved_order(self):
self.client.get_saved_order(ACCOUNT_ID, SAVED_ORDER_ID)
self.mock_session.get.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/savedorders/{savedOrderId}'),
params={})
def test_get_saved_order_str(self):
self.client.get_saved_order(str(ACCOUNT_ID), str(SAVED_ORDER_ID))
self.mock_session.get.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/savedorders/{savedOrderId}'),
params={})
# get_saved_orders_by_path
def test_get_saved_orders_by_path(self):
self.client.get_saved_orders_by_path(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders'), params={})
def test_get_saved_orders_by_path_str(self):
self.client.get_saved_orders_by_path(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/savedorders'), params={})
# replace_saved_order
def test_replace_saved_order(self):
order_spec = {'order': 'spec'}
self.client.replace_saved_order(ACCOUNT_ID, SAVED_ORDER_ID, order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/savedorders/{savedOrderId}'),
json=order_spec)
def test_replace_saved_order_order_builder(self):
order_spec = OrderBuilder(enforce_enums=False).set_order_type('LIMIT')
expected_spec = {'orderType': 'LIMIT'}
self.client.replace_saved_order(ACCOUNT_ID, SAVED_ORDER_ID, order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/savedorders/{savedOrderId}'),
json=expected_spec)
def test_replace_saved_order_str(self):
order_spec = {'order': 'spec'}
self.client.replace_saved_order(
str(ACCOUNT_ID), str(SAVED_ORDER_ID), order_spec)
self.mock_session.put.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/savedorders/{savedOrderId}'),
json=order_spec)
# get_account
def test_get_account(self):
self.client.get_account(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}'), params={})
def test_get_account_str(self):
self.client.get_account(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}'), params={})
def test_get_account_fields(self):
self.client.get_account(ACCOUNT_ID, fields=[
self.client_class.Account.Fields.POSITIONS,
self.client_class.Account.Fields.ORDERS])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}'),
params={'fields': 'positions,orders'})
def test_get_account_fields_scalar(self):
self.client.get_account(
ACCOUNT_ID, fields=self.client_class.Account.Fields.POSITIONS)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}'),
params={'fields': 'positions'})
def test_get_account_fields_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_account(ACCOUNT_ID, fields=['positions', 'orders'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}'),
params={'fields': 'positions,orders'})
# get_accounts
def test_get_accounts(self):
self.client.get_accounts()
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts'), params={})
def test_get_accounts_fields(self):
self.client.get_accounts(fields=[
self.client_class.Account.Fields.POSITIONS,
self.client_class.Account.Fields.ORDERS])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts'),
params={'fields': 'positions,orders'})
def test_get_accounts_fields_scalar(self):
self.client.get_accounts(fields=self.client_class.Account.Fields.POSITIONS)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts'),
params={'fields': 'positions'})
def test_get_accounts_fields_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_accounts(fields=['positions', 'orders'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts'),
params={'fields': 'positions,orders'})
# search_instruments
def test_search_instruments(self):
self.client.search_instruments(
['AAPL', 'MSFT'], self.client_class.Instrument.Projection.FUNDAMENTAL)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/instruments'), params={
'apikey': API_KEY,
'symbol': 'AAPL,MSFT',
'projection': 'fundamental'})
def test_search_instruments_one_instrument(self):
self.client.search_instruments(
'AAPL', self.client_class.Instrument.Projection.FUNDAMENTAL)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/instruments'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'projection': 'fundamental'})
def test_search_instruments_unchecked(self):
self.client.set_enforce_enums(False)
self.client.search_instruments(['AAPL', 'MSFT'], 'fundamental')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/instruments'), params={
'apikey': API_KEY,
'symbol': 'AAPL,MSFT',
'projection': 'fundamental'})
# get_instrument
def test_get_instrument(self):
self.client.get_instrument(CUSIP)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/instruments/{cusip}'),
params={'apikey': API_KEY})
def test_get_instrument_cusip_must_be_string(self):
msg = 'CUSIPs must be passed as strings to preserve leading zeroes'
with self.assertRaises(ValueError, msg=msg):
self.client.get_instrument(123456)
# get_hours_for_multiple_markets
def test_get_hours_for_multiple_markets_datetime(self):
self.client.get_hours_for_multiple_markets([
self.client_class.Markets.EQUITY,
self.client_class.Markets.BOND], NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/hours'), params={
'apikey': API_KEY,
'markets': 'EQUITY,BOND',
'date': NOW_DATE_ISO})
def test_get_hours_for_multiple_markets_single_market(self):
self.client.get_hours_for_multiple_markets(
self.client_class.Markets.EQUITY, NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/hours'), params={
'apikey': API_KEY,
'markets': 'EQUITY',
'date': NOW_DATE_ISO})
def test_get_hours_for_multiple_markets_date(self):
self.client.get_hours_for_multiple_markets([
self.client_class.Markets.EQUITY,
self.client_class.Markets.BOND], NOW_DATE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/hours'), params={
'apikey': API_KEY,
'markets': 'EQUITY,BOND',
'date': NOW_DATE_ISO})
def test_get_hours_for_multiple_markets_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_hours_for_multiple_markets([
self.client_class.Markets.EQUITY,
self.client_class.Markets.BOND], '2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) "
"for date, got 'builtins.str'")
def test_get_hours_for_multiple_markets_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_hours_for_multiple_markets(
['EQUITY', 'BOND'], NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/hours'), params={
'apikey': API_KEY,
'markets': 'EQUITY,BOND',
'date': NOW_DATE_ISO})
# get_hours_for_single_market
def test_get_hours_for_single_market_datetime(self):
self.client.get_hours_for_single_market(
self.client_class.Markets.EQUITY, NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{market}/hours'), params={
'apikey': API_KEY,
'date': NOW_DATE_ISO})
def test_get_hours_for_single_market_date(self):
self.client.get_hours_for_single_market(
self.client_class.Markets.EQUITY, NOW_DATE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{market}/hours'), params={
'apikey': API_KEY,
'date': NOW_DATE_ISO})
def test_get_hours_for_single_market_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_hours_for_single_market(
self.client_class.Markets.EQUITY, '2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) for " +
"date, got 'builtins.str'")
def test_get_hours_for_single_market_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_hours_for_single_market('EQUITY', NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{market}/hours'), params={
'apikey': API_KEY,
'date': NOW_DATE_ISO})
# get_movers
def test_get_movers(self):
self.client.get_movers(
INDEX, self.client_class.Movers.Direction.UP, self.client_class.Movers.Change.PERCENT)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{index}/movers'), params={
'apikey': API_KEY,
'direction': 'up',
'change': 'percent'})
def test_get_movers_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_movers(INDEX, 'up', 'percent')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{index}/movers'), params={
'apikey': API_KEY,
'direction': 'up',
'change': 'percent'})
# get_option_chain
def test_get_option_chain_vanilla(self):
self.client.get_option_chain('AAPL')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL'})
def test_get_option_chain_contract_type(self):
self.client.get_option_chain(
'AAPL', contract_type=self.client_class.Options.ContractType.PUT)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'contractType': 'PUT'})
def test_get_option_chain_contract_type_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_option_chain('AAPL', contract_type='PUT')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'contractType': 'PUT'})
def test_get_option_chain_strike_count(self):
self.client.get_option_chain('AAPL', strike_count=100)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'strikeCount': 100})
def test_get_option_chain_include_quotes(self):
self.client.get_option_chain('AAPL', include_quotes=True)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'includeQuotes': True})
def test_get_option_chain_strategy(self):
self.client.get_option_chain(
'AAPL', strategy=self.client_class.Options.Strategy.STRANGLE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'strategy': 'STRANGLE'})
def test_get_option_chain_strategy_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_option_chain('AAPL', strategy='STRANGLE')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'strategy': 'STRANGLE'})
def test_get_option_chain_interval(self):
self.client.get_option_chain('AAPL', interval=10.0)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'interval': 10.0})
def test_get_option_chain_strike(self):
self.client.get_option_chain('AAPL', strike=123)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'strike': 123})
def test_get_option_chain_strike_range(self):
self.client.get_option_chain(
'AAPL', strike_range=self.client_class.Options.StrikeRange.IN_THE_MONEY)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'range': 'ITM'})
def test_get_option_chain_strike_range_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_option_chain('AAPL', strike_range='ITM')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'range': 'ITM'})
def test_get_option_chain_from_date_datetime(self):
self.client.get_option_chain(
'AAPL', from_date=NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'fromDate': NOW_DATE_ISO})
def test_get_option_chain_from_date_date(self):
self.client.get_option_chain('AAPL', from_date=NOW_DATE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'fromDate': NOW_DATE_ISO})
def test_get_option_chain_from_date_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_option_chain('AAPL', from_date='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) for " +
"from_date, got 'builtins.str'")
def test_get_option_chain_to_date_datetime(self):
self.client.get_option_chain('AAPL', to_date=NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'toDate': NOW_DATE_ISO})
def test_get_option_chain_to_date_date(self):
self.client.get_option_chain('AAPL', to_date=NOW_DATE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'toDate': NOW_DATE_ISO})
def test_get_option_chain_to_date_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_option_chain('AAPL', to_date='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) for " +
"to_date, got 'builtins.str'")
def test_get_option_chain_volatility(self):
self.client.get_option_chain('AAPL', volatility=40.0)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'volatility': 40.0})
def test_get_option_chain_underlying_price(self):
self.client.get_option_chain('AAPL', underlying_price=234.0)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'underlyingPrice': 234.0})
def test_get_option_chain_interest_rate(self):
self.client.get_option_chain('AAPL', interest_rate=0.07)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'interestRate': 0.07})
def test_get_option_chain_days_to_expiration(self):
self.client.get_option_chain('AAPL', days_to_expiration=12)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'daysToExpiration': 12})
def test_get_option_chain_exp_month(self):
self.client.get_option_chain(
'AAPL', exp_month=self.client_class.Options.ExpirationMonth.JANUARY)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'expMonth': 'JAN'})
def test_get_option_chain_exp_month_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_option_chain('AAPL', exp_month='JAN')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'expMonth': 'JAN'})
def test_get_option_chain_option_type(self):
self.client.get_option_chain(
'AAPL', option_type=self.client_class.Options.Type.STANDARD)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'optionType': 'S'})
def test_get_option_chain_option_type_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_option_chain('AAPL', option_type='S')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/chains'), params={
'apikey': API_KEY,
'symbol': 'AAPL',
'optionType': 'S'})
# get_price_history
def test_get_price_history_vanilla(self):
self.client.get_price_history(SYMBOL)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY})
def test_get_price_history_period_type(self):
self.client.get_price_history(
SYMBOL, period_type=self.client_class.PriceHistory.PeriodType.MONTH)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'periodType': 'month'})
def test_get_price_history_period_type_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_price_history(SYMBOL, period_type='month')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'periodType': 'month'})
def test_get_price_history_num_periods(self):
self.client.get_price_history(
SYMBOL, period=self.client_class.PriceHistory.Period.TEN_DAYS)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'period': 10})
def test_get_price_history_num_periods_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_price_history(SYMBOL, period=10)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'period': 10})
def test_get_price_history_frequency_type(self):
self.client.get_price_history(
SYMBOL,
frequency_type=self.client_class.PriceHistory.FrequencyType.DAILY)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'frequencyType': 'daily'})
def test_get_price_history_frequency_type_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_price_history(SYMBOL, frequency_type='daily')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'frequencyType': 'daily'})
def test_get_price_history_frequency(self):
self.client.get_price_history(
SYMBOL,
frequency=self.client_class.PriceHistory.Frequency.EVERY_FIVE_MINUTES)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'frequency': 5})
def test_get_price_history_frequency_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_price_history(SYMBOL, frequency=5)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'frequency': 5})
def test_get_price_history_start_datetime(self):
self.client.get_price_history(
SYMBOL, start_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'startDate': EARLIER_MILLIS})
def test_get_price_history_start_datetime_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_price_history(SYMBOL, start_datetime='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type 'datetime.datetime' for " +
"start_datetime, got 'builtins.str'")
def test_get_price_history_end_datetime(self):
self.client.get_price_history(SYMBOL, end_datetime=EARLIER_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'endDate': EARLIER_MILLIS})
def test_get_price_history_end_datetime_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_price_history(SYMBOL, end_datetime='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type 'datetime.datetime' for " +
"end_datetime, got 'builtins.str'")
def test_get_price_history_need_extended_hours_data(self):
self.client.get_price_history(SYMBOL, need_extended_hours_data=True)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/pricehistory'), params={
'apikey': API_KEY,
'needExtendedHoursData': True})
# get_price_history_every_minute
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_minute_vanilla(self):
self.client.get_price_history_every_minute('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_MINUTE
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_minute_start_datetime(self):
self.client.get_price_history_every_minute(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_MINUTE
'frequency': 1,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_minute_end_datetime(self):
self.client.get_price_history_every_minute(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_MINUTE
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_minute_empty_extendedhours(self):
self.client.get_price_history_every_minute(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_MINUTE
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_minute_extendedhours(self):
self.client.get_price_history_every_minute(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_MINUTE
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_five_minutes
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_five_minutes_vanilla(self):
self.client.get_price_history_every_five_minutes('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIVE_MINUTES
'frequency': 5,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_five_minutes_start_datetime(self):
self.client.get_price_history_every_five_minutes(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIVE_MINUTES
'frequency': 5,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_five_minutes_end_datetime(self):
self.client.get_price_history_every_five_minutes(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIVE_MINUTES
'frequency': 5,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_five_minutes_empty_extendedhours(self):
self.client.get_price_history_every_five_minutes(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIVE_MINUTES
'frequency': 5,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_five_minutes_extendedhours(self):
self.client.get_price_history_every_five_minutes(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIVE_MINUTES
'frequency': 5,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_ten_minutes
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_ten_minutes_vanilla(self):
self.client.get_price_history_every_ten_minutes('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_TEN_MINUTES
'frequency': 10,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_ten_minutes_start_datetime(self):
self.client.get_price_history_every_ten_minutes(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_TEN_MINUTES
'frequency': 10,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_ten_minutes_end_datetime(self):
self.client.get_price_history_every_ten_minutes(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_TEN_MINUTES
'frequency': 10,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_ten_minutes_empty_extendedhours(self):
self.client.get_price_history_every_ten_minutes(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_TEN_MINUTES
'frequency': 10,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_ten_minutes_extendedhours(self):
self.client.get_price_history_every_ten_minutes(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_TEN_MINUTES
'frequency': 10,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_fifteen_minutes
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_fifteen_minutes_vanilla(self):
self.client.get_price_history_every_fifteen_minutes('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIFTEEN_MINUTES
'frequency': 15,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_fifteen_minutes_start_datetime(self):
self.client.get_price_history_every_fifteen_minutes(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIFTEEN_MINUTES
'frequency': 15,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_fifteen_minutes_end_datetime(self):
self.client.get_price_history_every_fifteen_minutes(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIFTEEN_MINUTES
'frequency': 15,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_fifteen_minutes_empty_extendedhours(self):
self.client.get_price_history_every_fifteen_minutes(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIFTEEN_MINUTES
'frequency': 15,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_fifteen_minutes_extendedhours(self):
self.client.get_price_history_every_fifteen_minutes(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_FIFTEEN_MINUTES
'frequency': 15,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_thirty_minutes
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_thirty_minutes_vanilla(self):
self.client.get_price_history_every_thirty_minutes('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_THIRTY_MINUTES
'frequency': 30,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_thirty_minutes_start_datetime(self):
self.client.get_price_history_every_thirty_minutes(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_THIRTY_MINUTES
'frequency': 30,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_thirty_minutes_end_datetime(self):
self.client.get_price_history_every_thirty_minutes(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_THIRTY_MINUTES
'frequency': 30,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_thirty_minutes_empty_extendedhours(self):
self.client.get_price_history_every_thirty_minutes(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_THIRTY_MINUTES
'frequency': 30,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_thirty_minutes_extendedhours(self):
self.client.get_price_history_every_thirty_minutes(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'day',
# ONE_DAY
'period': 1,
'frequencyType': 'minute',
# EVERY_THIRTY_MINUTES
'frequency': 30,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_day
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_day_vanilla(self):
self.client.get_price_history_every_day('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'daily',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_day_start_datetime(self):
self.client.get_price_history_every_day(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'daily',
# DAILY
'frequency': 1,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_day_end_datetime(self):
self.client.get_price_history_every_day(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'daily',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_day_empty_extendedhours(self):
self.client.get_price_history_every_day(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'daily',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_day_extendedhours(self):
self.client.get_price_history_every_day(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'daily',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_price_history_every_week
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_week_vanilla(self):
self.client.get_price_history_every_week('AAPL')
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'weekly',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_week_start_datetime(self):
self.client.get_price_history_every_week(
'AAPL', start_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'weekly',
# DAILY
'frequency': 1,
'startDate': EARLIER_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_week_end_datetime(self):
self.client.get_price_history_every_week(
'AAPL', end_datetime=EARLIER_DATETIME)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'weekly',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': EARLIER_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_week_empty_extendedhours(self):
self.client.get_price_history_every_week(
'AAPL', need_extended_hours_data=None)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'weekly',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
@patch('tda.client.base.datetime.datetime', mockdatetime)
def test_get_price_history_every_week_extendedhours(self):
self.client.get_price_history_every_week(
'AAPL', need_extended_hours_data=True)
params = {
'apikey': API_KEY,
'periodType': 'year',
# TWENTY_YEARS
'period': 20,
'frequencyType': 'weekly',
# DAILY
'frequency': 1,
'startDate': MIN_TIMESTAMP_MILLIS,
'endDate': NOW_DATETIME_PLUS_SEVEN_DAYS_TIMESTAMP_MILLIS,
'needExtendedHoursData': True,
}
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/AAPL/pricehistory'),
params=params)
# get_quote
def test_get_quote(self):
self.client.get_quote(SYMBOL)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/{symbol}/quotes'), params={
'apikey': API_KEY})
# get_quotes
def test_get_quotes(self):
self.client.get_quotes(['AAPL', 'MSFT'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/quotes'), params={
'apikey': API_KEY,
'symbol': 'AAPL,MSFT'})
def test_get_quotes_single_symbol(self):
self.client.get_quotes('AAPL')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/marketdata/quotes'), params={
'apikey': API_KEY,
'symbol': 'AAPL'})
# get_transaction
def test_get_transaction(self):
self.client.get_transaction(ACCOUNT_ID, TRANSACTION_ID)
self.mock_session.get.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/transactions/{transactionId}'),
params={'apikey': API_KEY})
def test_get_transaction_str(self):
self.client.get_transaction(str(ACCOUNT_ID), str(TRANSACTION_ID))
self.mock_session.get.assert_called_once_with(
self.make_url(
'/v1/accounts/{accountId}/transactions/{transactionId}'),
params={'apikey': API_KEY})
# get_transactions
def test_get_transactions(self):
self.client.get_transactions(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY})
def test_get_transactions_str(self):
self.client.get_transactions(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY})
def test_get_transactions_type(self):
self.client.get_transactions(
ACCOUNT_ID,
transaction_type=self.client_class.Transactions.TransactionType.DIVIDEND)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'type': 'DIVIDEND'})
def test_get_transactions_type_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_transactions(ACCOUNT_ID, transaction_type='DIVIDEND')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'type': 'DIVIDEND'})
def test_get_transactions_symbol(self):
self.client.get_transactions(ACCOUNT_ID, symbol='AAPL')
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'symbol': 'AAPL'})
def test_get_transactions_start_date_datetime(self):
self.client.get_transactions(ACCOUNT_ID, start_date=NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'startDate': NOW_DATE_ISO})
def test_get_transactions_start_date_date(self):
self.client.get_transactions(ACCOUNT_ID, start_date=NOW_DATE)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'startDate': NOW_DATE_ISO})
def test_get_transactions_start_date_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_transactions(ACCOUNT_ID, start_date='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) for " +
"start_date, got 'builtins.str'")
def test_get_transactions_end_date(self):
self.client.get_transactions(ACCOUNT_ID, end_date=NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'endDate': NOW_DATE_ISO})
def test_get_transactions_end_date_datetime(self):
self.client.get_transactions(ACCOUNT_ID, end_date=NOW_DATETIME)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/transactions'), params={
'apikey': API_KEY,
'endDate': NOW_DATE_ISO})
def test_get_transactions_end_date_str(self):
with self.assertRaises(ValueError) as cm:
self.client.get_transactions(ACCOUNT_ID, end_date='2020-01-01')
self.assertEqual(str(cm.exception),
"expected type in (datetime.date, datetime.datetime) for " +
"end_date, got 'builtins.str'")
# get_preferences
def test_get_preferences(self):
self.client.get_preferences(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/preferences'), params={
'apikey': API_KEY})
def test_get_preferences_str(self):
self.client.get_preferences(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/preferences'), params={
'apikey': API_KEY})
# get_streamer_subscription_keys
def test_get_streamer_subscription_keys(self):
self.client.get_streamer_subscription_keys([1000, 2000, 3000])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals/streamersubscriptionkeys'),
params={
'apikey': API_KEY,
'accountIds': '1000,2000,3000'})
def test_get_streamer_subscription_keys_one_account_id(self):
self.client.get_streamer_subscription_keys(1000)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals/streamersubscriptionkeys'),
params={
'apikey': API_KEY,
'accountIds': '1000'})
def test_get_streamer_subscription_keys_str(self):
self.client.get_streamer_subscription_keys(['1000', '2000', '3000'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals/streamersubscriptionkeys'),
params={
'apikey': API_KEY,
'accountIds': '1000,2000,3000'})
# get_user_principals
def test_get_user_principals_vanilla(self):
self.client.get_user_principals()
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals'), params={
'apikey': API_KEY})
def test_get_user_principals_fields(self):
self.client.get_user_principals(
fields=[
self.client_class.UserPrincipals.Fields.STREAMER_SUBSCRIPTION_KEYS,
self.client_class.UserPrincipals.Fields.PREFERENCES])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals'), params={
'apikey': API_KEY,
'fields': 'streamerSubscriptionKeys,preferences'})
def test_get_user_principals_one_field(self):
self.client.get_user_principals(
fields=self.client_class.UserPrincipals.Fields.PREFERENCES)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals'), params={
'apikey': API_KEY,
'fields': 'preferences'})
def test_get_user_principals_fields_unchecked(self):
self.client.set_enforce_enums(False)
self.client.get_user_principals(
fields=['streamerSubscriptionKeys', 'preferences'])
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/userprincipals'), params={
'apikey': API_KEY,
'fields': 'streamerSubscriptionKeys,preferences'})
# update_preferences
def test_update_preferences(self):
preferences = {'wantMoney': True}
self.client.update_preferences(ACCOUNT_ID, preferences)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/preferences'),
json=preferences)
def test_update_preferences_str(self):
preferences = {'wantMoney': True}
self.client.update_preferences(str(ACCOUNT_ID), preferences)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/preferences'),
json=preferences)
# create_watchlist
def test_create_watchlist(self):
watchlist = {'AAPL': True}
self.client.create_watchlist(ACCOUNT_ID, watchlist)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists'),
json=watchlist)
def test_create_watchlist_str(self):
watchlist = {'AAPL': True}
self.client.create_watchlist(str(ACCOUNT_ID), watchlist)
self.mock_session.post.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists'),
json=watchlist)
# delete_watchlist
def test_delete_watchlist(self):
watchlist = {'AAPL': True}
self.client.delete_watchlist(ACCOUNT_ID, WATCHLIST_ID)
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'))
def test_delete_watchlist_str(self):
watchlist = {'AAPL': True}
self.client.delete_watchlist(str(ACCOUNT_ID), str(WATCHLIST_ID))
self.mock_session.delete.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'))
# get_watchlist
def test_get_watchlist(self):
self.client.get_watchlist(ACCOUNT_ID, WATCHLIST_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
params={})
def test_get_watchlist_str(self):
self.client.get_watchlist(str(ACCOUNT_ID), str(WATCHLIST_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
params={})
# get_watchlists_for_multiple_accounts
def test_get_watchlists_for_multiple_accounts(self):
self.client.get_watchlists_for_multiple_accounts()
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/watchlists'), params={})
# get_watchlists_for_single_account
def test_get_watchlists_for_single_account(self):
self.client.get_watchlists_for_single_account(ACCOUNT_ID)
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists'), params={})
def test_get_watchlists_for_single_account_str(self):
self.client.get_watchlists_for_single_account(str(ACCOUNT_ID))
self.mock_session.get.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists'), params={})
# replace_watchlist
def test_replace_watchlist(self):
watchlist = {'AAPL': True}
self.client.replace_watchlist(ACCOUNT_ID, WATCHLIST_ID, watchlist)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
json=watchlist)
def test_replace_watchlist_str(self):
watchlist = {'AAPL': True}
self.client.replace_watchlist(
str(ACCOUNT_ID), str(WATCHLIST_ID), watchlist)
self.mock_session.put.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
json=watchlist)
# update_watchlist
def test_update_watchlist(self):
watchlist = {'AAPL': True}
self.client.update_watchlist(ACCOUNT_ID, WATCHLIST_ID, watchlist)
self.mock_session.patch.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
json=watchlist)
def test_update_watchlist_str(self):
watchlist = {'AAPL': True}
self.client.update_watchlist(
str(ACCOUNT_ID), str(WATCHLIST_ID), watchlist)
self.mock_session.patch.assert_called_once_with(
self.make_url('/v1/accounts/{accountId}/watchlists/{watchlistId}'),
json=watchlist)
class ClientTest(_TestClient, unittest.TestCase):
"""
Subclass set to use Client and MagicMock
"""
client_class = Client
magicmock_class = MagicMock
class AsyncClientTest(_TestClient, unittest.TestCase):
"""
Subclass set to resync AsyncClient and use AsyncMagicMock
"""
client_class = ResyncProxy(AsyncClient)
magicmock_class = AsyncMagicMock
def test_async_close(self):
self.client.close_async_session()
| 38.002719
| 98
| 0.617167
| 9,197
| 83,872
| 5.269109
| 0.03599
| 0.052621
| 0.053549
| 0.070574
| 0.92596
| 0.899009
| 0.876744
| 0.850598
| 0.820078
| 0.804333
| 0
| 0.010584
| 0.276767
| 83,872
| 2,206
| 99
| 38.019946
| 0.788308
| 0.023941
| 0
| 0.709854
| 0
| 0
| 0.170275
| 0.087647
| 0
| 0
| 0
| 0
| 0.118613
| 1
| 0.114964
| false
| 0.000608
| 0.006691
| 0.000608
| 0.127737
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ce6c7065a2fad0332a49f541db7265c96a70146
| 11,648
|
py
|
Python
|
tests/oserror_aliases_test.py
|
durin42/pyupgrade
|
0e8c31e03e15a13526a6c03b559fdd22dd253a20
|
[
"MIT"
] | null | null | null |
tests/oserror_aliases_test.py
|
durin42/pyupgrade
|
0e8c31e03e15a13526a6c03b559fdd22dd253a20
|
[
"MIT"
] | null | null | null |
tests/oserror_aliases_test.py
|
durin42/pyupgrade
|
0e8c31e03e15a13526a6c03b559fdd22dd253a20
|
[
"MIT"
] | null | null | null |
import pytest
from pyupgrade import _fix_py3_plus
from pyupgrade import FindPy3Plus
@pytest.mark.parametrize('alias', FindPy3Plus.OS_ERROR_ALIASES)
@pytest.mark.parametrize(
('tpl', 'expected'),
(
(
'try:\n'
' pass\n'
'except {alias}:\n'
' pass\n',
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'try:\n'
' pass\n'
'except ({alias},):\n'
' pass\n',
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'try:\n'
' pass\n'
'except ({alias}, KeyError, OSError):\n'
' pass\n',
'try:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
(
'try:\n'
' pass\n'
'except ({alias}, OSError, IOError):\n'
' pass\n',
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'try:\n'
' pass\n'
'except({alias}, OSError, IOError):\n'
' pass\n',
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
),
)
def test_fix_oserror_aliases_try(alias, tpl, expected):
s = tpl.format(alias=alias)
assert _fix_py3_plus(s) == expected
@pytest.mark.parametrize(
's',
(
# empty try-except
'try:\n'
' pass\n'
'except:\n'
' pass\n',
# no exception to rewrite
'try:\n'
' pass\n'
'except AssertionError:\n'
' pass\n',
# no exception to rewrite
'try:\n'
' pass\n'
'except ('
' AssertionError,'
'):\n'
' pass\n',
# already correct
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
# already correct
'try:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
)
def test_fix_oserror_aliases_noop(s):
assert _fix_py3_plus(s) == s
@pytest.mark.parametrize('imp', FindPy3Plus.OS_ERROR_ALIAS_MODULES)
@pytest.mark.parametrize(
'tpl',
(
# if the error isn't in a try or except it shouldn't be rewritten
# to avoid false positives
'from {imp} import error\n\n'
'def foo():\n'
' error = 3\n',
' return error\n',
# renaming things for weird reasons
'from {imp} import error as the_roof\n'
'raise the_roof()\n',
),
)
def test_fix_oserror_aliases_noop_tpl(imp, tpl):
s = tpl.format(imp=imp)
assert _fix_py3_plus(s) == s
@pytest.mark.parametrize('imp', FindPy3Plus.OS_ERROR_ALIAS_MODULES)
@pytest.mark.parametrize(
('tpl', 'expected_tpl'),
(
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except {imp}.error:\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except ({imp}.error,):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except ({imp}.error, KeyError, OSError):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except ({imp}.error, OSError, IOError):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except (OSError, {imp}.error, IOError):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except (OSError, {imp}.error, IOError):\n'
' pass\n'
'except (OSError, {imp}.error, KeyError):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except({imp}.error, OSError, IOError):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'import {imp}\n\n'
'try:\n'
' pass\n'
'except('
' {imp}.error,'
' OSError,'
' IOError,'
'):\n'
' pass\n',
'import {imp}\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except error:\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (error,):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (error, KeyError, OSError):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (error, OSError, IOError):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (OSError, error, OSError):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except (OSError, error, OSError):\n'
' pass\n'
'except (OSError, error, KeyError):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n'
'except (OSError, KeyError):\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except(error, OSError, IOError):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
(
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except('
' error,'
' OSError,'
' IOError,'
'):\n'
' pass\n',
'from {imp} import error\n\n'
'try:\n'
' pass\n'
'except OSError:\n'
' pass\n',
),
),
)
def test_fix_oserror_complex_aliases_try(imp, tpl, expected_tpl):
s, expected = tpl.format(imp=imp), expected_tpl.format(imp=imp)
assert _fix_py3_plus(s) == expected
@pytest.mark.parametrize('alias', FindPy3Plus.OS_ERROR_ALIASES)
@pytest.mark.parametrize(
('tpl', 'expected'),
(
('raise {alias}', 'raise OSError'),
('raise {alias}()', 'raise OSError()'),
('raise {alias}(1)', 'raise OSError(1)'),
('raise {alias}(1, 2)', 'raise OSError(1, 2)'),
(
'raise {alias}(\n'
' 1,\n'
' 2,\n'
')',
'raise OSError(\n'
' 1,\n'
' 2,\n'
')',
),
),
)
def test_fix_oserror_aliases_raise(alias, tpl, expected):
s = tpl.format(alias=alias)
assert _fix_py3_plus(s) == expected
@pytest.mark.parametrize('imp', FindPy3Plus.OS_ERROR_ALIAS_MODULES)
@pytest.mark.parametrize(
('tpl', 'expected_tpl'),
(
(
'import {imp}\n\n'
'raise {imp}.error\n',
'import {imp}\n\n'
'raise OSError\n',
),
(
'import {imp}\n\n'
'raise {imp}.error()\n',
'import {imp}\n\n'
'raise OSError()\n',
),
(
'import {imp}\n\n'
'raise {imp}.error(1)\n',
'import {imp}\n\n'
'raise OSError(1)\n',
),
(
'import {imp}\n\n'
'raise {imp}.error(1, 2)\n',
'import {imp}\n\n'
'raise OSError(1, 2)\n',
),
(
'import {imp}\n\n'
'raise {imp}.error(\n'
' 1,\n'
' 2,\n'
')',
'import {imp}\n\n'
'raise OSError(\n'
' 1,\n'
' 2,\n'
')',
),
(
'from {imp} import error\n\n'
'raise error\n',
'from {imp} import error\n\n'
'raise OSError\n',
),
(
'from {imp} import error\n\n'
'raise error()\n',
'from {imp} import error\n\n'
'raise OSError()\n',
),
(
'from {imp} import error\n\n'
'raise error(1)\n',
'from {imp} import error\n\n'
'raise OSError(1)\n',
),
(
'from {imp} import error\n\n'
'raise error(1, 2)\n',
'from {imp} import error\n\n'
'raise OSError(1, 2)\n',
),
(
'from {imp} import error\n\n'
'raise error(\n'
' 1,\n'
' 2,\n'
')',
'from {imp} import error\n\n'
'raise OSError(\n'
' 1,\n'
' 2,\n'
')',
),
),
)
def test_fix_oserror_complex_aliases_raise(imp, tpl, expected_tpl):
s, expected = tpl.format(imp=imp), expected_tpl.format(imp=imp)
assert _fix_py3_plus(s) == expected
| 24.266667
| 73
| 0.367617
| 1,160
| 11,648
| 3.631897
| 0.057759
| 0.116307
| 0.139568
| 0.145265
| 0.922146
| 0.915737
| 0.887966
| 0.868502
| 0.84619
| 0.823878
| 0
| 0.007212
| 0.476219
| 11,648
| 479
| 74
| 24.317328
| 0.683331
| 0.018802
| 0
| 0.784543
| 0
| 0
| 0.397198
| 0
| 0
| 0
| 0
| 0
| 0.018735
| 1
| 0.014052
| false
| 0.229508
| 0.133489
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7ce742c318a7a51e6dd216cef81849894a80e64a
| 48
|
py
|
Python
|
data/test.py
|
vo3xel/python-occ
|
44141db80db307c55fb241cfa3c56f4376ba54fc
|
[
"MIT"
] | null | null | null |
data/test.py
|
vo3xel/python-occ
|
44141db80db307c55fb241cfa3c56f4376ba54fc
|
[
"MIT"
] | null | null | null |
data/test.py
|
vo3xel/python-occ
|
44141db80db307c55fb241cfa3c56f4376ba54fc
|
[
"MIT"
] | null | null | null |
import OCC
print('OCC version:'+ OCC.VERSION)
| 16
| 34
| 0.708333
| 7
| 48
| 4.857143
| 0.571429
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 3
| 34
| 16
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
7cfd9b5b79977eedbc54762b28750275df469afb
| 48
|
py
|
Python
|
Python/S10718.py
|
irostub/Beakjoon-Problem-Solving
|
3a230cbd16ade4ed7cc1da7f36085853d69d673d
|
[
"Beerware"
] | null | null | null |
Python/S10718.py
|
irostub/Beakjoon-Problem-Solving
|
3a230cbd16ade4ed7cc1da7f36085853d69d673d
|
[
"Beerware"
] | null | null | null |
Python/S10718.py
|
irostub/Beakjoon-Problem-Solving
|
3a230cbd16ade4ed7cc1da7f36085853d69d673d
|
[
"Beerware"
] | null | null | null |
print("강한친구 대한육군", sep="\n")
print("강한친구 대한육군")
| 16
| 28
| 0.625
| 8
| 48
| 3.75
| 0.625
| 0.6
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 2
| 29
| 24
| 0.697674
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6b12a6f33418faacdb7d559a5eabe770dc84cf86
| 1,151
|
py
|
Python
|
tests/test_create_engine.py
|
lantunes/dnbpy
|
23536c70678575afd19000b69d047ee4b7a2bb40
|
[
"Apache-2.0"
] | 2
|
2018-04-23T22:59:15.000Z
|
2020-08-17T09:32:26.000Z
|
tests/test_create_engine.py
|
lantunes/dnbpy
|
23536c70678575afd19000b69d047ee4b7a2bb40
|
[
"Apache-2.0"
] | null | null | null |
tests/test_create_engine.py
|
lantunes/dnbpy
|
23536c70678575afd19000b69d047ee4b7a2bb40
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import dnbpy
class TestCreateEngine(unittest.TestCase):
def test_create_engine_checks_args(self):
with self.assertRaises(Exception):
dnbpy.Game(1, ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((1, 2, 3), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((0, 1), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((1, 0), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((-1, 1), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((1, -1), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((1, 1.2), ['player1', 'player2'])
with self.assertRaises(Exception):
dnbpy.Game((1, 1), [])
with self.assertRaises(Exception):
dnbpy.Game((1, 1), ['player1'])
with self.assertRaises(Exception):
dnbpy.Game((1, 1), None)
with self.assertRaises(Exception):
dnbpy.Game(None, ['player1', 'player2'])
| 37.129032
| 57
| 0.577758
| 121
| 1,151
| 5.46281
| 0.198347
| 0.133132
| 0.332829
| 0.482602
| 0.815431
| 0.815431
| 0.757943
| 0.700454
| 0.561271
| 0.490166
| 0
| 0.044549
| 0.258905
| 1,151
| 30
| 58
| 38.366667
| 0.730363
| 0
| 0
| 0.423077
| 0
| 0
| 0.103388
| 0
| 0
| 0
| 0
| 0
| 0.423077
| 1
| 0.038462
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
865fa539a7c9877328748dc27837419ab50355a1
| 427
|
py
|
Python
|
scanless/__init__.py
|
neelgandhi108/scanless
|
3b69b1c35726452d866e7ab62c43dd39d24d5326
|
[
"Unlicense"
] | 1
|
2020-03-06T08:53:05.000Z
|
2020-03-06T08:53:05.000Z
|
scanless/__init__.py
|
Warlockk/scanless
|
0f5fe0c16943d02f7fab66584283193531f6909d
|
[
"Unlicense"
] | null | null | null |
scanless/__init__.py
|
Warlockk/scanless
|
0f5fe0c16943d02f7fab66584283193531f6909d
|
[
"Unlicense"
] | 1
|
2020-05-14T23:40:04.000Z
|
2020-05-14T23:40:04.000Z
|
# _____ __ ____ ____ _ ___ _____ _____
# / ___/ / ] / || \ | | / _]/ ___// ___/
# ( \_ / / | o || _ || | / [_( \_( \_
# \__ |/ / | || | || |___ | _]\__ |\__ |
# / \ / \_ | _ || | || || [_ / \ |/ \ |
# \ \ || | || | || || |\ |\ |
# \___|\____||__|__||__|__||_____||_____| \___| \___|
from scanless.core import Scanless
| 47.444444
| 55
| 0.278689
| 6
| 427
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.461358
| 427
| 9
| 56
| 47.444444
| 0.134783
| 0.87822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
86619b93346632af868251148d494812c365f5d3
| 151
|
py
|
Python
|
sadedegel/dataset/movie_sentiment/__init__.py
|
GlobalMaksimum/sadedegel
|
8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b
|
[
"MIT"
] | 100
|
2020-07-06T05:50:49.000Z
|
2022-03-21T21:56:55.000Z
|
sadedegel/dataset/movie_sentiment/__init__.py
|
LyotardPostmodernizm/sadedegel
|
8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b
|
[
"MIT"
] | 244
|
2020-07-06T06:31:01.000Z
|
2022-02-26T10:40:17.000Z
|
sadedegel/dataset/movie_sentiment/__init__.py
|
LyotardPostmodernizm/sadedegel
|
8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b
|
[
"MIT"
] | 23
|
2020-07-27T16:32:48.000Z
|
2022-03-18T11:13:07.000Z
|
from ._core import load_movie_sentiment_train, load_movie_sentiment_test, load_movie_sentiment_test_label
from ._core import CORPUS_SIZE, CLASS_VALUES
| 50.333333
| 105
| 0.89404
| 23
| 151
| 5.26087
| 0.565217
| 0.223141
| 0.446281
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 151
| 2
| 106
| 75.5
| 0.864286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
868ca585161efc1769bde5038c8ee0f1b95d5d7d
| 154
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_0/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._mod1_1_0_1_0_0 import *
from ._mod1_1_0_1_0_1 import *
from ._mod1_1_0_1_0_2 import *
from ._mod1_1_0_1_0_3 import *
from ._mod1_1_0_1_0_4 import *
| 30.8
| 30
| 0.811688
| 40
| 154
| 2.375
| 0.2
| 0.210526
| 0.189474
| 0.526316
| 0.884211
| 0.884211
| 0.757895
| 0
| 0
| 0
| 0
| 0.222222
| 0.123377
| 154
| 5
| 31
| 30.8
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
86a3efd675a85a3197420d8fcc402074069225be
| 5,238
|
py
|
Python
|
src/compiler/Restler.Compiler.Test/baselines/dependencyTests/header_deps_grammar.py
|
Nauscar/restler-fuzzer
|
f370810726b9a44613eafd923008fc2f030fdf23
|
[
"MIT"
] | 1,539
|
2020-11-16T19:20:55.000Z
|
2022-03-30T16:36:49.000Z
|
src/compiler/Restler.Compiler.Test/baselines/dependencyTests/header_deps_grammar.py
|
Nauscar/restler-fuzzer
|
f370810726b9a44613eafd923008fc2f030fdf23
|
[
"MIT"
] | 282
|
2020-11-17T04:53:38.000Z
|
2022-03-31T13:16:25.000Z
|
src/compiler/Restler.Compiler.Test/baselines/dependencyTests/header_deps_grammar.py
|
Nauscar/restler-fuzzer
|
f370810726b9a44613eafd923008fc2f030fdf23
|
[
"MIT"
] | 171
|
2020-11-16T21:55:59.000Z
|
2022-03-28T12:56:26.000Z
|
""" THIS IS AN AUTOMATICALLY GENERATED FILE!"""
from __future__ import print_function
import json
from engine import primitives
from engine.core import requests
from engine.errors import ResponseParsingException
from engine import dependencies
_service_user_post_user_id_header = dependencies.DynamicVariable("_service_user_post_user_id_header")
def parse_serviceuserpost(data, **kwargs):
""" Automatically generated response parser """
# Declare response variables
temp_7262 = None
if 'headers' in kwargs:
headers = kwargs['headers']
# Parse body if needed
if data:
pass
# Try to extract each dynamic object
if headers:
# Try to extract dynamic objects from headers
try:
temp_7262 = str(headers["user-id"])
except Exception as error:
# This is not an error, since some properties are not always returned
pass
pass
# If no dynamic objects were extracted, throw.
if not (temp_7262):
raise ResponseParsingException("Error: all of the expected dynamic objects were not present in the response.")
# Set dynamic variables
if temp_7262:
dependencies.set_variable("_service_user_post_user_id_header", temp_7262)
req_collection = requests.RequestCollection([])
# Endpoint: /service/user, method: Post
request = requests.Request([
primitives.restler_static_string("POST "),
primitives.restler_static_string("/"),
primitives.restler_static_string("api"),
primitives.restler_static_string("/"),
primitives.restler_static_string("service"),
primitives.restler_static_string("/"),
primitives.restler_static_string("user"),
primitives.restler_static_string(" HTTP/1.1\r\n"),
primitives.restler_static_string("Accept: application/json\r\n"),
primitives.restler_static_string("Host: localhost:8888\r\n"),
primitives.restler_refreshable_authentication_token("authentication_token_tag"),
primitives.restler_static_string("\r\n"),
{
'post_send':
{
'parser': parse_serviceuserpost,
'dependencies':
[
_service_user_post_user_id_header.writer()
]
}
},
],
requestId="/service/user"
)
req_collection.add_request(request)
# Endpoint: /service/user, method: Get
request = requests.Request([
primitives.restler_static_string("GET "),
primitives.restler_static_string("/"),
primitives.restler_static_string("api"),
primitives.restler_static_string("/"),
primitives.restler_static_string("service"),
primitives.restler_static_string("/"),
primitives.restler_static_string("user"),
primitives.restler_static_string(" HTTP/1.1\r\n"),
primitives.restler_static_string("Accept: application/json\r\n"),
primitives.restler_static_string("Host: localhost:8888\r\n"),
primitives.restler_static_string("user-id: "),
primitives.restler_static_string(_service_user_post_user_id_header.reader(), quoted=False),
primitives.restler_static_string("\r\n"),
primitives.restler_refreshable_authentication_token("authentication_token_tag"),
primitives.restler_static_string("\r\n"),
],
requestId="/service/user"
)
req_collection.add_request(request)
# Endpoint: /service/user, method: Put
request = requests.Request([
primitives.restler_static_string("PUT "),
primitives.restler_static_string("/"),
primitives.restler_static_string("api"),
primitives.restler_static_string("/"),
primitives.restler_static_string("service"),
primitives.restler_static_string("/"),
primitives.restler_static_string("user"),
primitives.restler_static_string(" HTTP/1.1\r\n"),
primitives.restler_static_string("Accept: application/json\r\n"),
primitives.restler_static_string("Host: localhost:8888\r\n"),
primitives.restler_static_string("user-id: "),
primitives.restler_static_string(_service_user_post_user_id_header.reader(), quoted=False),
primitives.restler_static_string("\r\n"),
primitives.restler_refreshable_authentication_token("authentication_token_tag"),
primitives.restler_static_string("\r\n"),
],
requestId="/service/user"
)
req_collection.add_request(request)
# Endpoint: /service/user, method: Delete
request = requests.Request([
primitives.restler_static_string("DELETE "),
primitives.restler_static_string("/"),
primitives.restler_static_string("api"),
primitives.restler_static_string("/"),
primitives.restler_static_string("service"),
primitives.restler_static_string("/"),
primitives.restler_static_string("user"),
primitives.restler_static_string(" HTTP/1.1\r\n"),
primitives.restler_static_string("Accept: application/json\r\n"),
primitives.restler_static_string("Host: localhost:8888\r\n"),
primitives.restler_static_string("user-id: "),
primitives.restler_static_string(_service_user_post_user_id_header.reader(), quoted=False),
primitives.restler_static_string("\r\n"),
primitives.restler_refreshable_authentication_token("authentication_token_tag"),
primitives.restler_static_string("\r\n"),
],
requestId="/service/user"
)
req_collection.add_request(request)
| 35.154362
| 118
| 0.730431
| 605
| 5,238
| 6.008264
| 0.17686
| 0.266575
| 0.335351
| 0.422834
| 0.750481
| 0.750481
| 0.735626
| 0.658047
| 0.658047
| 0.658047
| 0
| 0.009908
| 0.152157
| 5,238
| 148
| 119
| 35.391892
| 0.808602
| 0.094693
| 0
| 0.648649
| 1
| 0
| 0.157025
| 0.034329
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009009
| false
| 0.027027
| 0.054054
| 0
| 0.063063
| 0.009009
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86c69a45f72b481968e7937e112e92137f543764
| 53,829
|
py
|
Python
|
dataloader/dataset.py
|
XiaoJake/DS-Net
|
8400da1bd7c7b1ccf4d5c6782b86372957e79a6b
|
[
"MIT"
] | null | null | null |
dataloader/dataset.py
|
XiaoJake/DS-Net
|
8400da1bd7c7b1ccf4d5c6782b86372957e79a6b
|
[
"MIT"
] | null | null | null |
dataloader/dataset.py
|
XiaoJake/DS-Net
|
8400da1bd7c7b1ccf4d5c6782b86372957e79a6b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
SemKITTI dataloader
"""
import os
import numpy as np
import torch
import random
import time
import numba as nb
import yaml
import pickle
from torch.utils import data
from tqdm import tqdm
from scipy import stats as s
from os.path import join
# load Semantic KITTI class info
with open("semantic-kitti.yaml", 'r') as stream:
semkittiyaml = yaml.safe_load(stream)
SemKITTI_label_name = dict()
for i in sorted(list(semkittiyaml['learning_map'].keys()))[::-1]:
SemKITTI_label_name[semkittiyaml['learning_map'][i]] = semkittiyaml['labels'][i]
# things = ['car', 'truck', 'bicycle', 'motorcycle', 'bus', 'person', 'bicyclist', 'motorcyclist']
# stuff = ['road', 'sidewalk', 'parking', 'other-ground', 'building', 'vegetation', 'trunk', 'terrain', 'fence', 'pole', 'traffic-sign']
# things_ids = []
# for i in sorted(list(semkittiyaml['labels'].keys())):
# if SemKITTI_label_name[semkittiyaml['learning_map'][i]] in things:
# things_ids.append(i)
# print(things_ids)
class SemKITTI(data.Dataset):
def __init__(self, data_path, imageset = 'train', return_ref = False, return_ins = False):
self.return_ref = return_ref
self.return_ins = return_ins
with open("semantic-kitti.yaml", 'r') as stream:
semkittiyaml = yaml.safe_load(stream)
self.learning_map = semkittiyaml['learning_map']
self.imageset = imageset
if imageset == 'train':
split = semkittiyaml['split']['train']
elif imageset == 'val':
split = semkittiyaml['split']['valid']
elif imageset == 'test':
split = semkittiyaml['split']['test']
else:
raise Exception('Split must be train/val/test')
self.sequences = sorted(split)
self.data_path = data_path
self.im_idx = []
for i_folder in split:
self.im_idx += absoluteFilePaths('/'.join([data_path,str(i_folder).zfill(2),'velodyne']))
self.im_idx.sort()
self.load_calib_poses()
self.im_idx_ind = []
for im in self.im_idx:
frame_path = im.split('/')
frame_id = im.split('/')[-1].split('.')[0]
assert len(frame_id) == 6
frame_id = int(frame_id)
seq = frame_path[-3]
seq_ind = self.seq2ind[seq]
self.im_idx_ind.append((seq_ind, frame_id))
self.things = ['car', 'truck', 'bicycle', 'motorcycle', 'bus', 'person', 'bicyclist', 'motorcyclist']
self.stuff = ['road', 'sidewalk', 'parking', 'other-ground', 'building', 'vegetation', 'trunk', 'terrain', 'fence', 'pole', 'traffic-sign']
self.things_ids = []
for i in sorted(list(semkittiyaml['labels'].keys())):
if SemKITTI_label_name[semkittiyaml['learning_map'][i]] in self.things:
self.things_ids.append(i)
def load_calib_poses(self):
"""
load calib poses and times.
"""
###########
# Load data
###########
self.calibrations = []
self.times = []
self.poses = []
self.seq2ind = {}
for i, seq in enumerate(self.sequences):
self.seq2ind[str(seq).zfill(2)] = i
seq_folder = join(self.data_path, str(seq).zfill(2))
# Read Calib
self.calibrations.append(self.parse_calibration(join(seq_folder, "calib.txt")))
# Read times
self.times.append(np.loadtxt(join(seq_folder, 'times.txt'), dtype=np.float32))
# Read poses
poses_f64 = self.parse_poses(join(seq_folder, 'poses.txt'), self.calibrations[-1])
self.poses.append([pose.astype(np.float32) for pose in poses_f64])
def parse_calibration(self, filename):
""" read calibration file with given filename
Returns
-------
dict
Calibration matrices as 4x4 numpy arrays.
"""
calib = {}
calib_file = open(filename)
for line in calib_file:
key, content = line.strip().split(":")
values = [float(v) for v in content.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
calib[key] = pose
calib_file.close()
return calib
def parse_poses(self, filename, calibration):
""" read poses file with per-scan poses from given filename
Returns
-------
list
list of poses as 4x4 numpy arrays.
"""
file = open(filename)
poses = []
Tr = calibration["Tr"]
Tr_inv = np.linalg.inv(Tr)
for line in file:
values = [float(v) for v in line.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
poses.append(np.matmul(Tr_inv, np.matmul(pose, Tr)))
return poses
def __len__(self):
'Denotes the total number of samples'
return len(self.im_idx)
def __getitem__(self, index):
raw_data = np.fromfile(self.im_idx[index], dtype=np.float32).reshape((-1, 4))
# print("loading {}, shape {}".format(self.im_idx[index], raw_data.shape))
if self.imageset == 'test':
annotated_data = np.expand_dims(np.zeros_like(raw_data[:,0],dtype=int),axis=1)
sem_labels = annotated_data
ins_labels = annotated_data
valid = annotated_data
else:
annotated_data = np.fromfile(self.im_idx[index].replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
sem_labels = annotated_data & 0xFFFF #delete high 16 digits binary
# ins_labels = (annotated_data & 0xFFFF0000) >> 16 # different classes could use same ins ids
ins_labels = annotated_data
# valid = (((ins_labels & 0xFFFF0000) >> 16) != 0).reshape(-1) # TODO: maybe this is not ok
valid = np.isin(sem_labels, self.things_ids).reshape(-1) # use 0 to filter out valid indexes is enough
# print(np.sum(valid) - np.sum((((ins_labels & 0xFFFF0000) >> 16) != 0)))
sem_labels = np.vectorize(self.learning_map.__getitem__)(sem_labels)
data_tuple = (raw_data[:,:3], sem_labels.astype(np.uint8))
if self.return_ref:
data_tuple += (raw_data[:,3],)
if self.return_ins:
data_tuple += (ins_labels, valid)
data_tuple += (self.im_idx[index], self.poses[self.im_idx_ind[index][0]][self.im_idx_ind[index][1]])
return data_tuple
def count_ins(self):
pbar = tqdm(total=len(self.im_idx), dynamic_ncols=True)
counter = np.zeros([9], dtype=np.int32)
min_valid_pn = 10000086
max_valid_pn = -1
for i in range(len(self.im_idx)):
# raw_data = np.fromfile(self.im_idx[i], dtype=np.float32).reshape((-1, 4))
annotated_data = np.fromfile(self.im_idx[i].replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
_sem_labels = annotated_data & 0xFFFF #delete high 16 digits binary
ins_labels = annotated_data
sem_labels = np.vectorize(self.learning_map.__getitem__)(_sem_labels)
for j in range(1,9):
j_ind = (sem_labels == j)
j_ins_labels = ins_labels[j_ind]
counter[j] += np.unique(j_ins_labels).reshape(-1).shape[0]
pbar.update(1)
valid_pn = np.sum(np.isin(_sem_labels, self.things_ids).reshape(-1))
if valid_pn > max_valid_pn:
max_valid_pn = valid_pn
if valid_pn < min_valid_pn:
min_valid_pn = valid_pn
print(valid_pn, sem_labels.shape[0])
pbar.close()
counter = counter[1:]
print("Counting results: ")
print(counter)
counter = counter.astype(np.float32)
counter /= (np.min(counter) if np.min(counter) != 0 else 1.0)
print("Weights: ")
print(counter)
print("max_valid_pn: {}".format(max_valid_pn))
print("min_valid_pn: {}".format(min_valid_pn))
def count_box_size(self):
pbar = tqdm(total=len(self.im_idx), dynamic_ncols=True)
counter = np.zeros([9], dtype=np.float32)
mean_size = np.zeros([9, 2], dtype=np.float32)
max_size = np.zeros([9, 2], dtype=np.float32)
min_size = np.zeros([9, 2], dtype=np.float32) + 10086
for i in range(len(self.im_idx)):
#if i % 10 != 0:
# pbar.update(1)
# continue
raw_data = np.fromfile(self.im_idx[i], dtype=np.float32).reshape((-1, 4))
annotated_data = np.fromfile(self.im_idx[i].replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
_sem_labels = annotated_data & 0xFFFF #delete high 16 digits binary
ins_labels = annotated_data
sem_labels = np.vectorize(self.learning_map.__getitem__)(_sem_labels)
pbar.update(1)
for j in range(1, 9):
j_ind = (sem_labels == j)
j_ins_labels = ins_labels[j_ind]
for j_ins_lab in np.unique(j_ins_labels):
j_pcd = raw_data[(ins_labels == j_ins_lab).reshape(-1)]
if j_pcd.shape[0] < 50:
continue
x = j_pcd[:, 0].max() - j_pcd[:, 0].min()
y = j_pcd[:, 1].max() - j_pcd[:, 1].min()
if x < y:
tmp = x
x = y
y = tmp
mean_size[j, 0] += x
mean_size[j, 1] += y
counter[j] += 1
if x > max_size[j, 0]:
max_size[j, 0] = x
if y > max_size[j, 1]:
max_size[j, 1] = y
if x < min_size[j, 0]:
min_size[j, 0] = x
if y < min_size[j, 1]:
min_size[j, 1] = y
pbar.close()
counter[0] = 1
print("Mean Size: {}".format(mean_size / counter.reshape(-1, 1)))
print("Max Size: {}".format(max_size))
print("Min Size: {}".format(min_size))
class SemKITTI_tracking(data.Dataset):
def __init__(self, data_path, imageset = 'train', return_ref = False, return_ins = False):
self.return_ref = return_ref
self.return_ins = return_ins
with open("semantic-kitti.yaml", 'r') as stream:
semkittiyaml = yaml.safe_load(stream)
self.learning_map = semkittiyaml['learning_map']
self.imageset = imageset
if imageset == 'train':
split = semkittiyaml['split']['train']
elif imageset == 'val':
split = semkittiyaml['split']['valid']
elif imageset == 'test':
split = semkittiyaml['split']['test']
else:
raise Exception('Split must be train/val/test')
self.sequences = sorted(split)
self.data_path = data_path
self.im_idx = []
for i_folder in split:
self.im_idx += absoluteFilePaths('/'.join([data_path,str(i_folder).zfill(2),'velodyne']))
self.im_idx.sort()
self.im_pair = []
self.im_pair_ind = []
self.findNext()
self.things = ['car', 'truck', 'bicycle', 'motorcycle', 'bus', 'person', 'bicyclist', 'motorcyclist']
self.stuff = ['road', 'sidewalk', 'parking', 'other-ground', 'building', 'vegetation', 'trunk', 'terrain', 'fence', 'pole', 'traffic-sign']
self.things_ids = []
for i in sorted(list(semkittiyaml['labels'].keys())):
if SemKITTI_label_name[semkittiyaml['learning_map'][i]] in self.things:
self.things_ids.append(i)
self.load_calib_poses()
def load_calib_poses(self):
"""
load calib poses and times.
"""
###########
# Load data
###########
self.calibrations = []
self.times = []
self.poses = []
self.seq2ind = {}
for i, seq in enumerate(self.sequences):
self.seq2ind[str(seq).zfill(2)] = i
seq_folder = join(self.data_path, str(seq).zfill(2))
# Read Calib
self.calibrations.append(self.parse_calibration(join(seq_folder, "calib.txt")))
# Read times
self.times.append(np.loadtxt(join(seq_folder, 'times.txt'), dtype=np.float32))
# Read poses
poses_f64 = self.parse_poses(join(seq_folder, 'poses.txt'), self.calibrations[-1])
self.poses.append([pose.astype(np.float32) for pose in poses_f64])
def parse_calibration(self, filename):
""" read calibration file with given filename
Returns
-------
dict
Calibration matrices as 4x4 numpy arrays.
"""
calib = {}
calib_file = open(filename)
for line in calib_file:
key, content = line.strip().split(":")
values = [float(v) for v in content.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
calib[key] = pose
calib_file.close()
return calib
def parse_poses(self, filename, calibration):
""" read poses file with per-scan poses from given filename
Returns
-------
list
list of poses as 4x4 numpy arrays.
"""
file = open(filename)
poses = []
Tr = calibration["Tr"]
Tr_inv = np.linalg.inv(Tr)
for line in file:
values = [float(v) for v in line.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
poses.append(np.matmul(Tr_inv, np.matmul(pose, Tr)))
return poses
def __len__(self):
'Denotes the total number of samples'
# return len(self.im_idx)
return len(self.im_pair)
def findNext(self):
for i in self.im_idx:
frame_path = i.split('/')
frame_id = i.split('/')[-1].split('.')[0]
assert len(frame_id) == 6
frame_id = int(frame_id)
im_list = [i]
seq = frame_path[-3]
seq_ind = self.seq2ind[seq]
frame_ind = frame_id
next_frame = str(frame_id + 1).zfill(6) + '.bin'
frame_path[-1] = next_frame
next_frame_path = '/'.join(frame_path)
if os.path.exists(next_frame_path):
self.im_pair.append((i, next_frame_path))
self.im_pair_ind.append((seq_ind, frame_ind, frame_ind + 1))
def __getitem__(self, index):
raw_data = np.fromfile(self.im_pair[index][0], dtype=np.float32).reshape((-1, 4))
next_raw_data = np.fromfile(self.im_pair[index][1], dtype=np.float32).reshape((-1, 4))
if self.imageset == 'test':
raise NotImplementedError
else:
annotated_data = np.fromfile(self.im_pair[index][0].replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
sem_labels = annotated_data & 0xFFFF #delete high 16 digits binary
# ins_labels = (annotated_data & 0xFFFF0000) >> 16 # different classes could use same ins ids
ins_labels = annotated_data
valid = np.isin(sem_labels, self.things_ids).reshape(-1)
sem_labels = np.vectorize(self.learning_map.__getitem__)(sem_labels)
next_annotated_data = np.fromfile(self.im_pair[index][1].replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
next_sem_labels = next_annotated_data & 0xFFFF
next_ins_labels = next_annotated_data
next_valid = np.isin(next_sem_labels, self.things_ids).reshape(-1)
next_sem_labels = np.vectorize(self.learning_map.__getitem__)(next_sem_labels)
data_tuple = (raw_data[:,:3], sem_labels.astype(np.uint8))
next_data_tuple = (next_raw_data[:,:3], next_sem_labels.astype(np.uint8))
if self.return_ref:
data_tuple += (raw_data[:,3],)
next_data_tuple += (next_raw_data[:,3],)
if self.return_ins:
data_tuple += (ins_labels, valid)
next_data_tuple += (next_ins_labels, next_valid)
data_tuple += (self.im_pair[index][0], self.poses[self.im_pair[index][0]][self.im_pair[index][1]])
next_data_tuple += (self.im_pair[index][1], self.poses[self.im_pair[index][0]][self.im_pair[index][2]])
return (next_data_tuple, data_tuple)
class SemKITTI_multi_frames(data.Dataset):
def __init__(self, data_path, imageset = 'train', return_ref = False, return_ins = False, n_frames = 3):
self.return_ref = return_ref
self.return_ins = return_ins
with open("semantic-kitti.yaml", 'r') as stream:
semkittiyaml = yaml.safe_load(stream)
self.learning_map = semkittiyaml['learning_map']
self.imageset = imageset
if imageset == 'train':
split = semkittiyaml['split']['train']
elif imageset == 'val':
split = semkittiyaml['split']['valid']
elif imageset == 'test':
split = semkittiyaml['split']['test']
else:
raise Exception('Split must be train/val/test')
self.sequences = sorted(split)
self.data_path = data_path
self.im_idx = []
for i_folder in split:
self.im_idx += absoluteFilePaths('/'.join([data_path,str(i_folder).zfill(2),'velodyne']))
self.im_idx.sort()
self.things = ['car', 'truck', 'bicycle', 'motorcycle', 'bus', 'person', 'bicyclist', 'motorcyclist']
self.stuff = ['road', 'sidewalk', 'parking', 'other-ground', 'building', 'vegetation', 'trunk', 'terrain', 'fence', 'pole', 'traffic-sign']
self.things_ids = []
for i in sorted(list(semkittiyaml['labels'].keys())):
if SemKITTI_label_name[semkittiyaml['learning_map'][i]] in self.things:
self.things_ids.append(i)
self.load_calib_poses()
self.n_frames = n_frames
self.multi_im_list = []
self.multi_im_list_ind = []
self.findNFrames()
def load_calib_poses(self):
"""
load calib poses and times.
"""
###########
# Load data
###########
self.calibrations = []
self.times = []
self.poses = []
self.seq2ind = {}
for i, seq in enumerate(self.sequences):
self.seq2ind[str(seq).zfill(2)] = i
seq_folder = join(self.data_path, str(seq).zfill(2))
# Read Calib
self.calibrations.append(self.parse_calibration(join(seq_folder, "calib.txt")))
# Read times
self.times.append(np.loadtxt(join(seq_folder, 'times.txt'), dtype=np.float32))
# Read poses
poses_f64 = self.parse_poses(join(seq_folder, 'poses.txt'), self.calibrations[-1])
self.poses.append([pose.astype(np.float32) for pose in poses_f64])
def parse_calibration(self, filename):
""" read calibration file with given filename
Returns
-------
dict
Calibration matrices as 4x4 numpy arrays.
"""
calib = {}
calib_file = open(filename)
for line in calib_file:
key, content = line.strip().split(":")
values = [float(v) for v in content.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
calib[key] = pose
calib_file.close()
return calib
def parse_poses(self, filename, calibration):
""" read poses file with per-scan poses from given filename
Returns
-------
list
list of poses as 4x4 numpy arrays.
"""
file = open(filename)
poses = []
Tr = calibration["Tr"]
Tr_inv = np.linalg.inv(Tr)
for line in file:
values = [float(v) for v in line.strip().split()]
pose = np.zeros((4, 4))
pose[0, 0:4] = values[0:4]
pose[1, 0:4] = values[4:8]
pose[2, 0:4] = values[8:12]
pose[3, 3] = 1.0
poses.append(np.matmul(Tr_inv, np.matmul(pose, Tr)))
file.close()
return poses
def findNFrames(self):
# looking past self.n_frames frames
# if not enough existing self.n_frames frames, then just find as much as possible
# e.g. the first frame will only contain one frame
for i in self.im_idx:
frame_path = i.split('/')
frame_id = i.split('/')[-1].split('.')[0]
assert len(frame_id) == 6
frame_id = int(frame_id)
im_list = [i]
seq = frame_path[-3]
seq_ind = self.seq2ind[seq]
frame_ind = frame_id
im_ind_list = [(seq_ind, frame_ind)]
for j in range(self.n_frames - 1):
if frame_id - j - 1 >= 0:
cur_frame = str(frame_id - j - 1).zfill(6) + '.bin'
frame_path[-1] = cur_frame
cur_frame_path = '/'.join(frame_path)
im_list.append(cur_frame_path)
frame_ind -= 1
im_ind_list.append((seq_ind, frame_ind))
else:
break
self.multi_im_list.append(im_list)
self.multi_im_list_ind.append(im_ind_list)
def __len__(self):
return len(self.multi_im_list)
def __getitem__(self, index):
cur_im_list = self.multi_im_list[index]
cur_im_ind = self.multi_im_list_ind[index]
merged_pts = np.zeros([0, 3], dtype=np.float32)
merged_ref = np.zeros([0, 1], dtype=np.float32)
merged_sem = np.zeros([0, 1], dtype=np.uint8)
merged_ins = np.zeros([0, 1], dtype=np.int32)
merged_valid = np.zeros([0, 1], dtype=np.int32)
merged_mask = np.zeros([0, 1], dtype=np.uint8)
merged_fnames = []
for i, im in enumerate(cur_im_list):
raw_data = np.fromfile(im, dtype=np.float32).reshape((-1, 4))
if self.imageset == 'test':
annotated_data = np.expand_dims(np.zeros_like(raw_data[:,0],dtype=int),axis=1)
sem_labels = annotated_data
ins_labels = annotated_data
valid = annotated_data
else:
annotated_data = np.fromfile(im.replace('velodyne','labels')[:-3]+'label', dtype=np.int32).reshape((-1,1))
sem_labels = annotated_data & 0xFFFF #delete high 16 digits binary
ins_labels = annotated_data
valid = np.isin(sem_labels, self.things_ids).reshape(-1) # use 0 to filter out valid indexes is enough
sem_labels = np.vectorize(self.learning_map.__getitem__)(sem_labels)
seq_ind, frame_ind = cur_im_ind[i]
cur_pose = self.poses[seq_ind][frame_ind]
if i == 0:
p_origin = np.zeros((1, 4))
p_origin[0, 3] = 1
pose0 = cur_pose
p0 = p_origin.dot(pose0.T)[:, :3]
p0 = np.squeeze(p0)
points = raw_data[:, :3]
else:
# to global coor
hpoints = np.hstack((raw_data[:, :3], np.ones_like(raw_data[:, :1])))
new_points = np.sum(np.expand_dims(hpoints, 2) * cur_pose.T, axis=1)[:, :3]
# to first frame coor
new_coords = new_points - pose0[:3, 3]
new_coords = np.sum(np.expand_dims(new_coords, 2) * pose0[:3, :3], axis=1)
points = new_coords
merged_pts = np.vstack((merged_pts, points))
merged_ref = np.vstack((merged_ref, raw_data[:, 3].reshape(-1, 1)))
merged_sem = np.vstack((merged_sem, sem_labels))
merged_ins = np.vstack((merged_ins, ins_labels))
merged_valid = np.vstack((merged_valid, valid.reshape(-1, 1)))
merged_mask = np.vstack((merged_mask, np.zeros_like(sem_labels) + i))
merged_fnames.append(im)
return (
merged_pts,
merged_sem,
merged_ref,
merged_ins,
merged_valid,
merged_mask,
merged_fnames,
)
def absoluteFilePaths(directory):
for dirpath,_,filenames in os.walk(directory):
for f in filenames:
yield os.path.abspath(os.path.join(dirpath, f))
class voxel_dataset(data.Dataset):
def __init__(self, in_dataset, grid_size, rotate_aug = False, flip_aug = False, ignore_label = 255, return_test = False,
fixed_volume_space= False, max_volume_space = [50,50,1.5], min_volume_space = [-50,-50,-3]):
'Initialization'
self.point_cloud_dataset = in_dataset
self.grid_size = np.asarray(grid_size)
self.rotate_aug = rotate_aug
self.ignore_label = ignore_label
self.return_test = return_test
self.flip_aug = flip_aug
self.fixed_volume_space = fixed_volume_space
self.max_volume_space = max_volume_space
self.min_volume_space = min_volume_space
def __len__(self):
'Denotes the total number of samples'
return len(self.point_cloud_dataset)
def __getitem__(self, index):
'Generates one sample of data'
data = self.point_cloud_dataset[index]
if len(data) == 2:
xyz,labels = data
elif len(data) == 3:
xyz,labels,sig = data
if len(sig.shape) == 2: sig = np.squeeze(sig)
elif len(data) == 4:
raise Exception('Not implement instance label for voxel_dataset')
else: raise Exception('Return invalid data tuple')
# random data augmentation by rotation
if self.rotate_aug:
rotate_rad = np.deg2rad(np.random.random()*360)
c, s = np.cos(rotate_rad), np.sin(rotate_rad)
j = np.matrix([[c, s], [-s, c]])
xyz[:,:2] = np.dot( xyz[:,:2],j)
# random data augmentation by flip x , y or x+y
if self.flip_aug:
flip_type = np.random.choice(4,1)
if flip_type==1:
xyz[:,0] = -xyz[:,0]
elif flip_type==2:
xyz[:,1] = -xyz[:,1]
elif flip_type==3:
xyz[:,:2] = -xyz[:,:2]
max_bound = np.percentile(xyz,100,axis = 0)
min_bound = np.percentile(xyz,0,axis = 0)
if self.fixed_volume_space:
max_bound = np.asarray(self.max_volume_space)
min_bound = np.asarray(self.min_volume_space)
# get grid index
crop_range = max_bound - min_bound
cur_grid_size = self.grid_size
intervals = crop_range/(cur_grid_size-1)
if (intervals==0).any(): print("Zero interval!")
grid_ind = (np.floor((np.clip(xyz,min_bound,max_bound)-min_bound)/intervals)).astype(np.int)
# process voxel position
voxel_position = np.zeros(self.grid_size,dtype = np.float32)
dim_array = np.ones(len(self.grid_size)+1,int)
dim_array[0] = -1
voxel_position = np.indices(self.grid_size)*intervals.reshape(dim_array) + min_bound.reshape(dim_array)
# process labels
processed_label = np.ones(self.grid_size,dtype = np.uint8)*self.ignore_label
label_voxel_pair = np.concatenate([grid_ind,labels],axis = 1)
label_voxel_pair = label_voxel_pair[np.lexsort((grid_ind[:,0],grid_ind[:,1],grid_ind[:,2])),:]
processed_label = nb_process_label(np.copy(processed_label),label_voxel_pair)
data_tuple = (voxel_position,processed_label)
# center data on each voxel for PTnet
voxel_centers = (grid_ind.astype(np.float32) + 0.5)*intervals + min_bound
return_xyz = xyz - voxel_centers
return_xyz = np.concatenate((return_xyz,xyz),axis = 1)
if len(data) == 2:
return_fea = return_xyz
elif len(data) == 3:
return_fea = np.concatenate((return_xyz,sig[...,np.newaxis]),axis = 1)
if self.return_test:
data_tuple += (grid_ind,labels,return_fea,index)
else:
data_tuple += (grid_ind,labels,return_fea)
return data_tuple
# transformation between Cartesian coordinates and polar coordinates
def cart2polar(input_xyz):
rho = np.sqrt(input_xyz[:,0]**2 + input_xyz[:,1]**2)
phi = np.arctan2(input_xyz[:,1],input_xyz[:,0])
return np.stack((rho,phi,input_xyz[:,2]),axis=1)
def polar2cat(input_xyz_polar):
x = input_xyz_polar[0]*np.cos(input_xyz_polar[1])
y = input_xyz_polar[0]*np.sin(input_xyz_polar[1])
return np.stack((x,y,input_xyz_polar[2]),axis=0)
class spherical_dataset(data.Dataset):
def __init__(self, in_dataset, grid_size, rotate_aug = False, flip_aug = False,
scale_aug =False, transform_aug=False, trans_std=[0.1, 0.1, 0.1],
min_rad=-np.pi/4, max_rad=np.pi/4, ignore_label = 255,
return_test = False, fixed_volume_space= False,
max_volume_space = [50,np.pi,1.5], min_volume_space = [3,-np.pi,-3],
center_type='Axis_center'):
'Initialization'
self.point_cloud_dataset = in_dataset
self.grid_size = np.asarray(grid_size)
self.rotate_aug = rotate_aug
self.flip_aug = flip_aug
self.ignore_label = ignore_label
self.return_test = return_test
self.fixed_volume_space = fixed_volume_space
self.max_volume_space = max_volume_space
self.min_volume_space = min_volume_space
self.scale_aug = scale_aug
self.transform = transform_aug
self.trans_std = trans_std
self.noise_rotation = np.random.uniform(min_rad, max_rad)
assert center_type in ['Axis_center', 'Mass_center']
self.center_type = center_type
def __len__(self):
'Denotes the total number of samples'
return len(self.point_cloud_dataset)
def __getitem__(self, index):
'Generates one sample of data'
data = self.point_cloud_dataset[index]
if len(data) == 2:
xyz,labels = data
elif len(data) == 3:
xyz,labels,sig = data
if len(sig.shape) == 2: sig = np.squeeze(sig)
elif len(data) == 6:
xyz,labels,sig,ins_labels,valid,pcd_fname = data
if len(sig.shape) == 2: sig = np.squeeze(sig)
elif len(data) == 7:
xyz,labels,sig,ins_labels,valid,pcd_fname,pose = data
if len(sig.shape) == 2: sig = np.squeeze(sig)
else: raise Exception('Return invalid data tuple')
# random data augmentation by rotation
if self.rotate_aug:
rotate_rad = np.deg2rad(np.random.random()*360)
c, s = np.cos(rotate_rad), np.sin(rotate_rad)
j = np.matrix([[c, s], [-s, c]])
xyz[:,:2] = np.dot( xyz[:,:2],j)
# random data augmentation by flip x , y or x+y
if self.flip_aug:
flip_type = np.random.choice(4,1)
if flip_type==1:
xyz[:,0] = -xyz[:,0]
elif flip_type==2:
xyz[:,1] = -xyz[:,1]
elif flip_type==3:
xyz[:,:2] = -xyz[:,:2]
if self.scale_aug:
noise_scale = np.random.uniform(0.95, 1.05)
xyz[:,0] = noise_scale * xyz[:,0]
xyz[:,1] = noise_scale * xyz[:,1]
if self.transform:
noise_translate = np.array([np.random.normal(0, self.trans_std[0], 1),
np.random.normal(0, self.trans_std[1], 1),
np.random.normal(0, self.trans_std[2], 1)]).T
xyz[:, 0:3] += noise_translate
# convert coordinate into polar coordinates
xyz_pol = cart2polar(xyz)
max_bound_r = np.percentile(xyz_pol[:,0],100,axis = 0)
min_bound_r = np.percentile(xyz_pol[:,0],0,axis = 0)
max_bound = np.max(xyz_pol[:,1:],axis = 0)
min_bound = np.min(xyz_pol[:,1:],axis = 0)
max_bound = np.concatenate(([max_bound_r],max_bound))
min_bound = np.concatenate(([min_bound_r],min_bound))
if self.fixed_volume_space:
max_bound = np.asarray(self.max_volume_space)
min_bound = np.asarray(self.min_volume_space)
# get grid index
crop_range = max_bound - min_bound
cur_grid_size = self.grid_size
intervals = crop_range/(cur_grid_size-1) # (size-1) could directly get index starting from 0, very convenient
if (intervals==0).any(): print("Zero interval!")
grid_ind = (np.floor((np.clip(xyz_pol,min_bound,max_bound)-min_bound)/intervals)).astype(np.int) # point-wise grid index
# process voxel position
voxel_position = np.zeros(self.grid_size,dtype = np.float32)
dim_array = np.ones(len(self.grid_size)+1,int)
dim_array[0] = -1
voxel_position = np.indices(self.grid_size)*intervals.reshape(dim_array) + min_bound.reshape(dim_array)
voxel_position = polar2cat(voxel_position)
# process labels
processed_label = np.ones(self.grid_size,dtype = np.uint8)*self.ignore_label
label_voxel_pair = np.concatenate([grid_ind,labels],axis = 1)
label_voxel_pair = label_voxel_pair[np.lexsort((grid_ind[:,0],grid_ind[:,1],grid_ind[:,2])),:]
processed_label = nb_process_label(np.copy(processed_label),label_voxel_pair)
data_tuple = (voxel_position,processed_label)
# center data on each voxel for PTnet
voxel_centers = (grid_ind.astype(np.float32) + 0.5)*intervals + min_bound
return_xyz = xyz_pol - voxel_centers #TODO: calculate relative coordinate using polar system?
return_xyz = np.concatenate((return_xyz,xyz_pol,xyz[:,:2]),axis = 1)
if len(data) == 2:
return_fea = return_xyz
elif len(data) >= 3:
return_fea = np.concatenate((return_xyz,sig[...,np.newaxis]),axis = 1)
if self.return_test:
data_tuple += (grid_ind,labels,return_fea,index)
else:
data_tuple += (grid_ind,labels,return_fea) # (grid-wise coor, grid-wise sem label, point-wise grid index, point-wise sem label, [relative polar coor(3), polar coor(3), cat coor(2), ref signal(1)])
if len(data) == 6:
offsets = np.zeros([xyz.shape[0], 3], dtype=np.float32)
offsets = nb_aggregate_pointwise_center_offset(offsets, xyz, ins_labels, self.center_type)
data_tuple += (ins_labels, offsets, valid, xyz, pcd_fname) # plus (point-wise instance label, point-wise center offset)
if len(data) == 7:
offsets = np.zeros([xyz.shape[0], 3], dtype=np.float32)
offsets = nb_aggregate_pointwise_center_offset(offsets, xyz, ins_labels, self.center_type)
data_tuple += (ins_labels, offsets, valid, xyz, pcd_fname, pose) # plus (point-wise instance label, point-wise center offset)
return data_tuple
class spherical_dataset_tracking(data.Dataset):
def __init__(self, in_dataset, grid_size, rotate_aug = False, flip_aug = False,
scale_aug =False, transform_aug=False, trans_std=[0.1, 0.1, 0.1],
min_rad=-np.pi/4, max_rad=np.pi/4, ignore_label = 255,
return_test = False, fixed_volume_space= False,
max_volume_space = [50,np.pi,1.5], min_volume_space = [3,-np.pi,-3],
center_type='Axis_center'):
'Initialization'
self.point_cloud_dataset = in_dataset
self.grid_size = np.asarray(grid_size)
self.rotate_aug = rotate_aug
self.flip_aug = flip_aug
self.ignore_label = ignore_label
self.return_test = return_test
self.fixed_volume_space = fixed_volume_space
self.max_volume_space = max_volume_space
self.min_volume_space = min_volume_space
self.scale_aug = scale_aug
self.transform = transform_aug
self.trans_std = trans_std
self.noise_rotation = np.random.uniform(min_rad, max_rad)
assert center_type in ['Axis_center', 'Mass_center']
self.center_type = center_type
def __len__(self):
'Denotes the total number of samples'
return len(self.point_cloud_dataset)
def __getitem__(self, index):
'Generates one sample of data'
data, before_data = self.point_cloud_dataset[index]
xyz, labels, sig, ins_labels, valid, pcd_fname, pose = data
before_xyz, before_labels, before_sig, before_ins_labels, before_valid, before_pcd_fname, before_pose = before_data
if len(sig.shape) == 2: sig = np.squeeze(sig)
if len(before_sig.shape) == 2: before_sig = np.squeeze(before_sig)
aug_info = {}
# random data augmentation by rotation
if self.rotate_aug:
rotate_rad = np.deg2rad(np.random.random()*360)
c, s = np.cos(rotate_rad), np.sin(rotate_rad)
j = np.matrix([[c, s], [-s, c]])
# xyz[:,:2] = np.dot( xyz[:,:2],j)
aug_info['j'] = j
# random data augmentation by flip x , y or x+y
if self.flip_aug:
flip_type = np.random.choice(4,1)
# if flip_type==1:
# xyz[:,0] = -xyz[:,0]
# elif flip_type==2:
# xyz[:,1] = -xyz[:,1]
# elif flip_type==3:
# xyz[:,:2] = -xyz[:,:2]
aug_info['flip_type'] = flip_type
if self.scale_aug:
noise_scale = np.random.uniform(0.95, 1.05)
# xyz[:,0] = noise_scale * xyz[:,0]
# xyz[:,1] = noise_scale * xyz[:,1]
aug_info['noise_scale'] = noise_scale
if self.transform:
noise_translate = np.array([np.random.normal(0, self.trans_std[0], 1),
np.random.normal(0, self.trans_std[1], 1),
np.random.normal(0, self.trans_std[2], 1)]).T
# xyz[:, 0:3] += noise_translate
aug_info['noise_translate'] = noise_translate
data_tuple = self.process_one_frame(xyz, labels, sig, ins_labels, valid, pcd_fname, aug_info, pose)
before_data_tuple = self.process_one_frame(before_xyz, before_labels, before_sig, before_ins_labels, before_valid, before_pcd_fname, aug_info, before_pose)
return data_tuple + before_data_tuple
def process_one_frame(self, xyz, labels, sig, ins_labels, valid, pcd_fname, aug_info, pose):
# random data augmentation by rotation
if self.rotate_aug:
xyz[:,:2] = np.dot(xyz[:,:2], aug_info['j'])
# random data augmentation by flip x , y or x+y
if self.flip_aug:
if aug_info['flip_type']==1:
xyz[:,0] = -xyz[:,0]
elif aug_info['flip_type']==2:
xyz[:,1] = -xyz[:,1]
elif aug_info['flip_type']==3:
xyz[:,:2] = -xyz[:,:2]
if self.scale_aug:
xyz[:,0] = aug_info['noise_scale'] * xyz[:,0]
xyz[:,1] = aug_info['noise_scale'] * xyz[:,1]
if self.transform:
xyz[:, 0:3] += aug_info['noise_translate']
# convert coordinate into polar coordinates
xyz_pol = cart2polar(xyz)
max_bound_r = np.percentile(xyz_pol[:,0],100,axis = 0)
min_bound_r = np.percentile(xyz_pol[:,0],0,axis = 0)
max_bound = np.max(xyz_pol[:,1:],axis = 0)
min_bound = np.min(xyz_pol[:,1:],axis = 0)
max_bound = np.concatenate(([max_bound_r],max_bound))
min_bound = np.concatenate(([min_bound_r],min_bound))
if self.fixed_volume_space:
max_bound = np.asarray(self.max_volume_space)
min_bound = np.asarray(self.min_volume_space)
# get grid index
crop_range = max_bound - min_bound
cur_grid_size = self.grid_size
intervals = crop_range/(cur_grid_size-1) # (size-1) could directly get index starting from 0, very convenient
if (intervals==0).any(): print("Zero interval!")
grid_ind = (np.floor((np.clip(xyz_pol,min_bound,max_bound)-min_bound)/intervals)).astype(np.int) # point-wise grid index
# process voxel position
voxel_position = np.zeros(self.grid_size,dtype = np.float32)
dim_array = np.ones(len(self.grid_size)+1,int)
dim_array[0] = -1
voxel_position = np.indices(self.grid_size)*intervals.reshape(dim_array) + min_bound.reshape(dim_array)
voxel_position = polar2cat(voxel_position)
# process labels
processed_label = np.ones(self.grid_size,dtype = np.uint8)*self.ignore_label
label_voxel_pair = np.concatenate([grid_ind,labels],axis = 1)
label_voxel_pair = label_voxel_pair[np.lexsort((grid_ind[:,0],grid_ind[:,1],grid_ind[:,2])),:]
processed_label = nb_process_label(np.copy(processed_label),label_voxel_pair)
data_tuple = (voxel_position,processed_label)
# center data on each voxel for PTnet
voxel_centers = (grid_ind.astype(np.float32) + 0.5)*intervals + min_bound
return_xyz = xyz_pol - voxel_centers #TODO: calculate relative coordinate using polar system?
return_xyz = np.concatenate((return_xyz,xyz_pol,xyz[:,:2]),axis = 1)
return_fea = np.concatenate((return_xyz,sig[...,np.newaxis]),axis = 1)
data_tuple += (grid_ind,labels,return_fea) # (grid-wise coor, grid-wise sem label, point-wise grid index, point-wise sem label, [relative polar coor(3), polar coor(3), cat coor(2), ref signal(1)])
offsets = np.zeros([xyz.shape[0], 3], dtype=np.float32)
offsets = nb_aggregate_pointwise_center_offset(offsets, xyz, ins_labels, self.center_type)
data_tuple += (ins_labels, offsets, valid, xyz, pcd_fname, pose) # plus (point-wise instance label, point-wise center offset)
return data_tuple
class spherical_dataset_multi_frames(data.Dataset):
def __init__(self, in_dataset, grid_size, rotate_aug = False, flip_aug = False,
scale_aug =False, transform_aug=False, trans_std=[0.1, 0.1, 0.1],
min_rad=-np.pi/4, max_rad=np.pi/4, ignore_label = 255,
return_test = False, fixed_volume_space= False,
max_volume_space = [50,np.pi,1.5], min_volume_space = [3,-np.pi,-3],
center_type='Axis_center'):
'Initialization'
self.point_cloud_dataset = in_dataset
self.grid_size = np.asarray(grid_size)
self.rotate_aug = rotate_aug
self.flip_aug = flip_aug
self.ignore_label = ignore_label
self.return_test = return_test
self.fixed_volume_space = fixed_volume_space
self.max_volume_space = max_volume_space
self.min_volume_space = min_volume_space
self.scale_aug = scale_aug
self.transform = transform_aug
self.trans_std = trans_std
self.noise_rotation = np.random.uniform(min_rad, max_rad)
assert center_type in ['Axis_center', 'Mass_center']
self.center_type = center_type
def __len__(self):
'Denotes the total number of samples'
return len(self.point_cloud_dataset)
def __getitem__(self, index):
'Generates one sample of data'
data = self.point_cloud_dataset[index]
assert len(data) == 7
xyz,labels,sig,ins_labels,valid,mask,pcd_fname = data
if len(sig.shape) == 2: sig = np.squeeze(sig)
# random data augmentation by rotation
if self.rotate_aug:
rotate_rad = np.deg2rad(np.random.random()*360)
c, s = np.cos(rotate_rad), np.sin(rotate_rad)
j = np.matrix([[c, s], [-s, c]])
xyz[:,:2] = np.dot( xyz[:,:2],j)
# random data augmentation by flip x , y or x+y
if self.flip_aug:
flip_type = np.random.choice(4,1)
if flip_type==1:
xyz[:,0] = -xyz[:,0]
elif flip_type==2:
xyz[:,1] = -xyz[:,1]
elif flip_type==3:
xyz[:,:2] = -xyz[:,:2]
if self.scale_aug:
noise_scale = np.random.uniform(0.95, 1.05)
xyz[:,0] = noise_scale * xyz[:,0]
xyz[:,1] = noise_scale * xyz[:,1]
if self.transform:
noise_translate = np.array([np.random.normal(0, self.trans_std[0], 1),
np.random.normal(0, self.trans_std[1], 1),
np.random.normal(0, self.trans_std[2], 1)]).T
xyz[:, 0:3] += noise_translate
# convert coordinate into polar coordinates
xyz_pol = cart2polar(xyz)
max_bound_r = np.percentile(xyz_pol[:,0],100,axis = 0)
min_bound_r = np.percentile(xyz_pol[:,0],0,axis = 0)
max_bound = np.max(xyz_pol[:,1:],axis = 0)
min_bound = np.min(xyz_pol[:,1:],axis = 0)
max_bound = np.concatenate(([max_bound_r],max_bound))
min_bound = np.concatenate(([min_bound_r],min_bound))
if self.fixed_volume_space:
max_bound = np.asarray(self.max_volume_space)
min_bound = np.asarray(self.min_volume_space)
# get grid index
crop_range = max_bound - min_bound
cur_grid_size = self.grid_size
intervals = crop_range/(cur_grid_size-1) # (size-1) could directly get index starting from 0, very convenient
if (intervals==0).any(): print("Zero interval!")
grid_ind = (np.floor((np.clip(xyz_pol,min_bound,max_bound)-min_bound)/intervals)).astype(np.int) # point-wise grid index
# process voxel position
voxel_position = np.zeros(self.grid_size,dtype = np.float32)
dim_array = np.ones(len(self.grid_size)+1,int)
dim_array[0] = -1
voxel_position = np.indices(self.grid_size)*intervals.reshape(dim_array) + min_bound.reshape(dim_array)
voxel_position = polar2cat(voxel_position)
# process labels
processed_label = np.ones(self.grid_size,dtype = np.uint8)*self.ignore_label
label_voxel_pair = np.concatenate([grid_ind,labels],axis = 1)
label_voxel_pair = label_voxel_pair[np.lexsort((grid_ind[:,0],grid_ind[:,1],grid_ind[:,2])),:]
processed_label = nb_process_label(np.copy(processed_label),label_voxel_pair)
data_tuple = (voxel_position,processed_label)
# center data on each voxel for PTnet
voxel_centers = (grid_ind.astype(np.float32) + 0.5)*intervals + min_bound
return_xyz = xyz_pol - voxel_centers
return_xyz = np.concatenate((return_xyz,xyz_pol,xyz[:,:2]),axis = 1)
if len(data) == 2:
return_fea = return_xyz
elif len(data) >= 3:
return_fea = np.concatenate((return_xyz,sig[...,np.newaxis]),axis = 1)
if self.return_test:
data_tuple += (grid_ind,labels,return_fea,index)
else:
data_tuple += (grid_ind,labels,return_fea) # (grid-wise coor, grid-wise sem label, point-wise grid index, point-wise sem label, [relative polar coor(3), polar coor(3), cat coor(2), ref signal(1)])
offsets = np.zeros([xyz.shape[0], 3], dtype=np.float32)
offsets = nb_aggregate_pointwise_center_offset(offsets, xyz, ins_labels, self.center_type)
data_tuple += (ins_labels, offsets, valid, xyz, mask, pcd_fname) # plus (point-wise instance label, point-wise center offset)
return data_tuple
def calc_xyz_middle(xyz):
return np.array([
(np.max(xyz[:, 0]) + np.min(xyz[:, 0])) / 2.0,
(np.max(xyz[:, 1]) + np.min(xyz[:, 1])) / 2.0,
(np.max(xyz[:, 2]) + np.min(xyz[:, 2])) / 2.0
], dtype=np.float32)
things_ids = set([10, 11, 13, 15, 16, 18, 20, 30, 31, 32, 252, 253, 254, 255, 256, 257, 258, 259])
# @nb.jit #TODO: why jit would lead to offsets all zero?
def nb_aggregate_pointwise_center_offset(offsets, xyz, ins_labels, center_type):
# ins_num = np.max(ins_labels) + 1
# for i in range(1, ins_num):
for i in np.unique(ins_labels):
# if ((i & 0xFFFF0000) >> 16) == 0: #TODO: change to use thing list to filter
# continue
if (i & 0xFFFF) not in things_ids:
continue
i_indices = (ins_labels == i).reshape(-1)
xyz_i = xyz[i_indices]
if xyz_i.shape[0] <= 0:
continue
if center_type == 'Axis_center':
mean_xyz = calc_xyz_middle(xyz_i)
elif center_type == 'Mass_center':
mean_xyz = np.mean(xyz_i, axis=0)
else:
raise NotImplementedError
offsets[i_indices] = mean_xyz - xyz_i
return offsets
@nb.jit('u1[:,:,:](u1[:,:,:],i8[:,:])',nopython=True,cache=True,parallel = False)
def nb_process_label(processed_label,sorted_label_voxel_pair):
label_size = 256
counter = np.zeros((label_size,),dtype = np.uint16)
counter[sorted_label_voxel_pair[0,3]] = 1
cur_sear_ind = sorted_label_voxel_pair[0,:3]
for i in range(1,sorted_label_voxel_pair.shape[0]):
cur_ind = sorted_label_voxel_pair[i,:3]
if not np.all(np.equal(cur_ind,cur_sear_ind)):
processed_label[cur_sear_ind[0],cur_sear_ind[1],cur_sear_ind[2]] = np.argmax(counter)
counter = np.zeros((label_size,),dtype = np.uint16)
cur_sear_ind = cur_ind
counter[sorted_label_voxel_pair[i,3]] += 1
processed_label[cur_sear_ind[0],cur_sear_ind[1],cur_sear_ind[2]] = np.argmax(counter)
return processed_label
def collate_fn_BEV(data): # stack alone batch dimension
data2stack=np.stack([d[0] for d in data]).astype(np.float32) # grid-wise coor
label2stack=np.stack([d[1] for d in data]) # grid-wise sem label
grid_ind_stack = [d[2] for d in data] # point-wise grid index
point_label = [d[3] for d in data] # point-wise sem label
xyz = [d[4] for d in data] # point-wise coor
pt_ins_labels = [d[5] for d in data] # point-wise instance label
pt_offsets = [d[6] for d in data] # point-wise center offset
pt_valid = [d[7] for d in data] # point-wise indicator for foreground points
pt_cart_xyz = [d[8] for d in data] # point-wise cart coor
return {
'vox_coor': torch.from_numpy(data2stack),
'vox_label': torch.from_numpy(label2stack),
'grid': grid_ind_stack,
'pt_labs': point_label,
'pt_fea': xyz,
'pt_ins_labels': pt_ins_labels,
'pt_offsets': pt_offsets,
'pt_valid': pt_valid,
'pt_cart_xyz': pt_cart_xyz,
'pcd_fname': [d[9] for d in data],
'pose': [d[10] for d in data] if len(data[0]) > 10 else None,
}
def collate_fn_BEV_multi_frames(data): # stack alone batch dimension
data2stack=np.stack([d[0] for d in data]).astype(np.float32) # grid-wise coor
label2stack=np.stack([d[1] for d in data]) # grid-wise sem label
grid_ind_stack = [d[2] for d in data] # point-wise grid index
point_label = [d[3] for d in data] # point-wise sem label
xyz = [d[4] for d in data] # point-wise coor
pt_ins_labels = [d[5] for d in data] # point-wise instance label
pt_offsets = [d[6] for d in data] # point-wise center offset
pt_valid = [d[7] for d in data] # point-wise indicator for foreground points
pt_cart_xyz = [d[8] for d in data] # point-wise cart coor
mask = np.stack([d[9] for d in data]).astype(np.uint8)
return {
'vox_coor': torch.from_numpy(data2stack),
'vox_label': torch.from_numpy(label2stack),
'grid': grid_ind_stack,
'pt_labs': point_label,
'pt_fea': xyz,
'pt_ins_labels': pt_ins_labels,
'pt_offsets': pt_offsets,
'pt_valid': pt_valid,
'pt_cart_xyz': pt_cart_xyz,
'pcd_fname': [d[10][0] for d in data],
'pcd_list_fname': [d[10] for d in data],
'mask': torch.from_numpy(mask),
'mask_np': mask,
}
def collate_fn_BEV_test(data):
data2stack=np.stack([d[0] for d in data]).astype(np.float32)
label2stack=np.stack([d[1] for d in data])
grid_ind_stack = [d[2] for d in data]
point_label = [d[3] for d in data]
xyz = [d[4] for d in data]
index = [d[5] for d in data]
return torch.from_numpy(data2stack),torch.from_numpy(label2stack),grid_ind_stack,point_label,xyz,index
def collate_fn_BEV_tracking(_data): # stack alone batch dimension
data = [d[:11] for d in _data]
before_data = [d[11:] for d in _data]
data_dict = collate_fn_BEV(data)
before_data_dict = collate_fn_BEV(before_data)
for k, v in before_data_dict.items():
data_dict['before_' + k] = v
return data_dict
if __name__ == '__main__':
dataset = SemKITTI('./sequences', 'train')
dataset.count_box_size()
| 41.598918
| 208
| 0.582734
| 7,309
| 53,829
| 4.066904
| 0.063073
| 0.014836
| 0.006257
| 0.010429
| 0.840168
| 0.816922
| 0.797611
| 0.786611
| 0.767939
| 0.757544
| 0
| 0.0272
| 0.286946
| 53,829
| 1,293
| 209
| 41.63109
| 0.747232
| 0.107396
| 0
| 0.717277
| 0
| 0
| 0.046056
| 0.000586
| 0
| 0
| 0.000879
| 0.001547
| 0.00733
| 1
| 0.04712
| false
| 0
| 0.012565
| 0.002094
| 0.098429
| 0.01466
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86f1363f9c8e4b00a6ee3901d1f3b3c709f5d4a1
| 115,865
|
py
|
Python
|
wsltools/utils/UAS.py
|
Symbo1/wsltools
|
0b6e536fc85c707a1c81f0296c4e91ca835396a1
|
[
"MIT"
] | 412
|
2020-04-16T08:11:58.000Z
|
2022-02-02T19:49:53.000Z
|
wsltools/utils/UAS.py
|
Symbo1/wsltools
|
0b6e536fc85c707a1c81f0296c4e91ca835396a1
|
[
"MIT"
] | 1
|
2020-04-16T14:03:46.000Z
|
2020-04-17T03:41:18.000Z
|
wsltools/utils/UAS.py
|
Symbo1/wsltools
|
0b6e536fc85c707a1c81f0296c4e91ca835396a1
|
[
"MIT"
] | 33
|
2020-04-16T08:48:53.000Z
|
2021-10-20T04:39:29.000Z
|
__author__ = 'CongRong <tr3jer@gmail.com>'
USER_AGENTS = {
"Mobile": [
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; brx-IN) AppleWebKit/535.37.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6535.37.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; si-LK) AppleWebKit/534.16.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6534.16.2",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/12.1y4709.0 Mobile/86N466 Safari/535.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/54.0.818.0 Mobile/53E945 Safari/533.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; fil-PH) AppleWebKit/533.12.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6533.12.6",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) CriOS/22.0.867.0 Mobile/73P967 Safari/532.1",
"Mozilla/5.0 (Android 1.1; Mobile; rv:50.0) Gecko/50.0 Firefox/50.0",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/24.0.808.0 Mobile/32W083 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; mn-MN) AppleWebKit/535.15.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6535.15.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; zh-HK) AppleWebKit/534.3.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6534.3.5",
"Mozilla/5.0 (Android 4.4.3; Mobile; rv:66.0) Gecko/66.0 Firefox/66.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/60.0.837.0 Mobile/82P736 Safari/533.0",
"Mozilla/5.0 (Android 8.1.0; Mobile; rv:67.0) Gecko/67.0 Firefox/67.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; sk-SK) AppleWebKit/533.14.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6533.14.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ln-CD) AppleWebKit/534.16.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6534.16.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; pt-BR) AppleWebKit/531.32.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6531.32.4",
"Mozilla/5.0 (Linux; Android 2.0) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/26.0.823.0 Safari/534.0",
"Mozilla/5.0 (Linux; Android 1.0) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/16.0.894.0 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) CriOS/36.0.849.0 Mobile/13M170 Safari/536.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; hy-AM) AppleWebKit/535.6.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6535.6.4",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/24.0.845.0 Mobile/42L182 Safari/533.2",
"Mozilla/5.0 (Linux; Android 3.2.6) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/30.0.821.0 Safari/533.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; en-DK) AppleWebKit/534.17.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6534.17.2",
"Mozilla/5.0 (Android 8.1.0; Mobile; rv:15.0) Gecko/15.0 Firefox/15.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; an-ES) AppleWebKit/532.45.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6532.45.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; wal-ET) AppleWebKit/535.50.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B118 Safari/6535.50.2",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) CriOS/60.0.817.0 Mobile/89K143 Safari/531.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_4 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) FxiOS/16.3s8139.0 Mobile/46F230 Safari/535.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/16.1v0996.0 Mobile/90T082 Safari/536.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; te-IN) AppleWebKit/534.43.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6534.43.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; gl-ES) AppleWebKit/534.26.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6534.26.1",
"Mozilla/5.0 (Android 2.0; Mobile; rv:25.0) Gecko/25.0 Firefox/25.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/11.9z5261.0 Mobile/48D648 Safari/536.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; tk-TM) AppleWebKit/531.12.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6531.12.4",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/534.1 (KHTML, like Gecko) CriOS/41.0.830.0 Mobile/94X484 Safari/534.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) FxiOS/12.7r1765.0 Mobile/96Y954 Safari/533.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; ak-GH) AppleWebKit/535.16.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6535.16.5",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/14.0k9170.0 Mobile/85P921 Safari/531.2",
"Mozilla/5.0 (Android 5.0.2; Mobile; rv:7.0) Gecko/7.0 Firefox/7.0",
"Mozilla/5.0 (Linux; Android 4.0) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/46.0.894.0 Safari/531.1",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/11.6v7290.0 Mobile/76L433 Safari/535.0",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/16.2x7629.0 Mobile/54W476 Safari/534.0",
"Mozilla/5.0 (Linux; Android 2.0) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/61.0.867.0 Safari/532.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; fa-IR) AppleWebKit/532.2.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6532.2.7",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/57.0.847.0 Mobile/83O886 Safari/532.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) CriOS/27.0.807.0 Mobile/63N973 Safari/532.1",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/17.1b4524.0 Mobile/49P918 Safari/534.0",
"Mozilla/5.0 (Android 2.0; Mobile; rv:48.0) Gecko/48.0 Firefox/48.0",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/14.0.811.0 Mobile/12U127 Safari/535.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; ro-RO) AppleWebKit/535.12.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6535.12.4",
"Mozilla/5.0 (Linux; Android 5.0.1) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/22.0.874.0 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; se-NO) AppleWebKit/534.13.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6534.13.5",
"Mozilla/5.0 (Linux; Android 3.2.6) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/61.0.849.0 Safari/531.1",
"Mozilla/5.0 (Linux; Android 5.1) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/25.0.894.0 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; he-IL) AppleWebKit/534.20.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6534.20.2",
"Mozilla/5.0 (Android 3.2.1; Mobile; rv:36.0) Gecko/36.0 Firefox/36.0",
"Mozilla/5.0 (Android 7.1.2; Mobile; rv:35.0) Gecko/35.0 Firefox/35.0",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/17.0.898.0 Mobile/93X843 Safari/536.0",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/20.0.885.0 Mobile/85J608 Safari/532.2",
"Mozilla/5.0 (Linux; Android 3.2.3) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/27.0.877.0 Safari/534.2",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/28.0.893.0 Mobile/97X360 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) CriOS/53.0.874.0 Mobile/96G764 Safari/534.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) CriOS/39.0.871.0 Mobile/16N056 Safari/535.2",
"Mozilla/5.0 (Linux; Android 2.2.2) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/47.0.874.0 Safari/534.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) FxiOS/18.9x3113.0 Mobile/22Z647 Safari/533.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; si-LK) AppleWebKit/535.16.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6535.16.4",
"Mozilla/5.0 (Linux; Android 2.0) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/45.0.819.0 Safari/534.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) CriOS/53.0.870.0 Mobile/28W489 Safari/531.2",
"Mozilla/5.0 (Linux; Android 3.2.4) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/20.0.875.0 Safari/535.1",
"Mozilla/5.0 (Linux; Android 4.4.4) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/49.0.814.0 Safari/534.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/26.0.871.0 Mobile/16W604 Safari/534.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/18.0.834.0 Mobile/84Y303 Safari/533.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/10.8l4815.0 Mobile/11W157 Safari/531.2",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) FxiOS/15.8k5607.0 Mobile/62O872 Safari/535.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) FxiOS/15.9k1692.0 Mobile/56K836 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; lo-LA) AppleWebKit/531.37.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6531.37.2",
"Mozilla/5.0 (Android 2.3.6; Mobile; rv:24.0) Gecko/24.0 Firefox/24.0",
"Mozilla/5.0 (Android 2.3.2; Mobile; rv:45.0) Gecko/45.0 Firefox/45.0",
"Mozilla/5.0 (Linux; Android 4.4.4) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/51.0.884.0 Safari/534.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; xh-ZA) AppleWebKit/535.20.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6535.20.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/44.0.839.0 Mobile/11K622 Safari/534.0",
"Mozilla/5.0 (Linux; Android 9) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/25.0.853.0 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; ce-RU) AppleWebKit/533.7.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6533.7.5",
"Mozilla/5.0 (Android 2.1; Mobile; rv:57.0) Gecko/57.0 Firefox/57.0",
"Mozilla/5.0 (Android 3.1; Mobile; rv:11.0) Gecko/11.0 Firefox/11.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; csb-PL) AppleWebKit/533.4.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6533.4.5",
"Mozilla/5.0 (Android 8.0.0; Mobile; rv:37.0) Gecko/37.0 Firefox/37.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; hr-HR) AppleWebKit/532.7.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6532.7.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) FxiOS/17.1w9073.0 Mobile/85H486 Safari/535.2",
"Mozilla/5.0 (Linux; Android 2.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/57.0.851.0 Safari/535.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/534.1 (KHTML, like Gecko) CriOS/54.0.841.0 Mobile/36Y545 Safari/534.1",
"Mozilla/5.0 (Linux; Android 4.0.2) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/13.0.811.0 Safari/533.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ts-ZA) AppleWebKit/533.2.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6533.2.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; my-MM) AppleWebKit/534.31.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6534.31.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; nr-ZA) AppleWebKit/535.2.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6535.2.3",
"Mozilla/5.0 (Android 3.2; Mobile; rv:15.0) Gecko/15.0 Firefox/15.0",
"Mozilla/5.0 (Linux; Android 9) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/43.0.872.0 Safari/533.2",
"Mozilla/5.0 (Android 4.4.1; Mobile; rv:5.0) Gecko/5.0 Firefox/5.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/27.0.855.0 Mobile/82E331 Safari/532.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) FxiOS/17.0t3871.0 Mobile/44P803 Safari/533.1",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) FxiOS/9.8p5255.0 Mobile/34F648 Safari/533.1",
"Mozilla/5.0 (Linux; Android 2.3) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/55.0.838.0 Safari/536.1",
"Mozilla/5.0 (Linux; Android 7.1) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/22.0.897.0 Safari/534.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) CriOS/25.0.867.0 Mobile/15H432 Safari/536.2",
"Mozilla/5.0 (Linux; Android 2.3.7) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/51.0.887.0 Safari/532.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; br-FR) AppleWebKit/532.22.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6532.22.6",
"Mozilla/5.0 (Android 2.3.3; Mobile; rv:31.0) Gecko/31.0 Firefox/31.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; rw-RW) AppleWebKit/531.47.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6531.47.5",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) FxiOS/10.4t8709.0 Mobile/74G449 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; lg-UG) AppleWebKit/534.46.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6534.46.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; cy-GB) AppleWebKit/533.11.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6533.11.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; nl-AW) AppleWebKit/533.11.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6533.11.7",
"Mozilla/5.0 (Android 1.6; Mobile; rv:33.0) Gecko/33.0 Firefox/33.0",
"Mozilla/5.0 (Linux; Android 4.2.2) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/18.0.829.0 Safari/533.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; raj-IN) AppleWebKit/532.25.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6532.25.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/534.1 (KHTML, like Gecko) FxiOS/14.4s4444.0 Mobile/54Q019 Safari/534.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; fo-FO) AppleWebKit/531.7.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6531.7.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; br-FR) AppleWebKit/531.24.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6531.24.5",
"Mozilla/5.0 (Android 7.0; Mobile; rv:60.0) Gecko/60.0 Firefox/60.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; om-ET) AppleWebKit/532.20.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6532.20.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; fy-NL) AppleWebKit/535.47.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6535.47.1",
"Mozilla/5.0 (Android 2.3.4; Mobile; rv:13.0) Gecko/13.0 Firefox/13.0",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/55.0.896.0 Mobile/29N578 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/35.0.835.0 Mobile/57S907 Safari/534.0",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) FxiOS/17.6p1211.0 Mobile/74X067 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; doi-IN) AppleWebKit/532.20.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6532.20.3",
"Mozilla/5.0 (Android 4.3.1; Mobile; rv:59.0) Gecko/59.0 Firefox/59.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; unm-US) AppleWebKit/535.45.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6535.45.5",
"Mozilla/5.0 (Linux; Android 4.4) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/41.0.873.0 Safari/531.0",
"Mozilla/5.0 (Android 3.1; Mobile; rv:56.0) Gecko/56.0 Firefox/56.0",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) FxiOS/14.3w9918.0 Mobile/43C360 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; th-TH) AppleWebKit/533.42.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6533.42.2",
"Mozilla/5.0 (Linux; Android 2.2) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/53.0.849.0 Safari/532.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/531.0 (KHTML, like Gecko) FxiOS/16.4h4047.0 Mobile/19Y667 Safari/531.0",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) CriOS/26.0.838.0 Mobile/05M938 Safari/531.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) FxiOS/14.3s5369.0 Mobile/07W116 Safari/536.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) FxiOS/10.3d1690.0 Mobile/48C955 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; ms-MY) AppleWebKit/531.45.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6531.45.7",
"Mozilla/5.0 (Android 5.0; Mobile; rv:45.0) Gecko/45.0 Firefox/45.0",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/12.5v4287.0 Mobile/35Q146 Safari/535.0",
"Mozilla/5.0 (Linux; Android 7.1) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/56.0.824.0 Safari/531.0",
"Mozilla/5.0 (Linux; Android 8.0.0) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/43.0.898.0 Safari/535.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; as-IN) AppleWebKit/534.18.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6534.18.3",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/9.2o3851.0 Mobile/76F652 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; rw-RW) AppleWebKit/535.42.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6535.42.1",
"Mozilla/5.0 (Android 3.0; Mobile; rv:10.0) Gecko/10.0 Firefox/10.0",
"Mozilla/5.0 (Android 3.1; Mobile; rv:65.0) Gecko/65.0 Firefox/65.0",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/13.8u8794.0 Mobile/81M314 Safari/531.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; bho-IN) AppleWebKit/535.40.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6535.40.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; pa-PK) AppleWebKit/535.40.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6535.40.6",
"Mozilla/5.0 (Linux; Android 4.1) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/56.0.847.0 Safari/536.0",
"Mozilla/5.0 (Android 1.5; Mobile; rv:19.0) Gecko/19.0 Firefox/19.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; iu-CA) AppleWebKit/534.19.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6534.19.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; ks-IN) AppleWebKit/531.14.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6531.14.1",
"Mozilla/5.0 (Linux; Android 4.1) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/62.0.828.0 Safari/536.1",
"Mozilla/5.0 (Android 2.3.7; Mobile; rv:20.0) Gecko/20.0 Firefox/20.0",
"Mozilla/5.0 (Linux; Android 2.3) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/30.0.845.0 Safari/536.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; da-DK) AppleWebKit/531.50.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6531.50.2",
"Mozilla/5.0 (Android 3.1; Mobile; rv:32.0) Gecko/32.0 Firefox/32.0",
"Mozilla/5.0 (Android 2.3.2; Mobile; rv:20.0) Gecko/20.0 Firefox/20.0",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/53.0.876.0 Mobile/81Y149 Safari/532.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; bho-IN) AppleWebKit/532.30.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6532.30.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; tn-ZA) AppleWebKit/532.7.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6532.7.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; hsb-DE) AppleWebKit/533.35.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6533.35.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/62.0.869.0 Mobile/73V511 Safari/533.2",
"Mozilla/5.0 (Android 4.1.1; Mobile; rv:27.0) Gecko/27.0 Firefox/27.0",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) FxiOS/18.3n1124.0 Mobile/97U989 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; gv-GB) AppleWebKit/535.40.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6535.40.7",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) FxiOS/17.7z5037.0 Mobile/27K450 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; yi-US) AppleWebKit/532.27.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6532.27.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; raj-IN) AppleWebKit/532.13.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6532.13.1",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/531.0 (KHTML, like Gecko) FxiOS/12.7x2018.0 Mobile/44W064 Safari/531.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_4 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/48.0.871.0 Mobile/98K825 Safari/532.0",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) FxiOS/9.6x4263.0 Mobile/97I548 Safari/532.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) CriOS/20.0.888.0 Mobile/42X415 Safari/535.2",
"Mozilla/5.0 (Linux; Android 4.3) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/23.0.822.0 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; be-BY) AppleWebKit/533.50.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6533.50.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ber-MA) AppleWebKit/533.14.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6533.14.2",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) CriOS/42.0.896.0 Mobile/84X476 Safari/535.2",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/40.0.809.0 Mobile/56A197 Safari/536.0",
"Mozilla/5.0 (Linux; Android 4.1.1) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/37.0.822.0 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/42.0.850.0 Mobile/11Y792 Safari/535.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/24.0.888.0 Mobile/51P333 Safari/535.0",
"Mozilla/5.0 (Android 5.1.1; Mobile; rv:21.0) Gecko/21.0 Firefox/21.0",
"Mozilla/5.0 (Linux; Android 6.0.1) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/62.0.837.0 Safari/535.0",
"Mozilla/5.0 (Android 4.0.2; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0",
"Mozilla/5.0 (Linux; Android 7.1.2) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/30.0.817.0 Safari/534.2",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) CriOS/38.0.854.0 Mobile/81R825 Safari/536.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; zh-TW) AppleWebKit/535.33.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6535.33.2",
"Mozilla/5.0 (Android 7.1.1; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (Android 5.0; Mobile; rv:9.0) Gecko/9.0 Firefox/9.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ti-ER) AppleWebKit/533.37.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6533.37.7",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_4 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/43.0.898.0 Mobile/69V020 Safari/535.0",
"Mozilla/5.0 (Android 4.3; Mobile; rv:11.0) Gecko/11.0 Firefox/11.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; ts-ZA) AppleWebKit/535.35.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6535.35.5",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) FxiOS/14.3c8165.0 Mobile/03V121 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; lb-LU) AppleWebKit/531.28.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6531.28.6",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/11.7l1968.0 Mobile/07U666 Safari/531.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) FxiOS/17.5t3623.0 Mobile/62W812 Safari/535.2",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/17.0p1769.0 Mobile/00Z053 Safari/535.0",
"Mozilla/5.0 (Linux; Android 7.1.1) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/41.0.872.0 Safari/532.1",
"Mozilla/5.0 (Linux; Android 2.1) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/24.0.899.0 Safari/534.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; my-MM) AppleWebKit/532.16.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6532.16.5",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) CriOS/16.0.861.0 Mobile/72F048 Safari/534.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; hsb-DE) AppleWebKit/531.45.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6531.45.6",
"Mozilla/5.0 (Linux; Android 2.3.2) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/28.0.815.0 Safari/536.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/9.2q6557.0 Mobile/33E910 Safari/536.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; pa-IN) AppleWebKit/532.5.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6532.5.2",
"Mozilla/5.0 (Android 4.1.1; Mobile; rv:6.0) Gecko/6.0 Firefox/6.0",
"Mozilla/5.0 (Linux; Android 7.0) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.845.0 Safari/535.2",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) FxiOS/14.0q8061.0 Mobile/93F259 Safari/536.2",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) FxiOS/12.3u9606.0 Mobile/31Q469 Safari/536.2",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/16.4g4700.0 Mobile/70H876 Safari/535.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) FxiOS/16.0l8028.0 Mobile/43C192 Safari/533.1",
"Mozilla/5.0 (Android 5.0; Mobile; rv:26.0) Gecko/26.0 Firefox/26.0",
"Mozilla/5.0 (Android 4.0.1; Mobile; rv:27.0) Gecko/27.0 Firefox/27.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) CriOS/38.0.803.0 Mobile/10L482 Safari/535.1",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/19.0.842.0 Mobile/85X774 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; rw-RW) AppleWebKit/532.19.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6532.19.1",
"Mozilla/5.0 (Android 4.4; Mobile; rv:51.0) Gecko/51.0 Firefox/51.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; lij-IT) AppleWebKit/535.22.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6535.22.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; nb-NO) AppleWebKit/535.19.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6535.19.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; wal-ET) AppleWebKit/531.20.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6531.20.6",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) FxiOS/10.9u1098.0 Mobile/57B474 Safari/535.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; lb-LU) AppleWebKit/533.39.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6533.39.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/50.0.875.0 Mobile/07Z474 Safari/533.0",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/41.0.878.0 Mobile/78L406 Safari/532.0",
"Mozilla/5.0 (Linux; Android 3.2.4) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/54.0.896.0 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; sd-IN) AppleWebKit/531.29.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6531.29.5",
"Mozilla/5.0 (Android 4.4.1; Mobile; rv:25.0) Gecko/25.0 Firefox/25.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/26.0.882.0 Mobile/57B108 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; bs-BA) AppleWebKit/534.29.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6534.29.5",
"Mozilla/5.0 (Android 4.0; Mobile; rv:9.0) Gecko/9.0 Firefox/9.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) FxiOS/18.9k9772.0 Mobile/95I420 Safari/533.0",
"Mozilla/5.0 (Android 2.3.4; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) FxiOS/17.1s0442.0 Mobile/60W887 Safari/532.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; nn-NO) AppleWebKit/534.25.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6534.25.5",
"Mozilla/5.0 (Android 4.2.2; Mobile; rv:67.0) Gecko/67.0 Firefox/67.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; vi-VN) AppleWebKit/533.15.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6533.15.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; fr-FR) AppleWebKit/533.45.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6533.45.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; ky-KG) AppleWebKit/533.15.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6533.15.4",
"Mozilla/5.0 (Linux; Android 2.2.3) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/14.0.832.0 Safari/531.1",
"Mozilla/5.0 (Linux; Android 4.4.4) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/20.0.819.0 Safari/534.0",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/37.0.891.0 Mobile/95G872 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; be-BY) AppleWebKit/534.2.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6534.2.5",
"Mozilla/5.0 (Linux; Android 1.0) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/22.0.864.0 Safari/534.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/531.0 (KHTML, like Gecko) FxiOS/14.5q6961.0 Mobile/74F053 Safari/531.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; mr-IN) AppleWebKit/531.14.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6531.14.2",
"Mozilla/5.0 (Linux; Android 4.3) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/21.0.875.0 Safari/536.2",
"Mozilla/5.0 (Android 3.2.1; Mobile; rv:23.0) Gecko/23.0 Firefox/23.0",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/10.6z1261.0 Mobile/38Q768 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; cy-GB) AppleWebKit/532.41.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6532.41.1",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/15.0.865.0 Mobile/70D987 Safari/532.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ka-GE) AppleWebKit/534.35.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6534.35.7",
"Mozilla/5.0 (Android 4.2.1; Mobile; rv:19.0) Gecko/19.0 Firefox/19.0",
"Mozilla/5.0 (Linux; Android 7.0) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/41.0.838.0 Safari/532.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) CriOS/38.0.891.0 Mobile/24M395 Safari/532.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) FxiOS/12.1c8245.0 Mobile/43W482 Safari/534.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/15.0.847.0 Mobile/90S021 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; et-EE) AppleWebKit/535.28.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6535.28.6",
"Mozilla/5.0 (Android 2.2; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) CriOS/33.0.819.0 Mobile/15Y310 Safari/531.1",
"Mozilla/5.0 (Android 2.3.4; Mobile; rv:6.0) Gecko/6.0 Firefox/6.0",
"Mozilla/5.0 (Linux; Android 4.2.1) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/27.0.801.0 Safari/531.2",
"Mozilla/5.0 (Android 3.1; Mobile; rv:36.0) Gecko/36.0 Firefox/36.0",
"Mozilla/5.0 (Linux; Android 5.1.1) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/25.0.845.0 Safari/533.1",
"Mozilla/5.0 (Linux; Android 2.0.1) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/60.0.840.0 Safari/534.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) CriOS/25.0.827.0 Mobile/69A826 Safari/535.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; uz-UZ) AppleWebKit/533.41.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6533.41.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; fa-IR) AppleWebKit/532.42.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6532.42.7",
"Mozilla/5.0 (Android 5.0.1; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0",
"Mozilla/5.0 (Android 7.1.2; Mobile; rv:21.0) Gecko/21.0 Firefox/21.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/15.2o7027.0 Mobile/64P316 Safari/536.1",
"Mozilla/5.0 (Android 6.0; Mobile; rv:10.0) Gecko/10.0 Firefox/10.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; brx-IN) AppleWebKit/534.5.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6534.5.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; ro-RO) AppleWebKit/532.4.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6532.4.5",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/11.6p9610.0 Mobile/51P846 Safari/531.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/534.1 (KHTML, like Gecko) FxiOS/15.7w9984.0 Mobile/74D247 Safari/534.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) FxiOS/12.5l9084.0 Mobile/53Z556 Safari/533.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) CriOS/20.0.807.0 Mobile/06W979 Safari/532.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; sv-FI) AppleWebKit/535.42.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.42.2",
"Mozilla/5.0 (Linux; Android 2.0) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/32.0.826.0 Safari/535.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; mag-IN) AppleWebKit/535.19.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6535.19.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/12.4t2801.0 Mobile/78R810 Safari/531.2",
"Mozilla/5.0 (Linux; Android 6.0.1) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/56.0.841.0 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; wal-ET) AppleWebKit/534.36.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B118 Safari/6534.36.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; et-EE) AppleWebKit/534.37.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6534.37.5",
"Mozilla/5.0 (Android 1.0; Mobile; rv:63.0) Gecko/63.0 Firefox/63.0",
"Mozilla/5.0 (Android 3.0; Mobile; rv:45.0) Gecko/45.0 Firefox/45.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; ks-IN) AppleWebKit/533.19.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6533.19.7",
"Mozilla/5.0 (Android 2.3.5; Mobile; rv:11.0) Gecko/11.0 Firefox/11.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; iu-CA) AppleWebKit/535.49.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6535.49.4",
"Mozilla/5.0 (Android 7.1.2; Mobile; rv:7.0) Gecko/7.0 Firefox/7.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; bg-BG) AppleWebKit/534.24.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6534.24.3",
"Mozilla/5.0 (Linux; Android 2.0.1) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/41.0.806.0 Safari/534.0",
"Mozilla/5.0 (Android 3.2.4; Mobile; rv:23.0) Gecko/23.0 Firefox/23.0",
"Mozilla/5.0 (Linux; Android 2.2.2) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/57.0.857.0 Safari/535.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ln-CD) AppleWebKit/531.47.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6531.47.3",
"Mozilla/5.0 (Android 4.3; Mobile; rv:41.0) Gecko/41.0 Firefox/41.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; iu-CA) AppleWebKit/531.42.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6531.42.2",
"Mozilla/5.0 (Linux; Android 2.2) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/53.0.804.0 Safari/532.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/43.0.809.0 Mobile/34T698 Safari/534.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/11.8u5040.0 Mobile/19U691 Safari/535.0",
"Mozilla/5.0 (Linux; Android 4.2.1) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/61.0.837.0 Safari/535.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_4 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/21.0.808.0 Mobile/23Y027 Safari/533.0",
"Mozilla/5.0 (Android 2.3.5; Mobile; rv:22.0) Gecko/22.0 Firefox/22.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/34.0.802.0 Mobile/60J650 Safari/535.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) FxiOS/17.8q5168.0 Mobile/02S074 Safari/535.1",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) FxiOS/14.9z1027.0 Mobile/44M403 Safari/533.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; sq-ML) AppleWebKit/535.27.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.27.7",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/41.0.802.0 Mobile/00E292 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ms-MY) AppleWebKit/535.42.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.42.7",
"Mozilla/5.0 (Linux; Android 3.2.3) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/35.0.890.0 Safari/534.2",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/59.0.818.0 Mobile/42M102 Safari/533.2",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/12.3i8634.0 Mobile/27U669 Safari/534.0",
"Mozilla/5.0 (Android 1.0; Mobile; rv:33.0) Gecko/33.0 Firefox/33.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; bs-BA) AppleWebKit/534.18.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6534.18.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; ca-ES) AppleWebKit/534.43.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6534.43.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; mt-MT) AppleWebKit/532.49.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6532.49.1",
"Mozilla/5.0 (Android 4.4.1; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (Android 4.4.4; Mobile; rv:52.0) Gecko/52.0 Firefox/52.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) FxiOS/16.1n7164.0 Mobile/28T007 Safari/532.1",
"Mozilla/5.0 (Linux; Android 2.1) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/47.0.818.0 Safari/536.0",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) CriOS/25.0.808.0 Mobile/57L596 Safari/535.2",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) FxiOS/12.6p5598.0 Mobile/70P528 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/22.0.851.0 Mobile/07D387 Safari/533.0",
"Mozilla/5.0 (Android 5.1; Mobile; rv:63.0) Gecko/63.0 Firefox/63.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; it-IT) AppleWebKit/531.47.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6531.47.3",
"Mozilla/5.0 (Android 4.0; Mobile; rv:32.0) Gecko/32.0 Firefox/32.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; yi-US) AppleWebKit/531.12.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6531.12.3",
"Mozilla/5.0 (Linux; Android 7.1.1) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/30.0.809.0 Safari/536.2",
"Mozilla/5.0 (Linux; Android 3.2.2) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/42.0.815.0 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; mi-NZ) AppleWebKit/532.6.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6532.6.6",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/13.0u3436.0 Mobile/77L123 Safari/536.1",
"Mozilla/5.0 (Android 2.2.2; Mobile; rv:52.0) Gecko/52.0 Firefox/52.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) FxiOS/12.9p4625.0 Mobile/93S753 Safari/531.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/24.0.885.0 Mobile/20J300 Safari/532.0",
"Mozilla/5.0 (iPad; CPU iPad OS 5_1_1 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) CriOS/38.0.898.0 Mobile/80Z387 Safari/532.2",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) CriOS/56.0.805.0 Mobile/34U453 Safari/534.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) CriOS/48.0.841.0 Mobile/82T312 Safari/531.1",
"Mozilla/5.0 (Android 2.2.2; Mobile; rv:57.0) Gecko/57.0 Firefox/57.0",
"Mozilla/5.0 (Android 2.3.5; Mobile; rv:16.0) Gecko/16.0 Firefox/16.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; so-DJ) AppleWebKit/533.9.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6533.9.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; te-IN) AppleWebKit/534.42.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6534.42.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) FxiOS/17.4k9744.0 Mobile/35H153 Safari/532.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; pl-PL) AppleWebKit/535.11.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.11.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; nb-NO) AppleWebKit/535.33.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6535.33.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; ln-CD) AppleWebKit/533.39.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6533.39.1",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) FxiOS/18.7w6337.0 Mobile/11D182 Safari/533.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; fur-IT) AppleWebKit/533.1.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6533.1.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; ckb-IQ) AppleWebKit/534.36.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6534.36.2",
"Mozilla/5.0 (Linux; Android 2.3.4) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/19.0.821.0 Safari/534.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; sw-TZ) AppleWebKit/533.7.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6533.7.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; mn-MN) AppleWebKit/533.14.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B112 Safari/6533.14.1",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) CriOS/48.0.882.0 Mobile/73T728 Safari/536.1",
"Mozilla/5.0 (Android 2.2.2; Mobile; rv:39.0) Gecko/39.0 Firefox/39.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/531.2 (KHTML, like Gecko) FxiOS/15.4m6916.0 Mobile/41G361 Safari/531.2",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/47.0.882.0 Mobile/00P236 Safari/536.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) FxiOS/12.3i5937.0 Mobile/98U506 Safari/536.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; be-BY) AppleWebKit/531.45.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6531.45.6",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) FxiOS/12.9i6620.0 Mobile/22N221 Safari/536.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_4 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) FxiOS/12.3r0309.0 Mobile/04D294 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; dz-BT) AppleWebKit/533.32.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6533.32.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; is-IS) AppleWebKit/533.9.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6533.9.5",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/27.0.809.0 Mobile/03H639 Safari/535.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; ff-SN) AppleWebKit/533.26.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6533.26.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; an-ES) AppleWebKit/533.20.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6533.20.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; yue-HK) AppleWebKit/535.39.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6535.39.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/534.1 (KHTML, like Gecko) CriOS/54.0.848.0 Mobile/94E530 Safari/534.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; li-BE) AppleWebKit/532.49.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6532.49.2",
"Mozilla/5.0 (Android 4.0; Mobile; rv:29.0) Gecko/29.0 Firefox/29.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/36.0.834.0 Mobile/42H169 Safari/532.0",
"Mozilla/5.0 (Android 4.2.2; Mobile; rv:37.0) Gecko/37.0 Firefox/37.0",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) CriOS/33.0.801.0 Mobile/64J543 Safari/535.0",
"Mozilla/5.0 (Linux; Android 7.0) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/20.0.886.0 Safari/531.0",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/17.0.812.0 Mobile/53T178 Safari/533.2",
"Mozilla/5.0 (iPad; CPU iPad OS 12_4 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) CriOS/38.0.897.0 Mobile/07F384 Safari/532.0",
"Mozilla/5.0 (Android 2.2.3; Mobile; rv:13.0) Gecko/13.0 Firefox/13.0",
"Mozilla/5.0 (Linux; Android 2.2.3) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/42.0.807.0 Safari/532.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; wae-CH) AppleWebKit/533.4.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B118 Safari/6533.4.1",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) CriOS/24.0.818.0 Mobile/08R777 Safari/535.1",
"Mozilla/5.0 (Android 3.0; Mobile; rv:56.0) Gecko/56.0 Firefox/56.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; lij-IT) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B118 Safari/6531.22.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; vi-VN) AppleWebKit/534.47.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6534.47.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; bs-BA) AppleWebKit/533.17.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6533.17.7",
"Mozilla/5.0 (Linux; Android 4.4.4) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/17.0.861.0 Safari/533.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; kn-IN) AppleWebKit/532.36.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6532.36.6",
"Mozilla/5.0 (Android 5.1; Mobile; rv:9.0) Gecko/9.0 Firefox/9.0",
"Mozilla/5.0 (Linux; Android 1.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/57.0.884.0 Safari/535.1",
"Mozilla/5.0 (Android 3.2.3; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0",
"Mozilla/5.0 (Linux; Android 2.2.1) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/18.0.820.0 Safari/533.2",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) CriOS/43.0.805.0 Mobile/67U093 Safari/534.0",
"Mozilla/5.0 (Android 1.0; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0",
"Mozilla/5.0 (Android 1.1; Mobile; rv:36.0) Gecko/36.0 Firefox/36.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; hi-IN) AppleWebKit/531.1.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6531.1.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; raj-IN) AppleWebKit/531.27.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6531.27.5",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/31.0.870.0 Mobile/02D918 Safari/533.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; mn-MN) AppleWebKit/532.41.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B119 Safari/6532.41.2",
"Mozilla/5.0 (Android 4.0.1; Mobile; rv:12.0) Gecko/12.0 Firefox/12.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; es-HN) AppleWebKit/535.38.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6535.38.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; mr-IN) AppleWebKit/533.9.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B111 Safari/6533.9.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; nb-NO) AppleWebKit/535.30.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6535.30.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; ga-IE) AppleWebKit/535.3.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B118 Safari/6535.3.1",
"Mozilla/5.0 (Android 2.0.1; Mobile; rv:51.0) Gecko/51.0 Firefox/51.0",
"Mozilla/5.0 (Android 2.3; Mobile; rv:6.0) Gecko/6.0 Firefox/6.0",
"Mozilla/5.0 (Android 4.2.1; Mobile; rv:49.0) Gecko/49.0 Firefox/49.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; mag-IN) AppleWebKit/531.14.1 (KHTML, like Gecko) Version/4.0.5 Mobile/8B114 Safari/6531.14.1",
"Mozilla/5.0 (Linux; Android 7.0) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/33.0.834.0 Safari/536.1",
"Mozilla/5.0 (Android 3.2.5; Mobile; rv:29.0) Gecko/29.0 Firefox/29.0",
"Mozilla/5.0 (Linux; Android 5.0.2) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/57.0.886.0 Safari/535.0",
"Mozilla/5.0 (Linux; Android 2.3.3) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/58.0.866.0 Safari/533.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/533.1 (KHTML, like Gecko) CriOS/26.0.813.0 Mobile/28M844 Safari/533.1",
"Mozilla/5.0 (Linux; Android 2.3.2) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/31.0.855.0 Safari/534.1",
"Mozilla/5.0 (Android 1.0; Mobile; rv:56.0) Gecko/56.0 Firefox/56.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_5 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) FxiOS/9.7a3704.0 Mobile/87X107 Safari/535.1",
"Mozilla/5.0 (Linux; Android 4.4) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/29.0.856.0 Safari/535.1",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) CriOS/18.0.811.0 Mobile/58F813 Safari/532.1",
"Mozilla/5.0 (Linux; Android 4.0.4) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/52.0.855.0 Safari/536.2",
"Mozilla/5.0 (Android 2.3.2; Mobile; rv:28.0) Gecko/28.0 Firefox/28.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; ro-RO) AppleWebKit/535.2.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.2.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; id-ID) AppleWebKit/533.34.2 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6533.34.2",
"Mozilla/5.0 (Linux; Android 4.4.2) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/50.0.846.0 Safari/535.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; lg-UG) AppleWebKit/535.5.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6535.5.4",
"Mozilla/5.0 (Linux; Android 3.2.6) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/35.0.881.0 Safari/532.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; yi-US) AppleWebKit/534.26.7 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6534.26.7",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/535.2 (KHTML, like Gecko) FxiOS/9.7o1690.0 Mobile/63V273 Safari/535.2",
"Mozilla/5.0 (Linux; Android 4.4.1) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/25.0.839.0 Safari/534.2",
"Mozilla/5.0 (Linux; Android 5.0.2) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/55.0.895.0 Safari/534.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/15.0.876.0 Mobile/23L068 Safari/533.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/17.0.848.0 Mobile/01D961 Safari/536.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/536.0 (KHTML, like Gecko) CriOS/31.0.881.0 Mobile/64F142 Safari/536.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; mni-IN) AppleWebKit/533.16.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6533.16.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; ayc-PE) AppleWebKit/532.26.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6532.26.4",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/534.0 (KHTML, like Gecko) FxiOS/17.9w0111.0 Mobile/03Q334 Safari/534.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_3 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) CriOS/42.0.853.0 Mobile/71U535 Safari/534.2",
"Mozilla/5.0 (Android 4.0.4; Mobile; rv:18.0) Gecko/18.0 Firefox/18.0",
"Mozilla/5.0 (Linux; Android 4.0) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/22.0.820.0 Safari/531.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/531.0 (KHTML, like Gecko) CriOS/38.0.888.0 Mobile/70F657 Safari/531.0",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/532.2 (KHTML, like Gecko) FxiOS/12.4d2131.0 Mobile/79X776 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; tt-RU) AppleWebKit/531.1.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B116 Safari/6531.1.6",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_0 like Mac OS X; fur-IT) AppleWebKit/534.33.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6534.33.7",
"Mozilla/5.0 (iPad; CPU iPad OS 6_1_6 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) FxiOS/12.8i1526.0 Mobile/78M578 Safari/533.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; lij-IT) AppleWebKit/533.6.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6533.6.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; tcy-IN) AppleWebKit/533.32.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6533.32.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; bs-BA) AppleWebKit/531.24.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6531.24.1",
"Mozilla/5.0 (Linux; Android 4.2) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/61.0.886.0 Safari/536.1",
"Mozilla/5.0 (iPad; CPU iPad OS 7_1_2 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) FxiOS/9.5d1878.0 Mobile/96Q533 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; nan-TW) AppleWebKit/532.43.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6532.43.5",
"Mozilla/5.0 (Linux; Android 4.1.1) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/31.0.858.0 Safari/532.1",
"Mozilla/5.0 (Linux; Android 4.0.2) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/33.0.808.0 Safari/535.1",
"Mozilla/5.0 (Android 4.4.4; Mobile; rv:66.0) Gecko/66.0 Firefox/66.0",
"Mozilla/5.0 (Android 1.5; Mobile; rv:24.0) Gecko/24.0 Firefox/24.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; th-TH) AppleWebKit/534.47.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6534.47.3",
"Mozilla/5.0 (Android 1.6; Mobile; rv:6.0) Gecko/6.0 Firefox/6.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; sw-TZ) AppleWebKit/533.33.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6533.33.4",
"Mozilla/5.0 (Android 2.1; Mobile; rv:16.0) Gecko/16.0 Firefox/16.0",
"Mozilla/5.0 (iPad; CPU iPad OS 4_2_1 like Mac OS X) AppleWebKit/535.0 (KHTML, like Gecko) FxiOS/16.6i0408.0 Mobile/56O429 Safari/535.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_6 like Mac OS X) AppleWebKit/534.2 (KHTML, like Gecko) FxiOS/14.3j7943.0 Mobile/62H627 Safari/534.2",
"Mozilla/5.0 (Linux; Android 4.1) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/38.0.835.0 Safari/534.0",
"Mozilla/5.0 (Linux; Android 4.2) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/59.0.853.0 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; sr-ME) AppleWebKit/535.20.2 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6535.20.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3 like Mac OS X; szl-PL) AppleWebKit/532.45.5 (KHTML, like Gecko) Version/4.0.5 Mobile/8B113 Safari/6532.45.5",
"Mozilla/5.0 (Android 4.4; Mobile; rv:52.0) Gecko/52.0 Firefox/52.0",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/536.1 (KHTML, like Gecko) FxiOS/15.5j6216.0 Mobile/66C978 Safari/536.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_0 like Mac OS X; eo-US) AppleWebKit/535.14.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B118 Safari/6535.14.1",
"Mozilla/5.0 (Android 3.2.1; Mobile; rv:6.0) Gecko/6.0 Firefox/6.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_1 like Mac OS X; ne-NP) AppleWebKit/534.7.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6534.7.4",
"Mozilla/5.0 (Linux; Android 8.0.0) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/35.0.881.0 Safari/531.1",
"Mozilla/5.0 (Android 3.2.3; Mobile; rv:63.0) Gecko/63.0 Firefox/63.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; dz-BT) AppleWebKit/535.32.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B113 Safari/6535.32.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; st-ZA) AppleWebKit/532.32.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6532.32.3",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; bhb-IN) AppleWebKit/534.9.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B112 Safari/6534.9.7",
"Mozilla/5.0 (iPad; CPU iPad OS 10_3_4 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) FxiOS/9.9m2403.0 Mobile/10T305 Safari/532.0",
"Mozilla/5.0 (Linux; Android 2.3) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/63.0.885.0 Safari/532.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; nb-NO) AppleWebKit/535.40.4 (KHTML, like Gecko) Version/4.0.5 Mobile/8B115 Safari/6535.40.4",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_1 like Mac OS X; nr-ZA) AppleWebKit/534.22.5 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6534.22.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; ml-IN) AppleWebKit/531.9.4 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6531.9.4",
"Mozilla/5.0 (Linux; Android 4.2) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/55.0.807.0 Safari/531.2",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_4 like Mac OS X) AppleWebKit/535.1 (KHTML, like Gecko) CriOS/53.0.859.0 Mobile/82S736 Safari/535.1",
"Mozilla/5.0 (Android 7.1.2; Mobile; rv:15.0) Gecko/15.0 Firefox/15.0",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_5 like Mac OS X) AppleWebKit/533.2 (KHTML, like Gecko) CriOS/58.0.844.0 Mobile/23B408 Safari/533.2",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; hr-HR) AppleWebKit/533.32.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6533.32.3",
"Mozilla/5.0 (Android 6.0.1; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_2 like Mac OS X; sl-SI) AppleWebKit/533.6.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6533.6.7",
"Mozilla/5.0 (Android 3.0; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/532.1 (KHTML, like Gecko) FxiOS/17.0h9848.0 Mobile/02P413 Safari/532.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; pa-PK) AppleWebKit/531.42.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B119 Safari/6531.42.3",
"Mozilla/5.0 (iPhone; CPU iPhone OS 5_1_1 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/53.0.893.0 Mobile/96L834 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_6 like Mac OS X) AppleWebKit/533.0 (KHTML, like Gecko) CriOS/46.0.848.0 Mobile/13A861 Safari/533.0",
"Mozilla/5.0 (iPhone; CPU iPhone OS 3_1_3 like Mac OS X) AppleWebKit/531.1 (KHTML, like Gecko) CriOS/30.0.808.0 Mobile/53Q222 Safari/531.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; wo-SN) AppleWebKit/533.3.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B114 Safari/6533.3.7",
"Mozilla/5.0 (Linux; Android 2.3) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/23.0.844.0 Safari/533.2",
"Mozilla/5.0 (Linux; Android 3.2.1) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/33.0.871.0 Safari/534.0",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; ps-AF) AppleWebKit/535.32.7 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6535.32.7",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2 like Mac OS X; sd-PK) AppleWebKit/534.30.3 (KHTML, like Gecko) Version/4.0.5 Mobile/8B111 Safari/6534.30.3",
"Mozilla/5.0 (Linux; Android 2.0.1) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/46.0.812.0 Safari/533.1",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 3_3 like Mac OS X; shs-CA) AppleWebKit/535.32.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6535.32.3",
"Mozilla/5.0 (iPad; CPU iPad OS 3_1_3 like Mac OS X) AppleWebKit/536.2 (KHTML, like Gecko) CriOS/42.0.800.0 Mobile/17M747 Safari/536.2",
"Mozilla/5.0 (iPad; CPU iPad OS 9_3_6 like Mac OS X) AppleWebKit/532.0 (KHTML, like Gecko) FxiOS/13.7w7436.0 Mobile/48R429 Safari/532.0",
"Mozilla/5.0 (Linux; Android 4.2) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/15.0.852.0 Safari/536.2"
],
"Desktop": [
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_9_2 rv:6.0; ik-CA) AppleWebKit/535.5.2 (KHTML, like Gecko) Version/4.1 Safari/535.5.2",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.7.20) Gecko/2017-08-27 14:20:34 Firefox/15.0",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_8_2; rv:1.9.2.20) Gecko/2014-11-10 23:56:29 Firefox/3.8",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_7; rv:1.9.5.20) Gecko/2011-05-15 01:58:03 Firefox/3.6.3",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/31.0.808.0 Safari/532.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_11_8; rv:1.9.6.20) Gecko/2016-12-07 04:42:24 Firefox/3.6.9",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/5.1)",
"Opera/9.31.(Windows NT 5.01; lij-IT) Presto/2.9.163 Version/11.00",
"Opera/8.97.(Windows NT 5.01; is-IS) Presto/2.9.175 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_12_3 rv:6.0; csb-PL) AppleWebKit/534.22.6 (KHTML, like Gecko) Version/5.1 Safari/534.22.6",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_12_5) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/50.0.868.0 Safari/535.1",
"Mozilla/5.0 (Windows; U; Windows NT 4.0) AppleWebKit/533.47.1 (KHTML, like Gecko) Version/5.0.1 Safari/533.47.1",
"Mozilla/5.0 (Windows NT 6.1; ca-IT; rv:1.9.1.20) Gecko/2011-08-10 03:57:17 Firefox/12.0",
"Mozilla/5.0 (Windows NT 6.1; nb-NO; rv:1.9.2.20) Gecko/2017-04-21 23:06:51 Firefox/3.6.15",
"Opera/8.52.(Windows NT 6.2; yo-NG) Presto/2.9.179 Version/11.00",
"Opera/8.50.(Windows NT 6.2; tl-PH) Presto/2.9.162 Version/11.00",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2016-09-08 12:09:06 Firefox/3.8",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/531.49.5 (KHTML, like Gecko) Version/5.1 Safari/531.49.5",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.2; Trident/5.0)",
"Opera/9.10.(Windows CE; mai-IN) Presto/2.9.178 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2017-08-09 05:24:40 Firefox/3.8",
"Opera/9.91.(X11; Linux i686; ts-ZA) Presto/2.9.172 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows 98; Win 9x 4.90; Trident/4.0)",
"Opera/9.89.(X11; Linux i686; ff-SN) Presto/2.9.163 Version/12.00",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/52.0.850.0 Safari/534.2",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/3.1)",
"Opera/9.24.(X11; Linux i686; es-PR) Presto/2.9.188 Version/10.00",
"Mozilla/5.0 (Windows 95) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/60.0.830.0 Safari/535.1",
"Opera/9.57.(Windows NT 4.0; af-ZA) Presto/2.9.174 Version/10.00",
"Opera/8.42.(X11; Linux i686; ti-ET) Presto/2.9.167 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_10_3; rv:1.9.5.20) Gecko/2014-03-14 15:13:36 Firefox/3.6.12",
"Opera/9.14.(X11; Linux x86_64; sid-ET) Presto/2.9.178 Version/12.00",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/25.0.883.0 Safari/534.2",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/62.0.835.0 Safari/535.1",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 98; Trident/4.0)",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows CE; Trident/4.0)",
"Opera/9.34.(X11; Linux i686; be-BY) Presto/2.9.173 Version/10.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4 rv:6.0; ti-ER) AppleWebKit/534.6.7 (KHTML, like Gecko) Version/5.0 Safari/534.6.7",
"Opera/8.97.(X11; Linux x86_64; bn-IN) Presto/2.9.178 Version/10.00",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/39.0.894.0 Safari/533.0",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/55.0.872.0 Safari/534.1",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_10_4 rv:2.0; vi-VN) AppleWebKit/533.45.3 (KHTML, like Gecko) Version/4.0.2 Safari/533.45.3",
"Opera/9.60.(X11; Linux i686; tg-TJ) Presto/2.9.174 Version/11.00",
"Opera/8.15.(X11; Linux i686; ko-KR) Presto/2.9.190 Version/11.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_7) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/55.0.858.0 Safari/536.1",
"Mozilla/5.0 (Windows 95) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/32.0.867.0 Safari/533.2",
"Opera/8.35.(X11; Linux i686; si-LK) Presto/2.9.176 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 95; Trident/3.1)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.2; Trident/3.0)",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_9_3; rv:1.9.6.20) Gecko/2019-11-09 02:34:33 Firefox/3.6.3",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5 rv:4.0; ts-ZA) AppleWebKit/535.12.1 (KHTML, like Gecko) Version/5.0 Safari/535.12.1",
"Mozilla/5.0 (Windows; U; Windows NT 5.2) AppleWebKit/535.1.2 (KHTML, like Gecko) Version/4.0.5 Safari/535.1.2",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_5_5 rv:6.0; sk-SK) AppleWebKit/532.38.7 (KHTML, like Gecko) Version/4.0.5 Safari/532.38.7",
"Opera/9.71.(Windows NT 5.01; rw-RW) Presto/2.9.160 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
"Opera/8.22.(Windows NT 5.01; dz-BT) Presto/2.9.190 Version/12.00",
"Opera/8.36.(X11; Linux x86_64; ku-TR) Presto/2.9.167 Version/10.00",
"Opera/9.12.(X11; Linux i686; bem-ZM) Presto/2.9.179 Version/12.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_9 rv:6.0; mt-MT) AppleWebKit/534.50.1 (KHTML, like Gecko) Version/4.0 Safari/534.50.1",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2013-02-05 11:12:33 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows 98; Win 9x 4.90; Trident/3.0)",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/14.0.874.0 Safari/535.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_9 rv:6.0; ug-CN) AppleWebKit/535.16.4 (KHTML, like Gecko) Version/4.0.1 Safari/535.16.4",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/56.0.883.0 Safari/535.2",
"Mozilla/5.0 (Windows; U; Windows CE) AppleWebKit/534.24.3 (KHTML, like Gecko) Version/5.0 Safari/534.24.3",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/33.0.836.0 Safari/531.2",
"Opera/9.19.(X11; Linux i686; ta-IN) Presto/2.9.178 Version/10.00",
"Mozilla/5.0 (Windows NT 6.0; tig-ER; rv:1.9.1.20) Gecko/2012-03-24 04:22:38 Firefox/15.0",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows 98; Win 9x 4.90; Trident/5.0)",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2013-07-03 07:55:18 Firefox/3.6.13",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2015-06-22 22:03:34 Firefox/3.6.4",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/51.0.804.0 Safari/534.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_9; rv:1.9.6.20) Gecko/2011-02-25 01:44:52 Firefox/15.0",
"Opera/8.46.(Windows NT 6.0; so-DJ) Presto/2.9.162 Version/11.00",
"Opera/8.16.(X11; Linux x86_64; tl-PH) Presto/2.9.184 Version/11.00",
"Opera/9.94.(X11; Linux x86_64; th-TH) Presto/2.9.190 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2014-10-12 22:26:37 Firefox/3.8",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0; rv:1.9.4.20) Gecko/2019-02-09 07:08:44 Firefox/3.6.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3 rv:2.0; nb-NO) AppleWebKit/534.31.7 (KHTML, like Gecko) Version/5.0.2 Safari/534.31.7",
"Opera/8.92.(Windows 95; ln-CD) Presto/2.9.183 Version/12.00",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.7.20) Gecko/2013-11-11 03:42:26 Firefox/10.0",
"Opera/8.62.(X11; Linux i686; cy-GB) Presto/2.9.165 Version/12.00",
"Opera/8.35.(Windows CE; af-ZA) Presto/2.9.165 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 95; Trident/5.0)",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_2) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/61.0.838.0 Safari/536.2",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2017-10-31 10:21:28 Firefox/3.6.17",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_10_7 rv:6.0; wa-BE) AppleWebKit/534.10.2 (KHTML, like Gecko) Version/4.0.2 Safari/534.10.2",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_7_6) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/43.0.867.0 Safari/536.1",
"Opera/8.86.(Windows NT 6.1; ko-KR) Presto/2.9.162 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_6 rv:6.0; lb-LU) AppleWebKit/533.14.5 (KHTML, like Gecko) Version/5.1 Safari/533.14.5",
"Mozilla/5.0 (Windows 98; Win 9x 4.90) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/59.0.866.0 Safari/534.2",
"Mozilla/5.0 (Windows; U; Windows NT 5.2) AppleWebKit/533.42.6 (KHTML, like Gecko) Version/4.0.4 Safari/533.42.6",
"Opera/9.39.(X11; Linux x86_64; csb-PL) Presto/2.9.179 Version/11.00",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/38.0.889.0 Safari/532.0",
"Opera/8.54.(X11; Linux i686; ts-ZA) Presto/2.9.172 Version/12.00",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/43.0.877.0 Safari/535.1",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/62.0.800.0 Safari/531.1",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 4.0; Trident/5.0)",
"Opera/9.67.(X11; Linux x86_64; ti-ET) Presto/2.9.181 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.01; Trident/4.0)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_6_9 rv:5.0; si-LK) AppleWebKit/535.1.3 (KHTML, like Gecko) Version/4.1 Safari/535.1.3",
"Opera/9.69.(Windows NT 5.2; ky-KG) Presto/2.9.171 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows CE; Trident/3.1)",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; apn-IN; rv:1.9.1.20) Gecko/2016-05-28 21:14:12 Firefox/12.0",
"Mozilla/5.0 (Windows 98; Win 9x 4.90) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/35.0.814.0 Safari/533.2",
"Opera/9.81.(X11; Linux i686; fur-IT) Presto/2.9.187 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_5; rv:1.9.4.20) Gecko/2018-11-11 06:13:59 Firefox/11.0",
"Opera/8.67.(Windows NT 6.2; nso-ZA) Presto/2.9.184 Version/11.00",
"Opera/9.85.(Windows 98; lo-LA) Presto/2.9.180 Version/10.00",
"Opera/9.65.(Windows NT 5.01; nso-ZA) Presto/2.9.167 Version/12.00",
"Opera/8.62.(X11; Linux i686; yo-NG) Presto/2.9.163 Version/12.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_5_5) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/35.0.876.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_6; rv:1.9.6.20) Gecko/2015-09-26 18:26:15 Firefox/3.8",
"Opera/9.83.(X11; Linux x86_64; dz-BT) Presto/2.9.160 Version/10.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_5; rv:1.9.3.20) Gecko/2019-02-14 08:57:44 Firefox/11.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5; rv:1.9.3.20) Gecko/2015-06-28 17:15:41 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 4.0; Trident/5.1)",
"Opera/8.45.(X11; Linux i686; ur-PK) Presto/2.9.182 Version/12.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/4.0)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows 98; Trident/5.1)",
"Opera/9.35.(X11; Linux i686; pt-PT) Presto/2.9.188 Version/10.00",
"Opera/8.48.(Windows NT 5.1; st-ZA) Presto/2.9.187 Version/12.00",
"Mozilla/5.0 (Windows 98; ro-RO; rv:1.9.2.20) Gecko/2018-12-24 13:08:51 Firefox/3.8",
"Opera/8.41.(X11; Linux i686; ga-IE) Presto/2.9.162 Version/11.00",
"Opera/9.95.(X11; Linux x86_64; niu-NU) Presto/2.9.172 Version/11.00",
"Mozilla/5.0 (Windows NT 5.01; niu-NU; rv:1.9.1.20) Gecko/2011-02-23 16:39:50 Firefox/3.6.17",
"Mozilla/5.0 (Windows; U; Windows NT 6.0) AppleWebKit/531.14.2 (KHTML, like Gecko) Version/5.0.3 Safari/531.14.2",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7 rv:5.0; sq-ML) AppleWebKit/533.28.5 (KHTML, like Gecko) Version/5.0.2 Safari/533.28.5",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.01; Trident/5.0)",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_9_0) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/16.0.842.0 Safari/533.1",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/3.0)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 4.0; Trident/3.1)",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.2; Trident/4.0)",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 98; Win 9x 4.90; Trident/4.0)",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_11_7 rv:5.0; ks-IN) AppleWebKit/535.27.3 (KHTML, like Gecko) Version/5.0.3 Safari/535.27.3",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows CE; Trident/5.1)",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2017-12-13 11:42:08 Firefox/9.0",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/57.0.801.0 Safari/534.2",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2011-11-26 13:13:23 Firefox/3.6.12",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2019-07-26 03:46:15 Firefox/3.8",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2015-02-19 11:05:12 Firefox/4.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2017-02-11 08:56:46 Firefox/15.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/14.0.863.0 Safari/536.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/55.0.850.0 Safari/536.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_6_9) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/38.0.871.0 Safari/534.1",
"Mozilla/5.0 (Windows NT 4.0; yi-US; rv:1.9.1.20) Gecko/2018-10-12 03:32:32 Firefox/3.6.16",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/55.0.815.0 Safari/535.1",
"Opera/9.91.(Windows NT 4.0; gv-GB) Presto/2.9.182 Version/11.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_5_7) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/18.0.883.0 Safari/533.2",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/58.0.816.0 Safari/534.0",
"Opera/8.11.(Windows NT 4.0; ru-UA) Presto/2.9.170 Version/12.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_10_6; rv:1.9.2.20) Gecko/2019-08-30 04:11:34 Firefox/3.8",
"Opera/8.92.(X11; Linux x86_64; ar-PS) Presto/2.9.177 Version/11.00",
"Mozilla/5.0 (Windows; U; Windows 98; Win 9x 4.90) AppleWebKit/535.32.3 (KHTML, like Gecko) Version/5.0.2 Safari/535.32.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/54.0.818.0 Safari/531.0",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_7_2) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/24.0.854.0 Safari/535.2",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_10_8) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/47.0.832.0 Safari/532.2",
"Opera/8.25.(X11; Linux i686; fy-NL) Presto/2.9.187 Version/12.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_1) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/27.0.830.0 Safari/536.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0 rv:4.0; dv-MV) AppleWebKit/532.36.5 (KHTML, like Gecko) Version/5.0.4 Safari/532.36.5",
"Opera/9.30.(X11; Linux i686; sr-ME) Presto/2.9.188 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/45.0.875.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_1) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/24.0.856.0 Safari/532.0",
"Opera/9.37.(X11; Linux x86_64; crh-UA) Presto/2.9.163 Version/11.00",
"Opera/8.76.(X11; Linux i686; ts-ZA) Presto/2.9.166 Version/12.00",
"Mozilla/5.0 (Windows; U; Windows NT 5.2) AppleWebKit/531.30.7 (KHTML, like Gecko) Version/5.0.4 Safari/531.30.7",
"Mozilla/5.0 (Windows NT 6.2; cy-GB; rv:1.9.1.20) Gecko/2014-07-06 12:55:04 Firefox/3.8",
"Mozilla/5.0 (Windows NT 5.0) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/39.0.824.0 Safari/533.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2013-06-19 17:15:53 Firefox/5.0",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/3.1)",
"Opera/8.54.(X11; Linux x86_64; hr-HR) Presto/2.9.162 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 95; Trident/3.0)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/33.0.875.0 Safari/536.2",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; bho-IN; rv:1.9.2.20) Gecko/2015-08-16 17:42:30 Firefox/13.0",
"Opera/9.18.(X11; Linux x86_64; sk-SK) Presto/2.9.188 Version/11.00",
"Mozilla/5.0 (Windows NT 5.0) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/44.0.839.0 Safari/531.2",
"Mozilla/5.0 (Windows 95) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/60.0.817.0 Safari/534.0",
"Opera/9.72.(Windows NT 6.2; sl-SI) Presto/2.9.179 Version/12.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_11_1) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/38.0.876.0 Safari/535.0",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_11_0 rv:5.0; iu-CA) AppleWebKit/532.25.1 (KHTML, like Gecko) Version/5.0.3 Safari/532.25.1",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; li-BE; rv:1.9.0.20) Gecko/2014-07-22 20:06:36 Firefox/3.8",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_4) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/35.0.842.0 Safari/531.2",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows 98; Win 9x 4.90; Trident/3.1)",
"Mozilla/5.0 (Windows; U; Windows 98) AppleWebKit/531.31.2 (KHTML, like Gecko) Version/4.0.3 Safari/531.31.2",
"Opera/9.47.(Windows CE; ps-AF) Presto/2.9.164 Version/11.00",
"Opera/8.97.(Windows NT 5.0; hi-IN) Presto/2.9.178 Version/11.00",
"Opera/9.36.(X11; Linux i686; ht-HT) Presto/2.9.163 Version/10.00",
"Mozilla/5.0 (Windows NT 5.01; wo-SN; rv:1.9.0.20) Gecko/2014-10-15 12:43:47 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows CE; Trident/3.0)",
"Mozilla/5.0 (Windows; U; Windows NT 5.1) AppleWebKit/534.3.5 (KHTML, like Gecko) Version/4.0.2 Safari/534.3.5",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.1)",
"Opera/9.26.(Windows NT 5.01; ln-CD) Presto/2.9.177 Version/11.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_11_0 rv:3.0; nso-ZA) AppleWebKit/533.5.6 (KHTML, like Gecko) Version/4.0.3 Safari/533.5.6",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5 rv:6.0; ru-RU) AppleWebKit/532.31.2 (KHTML, like Gecko) Version/5.0 Safari/532.31.2",
"Mozilla/5.0 (Windows 98) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/40.0.834.0 Safari/534.2",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 5.2; Trident/3.0)",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows CE; Trident/5.0)",
"Opera/8.93.(Windows NT 5.0; os-RU) Presto/2.9.189 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/3.1)",
"Opera/9.65.(Windows 95; lt-LT) Presto/2.9.170 Version/12.00",
"Opera/9.29.(X11; Linux i686; shs-CA) Presto/2.9.160 Version/12.00",
"Opera/9.21.(X11; Linux x86_64; ast-ES) Presto/2.9.178 Version/10.00",
"Opera/9.10.(X11; Linux i686; zu-ZA) Presto/2.9.181 Version/10.00",
"Opera/9.42.(X11; Linux x86_64; tn-ZA) Presto/2.9.173 Version/12.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_10_2) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/54.0.836.0 Safari/532.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3 rv:6.0; tl-PH) AppleWebKit/533.45.6 (KHTML, like Gecko) Version/5.0.1 Safari/533.45.6",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/22.0.899.0 Safari/534.2",
"Mozilla/5.0 (Windows NT 6.2; ps-AF; rv:1.9.0.20) Gecko/2017-06-25 17:45:36 Firefox/15.0",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows 98; Win 9x 4.90; Trident/5.1)",
"Opera/9.81.(Windows NT 5.01; tl-PH) Presto/2.9.166 Version/11.00",
"Opera/9.10.(Windows NT 5.0; tg-TJ) Presto/2.9.166 Version/12.00",
"Opera/9.32.(X11; Linux i686; be-BY) Presto/2.9.189 Version/10.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_8) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/57.0.888.0 Safari/531.0",
"Opera/8.84.(X11; Linux i686; tl-PH) Presto/2.9.179 Version/12.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_3 rv:4.0; cs-CZ) AppleWebKit/533.14.5 (KHTML, like Gecko) Version/5.1 Safari/533.14.5",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_9_1 rv:3.0; hak-TW) AppleWebKit/531.11.5 (KHTML, like Gecko) Version/5.1 Safari/531.11.5",
"Opera/8.30.(X11; Linux x86_64; bn-IN) Presto/2.9.177 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_2 rv:4.0; ha-NG) AppleWebKit/532.29.7 (KHTML, like Gecko) Version/5.0 Safari/532.29.7",
"Opera/9.47.(X11; Linux i686; csb-PL) Presto/2.9.173 Version/11.00",
"Opera/8.26.(Windows NT 5.1; te-IN) Presto/2.9.171 Version/11.00",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/533.2.1 (KHTML, like Gecko) Version/4.0 Safari/533.2.1",
"Mozilla/5.0 (Windows CE; mhr-RU; rv:1.9.1.20) Gecko/2013-09-04 03:51:11 Firefox/3.6.17",
"Mozilla/5.0 (Windows CE; ayc-PE; rv:1.9.1.20) Gecko/2012-03-22 06:17:46 Firefox/7.0",
"Mozilla/5.0 (Windows NT 6.2; am-ET; rv:1.9.0.20) Gecko/2012-02-10 20:01:54 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 6.0; Trident/5.0)",
"Opera/8.98.(Windows NT 6.2; ts-ZA) Presto/2.9.166 Version/11.00",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/534.10.4 (KHTML, like Gecko) Version/4.0.4 Safari/534.10.4",
"Opera/8.34.(Windows NT 6.1; zu-ZA) Presto/2.9.181 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2010-08-13 01:23:48 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 6.2; Trident/5.0)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_12_9; rv:1.9.4.20) Gecko/2016-11-27 04:58:01 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows CE; Trident/3.0)",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/42.0.805.0 Safari/534.2",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_6; rv:1.9.5.20) Gecko/2017-10-20 06:41:53 Firefox/3.6.14",
"Opera/9.61.(X11; Linux i686; yo-NG) Presto/2.9.162 Version/12.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_6_4 rv:6.0; gez-ET) AppleWebKit/533.9.6 (KHTML, like Gecko) Version/5.1 Safari/533.9.6",
"Opera/9.81.(X11; Linux x86_64; tg-TJ) Presto/2.9.168 Version/11.00",
"Opera/8.50.(X11; Linux i686; bho-IN) Presto/2.9.169 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows 98; Win 9x 4.90; Trident/4.1)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.1)",
"Opera/9.72.(X11; Linux i686; kok-IN) Presto/2.9.164 Version/10.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_9_0) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/42.0.897.0 Safari/532.2",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.0; Trident/5.1)",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/20.0.814.0 Safari/533.0",
"Mozilla/5.0 (Windows 95) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/52.0.856.0 Safari/533.2",
"Mozilla/5.0 (Windows CE) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/61.0.889.0 Safari/532.0",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/4.1)",
"Opera/8.41.(Windows 98; sd-IN) Presto/2.9.166 Version/12.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_6_2; rv:1.9.4.20) Gecko/2011-08-04 23:05:29 Firefox/3.8",
"Opera/8.77.(X11; Linux x86_64; so-DJ) Presto/2.9.165 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/32.0.810.0 Safari/531.1",
"Opera/8.61.(X11; Linux i686; ne-NP) Presto/2.9.180 Version/10.00",
"Mozilla/5.0 (Windows NT 5.0; ko-KR; rv:1.9.2.20) Gecko/2012-08-01 11:58:20 Firefox/7.0",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/63.0.858.0 Safari/532.2",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 4.0; Trident/3.0)",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.2; Trident/4.1)",
"Opera/8.99.(X11; Linux i686; ayc-PE) Presto/2.9.179 Version/11.00",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/534.4.1 (KHTML, like Gecko) Version/5.1 Safari/534.4.1",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/34.0.899.0 Safari/535.1",
"Opera/9.14.(X11; Linux x86_64; fa-IR) Presto/2.9.173 Version/12.00",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; an-ES; rv:1.9.1.20) Gecko/2011-05-13 00:00:22 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows CE; Trident/5.0)",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_7_6 rv:4.0; ha-NG) AppleWebKit/534.20.5 (KHTML, like Gecko) Version/5.1 Safari/534.20.5",
"Mozilla/5.0 (Windows; U; Windows NT 6.1) AppleWebKit/534.26.4 (KHTML, like Gecko) Version/4.0.3 Safari/534.26.4",
"Mozilla/5.0 (Windows; U; Windows NT 5.1) AppleWebKit/534.18.4 (KHTML, like Gecko) Version/4.0.5 Safari/534.18.4",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; af-ZA; rv:1.9.2.20) Gecko/2012-01-22 03:17:41 Firefox/3.8",
"Opera/8.35.(X11; Linux i686; tcy-IN) Presto/2.9.174 Version/12.00",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/49.0.854.0 Safari/535.1",
"Mozilla/5.0 (Windows NT 4.0) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/34.0.813.0 Safari/536.1",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/15.0.895.0 Safari/533.0",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.7.20) Gecko/2010-01-09 05:22:25 Firefox/3.8",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_9_1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/30.0.881.0 Safari/535.2",
"Opera/9.48.(X11; Linux i686; uk-UA) Presto/2.9.169 Version/12.00",
"Opera/9.53.(X11; Linux i686; ta-IN) Presto/2.9.189 Version/11.00",
"Opera/9.68.(Windows NT 4.0; ber-DZ) Presto/2.9.182 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_12_9 rv:4.0; pl-PL) AppleWebKit/533.39.4 (KHTML, like Gecko) Version/4.1 Safari/533.39.4",
"Mozilla/5.0 (Windows 98; Win 9x 4.90) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/32.0.811.0 Safari/535.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/32.0.830.0 Safari/531.0",
"Mozilla/5.0 (Windows; U; Windows CE) AppleWebKit/533.35.2 (KHTML, like Gecko) Version/4.1 Safari/533.35.2",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/26.0.881.0 Safari/532.0",
"Opera/9.92.(Windows NT 6.0; bhb-IN) Presto/2.9.186 Version/10.00",
"Opera/8.76.(X11; Linux i686; dz-BT) Presto/2.9.169 Version/10.00",
"Opera/8.42.(Windows CE; an-ES) Presto/2.9.179 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows 98; Win 9x 4.90; Trident/3.0)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_9) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/51.0.898.0 Safari/536.2",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/15.0.867.0 Safari/532.1",
"Mozilla/5.0 (Windows; U; Windows 95) AppleWebKit/535.13.7 (KHTML, like Gecko) Version/4.0.5 Safari/535.13.7",
"Mozilla/5.0 (Windows 95; te-IN; rv:1.9.0.20) Gecko/2010-07-17 18:06:58 Firefox/3.8",
"Opera/8.32.(X11; Linux i686; mi-NZ) Presto/2.9.161 Version/11.00",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/24.0.868.0 Safari/533.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_7 rv:6.0; bho-IN) AppleWebKit/531.15.1 (KHTML, like Gecko) Version/4.0.3 Safari/531.15.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_8_4) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/46.0.883.0 Safari/536.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_11_4 rv:4.0; zu-ZA) AppleWebKit/533.42.7 (KHTML, like Gecko) Version/5.0.1 Safari/533.42.7",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_6) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/15.0.899.0 Safari/531.2",
"Mozilla/5.0 (Windows 98; Win 9x 4.90) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/30.0.858.0 Safari/534.2",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/3.0)",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_9_0) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/22.0.810.0 Safari/536.0",
"Mozilla/5.0 (Windows 98) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/44.0.815.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_10_0 rv:6.0; ur-PK) AppleWebKit/533.40.5 (KHTML, like Gecko) Version/4.0 Safari/533.40.5",
"Mozilla/5.0 (Windows; U; Windows NT 4.0) AppleWebKit/531.31.1 (KHTML, like Gecko) Version/4.0.2 Safari/531.31.1",
"Opera/9.50.(X11; Linux x86_64; csb-PL) Presto/2.9.168 Version/10.00",
"Opera/8.90.(Windows 98; Win 9x 4.90; so-KE) Presto/2.9.167 Version/10.00",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2017-09-29 00:08:59 Firefox/3.8",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.7.20) Gecko/2013-06-09 05:53:52 Firefox/3.6.13",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2010-04-22 21:13:07 Firefox/3.6.9",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/44.0.886.0 Safari/532.0",
"Mozilla/5.0 (Windows 98) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/50.0.811.0 Safari/534.0",
"Mozilla/5.0 (Windows CE) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/45.0.805.0 Safari/534.1",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_8_5 rv:5.0; kw-GB) AppleWebKit/535.25.6 (KHTML, like Gecko) Version/4.0 Safari/535.25.6",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_10_9) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/58.0.818.0 Safari/531.0",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_5_3; rv:1.9.4.20) Gecko/2017-02-12 21:13:10 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.01; Trident/3.0)",
"Mozilla/5.0 (Windows; U; Windows NT 6.1) AppleWebKit/532.20.4 (KHTML, like Gecko) Version/4.1 Safari/532.20.4",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/21.0.890.0 Safari/532.1",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2015-04-23 23:02:42 Firefox/3.6.15",
"Mozilla/5.0 (Windows NT 5.2; ha-NG; rv:1.9.0.20) Gecko/2018-11-07 13:41:33 Firefox/6.0",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_11_2) AppleWebKit/533.1 (KHTML, like Gecko) Chrome/17.0.889.0 Safari/533.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_8_8) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/61.0.855.0 Safari/536.1",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.01; Trident/5.1)",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows CE; Trident/5.1)",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2010-11-22 15:01:53 Firefox/3.8",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_5; rv:1.9.3.20) Gecko/2013-04-23 08:46:49 Firefox/3.6.19",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/5.0)",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2015-04-14 10:37:38 Firefox/3.6.16",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 5.2; Trident/3.1)",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2012-07-11 19:59:19 Firefox/3.6.12",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4 rv:2.0; ks-IN) AppleWebKit/535.3.5 (KHTML, like Gecko) Version/4.0.5 Safari/535.3.5",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_6_8 rv:5.0; kok-IN) AppleWebKit/535.44.6 (KHTML, like Gecko) Version/5.1 Safari/535.44.6",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_11_5) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/16.0.890.0 Safari/536.1",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/47.0.835.0 Safari/533.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.7.20) Gecko/2017-08-19 10:14:52 Firefox/8.0",
"Opera/9.99.(Windows NT 6.0; kl-GL) Presto/2.9.172 Version/10.00",
"Mozilla/5.0 (Windows; U; Windows 98; Win 9x 4.90) AppleWebKit/535.41.3 (KHTML, like Gecko) Version/4.1 Safari/535.41.3",
"Mozilla/5.0 (Windows CE; tk-TM; rv:1.9.1.20) Gecko/2014-12-31 10:19:31 Firefox/3.6.1",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_9 rv:4.0; ha-NG) AppleWebKit/532.1.5 (KHTML, like Gecko) Version/5.0 Safari/532.1.5",
"Mozilla/5.0 (Windows; U; Windows CE) AppleWebKit/534.19.1 (KHTML, like Gecko) Version/4.1 Safari/534.19.1",
"Mozilla/5.0 (Windows NT 6.1; ss-ZA; rv:1.9.0.20) Gecko/2010-09-19 14:52:39 Firefox/3.6.7",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 6.0; Trident/5.1)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_8 rv:4.0; mai-IN) AppleWebKit/534.11.5 (KHTML, like Gecko) Version/5.0 Safari/534.11.5",
"Opera/9.99.(X11; Linux i686; wo-SN) Presto/2.9.179 Version/10.00",
"Opera/8.63.(Windows NT 6.1; crh-UA) Presto/2.9.173 Version/10.00",
"Mozilla/5.0 (Windows NT 6.1; mhr-RU; rv:1.9.0.20) Gecko/2014-12-15 14:10:16 Firefox/3.8",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2 rv:3.0; wa-BE) AppleWebKit/534.10.1 (KHTML, like Gecko) Version/4.0 Safari/534.10.1",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/60.0.843.0 Safari/532.1",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 6.0; Trident/3.1)",
"Opera/9.32.(X11; Linux x86_64; ar-IQ) Presto/2.9.170 Version/11.00",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/60.0.829.0 Safari/534.1",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.5.20) Gecko/2015-04-14 04:15:53 Firefox/3.6.10",
"Mozilla/5.0 (Windows 98) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/47.0.848.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_9_0) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/55.0.882.0 Safari/536.1",
"Mozilla/5.0 (Windows NT 5.2; hu-HU; rv:1.9.0.20) Gecko/2012-09-03 23:00:45 Firefox/3.6.5",
"Mozilla/5.0 (Windows NT 6.0; zh-HK; rv:1.9.0.20) Gecko/2014-11-15 01:43:43 Firefox/3.8",
"Opera/9.52.(Windows NT 5.01; cy-GB) Presto/2.9.181 Version/11.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_6 rv:3.0; ka-GE) AppleWebKit/531.8.3 (KHTML, like Gecko) Version/4.0.1 Safari/531.8.3",
"Opera/9.14.(Windows NT 5.0; so-KE) Presto/2.9.190 Version/11.00",
"Opera/9.43.(Windows NT 5.2; mn-MN) Presto/2.9.166 Version/10.00",
"Mozilla/5.0 (Windows NT 6.1; wal-ET; rv:1.9.1.20) Gecko/2015-04-27 02:20:25 Firefox/3.8",
"Mozilla/5.0 (Windows; U; Windows NT 6.0) AppleWebKit/535.39.5 (KHTML, like Gecko) Version/5.0.5 Safari/535.39.5",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_6 rv:2.0; lt-LT) AppleWebKit/533.11.6 (KHTML, like Gecko) Version/5.0 Safari/533.11.6",
"Opera/8.70.(Windows NT 5.0; ar-DJ) Presto/2.9.172 Version/12.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_8 rv:2.0; lv-LV) AppleWebKit/533.32.1 (KHTML, like Gecko) Version/5.0.3 Safari/533.32.1",
"Mozilla/5.0 (Windows 95; as-IN; rv:1.9.2.20) Gecko/2018-02-15 06:25:35 Firefox/3.6.10",
"Opera/8.64.(Windows NT 5.0; fo-FO) Presto/2.9.161 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_9_1) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/20.0.815.0 Safari/532.1",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.7.20) Gecko/2015-06-25 05:03:28 Firefox/11.0",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/60.0.808.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_9 rv:5.0; ml-IN) AppleWebKit/534.27.6 (KHTML, like Gecko) Version/4.0.2 Safari/534.27.6",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_2 rv:5.0; ak-GH) AppleWebKit/535.46.1 (KHTML, like Gecko) Version/4.0.2 Safari/535.46.1",
"Opera/9.86.(X11; Linux i686; mn-MN) Presto/2.9.188 Version/11.00",
"Opera/9.75.(X11; Linux i686; ve-ZA) Presto/2.9.188 Version/12.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 5.01; Trident/4.0)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/37.0.888.0 Safari/535.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_7 rv:5.0; wo-SN) AppleWebKit/534.36.6 (KHTML, like Gecko) Version/5.1 Safari/534.36.6",
"Opera/9.48.(Windows 95; nso-ZA) Presto/2.9.168 Version/12.00",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.5.20) Gecko/2017-03-14 20:15:49 Firefox/7.0",
"Opera/8.86.(Windows 98; tig-ER) Presto/2.9.182 Version/11.00",
"Opera/8.70.(X11; Linux i686; tt-RU) Presto/2.9.190 Version/11.00",
"Opera/8.68.(X11; Linux i686; crh-UA) Presto/2.9.166 Version/11.00",
"Opera/8.19.(Windows NT 5.01; kok-IN) Presto/2.9.179 Version/10.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_1) AppleWebKit/536.1 (KHTML, like Gecko) Chrome/62.0.884.0 Safari/536.1",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 5.0; Trident/3.0)",
"Opera/9.71.(X11; Linux i686; sw-KE) Presto/2.9.178 Version/12.00",
"Opera/8.78.(X11; Linux x86_64; my-MM) Presto/2.9.169 Version/11.00",
"Opera/9.40.(Windows CE; mt-MT) Presto/2.9.176 Version/11.00",
"Opera/9.96.(X11; Linux i686; en-PH) Presto/2.9.169 Version/11.00",
"Opera/8.78.(X11; Linux i686; es-UY) Presto/2.9.165 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 6.0; Trident/3.1)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/62.0.806.0 Safari/533.0",
"Opera/9.10.(Windows 98; sk-SK) Presto/2.9.163 Version/12.00",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows CE; Trident/3.1)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_7) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/62.0.884.0 Safari/535.2",
"Mozilla/5.0 (Windows; U; Windows NT 5.2) AppleWebKit/531.14.2 (KHTML, like Gecko) Version/5.1 Safari/531.14.2",
"Opera/9.28.(X11; Linux x86_64; fur-IT) Presto/2.9.176 Version/12.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_7_2 rv:6.0; ve-ZA) AppleWebKit/533.49.7 (KHTML, like Gecko) Version/4.0 Safari/533.49.7",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/38.0.851.0 Safari/532.2",
"Mozilla/5.0 (Windows; U; Windows 95) AppleWebKit/533.23.4 (KHTML, like Gecko) Version/4.0.4 Safari/533.23.4",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/534.0 (KHTML, like Gecko) Chrome/57.0.859.0 Safari/534.0",
"Mozilla/5.0 (Windows 98; Win 9x 4.90) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/39.0.821.0 Safari/536.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_6; rv:1.9.4.20) Gecko/2019-07-10 01:16:59 Firefox/3.6.6",
"Opera/8.27.(Windows 95; niu-NZ) Presto/2.9.162 Version/10.00",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7; rv:1.9.3.20) Gecko/2016-10-13 00:08:47 Firefox/12.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_2; rv:1.9.5.20) Gecko/2010-04-23 15:08:06 Firefox/3.8",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_7) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/25.0.844.0 Safari/531.1",
"Opera/8.68.(Windows NT 6.0; ce-RU) Presto/2.9.187 Version/10.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_9 rv:5.0; sc-IT) AppleWebKit/534.17.7 (KHTML, like Gecko) Version/4.0 Safari/534.17.7",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_1 rv:5.0; yi-US) AppleWebKit/534.29.3 (KHTML, like Gecko) Version/5.0.4 Safari/534.29.3",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.01; Trident/3.1)",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_0 rv:4.0; lo-LA) AppleWebKit/532.26.1 (KHTML, like Gecko) Version/5.0.4 Safari/532.26.1",
"Mozilla/5.0 (Windows CE; ms-MY; rv:1.9.0.20) Gecko/2011-02-13 03:23:41 Firefox/6.0",
"Opera/9.63.(Windows 95; wal-ET) Presto/2.9.164 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows CE; Trident/5.1)",
"Mozilla/5.0 (Windows 95; quz-PE; rv:1.9.0.20) Gecko/2018-12-12 23:40:15 Firefox/5.0",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/531.0 (KHTML, like Gecko) Chrome/63.0.864.0 Safari/531.0",
"Opera/9.49.(X11; Linux x86_64; gl-ES) Presto/2.9.179 Version/10.00",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2013-02-14 20:52:42 Firefox/3.8",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_9) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/38.0.815.0 Safari/535.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_0; rv:1.9.5.20) Gecko/2012-01-28 07:55:45 Firefox/5.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2017-10-16 01:01:03 Firefox/3.8",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/532.20.2 (KHTML, like Gecko) Version/5.1 Safari/532.20.2",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_10_0 rv:6.0; ca-IT) AppleWebKit/535.18.4 (KHTML, like Gecko) Version/5.0.2 Safari/535.18.4",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows CE; Trident/3.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0; rv:1.9.2.20) Gecko/2014-04-11 03:14:43 Firefox/3.6.12",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 6.1; Trident/5.0)",
"Opera/8.30.(X11; Linux i686; hsb-DE) Presto/2.9.181 Version/10.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/45.0.818.0 Safari/535.2",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 4.0; Trident/3.1)",
"Mozilla/5.0 (Windows 98; Win 9x 4.90; ig-NG; rv:1.9.2.20) Gecko/2012-10-28 12:02:44 Firefox/3.6.18",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/32.0.844.0 Safari/536.2",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_10_6) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/58.0.801.0 Safari/536.0",
"Mozilla/5.0 (X11; Linux i686; rv:1.9.6.20) Gecko/2018-01-10 13:33:19 Firefox/3.8",
"Opera/8.27.(X11; Linux x86_64; sd-PK) Presto/2.9.167 Version/10.00",
"Mozilla/5.0 (Windows; U; Windows NT 6.0) AppleWebKit/531.33.6 (KHTML, like Gecko) Version/5.0.4 Safari/531.33.6",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_9_4; rv:1.9.2.20) Gecko/2019-07-19 11:10:35 Firefox/4.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2016-01-10 11:34:34 Firefox/3.6.12",
"Mozilla/5.0 (Windows; U; Windows NT 5.2) AppleWebKit/532.34.4 (KHTML, like Gecko) Version/4.0 Safari/532.34.4",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_6_8 rv:3.0; ku-TR) AppleWebKit/534.8.6 (KHTML, like Gecko) Version/5.0.3 Safari/534.8.6",
"Mozilla/5.0 (Windows 98) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/53.0.812.0 Safari/534.1",
"Opera/9.80.(X11; Linux i686; mn-MN) Presto/2.9.161 Version/12.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_1) AppleWebKit/535.0 (KHTML, like Gecko) Chrome/43.0.891.0 Safari/535.0",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_10_4; rv:1.9.5.20) Gecko/2013-09-08 08:46:17 Firefox/3.6.4",
"Opera/9.71.(Windows 95; ig-NG) Presto/2.9.185 Version/12.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.01; Trident/3.1)",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.6.20) Gecko/2013-07-01 12:01:27 Firefox/3.6.12",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_8_8 rv:4.0; ik-CA) AppleWebKit/535.8.3 (KHTML, like Gecko) Version/5.1 Safari/535.8.3",
"Mozilla/5.0 (Windows; U; Windows NT 4.0) AppleWebKit/535.24.3 (KHTML, like Gecko) Version/5.0.4 Safari/535.24.3",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.01; Trident/5.0)",
"Opera/8.24.(X11; Linux i686; br-FR) Presto/2.9.166 Version/12.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_8 rv:3.0; fa-IR) AppleWebKit/534.45.4 (KHTML, like Gecko) Version/4.0 Safari/534.45.4",
"Opera/8.66.(X11; Linux x86_64; bn-BD) Presto/2.9.188 Version/10.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_7_0) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/57.0.845.0 Safari/535.1",
"Opera/8.47.(Windows CE; kok-IN) Presto/2.9.173 Version/10.00",
"Opera/9.22.(Windows NT 4.0; ia-FR) Presto/2.9.186 Version/11.00",
"Opera/8.10.(X11; Linux i686; hu-HU) Presto/2.9.184 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows CE; Trident/4.0)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.2 (KHTML, like Gecko) Chrome/26.0.849.0 Safari/534.2",
"Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 5.01; Trident/4.1)",
"Opera/8.11.(Windows NT 5.0; brx-IN) Presto/2.9.164 Version/12.00",
"Mozilla/5.0 (Windows; U; Windows NT 6.2) AppleWebKit/533.40.2 (KHTML, like Gecko) Version/5.0.5 Safari/533.40.2",
"Mozilla/5.0 (Windows 95) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/20.0.879.0 Safari/531.2",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_11_6 rv:6.0; gl-ES) AppleWebKit/532.48.5 (KHTML, like Gecko) Version/5.0.5 Safari/532.48.5",
"Opera/8.32.(Windows 95; nan-TW) Presto/2.9.161 Version/10.00",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_12_3 rv:5.0; bhb-IN) AppleWebKit/535.41.6 (KHTML, like Gecko) Version/4.1 Safari/535.41.6",
"Opera/8.61.(Windows NT 5.0; et-EE) Presto/2.9.183 Version/10.00",
"Opera/9.84.(Windows 98; lij-IT) Presto/2.9.170 Version/10.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows 98; Win 9x 4.90; Trident/4.1)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/5.0)",
"Opera/8.16.(X11; Linux x86_64; mni-IN) Presto/2.9.187 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.1; Trident/3.1)",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_12_6 rv:5.0; kn-IN) AppleWebKit/532.24.7 (KHTML, like Gecko) Version/4.0.5 Safari/532.24.7",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_5 rv:4.0; mni-IN) AppleWebKit/532.2.3 (KHTML, like Gecko) Version/5.0.4 Safari/532.2.3",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.5.20) Gecko/2018-07-15 07:44:19 Firefox/9.0",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_11_0) AppleWebKit/536.0 (KHTML, like Gecko) Chrome/14.0.836.0 Safari/536.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/533.0 (KHTML, like Gecko) Chrome/46.0.811.0 Safari/533.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:1.9.7.20) Gecko/2011-08-03 19:36:23 Firefox/7.0",
"Opera/8.82.(Windows CE; cy-GB) Presto/2.9.173 Version/12.00",
"Mozilla/5.0 (Windows NT 4.0) AppleWebKit/531.1 (KHTML, like Gecko) Chrome/61.0.852.0 Safari/531.1",
"Opera/9.88.(Windows NT 6.0; lij-IT) Presto/2.9.187 Version/11.00",
"Opera/9.25.(Windows NT 5.1; mt-MT) Presto/2.9.184 Version/10.00",
"Opera/9.82.(Windows NT 6.2; bho-IN) Presto/2.9.183 Version/12.00",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows 95; Trident/5.1)",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.2 (KHTML, like Gecko) Chrome/56.0.880.0 Safari/536.2",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/25.0.863.0 Safari/533.2",
"Opera/8.90.(X11; Linux x86_64; gd-GB) Presto/2.9.181 Version/11.00",
"Opera/9.46.(X11; Linux i686; pl-PL) Presto/2.9.166 Version/10.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/49.0.818.0 Safari/533.2",
"Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 6.0; Trident/4.1)",
"Opera/9.52.(Windows 98; Win 9x 4.90; lij-IT) Presto/2.9.171 Version/12.00",
"Opera/9.52.(Windows NT 5.01; az-AZ) Presto/2.9.178 Version/11.00",
"Mozilla/5.0 (Macintosh; PPC Mac OS X 10_11_2) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/39.0.807.0 Safari/532.2",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_9_7; rv:1.9.5.20) Gecko/2012-12-13 11:13:39 Firefox/6.0",
"Mozilla/5.0 (Windows CE) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/62.0.897.0 Safari/532.2",
"Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/535.9.4 (KHTML, like Gecko) Version/5.0.4 Safari/535.9.4",
"Opera/8.15.(Windows 98; quz-PE) Presto/2.9.185 Version/12.00",
"Mozilla/5.0 (Windows CE; hi-IN; rv:1.9.2.20) Gecko/2016-04-17 03:12:35 Firefox/3.8",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 5.2; Trident/5.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_0 rv:2.0; si-LK) AppleWebKit/535.47.3 (KHTML, like Gecko) Version/4.0.2 Safari/535.47.3",
"Opera/9.50.(X11; Linux x86_64; yi-US) Presto/2.9.185 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows CE; Trident/5.0)"
]
}
| 114.831516
| 158
| 0.658465
| 23,635
| 115,865
| 3.195007
| 0.03186
| 0.025452
| 0.102259
| 0.042641
| 0.871679
| 0.846479
| 0.826602
| 0.754271
| 0.676431
| 0.600366
| 0
| 0.211664
| 0.172908
| 115,865
| 1,009
| 159
| 114.831516
| 0.576327
| 0
| 0
| 0
| 0
| 0.993049
| 0.895793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
86f683dd3611e8418a468b5b2b962586adbbb6ae
| 7,924
|
py
|
Python
|
tools/benchmark_result_analysis.py
|
conquerhuang/FDSiamFC
|
d43a254b0fa1e309d6625132b25950803bf34b0d
|
[
"MIT"
] | 2
|
2022-03-18T02:14:36.000Z
|
2022-03-18T02:14:39.000Z
|
tools/benchmark_result_analysis.py
|
scott-mao/FDSiamFC
|
d43a254b0fa1e309d6625132b25950803bf34b0d
|
[
"MIT"
] | null | null | null |
tools/benchmark_result_analysis.py
|
scott-mao/FDSiamFC
|
d43a254b0fa1e309d6625132b25950803bf34b0d
|
[
"MIT"
] | 1
|
2022-02-28T05:15:45.000Z
|
2022-02-28T05:15:45.000Z
|
from matplotlib import pyplot as plt
import os
from glob import glob
import json
import numpy as np
def result_analysis_otb2015(result_dir):
tracker_names = os.listdir(os.path.join(result_dir, 'OTB2015')) # 获取OTB2015路径下的所有结果
tracker_names = [x for x in tracker_names if 'SiamFC_' in x] # 对特定的结果进行筛选
# tracker_names = [x for x in tracker_names if '.' in x]
tracker_results = []
for tracker_name in tracker_names:
tracker_dir = os.path.join(result_dir, 'OTB2015', tracker_name, 'performance.json')
with open(tracker_dir) as fp:
result = json.load(fp)
tracker_results.append(result)
# 从tracker_result中解析所有跟踪器的结果,并绘制相应的曲线,或者图表。
# otb2015的结果中,主要包含三个重要的信息,分别是success_score, precision_score, success_rate.将这三个信息读取,并存储为list
successScore = []
precisionScore = []
successRate = []
for result in tracker_results:
successScore.append(result[list(result.keys())[0]]['overall']['success_score'])
precisionScore.append(result[list(result.keys())[0]]['overall']['precision_score'])
successRate.append(result[list(result.keys())[0]]['overall']['success_rate'])
# 根据得分对所有模型下的跟踪结果进行排序。
scores = [0.4*a+0.4*b+0.2*c for a, b, c in zip(successScore, precisionScore, successRate)]
scores_decay_index = np.fromiter(scores, dtype=np.float).argsort()[::-1]
successScore = [successScore[x] for x in scores_decay_index]
precisionScore = [precisionScore[x] for x in scores_decay_index]
successRate = [successRate[x] for x in scores_decay_index]
tracker_names = [tracker_names[x] for x in scores_decay_index]
# 根据结果绘制条形图
x = np.arange(1, len(successScore)+1)
bar_wideth = 0.2
p1 = plt.bar(x, successScore, bar_wideth, label='success score', tick_label=tracker_names)
p2 = plt.bar(x+bar_wideth, precisionScore, bar_wideth, label='precision score')
p3 = plt.bar(x+bar_wideth+bar_wideth, successRate, bar_wideth, label='success rate')
plt.xticks(rotation=90)
plt.legend([p1, p2, p3], ['success score', 'precision score', 'success rate'])
plt.show()
pass
pass
def result_analysis_otb2013(result_dir):
tracker_names = os.listdir(os.path.join(result_dir, 'OTB2013')) # 获取OTB2015路径下的所有结果
tracker_names = [x for x in tracker_names if 'SiamFC_' in x] # 对特定的结果进行筛选
# tracker_names = [x for x in tracker_names if '.' in x]
tracker_results = []
for tracker_name in tracker_names:
tracker_dir = os.path.join(result_dir, 'OTB2013', tracker_name, 'performance.json')
with open(tracker_dir) as fp:
result = json.load(fp)
tracker_results.append(result)
# 从tracker_result中解析所有跟踪器的结果,并绘制相应的曲线,或者图表。
# otb2015的结果中,主要包含三个重要的信息,分别是success_score, precision_score, success_rate.将这三个信息读取,并存储为list
successScore = []
precisionScore = []
successRate = []
for result in tracker_results:
successScore.append(result[list(result.keys())[0]]['overall']['success_score'])
precisionScore.append(result[list(result.keys())[0]]['overall']['precision_score'])
successRate.append(result[list(result.keys())[0]]['overall']['success_rate'])
# 根据结果绘制条形图
# 根据得分对所有模型下的跟踪结果进行排序。
scores = [0.4*a+0.4*b+0.2*c for a, b, c in zip(successScore, precisionScore, successRate)]
scores_decay_index = np.fromiter(scores, dtype=np.float).argsort()[::-1]
successScore = [successScore[x] for x in scores_decay_index]
precisionScore = [precisionScore[x] for x in scores_decay_index]
successRate = [successRate[x] for x in scores_decay_index]
tracker_names = [tracker_names[x] for x in scores_decay_index]
x = np.arange(1, len(successScore)+1)
bar_wideth = 0.2
p1 = plt.bar(x, successScore, bar_wideth, label='success score', tick_label=tracker_names)
p2 = plt.bar(x+bar_wideth, precisionScore, bar_wideth, label='precision score')
p3 = plt.bar(x+bar_wideth+bar_wideth, successRate, bar_wideth, label='success rate')
plt.xticks(rotation=90)
plt.legend([p1, p2, p3], ['success score', 'precision score', 'success rate'])
plt.show()
pass
pass
def result_analysis_tb50(result_dir):
tracker_names = os.listdir(os.path.join(result_dir, 'OTBtb50')) # 获取OTB2015路径下的所有结果
tracker_results = []
for tracker_name in tracker_names:
tracker_dir = os.path.join(result_dir, 'OTBtb50', tracker_name, 'performance.json')
with open(tracker_dir) as fp:
result = json.load(fp)
tracker_results.append(result)
# 从tracker_result中解析所有跟踪器的结果,并绘制相应的曲线,或者图表。
# otb2015的结果中,主要包含三个重要的信息,分别是success_score, precision_score, success_rate.将这三个信息读取,并存储为list
successScore = []
precisionScore = []
successRate = []
for result in tracker_results:
successScore.append(result[list(result.keys())[0]]['overall']['success_score'])
precisionScore.append(result[list(result.keys())[0]]['overall']['precision_score'])
successRate.append(result[list(result.keys())[0]]['overall']['success_rate'])
# 根据得分对所有模型下的跟踪结果进行排序。
scores = [0.4*a+0.4*b+0.2*c for a, b, c in zip(successScore, precisionScore, successRate)]
scores_decay_index = np.fromiter(scores, dtype=np.float).argsort()[::-1]
successScore = [successScore[x] for x in scores_decay_index]
precisionScore = [precisionScore[x] for x in scores_decay_index]
successRate = [successRate[x] for x in scores_decay_index]
tracker_names = [tracker_names[x] for x in scores_decay_index]
# 根据结果绘制条形图
x = np.arange(1, len(successScore)+1)
bar_wideth = 0.2
p1 = plt.bar(x, successScore, bar_wideth, label='success score', tick_label=tracker_names)
p2 = plt.bar(x+bar_wideth, precisionScore, bar_wideth, label='precision score')
p3 = plt.bar(x+bar_wideth+bar_wideth, successRate, bar_wideth, label='success rate')
plt.xticks(rotation=90)
plt.legend([p1, p2, p3], ['success score', 'precision score', 'success rate'])
plt.show()
def result_analysis_vot2016(result_dir):
tracker_names = os.listdir(os.path.join(result_dir, 'VOT2016')) # 获取OTB2015路径下的所有结果
tracker_results = []
for tracker_name in tracker_names:
tracker_dir = os.path.join(result_dir, 'VOT2016', tracker_name, 'performance.json')
with open(tracker_dir) as fp:
result = json.load(fp)
tracker_results.append(result)
# 从tracker_result中解析所有跟踪器的结果,并绘制相应的曲线,或者图表。
# otb2015的结果中,主要包含三个重要的信息,分别是success_score, precision_score, success_rate.将这三个信息读取,并存储为list
accuracy = []
robustness = []
for result in tracker_results:
accuracy.append(result[list(result.keys())[0]]['accuracy'])
robustness.append(result[list(result.keys())[0]]['robustness']/50.)
# 根据得分对所有模型下的跟踪结果进行排序。
scores = [0.5*a+0.5*b for a, b in zip(accuracy, robustness)]
scores_decay_index = np.fromiter(scores, dtype=np.float).argsort()[::-1]
accuracy = [accuracy[x] for x in scores_decay_index]
robustness = [robustness[x] for x in scores_decay_index]
tracker_names = [tracker_names[x] for x in scores_decay_index]
# 根据结果绘制条形图
x = np.arange(1, len(accuracy)+1)
bar_wideth = 0.2
p1 = ax1 = plt.bar(x, accuracy, bar_wideth, label='success score', tick_label=tracker_names)
p2 = plt.bar(x+bar_wideth, robustness, bar_wideth, label='precision score(1/50)')
plt.xticks(rotation=90)
plt.legend([p1, p2], ['success score', 'precision score(1/50)'])
plt.show()
def main():
# got10k结果路径
result_dir = r'./reports'
# result_analysis_otb2015(result_dir)
result_analysis_otb2013(result_dir)
# result_analysis_tb50(result_dir)
# result_analysis_vot2016(result_dir)
if __name__ == '__main__':
main()
| 43.300546
| 97
| 0.680086
| 1,025
| 7,924
| 5.07122
| 0.097561
| 0.06464
| 0.018276
| 0.025587
| 0.933051
| 0.885725
| 0.86187
| 0.857445
| 0.851289
| 0.851289
| 0
| 0.0285
| 0.194094
| 7,924
| 182
| 98
| 43.538462
| 0.785468
| 0.122539
| 0
| 0.742188
| 0
| 0
| 0.097923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039063
| false
| 0.03125
| 0.039063
| 0
| 0.078125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b10a62691f03cb6add21b467186a66a747f74d
| 12,164
|
py
|
Python
|
tools/accuracy_checker/tests/test_model_evaluator.py
|
apankratovantonp/open_model_zoo
|
e372d4173e50741a6828cda415d55c37320f89cd
|
[
"Apache-2.0"
] | 5
|
2020-03-09T07:39:04.000Z
|
2021-08-16T07:17:28.000Z
|
tools/accuracy_checker/tests/test_model_evaluator.py
|
ananda89/open_model_zoo
|
e372d4173e50741a6828cda415d55c37320f89cd
|
[
"Apache-2.0"
] | 6
|
2020-09-26T01:24:39.000Z
|
2022-02-10T02:16:03.000Z
|
tools/accuracy_checker/tests/test_model_evaluator.py
|
ananda89/open_model_zoo
|
e372d4173e50741a6828cda415d55c37320f89cd
|
[
"Apache-2.0"
] | 3
|
2020-07-06T08:45:26.000Z
|
2020-11-12T10:14:45.000Z
|
"""
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from unittest.mock import Mock, MagicMock
from accuracy_checker.evaluators import ModelEvaluator
class TestModelEvaluator:
def setup_method(self):
self.launcher = Mock()
self.launcher.predict.return_value = []
data = MagicMock(data=MagicMock(), metadata=MagicMock(), identifier=0)
self.preprocessor = Mock()
self.preprocessor.process = Mock(return_value=data)
self.postprocessor = Mock()
self.adapter = MagicMock(return_value=[])
self.input_feeder = Mock()
self.data_reader = Mock(return_value=data)
self.data_reader.data_source = 'source'
annotation_0 = MagicMock()
annotation_0.identifier = 0
annotation_0.metadata = {'data_source': MagicMock()}
annotation_1 = MagicMock()
annotation_1.identifier = 1
annotation_1.metadata = {'data_source': MagicMock()}
annotation_container_0 = MagicMock()
annotation_container_0.values = MagicMock(return_value=[annotation_0])
annotation_container_1 = MagicMock()
annotation_container_1.values = MagicMock(return_value=([annotation_1]))
self.annotations = [[annotation_container_0], [annotation_container_1]]
self.dataset = MagicMock()
self.dataset.__iter__.return_value = self.annotations
self.postprocessor.process_batch = Mock(side_effect=[
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
])
self.postprocessor.process_dataset = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
self.postprocessor.full_process = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
self.metric = Mock()
self.metric.update_metrics_on_batch = Mock()
self.evaluator = ModelEvaluator(
self.launcher,
self.input_feeder,
self.adapter,
self.data_reader,
self.preprocessor,
self.postprocessor,
self.dataset,
self.metric,
False
)
self.evaluator.store_predictions = Mock()
self.evaluator.load = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
def test_process_dataset_without_storing_predictions_and_dataset_processors(self):
self.postprocessor.has_dataset_processors = False
self.evaluator.dataset_processor(None, None)
assert not self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert self.launcher.predict.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == len(self.annotations)
assert self.postprocessor.process_dataset.called
assert not self.postprocessor.full_process.called
def test_process_dataset_without_storing_predictions_and_with_dataset_processors(self):
self.postprocessor.has_dataset_processors = True
self.evaluator.dataset_processor(None, None)
assert not self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert self.launcher.predict.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert self.postprocessor.process_dataset.called
assert not self.postprocessor.full_process.called
def test_process_dataset_with_storing_predictions_and_without_dataset_processors(self):
self.postprocessor.has_dataset_processors = False
self.evaluator.dataset_processor('path', None)
assert self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert self.launcher.predict.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == len(self.annotations)
assert self.postprocessor.process_dataset.called
assert not self.postprocessor.full_process.called
def test_process_dataset_with_storing_predictions_and_with_dataset_processors(self):
self.postprocessor.has_dataset_processors = True
self.evaluator.dataset_processor('path', None)
assert self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert self.launcher.predict.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert self.postprocessor.process_dataset.called
assert not self.postprocessor.full_process.called
def test_process_dataset_with_loading_predictions_and_without_dataset_processors(self, mocker):
mocker.patch('accuracy_checker.evaluators.model_evaluator.get_path')
self.postprocessor.has_dataset_processors = False
self.evaluator.process_dataset('path', None)
assert self.evaluator.load.called
assert not self.launcher.predict.called
assert not self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert not self.postprocessor.process_dataset.called
assert self.postprocessor.full_process.called
def test_process_dataset_with_loading_predictions_and_with_dataset_processors(self, mocker):
mocker.patch('accuracy_checker.evaluators.model_evaluator.get_path')
self.postprocessor.has_dataset_processors = True
self.evaluator.dataset_processor('path', None)
assert not self.evaluator.store_predictions.called
assert self.evaluator.load.called
assert not self.launcher.predict.called
assert not self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert not self.postprocessor.process_dataset.called
assert self.postprocessor.full_process.called
class TestModelEvaluatorAsync:
def setup_method(self):
self.launcher = MagicMock()
infer_request = MagicMock()
infer_request.wait = Mock(return_value=0)
infer_request.outputs = Mock()
self.launcher.infer_requests = [infer_request]
data = MagicMock(data=MagicMock(), metadata=MagicMock(), identifier=0)
self.preprocessor = Mock()
self.preprocessor.process = Mock(return_value=data)
self.postprocessor = Mock()
self.adapter = MagicMock(return_value=[])
self.input_feeder = Mock()
self.data_reader = Mock(return_value=data)
self.data_reader.data_source = 'source'
annotation_0 = MagicMock()
annotation_0.identifier = 0
annotation_0.metadata = {'data_source': MagicMock()}
annotation_1 = MagicMock()
annotation_1.identifier = 1
annotation_1.metadata = {'data_source': MagicMock()}
annotation_container_0 = MagicMock()
annotation_container_0.values = MagicMock(return_value=[annotation_0])
annotation_container_1 = MagicMock()
annotation_container_1.values = MagicMock(return_value=([annotation_1]))
self.annotations = [[annotation_container_0], [annotation_container_1]]
self.dataset = MagicMock()
self.dataset.__iter__.return_value = self.annotations
self.postprocessor.process_batch = Mock(side_effect=[
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
])
self.postprocessor.process_dataset = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
self.postprocessor.full_process = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
self.metric = Mock()
self.metric.update_metrics_on_batch = Mock()
self.evaluator = ModelEvaluator(
self.launcher,
self.input_feeder,
self.adapter,
self.data_reader,
self.preprocessor,
self.postprocessor,
self.dataset,
self.metric,
True
)
self.evaluator.store_predictions = Mock()
self.evaluator.load = Mock(return_value=(
([annotation_container_0], [annotation_container_0]), ([annotation_container_1], [annotation_container_1])
))
def test_process_dataset_without_storing_predictions_and_dataset_processors(self):
self.postprocessor.has_dataset_processors = False
self.evaluator.dataset_processor(None, None)
assert not self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert not self.launcher.predict.called
assert self.launcher.predict_async.called
assert self.metric.update_metrics_on_batch.call_count == len(self.annotations)
def test_process_dataset_without_storing_predictions_and_with_dataset_processors(self):
self.postprocessor.has_dataset_processors = True
self.evaluator.dataset_processor(None, None)
assert not self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert not self.launcher.predict.called
assert self.launcher.predict_async.called
assert self.metric.update_metrics_on_batch.call_count == 1
def test_process_dataset_with_storing_predictions_and_without_dataset_processors(self):
self.postprocessor.has_dataset_processors = False
self.evaluator.dataset_processor('path', None)
assert self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert not self.launcher.predict.called
assert self.launcher.predict_async.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == len(self.annotations)
def test_process_dataset_with_storing_predictions_and_with_dataset_processors(self):
self.postprocessor.has_dataset_processors = True
self.evaluator.dataset_processor('path', None)
assert self.evaluator.store_predictions.called
assert not self.evaluator.load.called
assert not self.launcher.predict.called
assert self.launcher.predict_async.called
assert self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert self.postprocessor.process_dataset.called
assert not self.postprocessor.full_process.called
def test_process_dataset_with_loading_predictions_and_without_dataset_processors(self, mocker):
mocker.patch('accuracy_checker.evaluators.model_evaluator.get_path')
self.postprocessor.has_dataset_processors = False
self.evaluator.process_dataset('path', None)
assert self.evaluator.load.called
assert not self.launcher.predict.called
assert not self.launcher.predict_async.called
assert not self.postprocessor.process_batch.called
assert self.metric.update_metrics_on_batch.call_count == 1
assert not self.postprocessor.process_dataset.called
assert self.postprocessor.full_process.called
| 43.442857
| 118
| 0.719336
| 1,382
| 12,164
| 6.04631
| 0.099855
| 0.076113
| 0.049785
| 0.054572
| 0.910723
| 0.910723
| 0.90067
| 0.90067
| 0.90067
| 0.896123
| 0
| 0.008438
| 0.201085
| 12,164
| 279
| 119
| 43.598566
| 0.85141
| 0.046284
| 0
| 0.92093
| 0
| 0
| 0.020702
| 0.013456
| 0
| 0
| 0
| 0
| 0.334884
| 1
| 0.060465
| false
| 0
| 0.009302
| 0
| 0.07907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4d49a40da4b8d902977cb568fb61f75fff21259
| 8,907
|
py
|
Python
|
nas_lib/algos_nas/predictor_unsupervised.py
|
auroua/SSNENAS
|
65bdece174f0da2f9a3c716b86859abba077d279
|
[
"MIT"
] | 2
|
2020-12-29T06:33:22.000Z
|
2022-02-19T22:21:05.000Z
|
nas_lib/algos_nas/predictor_unsupervised.py
|
auroua/SSNENAS
|
65bdece174f0da2f9a3c716b86859abba077d279
|
[
"MIT"
] | null | null | null |
nas_lib/algos_nas/predictor_unsupervised.py
|
auroua/SSNENAS
|
65bdece174f0da2f9a3c716b86859abba077d279
|
[
"MIT"
] | null | null | null |
# Copyright (c) Xidian University and Xi'an University of Posts & Telecommunications. All Rights Reserved
import torch
import numpy as np
from nas_lib.utils.utils_data import nasbench2graph_101, nasbench2graph_201
from nas_lib.eigen.trainer_predictor import NasBenchGinPredictorTrainer
import copy
def gin_unsupervised_predictor(search_space,
model_dir=None,
num_init=10,
k=10,
total_queries=150,
acq_opt_type='mutation',
allow_isomorphisms=False,
verbose=1,
agent=None,
logger=None,
gpu='0',
lr=0.01,
candidate_nums=100,
epochs=1000,
predictor_type=None,
algo_name=None,
benchmark=None,
rate=10):
assert benchmark is not None, 'The benchmark have to be nasbench_101 or nasbench_201.'
device = torch.device('cuda:%d' % gpu)
data = search_space.generate_random_dataset(num=num_init,
allow_isomorphisms=allow_isomorphisms,
deterministic_loss=True)
query = num_init + k
search_agent = agent
input_dim = 6 if benchmark == 'nasbench_101' else 8
if len(data) <= 10:
batch_size = 10
else:
batch_size = 16
while query <= total_queries:
arch_data = [d[0] for d in data]
agent = NasBenchGinPredictorTrainer(search_agent, lr=lr, device=device, epochs=epochs,
train_images=len(data), batch_size=batch_size, input_dim=input_dim,
model_dir=model_dir, predictor_type=predictor_type, logger=logger,
algo_name=algo_name, rate=rate)
val_accuracy = np.array([d[4] for d in data])
arch_data_edge_idx_list = []
arch_data_node_f_list = []
for arch in arch_data:
edge_index, node_f = nasbench2graph_101(arch) if benchmark == 'nasbench_101' else nasbench2graph_201(arch)
arch_data_edge_idx_list.append(edge_index)
arch_data_node_f_list.append(node_f)
candidates = search_space.get_candidates(data,
num=candidate_nums,
allow_isomorphisms=allow_isomorphisms)
candiate_edge_list = []
candiate_node_list = []
for cand in candidates:
edge_index, node_f = nasbench2graph_101(cand[0]) if benchmark == 'nasbench_101' else nasbench2graph_201(cand[0])
candiate_edge_list.append(edge_index)
candiate_node_list.append(node_f)
agent.fit(arch_data_edge_idx_list, arch_data_node_f_list, val_accuracy, logger=None)
acc_train = agent.pred(arch_data_edge_idx_list, arch_data_node_f_list)
acc_pred = agent.pred(candiate_edge_list, candiate_node_list)
candidate_np = acc_pred.cpu().numpy()
sorted_indices = np.argsort(candidate_np)
for i in sorted_indices[:k]:
if benchmark == 'nasbench_101':
archtuple = search_space.query_arch(matrix=candidates[i][1],
ops=candidates[i][2])
elif benchmark == 'nasbench_201':
archtuple = candidates[i]
else:
raise NotImplementedError()
data.append(archtuple)
if verbose:
top_5_loss = sorted([d[4] for d in data])[:min(5, len(data))]
logger.info('Query {}, training mean loss is {}'.format(query,
np.mean(np.abs(acc_train.cpu().numpy()-val_accuracy))))
logger.info('Query {}, top 5 val losses {}'.format(query, top_5_loss))
query += k
# data = [list(dd) for dd in data]
# for i, d in enumerate(data):
# data[i][4] = (1 - d[4]) * 100
# data[i][5] = (1 - d[5]) * 100
return data
def gin_unsupervised_predictor_fix_num(search_space,
num_init=10,
model_dir=None,
k=10,
total_queries=150,
acq_opt_type='mutation',
allow_isomorphisms=False,
verbose=1,
agent=None,
logger=None,
gpu='0',
lr=0.01,
candidate_nums=100,
epochs=1000,
predictor_type=None,
algo_name=None,
training_nums=50,
benchmark=None,
rate=10):
assert benchmark is not None, 'The benchmark have to be nasbench_101 or nasbench_201.'
device = torch.device('cuda:%d' % gpu)
data = search_space.generate_random_dataset(num=num_init,
allow_isomorphisms=allow_isomorphisms,
deterministic_loss=True)
query = num_init + k
search_agent = agent
train_data = []
train_flag = False
input_dim = 6 if benchmark == 'nasbench_101' else 8
while query <= total_queries:
if len(train_data) < training_nums:
train_data = copy.deepcopy(data)
train_flag = True
batch_size = 10 if len(train_data) <= 10 else 16
candidates = search_space.get_candidates(data,
num=candidate_nums,
allow_isomorphisms=allow_isomorphisms)
candiate_edge_list = []
candiate_node_list = []
for cand in candidates:
edge_index, node_f = nasbench2graph_101(cand[0]) if benchmark == 'nasbench_101' else nasbench2graph_201(cand[0])
candiate_edge_list.append(edge_index)
candiate_node_list.append(node_f)
if train_flag:
agent = NasBenchGinPredictorTrainer(search_agent, lr=lr, device=device, epochs=epochs,
train_images=len(train_data), batch_size=batch_size, input_dim=input_dim,
model_dir=model_dir, predictor_type=predictor_type, logger=logger,
algo_name=algo_name, rate=rate)
arch_data = [d[0] for d in train_data]
val_accuracy = np.array([d[4] for d in train_data])
arch_data_edge_idx_list = []
arch_data_node_f_list = []
for arch in arch_data:
edge_index, node_f = nasbench2graph_101(arch) if benchmark == 'nasbench_101' else nasbench2graph_201(arch)
arch_data_edge_idx_list.append(edge_index)
arch_data_node_f_list.append(node_f)
agent.fit(arch_data_edge_idx_list, arch_data_node_f_list, val_accuracy, logger=None)
acc_train = agent.pred(arch_data_edge_idx_list, arch_data_node_f_list)
acc_pred = agent.pred(candiate_edge_list, candiate_node_list)
candidate_np = acc_pred.cpu().numpy()
sorted_indices = np.argsort(candidate_np)
for i in sorted_indices[:k]:
if benchmark == 'nasbench_101':
archtuple = search_space.query_arch(matrix=candidates[i][1],
ops=candidates[i][2])
elif benchmark == 'nasbench_201':
archtuple = candidates[i]
else:
raise NotImplementedError()
data.append(archtuple)
if verbose:
top_5_loss = sorted([d[4] for d in data])[:min(5, len(data))]
logger.info('Query {}, training mean loss is {}'.format(query,
np.mean(np.abs(acc_train.cpu().numpy()-val_accuracy))))
logger.info('Query {}, top 5 val losses {}'.format(query, top_5_loss))
query += k
train_flag = False
# data = [list(dd) for dd in data]
# for i, d in enumerate(data):
# data[i][4] = (1 - d[4]) * 100
# data[i][5] = (1 - d[5]) * 100
return data
| 52.087719
| 124
| 0.511957
| 949
| 8,907
| 4.522655
| 0.159115
| 0.037279
| 0.027959
| 0.041007
| 0.855079
| 0.851817
| 0.851817
| 0.844362
| 0.844362
| 0.815471
| 0
| 0.035694
| 0.408667
| 8,907
| 171
| 125
| 52.087719
| 0.779191
| 0.040754
| 0
| 0.852564
| 0
| 0
| 0.04546
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 1
| 0.012821
| false
| 0
| 0.032051
| 0
| 0.057692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4e90e52794d352081680e96622bdd424c94f451
| 156
|
py
|
Python
|
demopy/notebooks/common_imports.py
|
omars-lab/demo-py
|
08d656968ee330e607b100e58727b4503a5cde33
|
[
"MIT"
] | null | null | null |
demopy/notebooks/common_imports.py
|
omars-lab/demo-py
|
08d656968ee330e607b100e58727b4503a5cde33
|
[
"MIT"
] | null | null | null |
demopy/notebooks/common_imports.py
|
omars-lab/demo-py
|
08d656968ee330e607b100e58727b4503a5cde33
|
[
"MIT"
] | null | null | null |
from demopy.notebooks.code import *
from demopy.notebooks.charts import *
from demopy.notebooks.widgets import *
def debug(x):
print(x)
return x
| 15.6
| 38
| 0.730769
| 22
| 156
| 5.181818
| 0.545455
| 0.263158
| 0.5
| 0.438596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 156
| 9
| 39
| 17.333333
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d4f917061385cadd4b6857fc418912236fc6a6e8
| 2,238
|
py
|
Python
|
benchbuild/projects/benchbuild/lulesh.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 11
|
2017-10-05T08:59:35.000Z
|
2021-05-29T01:43:07.000Z
|
benchbuild/projects/benchbuild/lulesh.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 326
|
2016-07-12T08:11:43.000Z
|
2022-03-28T07:10:11.000Z
|
benchbuild/projects/benchbuild/lulesh.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 13
|
2016-06-17T12:13:35.000Z
|
2022-01-04T16:09:12.000Z
|
from plumbum import local
import benchbuild as bb
from benchbuild.environments.domain.declarative import ContainerImage
from benchbuild.source import Git
class Lulesh(bb.Project):
""" LULESH, Serial """
NAME = 'lulesh'
DOMAIN = 'scientific'
GROUP = 'benchbuild'
SOURCE = [
Git(
remote='https://github.com/LLNL/LULESH/',
local='lulesh.git',
limit=5,
refspec='HEAD'
)
]
CONTAINER = ContainerImage().from_('benchbuild:alpine')
def compile(self):
lulesh_repo = local.path(self.source_of('lulesh.git'))
self.cflags += ["-DUSE_MPI=0"]
cxx_files = local.cwd / lulesh_repo // "*.cc"
clang = bb.compiler.cxx(self)
with local.cwd(lulesh_repo):
for src_file in cxx_files:
clang("-c", "-o", src_file + '.o', src_file)
obj_files = local.cwd / lulesh_repo // "*.cc.o"
with local.cwd(lulesh_repo):
clang(obj_files, "-lm", "-o", "../lulesh")
def run_tests(self):
lulesh = bb.wrap("lulesh", self)
_lulesh = bb.watch(lulesh)
for i in range(1, 15):
_lulesh("-i", i)
class LuleshOMP(bb.Project):
""" LULESH, OpenMP """
NAME = 'lulesh-omp'
DOMAIN = 'scientific'
GROUP = 'benchbuild'
SOURCE = [
Git(
remote='https://github.com/LLNL/LULESH/',
local='lulesh.git',
limit=5,
refspec='HEAD'
)
]
CONTAINER = ContainerImage().from_('benchbuild:alpine')
def compile(self):
lulesh_repo = local.path(self.source_of('lulesh.git'))
self.cflags = ['-DUSE_MPI=0', '-fopenmp']
cxx_files = local.cwd / lulesh_repo // "*.cc"
clang = bb.compiler.cxx(self)
with local.cwd(lulesh_repo):
for src_file in cxx_files:
clang("-c", "-o", src_file + '.o', src_file)
obj_files = local.cwd / lulesh_repo // "*.cc.o"
with local.cwd(lulesh_repo):
clang(obj_files, "-lm", "-o", "../lulesh")
def run_tests(self):
lulesh = bb.wrap("lulesh", self)
_lulesh = bb.watch(lulesh)
for i in range(1, 15):
_lulesh("-i", i)
| 27.62963
| 69
| 0.549151
| 264
| 2,238
| 4.518939
| 0.257576
| 0.083822
| 0.093881
| 0.120704
| 0.80637
| 0.80637
| 0.80637
| 0.80637
| 0.80637
| 0.80637
| 0
| 0.006382
| 0.299821
| 2,238
| 80
| 70
| 27.975
| 0.754946
| 0.013405
| 0
| 0.774194
| 0
| 0
| 0.139471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.064516
| 0
| 0.322581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be0725ba2d98c65b3c23a437ba3f6b4b2118cb35
| 11,360
|
py
|
Python
|
torchreid/data/datasets/image/make_csv_dataset.py
|
liangmuxue/deep-person-reid
|
e566f557a2dce8fde919aba7b718edb9cc135d24
|
[
"MIT"
] | null | null | null |
torchreid/data/datasets/image/make_csv_dataset.py
|
liangmuxue/deep-person-reid
|
e566f557a2dce8fde919aba7b718edb9cc135d24
|
[
"MIT"
] | null | null | null |
torchreid/data/datasets/image/make_csv_dataset.py
|
liangmuxue/deep-person-reid
|
e566f557a2dce8fde919aba7b718edb9cc135d24
|
[
"MIT"
] | null | null | null |
import os
from torchreid.data.datasets.db_accessor import DbAccessor
import csv
from PIL import ImageDraw,ImageFont,Image
def read_data_from_sql(csv_path):
dbaccessor = DbAccessor({})
#sql = "select group_id, url, device_no, id ,batch_no from ss_group_data where batch_no = 5 and state = 1"
sql = "select url, device_no, cluster_id,id from ss_cluster_data where state = 1"
rows = dbaccessor.do_query(sql)
for row in rows:
img_item_path = '/home/bavon/model/datasets/test_20{}'.format(row[0])
pid = int(row[2]) + 610000
cid = row[1]
id = row[3]
tang = [img_item_path, pid, cid, id]
print(tang)
with open(csv_path, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(tang)
def markcsv_tr_gq_type(csv_path, csv_path_wr):
print('read data from CSVPATH {}'.format(csv_path))
print('write data to CSVPATH {}'.format(csv_path_wr))
csv_file = open(csv_path)
csv_reader_lines = csv.reader(csv_file)
# print(len(csv_reader_lines))
num = 0
for one_line in csv_reader_lines:
num += 1
print('total data num is {}'.format(num))
num_train = int(num / 2)
num_test = num - num_train
print('num train is {} num test is {}'.format(num_train, num_test))
sp_num = 0
label = {}
csv_file = open(csv_path)
csv_reader_lines = csv.reader(csv_file)
for one_line in csv_reader_lines:
# print(one_line)
if sp_num <= num_train:
img_item_path = one_line[0]
pid = int(one_line[1])
cid = int(one_line[2])
index = one_line[3]
tr_type = 'train'
tang = [img_item_path, pid, cid, index, tr_type]
#print(tang)
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(tang)
sp_num += 1
else:
img_item_path = one_line[0]
pid = int(one_line[1])
cid = int(one_line[2])
index = one_line[3]
tr_type = 'test'
dada_line = [img_item_path, pid, cid, index, tr_type]
try:
label['{}'.format(pid)].append(dada_line)
except:
label['{}'.format(pid)] = []
label['{}'.format(pid)].append(dada_line)
sp_num += 1
for key in label:
if len(label[key]) == 1:
label[key][0].append('gallery')
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][0])
if len(label[key]) == 2:
label[key][0].append('gallery')
label[key][1].append('query')
label[key][1][2] = 20210123
for i in range(0, len(label[key])):
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
if len(label[key]) == 3:
label[key][0].append('gallery')
label[key][1].append('query')
label[key][1][2] = 20210123
label[key][2].append('gallery')
for i in range(0, len(label[key])):
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
if len(label[key]) >= 5:
num_gallery = int(len(label[key]) * 0.7)
num_query = len(label[key]) - num_gallery
for i in range(0, len(label[key])):
if i <= num_gallery:
label[key][i].append('gallery')
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
else:
label[key][i].append('query')
label[key][i][2] = 20210123
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
print('============create csv datasets success================')
def mark_train_follow_epoch(csv_path, csv_path_wr):
print('read data from CSVPATH {}'.format(csv_path))
print('write data to CSVPATH {}'.format(csv_path_wr))
csv_file = open(csv_path)
csv_reader_lines = csv.reader(csv_file)
# print(len(csv_reader_lines))
num = 0
for one_line in csv_reader_lines:
num += 1
print('total data num is {}'.format(num))
num_train = int(num / 2)
num_test = num - num_train
print('num train is {} num test is {}'.format(num_train, num_test))
#sp_num = 0
label = {}
csv_file = open(csv_path)
csv_reader_lines = csv.reader(csv_file)
for one_line in csv_reader_lines:
img_item_path = one_line[0]
pid = int(one_line[1])
cid = int(one_line[2])
index = one_line[3]
#tr_type = 'test'
dada_line = [img_item_path, pid, cid, index]
try:
label['{}'.format(pid)].append(dada_line)
except:
label['{}'.format(pid)] = []
label['{}'.format(pid)].append(dada_line)
#sp_num += 1
label_num = 0
train = {}
for i in range(0,50):
train['{}'.format(i)] = []
jj = 0
for key in label:
if len(label[key]) >= 2:
#for j in range(0, len(label[key])):
label[key][0].append('train')
label[key][1].append('train')
#for i in range(0, len(train)):
try:
train['{}'.format(jj)].append(label[key][0])
train['{}'.format(jj)].append(label[key][1])
except:
break
del label[key][0]
del label[key][0]
jj+=1
for keys in train:
for key in label:
for i in range(0, len(label[key])):
if len(label[key]) == 0:
continue
if len(train[keys]) <32:
print('{} len is {}'.format(keys,len(train[keys])))
label[key][0].append('train')
train[keys].append(label[key][0])
del label[key][0]
for key in train:
for i in range(0,len(train[key])):
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(train[key][i])
for key in label:
if len(label[key]) == 1:
label[key][0].append('test')
label[key][0].append('gallery')
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][0])
if len(label[key]) == 2:
label[key][0].append('test')
label[key][0].append('gallery')
label[key][1].append('test')
label[key][1].append('query')
label[key][1][2] = 20210123
for i in range(0, len(label[key])):
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
if len(label[key]) == 3:
label[key][0].append('test')
label[key][0].append('gallery')
label[key][1].append('test')
label[key][1].append('query')
label[key][1][2] = 20210123
label[key][0].append('test')
label[key][2].append('gallery')
for i in range(0, len(label[key])):
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
if len(label[key]) >= 5:
num_gallery = int(len(label[key]) * 0.7)
num_query = len(label[key]) - num_gallery
for i in range(0, len(label[key])):
if i <= num_gallery:
label[key][i].append('test')
label[key][i].append('gallery')
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
else:
label[key][i].append('test')
label[key][i].append('query')
label[key][i][2] = 20210123
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
print('============create csv datasets success================')
def hard_num_csv_read(csv_path='/home/bavon/model/datasets/batch_no5.csv',
csv_path_wr='/home/bavon/model/datasets/batch_no_af5-2040.csv'):
csv_file = open(csv_path)
csv_reader_lines = csv.reader(csv_file)
label = {}
for one_line in csv_reader_lines:
img_item_path = one_line[0]
pid = int(one_line[1])
cid = int(one_line[2])
index = one_line[3]
# tr_type = 'test'
dada_line = [img_item_path, pid, cid, index]
try:
label['{}'.format(pid)].append(dada_line)
except:
label['{}'.format(pid)] = []
label['{}'.format(pid)].append(dada_line)
top = 40
for key in label:
if len(label[key]) >= 25:
for i in range(0, len(label[key])):
if i < top:
with open(csv_path_wr, "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(label[key][i])
def make_test_gq():
for root, dirs, files in os.walk('/home/bavon/model/datasets/test_mul_act'):
for index, name in enumerate(files):
print(root)
print(name)
print(os.path.join(root,name))
if index >2:
img_item_path = os.path.join(root, name)
pid = int(root.split('/')[-1])
cid = 20210131
index = index
tr_type = 'test'
gq_type = 'query'
else:
img_item_path = os.path.join(root, name)
pid = int(root.split('/')[-1])
cid = 20210123
index = index
tr_type = 'test'
gq_type = 'gallery'
dada_line = [img_item_path, pid, cid, index,tr_type,gq_type]
with open('/home/bavon/project/deep-person-reid/torchreid/data/datasets/image/test_96.csv', "a+") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(dada_line)
if __name__ == '__main__':
#read_data_from_sql(csv_path='/home/bavon/model/datasets/1221.csv')
markcsv_tr_gq_type(csv_path='/home/bavon/model/datasets/1221.csv',
csv_path_wr='/home/bavon/model/datasets/1221-af.csv')
# mark_train_follow_epoch(csv_path='/home/bavon/model/datasets/batch_no4.csv',
# csv_path_wr='/home/bavon/model/datasets/batch_no_af.csv')
# hard_num_csv_read(csv_path='/home/bavon/model/datasets/batch_no5.csv',
# csv_path_wr='/home/bavon/model/datasets/batch_no_af5-2040.csv')
#make_test_gq()
| 39.581882
| 121
| 0.513908
| 1,462
| 11,360
| 3.822161
| 0.093023
| 0.105941
| 0.047244
| 0.042949
| 0.847351
| 0.826056
| 0.780422
| 0.755548
| 0.713851
| 0.699714
| 0
| 0.02846
| 0.344278
| 11,360
| 286
| 122
| 39.72028
| 0.721708
| 0.061708
| 0
| 0.744094
| 0
| 0.003937
| 0.089175
| 0.033828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019685
| false
| 0
| 0.015748
| 0
| 0.035433
| 0.059055
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be29f3ef7c6027725c460b362f5499f21327bcb4
| 141,914
|
py
|
Python
|
tests/test_fundamental.py
|
quantrocket-llc/quantrocket-client
|
1877a9a0f990f6abb0d43c4ebd2e039276b4f778
|
[
"Apache-2.0"
] | 19
|
2017-08-01T15:13:34.000Z
|
2021-11-12T11:16:54.000Z
|
tests/test_fundamental.py
|
quantrocket-llc/quantrocket-client
|
1877a9a0f990f6abb0d43c4ebd2e039276b4f778
|
[
"Apache-2.0"
] | null | null | null |
tests/test_fundamental.py
|
quantrocket-llc/quantrocket-client
|
1877a9a0f990f6abb0d43c4ebd2e039276b4f778
|
[
"Apache-2.0"
] | 12
|
2018-01-25T14:24:48.000Z
|
2022-01-07T05:44:20.000Z
|
# Copyright 2018 QuantRocket LLC - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run: python -m unittest discover -s tests/ -p test*.py -t .
import unittest
try:
from unittest.mock import patch
except ImportError:
# py27
from mock import patch
import pandas as pd
import pytz
import numpy as np
from quantrocket.fundamental import (
get_reuters_estimates_reindexed_like,
get_reuters_financials_reindexed_like,
get_alpaca_etb_reindexed_like,
get_ibkr_borrow_fees_reindexed_like,
get_ibkr_shortable_shares_reindexed_like,
get_sharadar_fundamentals_reindexed_like,
get_sharadar_institutions_reindexed_like,
get_sharadar_sec8_reindexed_like,
get_sharadar_sp500_reindexed_like,
get_wsh_earnings_dates_reindexed_like
)
from quantrocket.exceptions import ParameterError, MissingData, NoFundamentalData
class ReutersEstimatesReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_reuters_estimates_reindexed_like(closes, codes="BVPS")
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_reuters_estimates_reindexed_like(closes, codes="BVPS")
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_reuters_estimates_reindexed_like(closes, codes="BVPS")
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_reuters_estimates")
@patch("quantrocket.fundamental.download_master_file")
def test_pass_args_correctly(self,
mock_download_master_file,
mock_download_reuters_estimates):
"""
Tests that sids, date ranges, and and other args are correctly
passed to download_reuters_estimates.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def _mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30"
],
UpdatedDate=[
"2018-04-06T10:00:00",
"2018-04-06T10:00:00",
"2018-04-23T13:00:00",
"2018-04-23T13:00:00",
"2018-07-23T13:00:00",
"2018-07-23T13:00:00",
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI12345",
"FI12345",
],
Indicator=[
"BVPS",
"EPS",
"BVPS",
"EPS",
"BVPS",
"EPS"
],
Actual=[
20,
9.56,
50,
63.22,
24.5,
11.35
]))
estimates.to_csv(f, index=False)
f.seek(0)
def _mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345","FI23456"],
Timezone=["Japan","Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
mock_download_master_file.side_effect = _mock_download_master_file
mock_download_reuters_estimates.side_effect = _mock_download_reuters_estimates
get_reuters_estimates_reindexed_like(
closes, ["BVPS","EPS"], fields=["Actual", "FiscalPeriodEndDate"],
period_types=["Q"], max_lag="500D")
reuters_estimates_call = mock_download_reuters_estimates.mock_calls[0]
_, args, kwargs = reuters_estimates_call
self.assertListEqual(args[0], ["BVPS", "EPS"])
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2016-09-02") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-01")
self.assertEqual(kwargs["fields"], ["Actual", "FiscalPeriodEndDate", "UpdatedDate"])
self.assertEqual(kwargs["period_types"], ["Q"])
master_call = mock_download_master_file.mock_calls[0]
_, args, kwargs = master_call
self.assertEqual(kwargs["sids"], ["FI12345","FI23456"])
get_reuters_estimates_reindexed_like(
closes, ["BVPS", "EPS", "ROA"], fields=["Actual", "Mean"],
period_types=["A","S"], max_lag="500D")
reuters_estimates_call = mock_download_reuters_estimates.mock_calls[1]
_, args, kwargs = reuters_estimates_call
self.assertListEqual(args[0], ["BVPS", "EPS", "ROA"])
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2016-09-02") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-01")
self.assertEqual(kwargs["fields"], ["Actual", "Mean","UpdatedDate"])
self.assertEqual(kwargs["period_types"], ["A","S"])
master_call = mock_download_master_file.mock_calls[1]
_, args, kwargs = master_call
self.assertEqual(kwargs["sids"], ["FI12345","FI23456"])
def test_dedupe_announce_date(self):
"""
Tests that duplicate UpdatedDates (resulting from reporting several
fiscal periods at once) are deduped by keeping the latest record.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-06-30",
],
UpdatedDate=[
"2018-07-23T10:00:00",
"2018-07-23T10:00:00",
],
Sid=[
"FI12345",
"FI12345",
],
Indicator=[
"EPS",
"EPS"
],
Actual=[
9.56,
11.35
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, "EPS", period_types="Q")
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"EPS"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
eps = estimates.loc["EPS"].loc["Actual"]
self.assertListEqual(list(eps.index), list(closes.index))
self.assertListEqual(list(eps.columns), list(closes.columns))
self.assertEqual(eps["FI12345"].loc["2018-08-01"], 11.35)
def test_ffill_no_lookahead_bias(self):
"""
Tests that indicators are ffilled and are shifted forward 1 period to
avoid lookahead bias.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-30",
"2018-06-30"
],
UpdatedDate=[
"2018-04-23T10:00:00",
"2018-07-23T10:00:00",
],
Sid=[
"FI12345",
"FI12345",
],
Indicator=[
"EPS",
"EPS",
],
Actual=[
13.45,
16.34
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, ["EPS"])
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"EPS"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
eps = estimates.loc["EPS"].loc["Actual"]
self.assertListEqual(list(eps.index), list(closes.index))
self.assertListEqual(list(eps.columns), list(closes.columns))
self.assertEqual(eps["FI12345"].loc["2018-07-23"], 13.45)
self.assertEqual(eps["FI12345"].loc["2018-07-24"], 16.34)
def test_no_shift(self):
"""
Tests that indicators are not shifted forward 1 period if shift=False.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-30",
"2018-06-30"
],
UpdatedDate=[
"2018-04-23T10:00:00",
"2018-07-23T10:00:00",
],
Sid=[
"FI12345",
"FI12345",
],
Indicator=[
"EPS",
"EPS",
],
Actual=[
13.45,
16.34
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, ["EPS"], shift=False)
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"EPS"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
eps = estimates.loc["EPS"].loc["Actual"]
self.assertListEqual(list(eps.index), list(closes.index))
self.assertListEqual(list(eps.columns), list(closes.columns))
self.assertEqual(eps["FI12345"].loc["2018-07-22"], 13.45)
self.assertEqual(eps["FI12345"].loc["2018-07-23"], 16.34)
def test_no_ffill(self):
"""
Tests that indicators are not forward-filled if ffill=False.
"""
closes = pd.DataFrame(
np.random.rand(6,3),
columns=["FI12345", "FI23456", "FI34567"],
index=pd.DatetimeIndex(["2018-07-22", "2018-07-23","2018-07-24",
"2018-07-27","2018-07-28","2018-07-29"], name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-30",
"2018-06-30",
"2018-03-30",
"2018-06-30",
"2018-06-30",
"2018-06-30",
"2018-06-30"
],
UpdatedDate=[
"2018-04-23T10:00:00",
"2018-07-23T10:00:00",
"2018-04-25T10:00:00",
"2018-07-25T10:00:00", # in the unlikely event of an announcement on the weekend, it will be dropped if no ffill
"2018-07-27T10:00:00",
"2018-07-27T10:00:00",
"2018-07-28T10:00:00",
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI34567",
"FI12345",
"FI23456"
],
Indicator=[
"EPS",
"EPS",
"EPS",
"EPS",
"EPS",
"BVPS",
"BVPS",
],
Mean=[
13.50,
15.67,
None,
10.03,
1.00,
42.34,
24.56
],
Actual=[
13.45,
16.34,
9.45,
10.04,
0.56,
45.34,
21.34
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345","FI23456","FI34567"],
Timezone=["America/New_York","America/New_York",
"America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, ["EPS", "BVPS"], fields=["Mean","Actual"],
ffill=False, shift=False)
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"EPS", "BVPS"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual", "Mean"})
eps_actuals = estimates.loc["EPS"].loc["Actual"]
self.assertListEqual(list(eps_actuals.index), list(closes.index))
self.assertListEqual(list(eps_actuals.columns), list(closes.columns))
# replace Nan with "nan" to allow equality comparisons
eps_actuals = eps_actuals.fillna("nan")
self.maxDiff = None
self.assertDictEqual(
eps_actuals.to_dict(),
{"FI12345": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): 16.34,
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI23456": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI34567": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): 0.56,
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"}
})
eps_estimates = estimates.loc["EPS"].loc["Mean"]
# replace Nan with "nan" to allow equality comparisons
eps_estimates = eps_estimates.fillna("nan")
self.assertDictEqual(
eps_estimates.to_dict(),
{"FI12345": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): 15.67,
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI23456": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI34567": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): 1.00,
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"}
})
bvps_actuals = estimates.loc["BVPS"].loc["Actual"]
# replace Nan with "nan" to allow equality comparisons
bvps_actuals = bvps_actuals.fillna("nan")
self.assertDictEqual(
bvps_actuals.to_dict(),
{"FI12345": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): 45.34,
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI23456": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): 21.34,
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI34567": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"}
})
bvps_estimates = estimates.loc["BVPS"].loc["Mean"]
# replace Nan with "nan" to allow equality comparisons
bvps_estimates = bvps_estimates.fillna("nan")
self.assertDictEqual(
bvps_estimates.to_dict(),
{"FI12345": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): 42.34,
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI23456": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): 24.56,
pd.Timestamp('2018-07-29 00:00:00'): "nan"
},
"FI34567": {
pd.Timestamp('2018-07-22 00:00:00'): "nan",
pd.Timestamp('2018-07-23 00:00:00'): "nan",
pd.Timestamp('2018-07-24 00:00:00'): "nan",
pd.Timestamp('2018-07-27 00:00:00'): "nan",
pd.Timestamp('2018-07-28 00:00:00'): "nan",
pd.Timestamp('2018-07-29 00:00:00'): "nan"}
})
def test_max_lag(self):
"""
Tests that max_lag works as expected.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-06-30"
],
UpdatedDate=[
"2018-07-06T18:00:35",
],
Sid=[
"FI12345",
],
Indicator=[
"BVPS",
],
Actual=[
45
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
# request without max_lag
estimates = get_reuters_estimates_reindexed_like(
closes, "BVPS")
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"BVPS"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
bvps = estimates.loc["BVPS"].loc["Actual"]
self.assertListEqual(list(bvps.index), list(closes.index))
self.assertListEqual(list(bvps.columns), list(closes.columns))
# Data is ffiled to end of frame
self.assertTrue((bvps["FI12345"] == 45).all())
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
# request with max_lag
estimates = get_reuters_estimates_reindexed_like(
closes, ["BVPS"], max_lag="23D")
bvps = estimates.loc["BVPS"].loc["Actual"]["FI12345"]
# Data is only ffiled to 2018-07-23 (2018-06-30 + 23D)
self.assertTrue((bvps.loc[bvps.index <= "2018-07-23"] == 45).all())
self.assertTrue((bvps.loc[bvps.index > "2018-07-23"].isnull()).all())
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-06-30"
],
UpdatedDate=[
"2018-04-23T14:00:00",
"2018-07-06T17:34:00",
],
Sid=[
"FI12345",
"FI12345"
],
Indicator=[
"ROA",
"ROA"
],
Actual=[
35,
23
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
# request with tz_naive
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", name="Date"))
estimates = get_reuters_estimates_reindexed_like(
closes, "ROA", fields="Actual")
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"ROA"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
roas = estimates.loc["ROA"].loc["Actual"]
self.assertListEqual(list(roas.index), list(closes.index))
self.assertListEqual(list(roas.columns), list(closes.columns))
roas = roas.reset_index()
roas.loc[:, "Date"] = roas.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
roas.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00', "FI12345": 35.0},
{'Date': '2018-07-06T00:00:00', "FI12345": 35.0},
{'Date': '2018-07-07T00:00:00', "FI12345": 23.0},
{'Date': '2018-07-08T00:00:00', "FI12345": 23.0}]
)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
# request with tz-aware
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D",
tz="America/New_York", name="Date"))
estimates = get_reuters_estimates_reindexed_like(
closes, ["ROA"])
roas = estimates.loc["ROA"].loc["Actual"]
roas = roas.reset_index()
roas.loc[:, "Date"] = roas.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
roas.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00-0400', "FI12345": 35.0},
{'Date': '2018-07-06T00:00:00-0400', "FI12345": 35.0},
{'Date': '2018-07-07T00:00:00-0400', "FI12345": 23.0},
{'Date': '2018-07-08T00:00:00-0400', "FI12345": 23.0}]
)
def test_complain_if_missing_securities(self):
"""
Tests error handling when a security is missing from the securities
master.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30"
],
UpdatedDate=[
"2018-04-06T10:00:00",
"2018-04-06T10:00:00",
"2018-04-23T13:00:00",
"2018-04-23T13:00:00",
"2018-07-23T13:00:00",
"2018-07-23T13:00:00",
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI12345",
"FI12345",
],
Indicator=[
"BVPS",
"EPS",
"BVPS",
"EPS",
"BVPS",
"EPS"
],
Actual=[
20,
9.56,
50,
63.22,
24.5,
11.35
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345"],
Timezone=["Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
with self.assertRaises(MissingData) as cm:
get_reuters_estimates_reindexed_like(
closes, ["BVPS","EPS"])
self.assertIn((
"timezones are missing for some sids so cannot convert UTC "
"estimates to timezone of security (sids missing timezone: FI23456)"), str(cm.exception))
def test_convert_utc_to_security_timezone(self):
"""
Tests that estimate UpdatedDates are converted from UTC to the
security timezone for the purpose of date alignment.
"""
closes = pd.DataFrame(
np.random.rand(4,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-07-22", periods=4, freq="D", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30",
],
UpdatedDate=[
"2018-04-06T08:00:00",
"2018-04-07T09:35:00",
"2018-07-23T17:00:00", # = 2018-07-23 America/New_York
"2018-07-23T17:00:00", # = 2018-07-24 Japan
],
Sid=[
"FI12345",
"FI23456",
"FI12345",
"FI23456"
],
Indicator=[
"EPS",
"EPS",
"EPS",
"EPS"
],
Actual=[
24.5,
11.35,
26.7,
15.4
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345","FI23456"],
Timezone=["America/New_York", "Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, ["EPS"])
eps = estimates.loc["EPS"].loc["Actual"]
eps = eps.reset_index()
eps.loc[:, "Date"] = eps.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(eps.to_dict(orient="records"),
[{'Date': '2018-07-22T00:00:00', "FI12345": 24.5, "FI23456": 11.35},
{'Date': '2018-07-23T00:00:00', "FI12345": 24.5, "FI23456": 11.35},
{'Date': '2018-07-24T00:00:00', "FI12345": 26.7, "FI23456": 11.35},
{'Date': '2018-07-25T00:00:00', "FI12345": 26.7, "FI23456": 15.4}]
)
def test_ignore_no_actuals(self):
"""
Tests that estimates with no actuals are ignored.
"""
closes = pd.DataFrame(
np.random.rand(4,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", name="Date"))
def mock_download_reuters_estimates(codes, f, *args, **kwargs):
estimates = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-06-30",
"2018-03-31",
"2018-06-30",
],
UpdatedDate=[
"2018-04-23T14:00:00",
"2018-07-06T17:34:00",
"2018-04-23T14:00:00",
"2018-07-06T17:34:00",
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
],
Indicator=[
"ROA",
"ROA",
"ROA",
"ROA"
],
Actual=[
35,
None,
None,
46.7
]))
estimates.to_csv(f, index=False)
f.seek(0)
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345", "FI23456"],
Timezone=["America/New_York", "America/New_York"]))
securities.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_estimates', new=mock_download_reuters_estimates):
with patch('quantrocket.fundamental.download_master_file', new=mock_download_master_file):
estimates = get_reuters_estimates_reindexed_like(
closes, ["ROA"])
self.assertSetEqual(set(estimates.index.get_level_values("Indicator")), {"ROA"})
self.assertSetEqual(set(estimates.index.get_level_values("Field")), {"Actual"})
roas = estimates.loc["ROA"].loc["Actual"]
self.assertListEqual(list(roas.index), list(closes.index))
self.assertListEqual(list(roas.columns), list(closes.columns))
# replace nan with "nan" to allow equality comparisons
roas = roas.where(roas.notnull(), "nan")
roas = roas.reset_index()
roas.loc[:, "Date"] = roas.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
roas.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00', "FI12345": 35.0, "FI23456": "nan"},
{'Date': '2018-07-06T00:00:00', "FI12345": 35.0, "FI23456": "nan"},
{'Date': '2018-07-07T00:00:00', "FI12345": 35.0, "FI23456": 46.7},
{'Date': '2018-07-08T00:00:00', "FI12345": 35.0, "FI23456": 46.7}]
)
class ReutersFinancialsReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_reuters_financials_reindexed_like(closes, "ATOT")
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_reuters_financials_reindexed_like(closes, "ATOT")
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_reuters_financials_reindexed_like(closes, "ATOT")
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_reuters_financials")
def test_pass_args_correctly(self,
mock_download_reuters_financials):
"""
Tests that sids, date ranges, and and other args are correctly
passed to download_reuters_financials.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def _mock_download_reuters_financials(coa_codes, f, *args, **kwargs):
financials = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30"
],
SourceDate=[
"2018-04-06",
"2018-04-06",
"2018-04-23",
"2018-04-23",
"2018-07-23",
"2018-07-23",
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI12345",
"FI12345",
],
CoaCode=[
"ATOT",
"QTCO",
"ATOT",
"QTCO",
"ATOT",
"QTCO"
],
Amount=[
565,
89,
235,
73,
580,
92
]))
financials.to_csv(f, index=False)
f.seek(0)
mock_download_reuters_financials.side_effect = _mock_download_reuters_financials
get_reuters_financials_reindexed_like(
closes, ["ATOT","QTCO"], fields=["Amount", "FiscalPeriodEndDate"],
interim=True, exclude_restatements=False, max_lag="500D")
reuters_financials_call = mock_download_reuters_financials.mock_calls[0]
_, args, kwargs = reuters_financials_call
self.assertListEqual(args[0], ["ATOT", "QTCO"])
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2016-09-02") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-01")
self.assertEqual(kwargs["fields"], ["Amount", "FiscalPeriodEndDate"])
self.assertTrue(kwargs["interim"])
self.assertFalse(kwargs["exclude_restatements"])
get_reuters_financials_reindexed_like(
closes, ["ATOT", "QTCO", "LTLL"], fields=["Amount", "Source"],
interim=False, exclude_restatements=True, max_lag="500D")
reuters_financials_call = mock_download_reuters_financials.mock_calls[1]
_, args, kwargs = reuters_financials_call
self.assertListEqual(args[0], ["ATOT", "QTCO", "LTLL"])
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2016-09-02") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-01")
self.assertEqual(kwargs["fields"], ["Amount", "Source"])
self.assertFalse(kwargs["interim"])
self.assertTrue(kwargs["exclude_restatements"])
def test_dedupe_source_date(self):
"""
Tests that duplicate SourceDates (resulting from reporting several
fiscal periods at once) are deduped by keeping the latest record.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def mock_download_reuters_financials(coa_codes, f, *args, **kwargs):
financials = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-30",
"2018-06-30"
],
SourceDate=[
"2018-07-23",
"2018-07-23",
],
Sid=[
"FI12345",
"FI12345",
],
CoaCode=[
"ATOT",
"ATOT",
],
Amount=[
565,
580
]))
financials.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
financials = get_reuters_financials_reindexed_like(
closes, "ATOT", interim=True)
self.assertSetEqual(set(financials.index.get_level_values("CoaCode")), {"ATOT"})
self.assertSetEqual(set(financials.index.get_level_values("Field")), {"Amount"})
atots = financials.loc["ATOT"].loc["Amount"]
self.assertListEqual(list(atots.index), list(closes.index))
self.assertListEqual(list(atots.columns), list(closes.columns))
self.assertEqual(atots["FI12345"].loc["2018-08-01"], 580)
def test_ffill_no_lookahead_bias(self):
"""
Tests that financial statement metrics are ffilled and are shifted
forward 1 period to avoid lookahead bias.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_reuters_financials(coa_codes, f, *args, **kwargs):
financials = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-30",
"2018-06-30"
],
SourceDate=[
"2018-04-23",
"2018-07-23",
],
Sid=[
"FI12345",
"FI12345",
],
CoaCode=[
"ATOT",
"ATOT",
],
Amount=[
565,
580
]))
financials.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
financials = get_reuters_financials_reindexed_like(
closes, ["ATOT"], interim=True)
self.assertSetEqual(set(financials.index.get_level_values("CoaCode")), {"ATOT"})
self.assertSetEqual(set(financials.index.get_level_values("Field")), {"Amount"})
atots = financials.loc["ATOT"].loc["Amount"]
self.assertListEqual(list(atots.index), list(closes.index))
self.assertListEqual(list(atots.columns), list(closes.columns))
self.assertEqual(atots["FI12345"].loc["2018-07-23"], 565)
self.assertEqual(atots["FI12345"].loc["2018-07-24"], 580)
def test_max_lag(self):
"""
Tests that max_lag works as expected.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_reuters_financials(coa_codes, f, *args, **kwargs):
financials = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-06-30"
],
SourceDate=[
"2018-07-06",
],
Sid=[
"FI12345",
],
CoaCode=[
"ATOT",
],
Amount=[
580
]))
financials.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
# request without max_lag
financials = get_reuters_financials_reindexed_like(
closes, ["ATOT"], interim=True)
self.assertSetEqual(set(financials.index.get_level_values("CoaCode")), {"ATOT"})
self.assertSetEqual(set(financials.index.get_level_values("Field")), {"Amount"})
atots = financials.loc["ATOT"].loc["Amount"]
self.assertListEqual(list(atots.index), list(closes.index))
self.assertListEqual(list(atots.columns), list(closes.columns))
# Data is ffiled to end of frame
self.assertTrue((atots["FI12345"] == 580).all())
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
# request with max_lag
financials = get_reuters_financials_reindexed_like(
closes, ["ATOT"], interim=True, max_lag="23D")
atots = financials.loc["ATOT"].loc["Amount"]["FI12345"]
# Data is only ffiled to 2018-07-23 (2018-06-30 + 23D)
self.assertTrue((atots.loc[atots.index <= "2018-07-23"] == 580).all())
self.assertTrue((atots.loc[atots.index > "2018-07-23"].isnull()).all())
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_reuters_financials(coa_codes, f, *args, **kwargs):
financials = pd.DataFrame(
dict(
FiscalPeriodEndDate=[
"2018-03-31",
"2018-06-30"
],
SourceDate=[
"2018-04-23",
"2018-07-06",
],
Sid=[
"FI12345",
"FI12345"
],
CoaCode=[
"ATOT",
"ATOT"
],
Amount=[
580,
542
]))
financials.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
# request with tz_naive
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", name="Date"))
financials = get_reuters_financials_reindexed_like(
closes, "ATOT", fields="Amount", interim=True)
self.assertSetEqual(set(financials.index.get_level_values("CoaCode")), {"ATOT"})
self.assertSetEqual(set(financials.index.get_level_values("Field")), {"Amount"})
atots = financials.loc["ATOT"].loc["Amount"]
self.assertListEqual(list(atots.index), list(closes.index))
self.assertListEqual(list(atots.columns), list(closes.columns))
atots = atots.reset_index()
atots.loc[:, "Date"] = atots.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
atots.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00', "FI12345": 580.0},
{'Date': '2018-07-06T00:00:00', "FI12345": 580.0},
{'Date': '2018-07-07T00:00:00', "FI12345": 542.0},
{'Date': '2018-07-08T00:00:00', "FI12345": 542.0}]
)
with patch('quantrocket.fundamental.download_reuters_financials', new=mock_download_reuters_financials):
# request with tz-aware
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", tz="America/New_York", name="Date"))
financials = get_reuters_financials_reindexed_like(
closes, ["ATOT"], interim=True)
atots = financials.loc["ATOT"].loc["Amount"]["FI12345"]
atots = atots.reset_index()
atots.loc[:, "Date"] = atots.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
atots.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00-0400', "FI12345": 580.0},
{'Date': '2018-07-06T00:00:00-0400', "FI12345": 580.0},
{'Date': '2018-07-07T00:00:00-0400', "FI12345": 542.0},
{'Date': '2018-07-08T00:00:00-0400', "FI12345": 542.0}]
)
class WSHEarningsDatesReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_wsh_earnings_dates_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_wsh_earnings_dates_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_wsh_earnings_dates_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_wsh_earnings_dates")
def test_pass_sids_and_dates_based_on_reindex_like(self,
mock_download_wsh_earnings_dates):
"""
Tests that sids and date ranges are correctly passed to the
download_wsh_earnings_dates function based on reindex_like.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", name="Date"))
def _mock_download_wsh_earnings_dates(f, *args, **kwargs):
announcements = pd.DataFrame(
dict(Date=["2018-05-01",
"2018-05-02"],
Sid=["FI12345",
"FI23456"],
Time=["Before Market",
"After Market"],
Status=["Unconfirmed",
"Unconfirmed"],
LastUpdated=["2018-04-11T07:48:20",
"2018-04-09T07:48:20"]
))
announcements.to_csv(f, index=False)
f.seek(0)
mock_download_wsh_earnings_dates.side_effect = _mock_download_wsh_earnings_dates
get_wsh_earnings_dates_reindexed_like(closes, fields=["Time","Status"], statuses="Unconfirmed")
wsh_call = mock_download_wsh_earnings_dates.mock_calls[0]
_, args, kwargs = wsh_call
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2018-05-01")
self.assertEqual(kwargs["end_date"], "2018-05-03")
self.assertListEqual(kwargs["fields"], ["Time","Status","LastUpdated"])
self.assertListEqual(kwargs["statuses"], ["Unconfirmed"])
@patch("quantrocket.fundamental.download_wsh_earnings_dates")
def test_dedupe(self, mock_download_wsh_earnings_dates):
"""
Tests that the resulting DataFrame is correct when deduping on
LastUpdated.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", name="Date"))
def _mock_download_wsh_earnings_dates(f, *args, **kwargs):
announcements = pd.DataFrame(
dict(Date=["2018-05-01",
"2018-05-01",
"2018-05-02",
"2018-05-02"],
Sid=["FI12345",
"FI12345",
"FI23456",
"FI23456"],
Time=["Before Market",
"After Market",
"After Market",
"Unspecified"],
Status=["Unconfirmed",
"Confirmed",
"Confirmed",
"Confirmed"],
LastUpdated=["2018-03-11T07:48:20",
"2018-04-09T07:48:20",
"2018-04-11T07:48:20",
"2018-04-09T07:48:20"]))
announcements.to_csv(f, index=False)
f.seek(0)
mock_download_wsh_earnings_dates.side_effect = _mock_download_wsh_earnings_dates
announcements = get_wsh_earnings_dates_reindexed_like(closes,
statuses=["Confirmed","Unconfirmed"])
wsh_call = mock_download_wsh_earnings_dates.mock_calls[0]
_, args, kwargs = wsh_call
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2018-05-01")
self.assertEqual(kwargs["end_date"], "2018-05-03")
self.assertListEqual(kwargs["fields"], ["Time", "LastUpdated"])
self.assertListEqual(kwargs["statuses"], ["Confirmed", "Unconfirmed"])
# but only Time is returned, as requested
self.assertSetEqual(set(announcements.index.get_level_values("Field").unique()), {"Time"})
announce_times = announcements.loc["Time"]
announce_times = announce_times.reset_index()
announce_times.loc[:, "Date"] = announce_times.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
announce_times = announce_times.fillna("nan")
self.assertListEqual(
announce_times.to_dict(orient="records"),
[
{'Date': '2018-05-01T00:00:00',
"FI12345": 'After Market',
"FI23456": 'nan'},
{'Date': '2018-05-02T00:00:00',
"FI12345": 'nan',
"FI23456": 'After Market'},
{'Date': '2018-05-03T00:00:00',
"FI12345": 'nan',
"FI23456": 'nan'}]
)
# Repeat but request Status field so we can check the output of that too
announcements = get_wsh_earnings_dates_reindexed_like(closes,
fields=["Time","Status"],
statuses=["Confirmed","Unconfirmed"])
announce_statuses = announcements.loc["Status"]
announce_statuses = announce_statuses.reset_index()
announce_statuses.loc[:, "Date"] = announce_statuses.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
announce_statuses = announce_statuses.fillna("nan")
self.assertListEqual(
announce_statuses.to_dict(orient="records"),
[
{'Date': '2018-05-01T00:00:00',
"FI12345": 'Confirmed',
"FI23456": 'nan'},
{'Date': '2018-05-02T00:00:00',
"FI12345": 'nan',
"FI23456": 'Confirmed'},
{'Date': '2018-05-03T00:00:00',
"FI12345": 'nan',
"FI23456": 'nan'}]
)
def test_tz_aware_index(self):
"""
Tests that a tz-aware index in the input DataFrame can be handled.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", tz="America/New_York",
name="Date"))
def mock_download_wsh_earnings_dates(f, *args, **kwargs):
announcements = pd.DataFrame(
dict(Date=["2018-05-01",
"2018-05-02"],
Sid=["FI12345",
"FI23456"],
Time=["Before Market",
"After Market"],
LastUpdated=["2018-04-11T07:48:20",
"2018-04-09T07:48:20"]))
announcements.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_wsh_earnings_dates', new=mock_download_wsh_earnings_dates):
announcements = get_wsh_earnings_dates_reindexed_like(closes)
self.assertSetEqual(set(announcements.index.get_level_values("Field").unique()), {"Time"})
announce_times = announcements.loc["Time"]
self.assertEqual(announce_times.index.tz.zone, "America/New_York")
announce_times = announce_times.reset_index()
announce_times.loc[:, "Date"] = announce_times.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
announce_times = announce_times.fillna("nan")
self.assertListEqual(
announce_times.to_dict(orient="records"),
[
{'Date': '2018-05-01T00:00:00-0400',
"FI12345": 'Before Market',
"FI23456": 'nan'},
{'Date': '2018-05-02T00:00:00-0400',
"FI12345": 'nan',
"FI23456": 'After Market'},
{'Date': '2018-05-03T00:00:00-0400',
"FI12345": 'nan',
"FI23456": 'nan'}]
)
class StockloanDataReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_ibkr_borrow_fees")
@patch("quantrocket.fundamental.download_ibkr_shortable_shares")
def test_pass_sids_and_dates_based_on_reindex_like(self,
mock_download_ibkr_shortable_shares,
mock_download_ibkr_borrow_fees):
"""
Tests that sids and date ranges and corrected passed to the
download_* functions based on reindex_like.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", name="Date"))
def _mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T22:00:03",
"2018-05-01T21:45:02"],
Sid=["FI12345",
"FI12345",
"FI23456"],
Quantity=[10000,
9000,
80000]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
mock_download_ibkr_shortable_shares.side_effect = _mock_download_ibkr_shortable_shares
get_ibkr_shortable_shares_reindexed_like(closes, time="00:00:00 America/New_York")
shortable_shares_call = mock_download_ibkr_shortable_shares.mock_calls[0]
_, args, kwargs = shortable_shares_call
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2018-03-17") # 45 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-05-03")
def _mock_download_ibkr_borrow_fees(f, *args, **kwargs):
borrow_fees = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T22:00:03",
"2018-05-01T21:45:02"],
Sid=["FI12345",
"FI12345",
"FI23456"],
FeeRate=[1.75,
1.79,
0.35]))
borrow_fees.to_csv(f, index=False)
f.seek(0)
mock_download_ibkr_borrow_fees.side_effect = _mock_download_ibkr_borrow_fees
get_ibkr_borrow_fees_reindexed_like(closes, time="00:00:00 America/Toronto")
borrow_fees_call = mock_download_ibkr_borrow_fees.mock_calls[0]
_, args, kwargs = borrow_fees_call
self.assertListEqual(kwargs["sids"], ["FI12345","FI23456"])
self.assertEqual(kwargs["start_date"], "2018-03-17") # 45 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-05-03")
def test_complain_if_passed_timezone_not_match_reindex_like_timezone(self):
"""
Tests error handling when a timezone is passed and reindex_like
timezone is set and they do not match.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T23:15:02",
"2018-05-03T00:30:03",
"2018-05-01T21:45:02",
"2018-05-02T23:15:02",
"2018-05-03T00:30:03",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes, time="09:30:00 Europe/London")
self.assertIn((
"cannot use timezone Europe/London because reindex_like timezone is America/New_York, "
"these must match"), str(cm.exception))
def test_pass_timezone(self):
"""
Tests that the UTC timestamps of the shortable shares data are
correctly interpreted based on the requested timezone.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800,
3100
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00 America/New_York")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00', "FI12345": 80000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00', "FI12345": 80000.0, "FI23456": 3100.0}]
)
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00 Europe/London")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00', "FI12345": 9000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00', "FI12345": 80000.0, "FI23456": 3100.0}]
)
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00 Japan")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00', "FI12345": 9000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00', "FI12345": 80000.0, "FI23456": 3800.0}]
)
def test_use_reindex_like_timezone(self):
"""
Tests that, when a timezone is not passed but reindex_like timezone
is set, the latter is used.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800,
3100
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00-0400', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00-0400', "FI12345": 80000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00-0400', "FI12345": 80000.0, "FI23456": 3100.0}]
)
@patch("quantrocket.fundamental.download_master_file")
def test_infer_timezone_from_securities(self, mock_download_master_file):
"""
Tests that, when timezone is not passed and reindex_like timezone is
not set, the timezone is inferred from the component securities.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
name="Date"))
def _mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345","FI23456"],
Timezone=["Japan","Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
mock_download_master_file.side_effect = _mock_download_master_file
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800,
3100
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00', "FI12345": 9000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00', "FI12345": 80000.0, "FI23456": 3800.0}]
)
def test_complain_if_cannot_infer_timezone(self):
"""
Tests error handling when a timezone is not passed, reindex_like
timezone is not set, and the timezone cannot be inferred from the
securities master because there are multiple timezones among the
component securities.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", name="Date"))
def mock_download_master_file(f, *args, **kwargs):
securities = pd.DataFrame(dict(Sid=["FI12345","FI23456"],
Timezone=["America/New_York","Japan"]))
securities.to_csv(f, index=False)
f.seek(0)
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T22:00:03",
"2018-05-01T21:45:02"],
Sid=["FI12345",
"FI12345",
"FI23456"],
Quantity=[10000,
9000,
80000]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
with patch("quantrocket.fundamental.download_master_file", new=mock_download_master_file):
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes)
self.assertIn((
"no timezone specified and cannot infer because multiple timezones are "
"present in data, please specify timezone (timezones in data: America/New_York, Japan)"
), str(cm.exception))
def test_invalid_timezone(self):
"""
Tests error handling when an invalid timezone is passed.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T22:00:03",
"2018-05-01T21:45:02"],
Sid=["FI12345",
"FI12345",
"FI23456"],
Quantity=[10000,
9000,
80000]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
with self.assertRaises(pytz.exceptions.UnknownTimeZoneError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes, time="09:30:00 Mars")
self.assertIn("pytz.exceptions.UnknownTimeZoneError: 'Mars'", repr(cm.exception))
def test_invalid_time(self):
"""
Tests error handling when an invalid time is passed.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01", periods=3, freq="D", tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-05-01T21:45:02",
"2018-05-01T22:00:03",
"2018-05-01T21:45:02"],
Sid=["FI12345",
"FI12345",
"FI23456"],
Quantity=[10000,
9000,
80000]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
with self.assertRaises(ParameterError) as cm:
get_ibkr_shortable_shares_reindexed_like(closes, time="foo")
self.assertIn("could not parse time 'foo': could not convert string to Timestamp", str(cm.exception))
def test_pass_time(self):
"""
Tests that, when a time arg is passed, it is used.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800,
3100
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(
closes,
time="09:30:00")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00-0400', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00-0400', "FI12345": 80000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00-0400', "FI12345": 80000.0, "FI23456": 3100.0}]
)
def test_no_pass_time(self):
"""
Tests that, when no time arg is passed, the reindex_like times are
used, which for a date index are 00:00:00.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
Quantity=[10000,
9000,
80000,
3500,
3600,
3800,
3100
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(closes)
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00-0400', "FI12345": 10000.0, "FI23456": 3500.0},
{'Date': '2018-05-02T00:00:00-0400', "FI12345": 9000.0, "FI23456": 3600.0},
{'Date': '2018-05-03T00:00:00-0400', "FI12345": 80000.0, "FI23456": 3800.0}]
)
def test_fillna_0_after_start_date(self):
"""
Tests that NaN data after 2018-04-15 is converted to 0 but NaN data
before is not.
"""
closes = pd.DataFrame(
np.random.rand(5,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-04-13",
periods=5,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_shortable_shares(f, *args, **kwargs):
shortable_shares = pd.DataFrame(
dict(Date=["2018-04-15T21:45:02",
"2018-04-16T13:45:02",
"2018-04-17T12:30:03",
],
Sid=["FI12345",
"FI12345",
"FI12345",
],
Quantity=[10000,
9000,
80000,
]))
shortable_shares.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_shortable_shares', new=mock_download_ibkr_shortable_shares):
shortable_shares = get_ibkr_shortable_shares_reindexed_like(closes)
# replace nan with "nan" to allow equality comparisons
shortable_shares = shortable_shares.where(shortable_shares.notnull(), "nan")
shortable_shares = shortable_shares.reset_index()
shortable_shares.loc[:, "Date"] = shortable_shares.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
shortable_shares.to_dict(orient="records"),
[{'Date': '2018-04-13T00:00:00-0400', "FI12345": "nan", "FI23456": "nan"},
{'Date': '2018-04-14T00:00:00-0400', "FI12345": "nan", "FI23456": "nan"},
{'Date': '2018-04-15T00:00:00-0400', "FI12345": "nan", "FI23456": "nan"},
{'Date': '2018-04-16T00:00:00-0400', "FI12345": 10000.0, "FI23456": 0.0},
{'Date': '2018-04-17T00:00:00-0400', "FI12345": 9000.0, "FI23456": 0.0}]
)
def test_borrow_fees(self):
"""
Tests get_ibkr_borrow_fees_reindexed_like. (get_ibkr_borrow_fees_reindexed_like
and get_ibkr_shortable_shares_reindexed_like share a base function so for
the most part testing one tests both.)
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_ibkr_borrow_fees(f, *args, **kwargs):
borrow_fees = pd.DataFrame(
dict(Date=["2018-04-20T21:45:02",
"2018-05-01T13:45:02",
"2018-05-02T12:30:03",
"2018-04-20T21:45:02",
"2018-05-01T14:15:02",
"2018-05-02T14:30:03",
"2018-05-03T08:30:00",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456",
"FI23456"],
FeeRate=[1.5,
1.65,
1.7,
0.35,
0.40,
0.44,
0.23
]))
borrow_fees.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_ibkr_borrow_fees', new=mock_download_ibkr_borrow_fees):
borrow_fees = get_ibkr_borrow_fees_reindexed_like(
closes,
time="09:30:00")
borrow_fees = borrow_fees.reset_index()
borrow_fees.loc[:, "Date"] = borrow_fees.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
borrow_fees.to_dict(orient="records"),
[{'Date': '2018-05-01T00:00:00-0400', "FI12345": 1.5, "FI23456": 0.35},
{'Date': '2018-05-02T00:00:00-0400', "FI12345": 1.7, "FI23456": 0.40},
{'Date': '2018-05-03T00:00:00-0400', "FI12345": 1.7, "FI23456": 0.23}]
)
def test_alpaca_etb(self):
"""
Tests get_alpaca_etb_reindexed_like.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2019-05-01",
periods=3,
freq="D",
tz="America/New_York",
name="Date"))
def mock_download_alpaca_etb(f, *args, **kwargs):
etb = pd.DataFrame(
dict(Date=["2019-05-01",
"2019-05-02",
"2019-05-03",
"2019-05-01",
"2019-05-02",
"2019-05-03",
],
Sid=["FI12345",
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI23456"],
EasyToBorrow=[1,
0,
1,
0,
0,
1,
]))
etb.to_csv(f, index=False)
f.seek(0)
with patch('quantrocket.fundamental.download_alpaca_etb', new=mock_download_alpaca_etb):
etb = get_alpaca_etb_reindexed_like(closes)
etb = etb.reset_index()
etb.loc[:, "Date"] = etb.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
etb.to_dict(orient="records"),
[{'Date': '2019-05-01T00:00:00-0400', "FI12345": True, "FI23456": False},
{'Date': '2019-05-02T00:00:00-0400', "FI12345": False, "FI23456": False},
{'Date': '2019-05-03T00:00:00-0400', "FI12345": True, "FI23456": True}]
)
class SharadarFundamentalsReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_sharadar_fundamentals_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_fundamentals_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_fundamentals_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_sharadar_fundamentals")
def test_pass_args_correctly(self,
mock_download_sharadar_fundamentals):
"""
Tests that sids, date ranges, and and other args are correctly
passed to download_sharadar_fundamentals.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def _mock_download_sharadar_fundamentals(filepath_or_buffer, *args, **kwargs):
fundamentals = pd.DataFrame(
dict(
DATEKEY=[
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30"
],
REPORTPERIOD=[
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-03-31",
"2018-06-30",
"2018-06-30"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
"FI12345",
"FI12345",
],
EPS=[
565,
89,
235,
73,
580,
92
]))
fundamentals.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
mock_download_sharadar_fundamentals.side_effect = _mock_download_sharadar_fundamentals
get_sharadar_fundamentals_reindexed_like(
closes, fields=["EPS", "DATEKEY"], dimension="ARQ")
sharadar_fundamentals_call = mock_download_sharadar_fundamentals.mock_calls[0]
_, args, kwargs = sharadar_fundamentals_call
self.assertEqual(kwargs["start_date"], "2016-09-02") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-01")
self.assertEqual(kwargs["fields"], ["EPS", "DATEKEY"])
self.assertEqual(kwargs["dimensions"], "ARQ")
def test_dedupe_datekey(self):
"""
Tests that duplicate DATEKEYS (resulting from reporting several
fiscal periods at once) are deduped by keeping the latest record.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-03-01", periods=6, freq="MS", name="Date"))
def mock_download_sharadar_fundamentals(filepath_or_buffer, *args, **kwargs):
fundamentals = pd.DataFrame(
dict(
REPORTPERIOD=[
"2018-03-30",
"2018-06-30"
],
DATEKEY=[
"2018-07-23",
"2018-07-23",
],
Sid=[
"FI12345",
"FI12345",
],
EPS=[
565,
580
]))
fundamentals.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch('quantrocket.fundamental.download_sharadar_fundamentals', new=mock_download_sharadar_fundamentals):
fundamentals = get_sharadar_fundamentals_reindexed_like(
closes, fields="EPS")
self.assertSetEqual(set(fundamentals.index.get_level_values("Field")), {"EPS"})
eps = fundamentals.loc["EPS"]
self.assertListEqual(list(eps.index), list(closes.index))
self.assertListEqual(list(eps.columns), list(eps.columns))
self.assertEqual(eps["FI12345"].loc["2018-08-01"], 580)
def test_ffill_no_lookahead_bias(self):
"""
Tests that financial statement metrics are ffilled and are shifted
forward 1 period to avoid lookahead bias.
"""
closes = pd.DataFrame(
np.random.rand(6,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-20", periods=6, freq="D", name="Date"))
def mock_download_sharadar_fundamentals(filepath_or_buffer, *args, **kwargs):
fundamentals = pd.DataFrame(
dict(
REPORTPERIOD=[
"2018-03-30",
"2018-06-30"
],
DATEKEY=[
"2018-04-23",
"2018-07-23",
],
Sid=[
"FI12345",
"FI12345",
],
EPS=[
565,
580
]))
fundamentals.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch('quantrocket.fundamental.download_sharadar_fundamentals', new=mock_download_sharadar_fundamentals):
fundamentals = get_sharadar_fundamentals_reindexed_like(
closes, fields=["EPS"])
self.assertSetEqual(set(fundamentals.index.get_level_values("Field")), {"EPS"})
eps = fundamentals.loc["EPS"]
self.assertListEqual(list(eps.index), list(eps.index))
self.assertListEqual(list(eps.columns), list(eps.columns))
self.assertEqual(eps["FI12345"].loc["2018-07-23"], 565)
self.assertEqual(eps["FI12345"].loc["2018-07-24"], 580)
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_sharadar_fundamentals(filepath_or_buffer, *args, **kwargs):
fundamentals = pd.DataFrame(
dict(
REPORTPERIOD=[
"2018-03-31",
"2018-06-30"
],
DATEKEY=[
"2018-04-23",
"2018-07-06",
],
Sid=[
"FI12345",
"FI12345",
],
REVENUE=[
580,
542
]))
fundamentals.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch('quantrocket.fundamental.download_sharadar_fundamentals', new=mock_download_sharadar_fundamentals):
# request with tz_naive
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", name="Date"))
fundamentals = get_sharadar_fundamentals_reindexed_like(
closes, fields="REVENUE")
self.assertSetEqual(set(fundamentals.index.get_level_values("Field")), {"REVENUE"})
revenues = fundamentals.loc["REVENUE"]
self.assertListEqual(list(revenues.index), list(revenues.index))
self.assertListEqual(list(revenues.columns), list(revenues.columns))
revenues = revenues.reset_index()
revenues.loc[:, "Date"] = revenues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
revenues.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00', "FI12345": 580.0},
{'Date': '2018-07-06T00:00:00', "FI12345": 580.0},
{'Date': '2018-07-07T00:00:00', "FI12345": 542.0},
{'Date': '2018-07-08T00:00:00', "FI12345": 542.0}]
)
with patch('quantrocket.fundamental.download_sharadar_fundamentals', new=mock_download_sharadar_fundamentals):
# request with tz-aware
closes = pd.DataFrame(
np.random.rand(4,1),
columns=["FI12345"],
index=pd.date_range(start="2018-07-05", periods=4, freq="D", tz="America/New_York", name="Date"))
fundamentals = get_sharadar_fundamentals_reindexed_like(
closes, fields="REVENUE")
self.assertSetEqual(set(fundamentals.index.get_level_values("Field")), {"REVENUE"})
revenues = fundamentals.loc["REVENUE"]
self.assertListEqual(list(revenues.index), list(revenues.index))
self.assertListEqual(list(revenues.columns), list(revenues.columns))
revenues = revenues.reset_index()
revenues.loc[:, "Date"] = revenues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
revenues.to_dict(orient="records"),
[{'Date': '2018-07-05T00:00:00-0400', "FI12345": 580.0},
{'Date': '2018-07-06T00:00:00-0400', "FI12345": 580.0},
{'Date': '2018-07-07T00:00:00-0400', "FI12345": 542.0},
{'Date': '2018-07-08T00:00:00-0400', "FI12345": 542.0}]
)
class SharadarInstitutionsReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_sharadar_institutions_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_institutions_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_institutions_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_sharadar_institutions")
def test_pass_args_correctly(self,
mock_download_sharadar_institutions):
"""
Tests that sids, date ranges, and and other args are correctly
passed to download_sharadar_institutions.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def _mock_download_sharadar_institutions(filepath_or_buffer, *args, **kwargs):
institutions = pd.DataFrame(
dict(
CALENDARDATE=[
"2018-03-31",
"2018-06-30",
"2018-03-31",
"2018-06-30"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
],
SHRVALUE=[
500000,
600000,
700000,
800000,
],
TOTALVALUE=[
1500000,
1600000,
1700000,
1800000,
]))
institutions.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
mock_download_sharadar_institutions.side_effect = _mock_download_sharadar_institutions
get_sharadar_institutions_reindexed_like(
closes, fields=["SHRVALUE", "TOTALVALUE"])
sharadar_institutions_call = mock_download_sharadar_institutions.mock_calls[0]
_, args, kwargs = sharadar_institutions_call
self.assertEqual(kwargs["start_date"], "2017-02-14") # 365+180 days before reindex_like min date
self.assertEqual(kwargs["end_date"], "2018-08-18")
self.assertEqual(kwargs["fields"], ["SHRVALUE", "TOTALVALUE"])
def test_ffill_and_shift(self):
"""
Tests that metrics are ffilled and are shifted forward to avoid
lookahead bias.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-11", periods=6, freq="D", name="Date"))
def mock_download_sharadar_institutions(filepath_or_buffer, *args, **kwargs):
institutions = pd.DataFrame(
dict(
CALENDARDATE=[
"2018-03-31",
"2018-06-30",
"2018-03-31",
"2018-06-30"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
],
SHRVALUE=[
500000,
600000,
700000,
800000,
],
TOTALVALUE=[
1500000,
1600000,
1700000,
1800000,
]))
institutions.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch("quantrocket.fundamental.download_sharadar_institutions", new=mock_download_sharadar_institutions):
institutions = get_sharadar_institutions_reindexed_like(
closes, fields=["SHRVALUE", "TOTALVALUE"])
self.assertSetEqual(set(institutions.index.get_level_values("Field")), {"SHRVALUE", "TOTALVALUE"})
sharevalues = institutions.loc["SHRVALUE"]
sharevalues = sharevalues.reset_index()
sharevalues.loc[:, "Date"] = sharevalues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
sharevalues.to_dict(orient="records"),
[{'Date': '2018-08-11T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-12T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-13T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-14T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-15T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-16T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0}]
)
totalvalues = institutions.loc["TOTALVALUE"]
totalvalues = totalvalues.reset_index()
totalvalues.loc[:, "Date"] = totalvalues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
totalvalues.to_dict(orient="records"),
[{'Date': '2018-08-11T00:00:00', 'FI12345': 1500000.0, 'FI23456': 1700000.0},
{'Date': '2018-08-12T00:00:00', 'FI12345': 1500000.0, 'FI23456': 1700000.0},
{'Date': '2018-08-13T00:00:00', 'FI12345': 1500000.0, 'FI23456': 1700000.0},
{'Date': '2018-08-14T00:00:00', 'FI12345': 1600000.0, 'FI23456': 1800000.0},
{'Date': '2018-08-15T00:00:00', 'FI12345': 1600000.0, 'FI23456': 1800000.0},
{'Date': '2018-08-16T00:00:00', 'FI12345': 1600000.0, 'FI23456': 1800000.0}]
)
# Repeat with a custom shift
with patch("quantrocket.fundamental.download_sharadar_institutions", new=mock_download_sharadar_institutions):
institutions = get_sharadar_institutions_reindexed_like(
closes, fields=["SHRVALUE", "TOTALVALUE"], shift=47)
self.assertSetEqual(set(institutions.index.get_level_values("Field")), {"SHRVALUE", "TOTALVALUE"})
sharevalues = institutions.loc["SHRVALUE"]
sharevalues = sharevalues.reset_index()
sharevalues.loc[:, "Date"] = sharevalues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
sharevalues.to_dict(orient="records"),
[{'Date': '2018-08-11T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-12T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-13T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-14T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-15T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-16T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0}]
)
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_sharadar_institutions(filepath_or_buffer, *args, **kwargs):
institutions = pd.DataFrame(
dict(
CALENDARDATE=[
"2018-03-31",
"2018-06-30",
"2018-03-31",
"2018-06-30"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
"FI23456",
],
SHRVALUE=[
500000,
600000,
700000,
800000,
],
TOTALVALUE=[
1500000,
1600000,
1700000,
1800000,
]))
institutions.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
# request with tz_naive
with patch("quantrocket.fundamental.download_sharadar_institutions", new=mock_download_sharadar_institutions):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-11", periods=6, freq="D", name="Date"))
institutions = get_sharadar_institutions_reindexed_like(
closes, fields=["SHRVALUE", "TOTALVALUE"])
self.assertSetEqual(set(institutions.index.get_level_values("Field")), {"SHRVALUE", "TOTALVALUE"})
sharevalues = institutions.loc["SHRVALUE"]
sharevalues = sharevalues.reset_index()
sharevalues.loc[:, "Date"] = sharevalues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
sharevalues.to_dict(orient="records"),
[{'Date': '2018-08-11T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-12T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-13T00:00:00', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-14T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-15T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-16T00:00:00', 'FI12345': 600000.0, 'FI23456': 800000.0}]
)
# request with tz aware
with patch("quantrocket.fundamental.download_sharadar_institutions", new=mock_download_sharadar_institutions):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-11", periods=6, tz='America/New_York', freq="D", name="Date"))
institutions = get_sharadar_institutions_reindexed_like(
closes, fields=["SHRVALUE", "TOTALVALUE"])
self.assertSetEqual(set(institutions.index.get_level_values("Field")), {"SHRVALUE", "TOTALVALUE"})
sharevalues = institutions.loc["SHRVALUE"]
sharevalues = sharevalues.reset_index()
sharevalues.loc[:, "Date"] = sharevalues.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
sharevalues.to_dict(orient="records"),
[{'Date': '2018-08-11T00:00:00-0400', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-12T00:00:00-0400', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-13T00:00:00-0400', 'FI12345': 500000.0, 'FI23456': 700000.0},
{'Date': '2018-08-14T00:00:00-0400', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-15T00:00:00-0400', 'FI12345': 600000.0, 'FI23456': 800000.0},
{'Date': '2018-08-16T00:00:00-0400', 'FI12345': 600000.0, 'FI23456': 800000.0}]
)
class SharadarSEC8ReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sec8_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sec8_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sec8_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_sharadar_sec8")
def test_pass_args_correctly(self,
mock_download_sharadar_sec8):
"""
Tests that sids, date ranges, and event codes are correctly
passed to download_sharadar_sec8.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def _mock_download_sharadar_sec8(filepath_or_buffer, *args, **kwargs):
sec8 = pd.DataFrame(
dict(
DATE=[
"2018-08-15",
"2018-08-16"
],
Sid=[
"FI12345",
"FI23456",
],
EVENTCODE=[
13,
13
],
))
sec8.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
mock_download_sharadar_sec8.side_effect = _mock_download_sharadar_sec8
get_sharadar_sec8_reindexed_like(
closes, event_codes=[13])
sharadar_sec8_call = mock_download_sharadar_sec8.mock_calls[0]
_, args, kwargs = sharadar_sec8_call
self.assertEqual(kwargs["start_date"], "2018-08-13")
self.assertEqual(kwargs["end_date"], "2018-08-18")
self.assertEqual(kwargs["event_codes"], [13])
self.assertEqual(kwargs["fields"], ["Sid","DATE","EVENTCODE"])
def test_single_code(self):
"""
Tests requesting a single event code.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def mock_download_sharadar_sec8(filepath_or_buffer, *args, **kwargs):
sec8 = pd.DataFrame(
dict(
DATE=[
"2018-08-15",
"2018-08-16"
],
Sid=[
"FI12345",
"FI23456",
],
EVENTCODE=[
13,
13
],
))
sec8.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch("quantrocket.fundamental.download_sharadar_sec8", new=mock_download_sharadar_sec8):
have_events = get_sharadar_sec8_reindexed_like(closes, event_codes=[13])
have_events = have_events.reset_index()
have_events.loc[:, "Date"] = have_events.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
have_events.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': False}]
)
def test_multiple_codes(self):
"""
Tests requesting multiple event codes.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def mock_download_sharadar_sec8(filepath_or_buffer, *args, **kwargs):
sec8 = pd.DataFrame(
dict(
DATE=[
"2018-08-15",
"2018-08-17"
],
Sid=[
"FI12345",
"FI23456",
],
EVENTCODE=[
13,
14
],
))
sec8.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch("quantrocket.fundamental.download_sharadar_sec8", new=mock_download_sharadar_sec8):
have_events = get_sharadar_sec8_reindexed_like(closes, event_codes=[13, 14])
have_events = have_events.reset_index()
have_events.loc[:, "Date"] = have_events.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
have_events.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': False}]
)
def test_no_matching_events(self):
"""
Tests that False is return (not an exception) when there are no matching events.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def mock_download_sharadar_sec8(filepath_or_buffer, *args, **kwargs):
raise NoFundamentalData("no sec8 data matches the query parameters")
with patch("quantrocket.fundamental.download_sharadar_sec8", new=mock_download_sharadar_sec8):
have_events = get_sharadar_sec8_reindexed_like(closes, event_codes=[13])
have_events = have_events.reset_index()
have_events.loc[:, "Date"] = have_events.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
have_events.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': False}]
)
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_sharadar_sec8(filepath_or_buffer, *args, **kwargs):
sec8 = pd.DataFrame(
dict(
DATE=[
"2018-08-15",
"2018-08-16"
],
Sid=[
"FI12345",
"FI23456",
],
EVENTCODE=[
13,
13
],
))
sec8.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
# request with tz_naive
with patch("quantrocket.fundamental.download_sharadar_sec8", new=mock_download_sharadar_sec8):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
have_events = get_sharadar_sec8_reindexed_like(closes, event_codes=[13])
have_events = have_events.reset_index()
have_events.loc[:, "Date"] = have_events.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
have_events.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': False}]
)
# request with tz aware
with patch("quantrocket.fundamental.download_sharadar_sec8", new=mock_download_sharadar_sec8):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, tz='America/New_York', freq="D", name="Date"))
have_events = get_sharadar_sec8_reindexed_like(closes, event_codes=[13])
have_events = have_events.reset_index()
have_events.loc[:, "Date"] = have_events.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
have_events.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00-0400', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00-0400', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00-0400', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-16T00:00:00-0400', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00-0400', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-18T00:00:00-0400', 'FI12345': False, 'FI23456': False}]
)
class SharadarSP500ReindexedLikeTestCase(unittest.TestCase):
def test_complain_if_time_level_in_index(self):
"""
Tests error handling when reindex_like has a Time level in the index.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.MultiIndex.from_product((
pd.date_range(start="2018-01-01", periods=3, freq="D"),
["15:00:00","15:15:00"]), names=["Date", "Time"]))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sp500_reindexed_like(closes)
self.assertIn("reindex_like should not have 'Time' in index", str(cm.exception))
def test_complain_if_date_level_not_in_index(self):
"""
Tests error handling when reindex_like doesn't have an index named
Date.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-01-01", periods=3, freq="D"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sp500_reindexed_like(closes)
self.assertIn("reindex_like must have index called 'Date'", str(cm.exception))
def test_complain_if_not_datetime_index(self):
"""
Tests error handling when the reindex_like index is named Date but is
not a DatetimeIndex.
"""
closes = pd.DataFrame(
np.random.rand(3,2),
columns=["FI12345","FI23456"],
index=pd.Index(["foo","bar","bat"], name="Date"))
with self.assertRaises(ParameterError) as cm:
get_sharadar_sp500_reindexed_like(closes)
self.assertIn("reindex_like must have a DatetimeIndex", str(cm.exception))
@patch("quantrocket.fundamental.download_sharadar_sp500")
def test_pass_args_correctly(self,
mock_download_sharadar_sp500):
"""
Tests that sids, date ranges, and fields are correctly
passed to download_sharadar_sp500.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def _mock_download_sharadar_sp500(filepath_or_buffer, *args, **kwargs):
sp500 = pd.DataFrame(
dict(
DATE=[
"1970-08-15",
"2018-08-16",
"2018-08-14"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
],
ACTION=[
"added",
"removed",
"added"
],
))
sp500.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
mock_download_sharadar_sp500.side_effect = _mock_download_sharadar_sp500
get_sharadar_sp500_reindexed_like(closes)
sharadar_sp500_call = mock_download_sharadar_sp500.mock_calls[0]
_, args, kwargs = sharadar_sp500_call
self.assertNotIn("start_date", kwargs) # not called with start_date
self.assertEqual(kwargs["end_date"], "2018-08-18")
self.assertEqual(kwargs["sids"], ["FI12345", "FI23456"])
self.assertEqual(kwargs["fields"], ["Sid","DATE","ACTION"])
def test_in_sp500(self):
"""
Tests requesting securities that were in the S&P500
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def mock_download_sharadar_sp500(filepath_or_buffer, *args, **kwargs):
sp500 = pd.DataFrame(
dict(
DATE=[
"1970-08-15",
"2018-08-16",
"2018-08-14"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
],
ACTION=[
"added",
"removed",
"added"
],
))
sp500.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
with patch("quantrocket.fundamental.download_sharadar_sp500", new=mock_download_sharadar_sp500):
in_sp500 = get_sharadar_sp500_reindexed_like(closes)
in_sp500 = in_sp500.reset_index()
in_sp500.loc[:, "Date"] = in_sp500.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
in_sp500.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-15T00:00:00', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': True}]
)
def test_no_matching_events(self):
"""
Tests that False is returned (not an exception) when the securities were never
in the S&P 500.
"""
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
def mock_download_sharadar_sp500(filepath_or_buffer, *args, **kwargs):
raise NoFundamentalData("no sp500 data matches the query parameters")
with patch("quantrocket.fundamental.download_sharadar_sp500", new=mock_download_sharadar_sp500):
in_sp500 = get_sharadar_sp500_reindexed_like(closes)
in_sp500 = in_sp500.reset_index()
in_sp500.loc[:, "Date"] = in_sp500.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
in_sp500.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-15T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': False},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': False}]
)
def test_tz_aware_index(self):
"""
Tests that reindex_like.index can be tz-naive or tz-aware.
"""
def mock_download_sharadar_sp500(filepath_or_buffer, *args, **kwargs):
sp500 = pd.DataFrame(
dict(
DATE=[
"1970-08-15",
"2018-08-16",
"2018-08-14"
],
Sid=[
"FI12345",
"FI12345",
"FI23456",
],
ACTION=[
"added",
"removed",
"added"
],
))
sp500.to_csv(filepath_or_buffer, index=False)
filepath_or_buffer.seek(0)
# request with tz_naive
with patch("quantrocket.fundamental.download_sharadar_sp500", new=mock_download_sharadar_sp500):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, freq="D", name="Date"))
in_sp500 = get_sharadar_sp500_reindexed_like(closes)
in_sp500 = in_sp500.reset_index()
in_sp500.loc[:, "Date"] = in_sp500.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
in_sp500.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-14T00:00:00', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-15T00:00:00', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-16T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-18T00:00:00', 'FI12345': False, 'FI23456': True}]
)
# request with tz aware
with patch("quantrocket.fundamental.download_sharadar_sp500", new=mock_download_sharadar_sp500):
closes = pd.DataFrame(
np.random.rand(6,2),
columns=["FI12345","FI23456"],
index=pd.date_range(start="2018-08-13", periods=6, tz='America/New_York', freq="D", name="Date"))
in_sp500 = get_sharadar_sp500_reindexed_like(closes)
in_sp500 = in_sp500.reset_index()
in_sp500.loc[:, "Date"] = in_sp500.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertListEqual(
in_sp500.to_dict(orient="records"),
[{'Date': '2018-08-13T00:00:00-0400', 'FI12345': True, 'FI23456': False},
{'Date': '2018-08-14T00:00:00-0400', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-15T00:00:00-0400', 'FI12345': True, 'FI23456': True},
{'Date': '2018-08-16T00:00:00-0400', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-17T00:00:00-0400', 'FI12345': False, 'FI23456': True},
{'Date': '2018-08-18T00:00:00-0400', 'FI12345': False, 'FI23456': True}]
)
| 41.986391
| 136
| 0.497393
| 14,473
| 141,914
| 4.720238
| 0.038624
| 0.020259
| 0.016746
| 0.021415
| 0.909494
| 0.889601
| 0.868303
| 0.854836
| 0.832689
| 0.813587
| 0
| 0.142018
| 0.372838
| 141,914
| 3,379
| 137
| 41.998816
| 0.625552
| 0.057056
| 0
| 0.829472
| 0
| 0
| 0.18451
| 0.036589
| 0
| 0
| 0
| 0
| 0.089632
| 1
| 0.049753
| false
| 0.004558
| 0.003418
| 0
| 0.05621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07785da4cb8594be8c44fd0baf76434b0cdbc194
| 22,503
|
py
|
Python
|
katastrophe/latest.py
|
alyakhtar/Katastrophe
|
3f917b50b80ea6b8060cef6f167e9dcce1931251
|
[
"MIT"
] | 116
|
2016-05-29T12:54:57.000Z
|
2021-12-10T12:31:03.000Z
|
katastrophe/latest.py
|
MrStfnz/Katastrophe
|
f5b78f9d59271f51a91156c1df70e1e7d1cbe9e5
|
[
"MIT"
] | 14
|
2016-05-29T19:05:24.000Z
|
2017-01-31T00:22:10.000Z
|
katastrophe/latest.py
|
MrStfnz/Katastrophe
|
f5b78f9d59271f51a91156c1df70e1e7d1cbe9e5
|
[
"MIT"
] | 25
|
2016-05-31T09:19:41.000Z
|
2021-11-18T07:54:15.000Z
|
import requests
from bs4 import BeautifulSoup
from tabulate import tabulate
from sys import platform
import subprocess
import os,time
from run import download
import getpass
try:
raw_input_ = raw_input
except NameError:
raw_input_ = input
try:
xrange_ = xrange
except NameError:
xrange_ = range
def download_torrent(link, name, ssl):
file_name = "".join(name.split())
source_code = requests.get(link, verify = ssl)
plain_text = source_code.text.encode('utf-8')
soup = BeautifulSoup(plain_text, "lxml")
magnet = soup.find('a', {'title': 'Magnet link'})
magnet_link = magnet.get('href')
torr = soup.find('a', {'title': 'Download verified torrent file'})
torr_file = torr.get('href')
user = getpass.getuser()
directory = 'Torrents'
if platform == "linux" or platform == "linux2" or platform == "darwin":
directory = '/home/'+ user +'/Torrents'
try:
subprocess.Popen(['xdg-open', magnet_link],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except:
if not os.path.exists(directory):
os.makedirs(directory)
os.system('wget -O %s/%s.gz %s' %(directory,file_name,torr_file[torrent-1]))
os.system('gunzip %s/%s.gz' %(directory,file_name))
download(file_name)
print '\n\nDownload Complete\n'
elif platform == "win32":
directory = 'C:\Users' + user + '\Torrents'
procs = []
flag = 0
client = ''
cmd = 'WMIC PROCESS get Caption'
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
for line in proc.stdout:
procs.append(line.strip())
clients = ['BitTorrent.exe',
'uTorrent.exe',
'deluge.exe']
for c in clients:
if c in procs:
client = c
break
if client:
cmd = 'wmic process where "name=\'{}\'" get ExecutablePath'.format(client)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
loc = proc.stdout.readlines()
exe = loc[1].strip()
subprocess.Popen([exe.decode(), magnet_link])
else:
pwrshell = subprocess.Popen([r'C:\WINDOWS\system32\WindowsPowerShell\v1.0\powershell.exe',
'-ExecutionPolicy',
'Unrestricted',
'wget %s -Outfile %s/%s.torrent' %(directory,torr_file, file_name)], cwd=os.getcwd())
result = pwrshell.wait()
print '\n'
download(file_name+'.torrent')
print '\n\nDownload Complete\n'
def fetch(ssl):
link = 'https://kickasstorrents.to/full/'
source_code = requests.get(link,verify = ssl)
plain_text = source_code.text.encode('utf-8')
soup = BeautifulSoup(plain_text, "lxml")
global torrent_href
torrent_name = []
torrent_seeds = []
torrent_href = []
torrent_size = []
torrent_leechers = []
movie = []
tv = []
music = []
games = []
applications = []
anime = []
books = []
losslessmusic = []
sno = []
for i in soup.findAll('table', {'class': 'data frontPageWidget'}):
for j in i('a', {'class': 'cellMainLink'}):
torrent_name.append(
''.join([k if ord(k) < 128 else '' for k in j.get_text()]))
torrent_href.append(j.get('href'))
for j in i('td', {'class': 'nobr center'}):
torrent_size.append(j.get_text())
for j in i('td', {'class': 'green center'}):
torrent_seeds.append(j.get_text())
for j in i('td', {'class': 'red lasttd center'}):
torrent_leechers.append(j.get_text())
for i in xrange_(8):
for j in xrange_(15):
sno.append(j+1)
combine = zip(sno,torrent_name, torrent_size, torrent_seeds, torrent_leechers)
return combine
def movies_torrent(ssl):
torrents = fetch(ssl)
movies = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(15):
movies.append(torrents[i])
print '\nLATEST MOVIE TORRENTS\n'
print(tabulate(movies, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[i - 1],movies[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[i - 1],movies[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[i - 1],movies[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[i - 1],movies[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[int(serial) - 1],movies[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def tv_torrent(ssl):
torrents = fetch(ssl)
tv = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(15,30):
tv.append(torrents[i])
print '\nLATEST TV TORRENTS\n'
print(tabulate(tv, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+15) - 1],tv[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+15) - 1],tv[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+15) - 1],tv[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+15) - 1],tv[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 15) - 1],tv[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def music_torrent(ssl):
torrents = fetch(ssl)
music = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(30,45):
music.append(torrents[i])
print '\nLATEST MUSIC TORRENTS\n'
print(tabulate(music, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+30) - 1],music[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+30) - 1],music[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+30) - 1],music[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+30) - 1],music[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 30) - 1],music[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def games_torrent(ssl):
torrents = fetch(ssl)
games = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(45,60):
games.append(torrents[i])
print '\nLATEST GAME TORRENTS\n'
print(tabulate(games, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+45) - 1],games[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+45) - 1],games[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+45) - 1],games[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+45) - 1],games[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 45) - 1],games[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def applications_torrent(ssl):
torrents = fetch(ssl)
applications = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(60,75):
applications.append(torrents[i])
print '\nLATEST APPLICATION TORRENTS\n'
print(tabulate(applications, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+60) - 1],applications[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+60) - 1],applications[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+60) - 1],applications[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+60) - 1],applications[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 60) - 1],applications[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def anime_torrent(ssl):
torrents = fetch(ssl)
anime = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(75,90):
anime.append(torrents[i])
print '\nLATEST ANIME TORRENTS\n'
print(tabulate(anime, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+75) - 1],anime[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+75) - 1],anime[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+75) - 1],anime[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+75) - 1],anime[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 75) - 1],anime[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def books_torrent(ssl):
torrents = fetch(ssl)
books = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(90,105):
books.append(torrents[i])
print '\nLATEST BOOK TORRENTS\n'
print(tabulate(books, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+90) - 1],books[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+90) - 1],books[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+90) - 1],books[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+90) - 1],books[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 90) - 1],books[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
def losslessmusic_torrent(ssl):
torrents = fetch(ssl)
losslessmusic = []
headers = ['SNO.', 'NAME', 'SIZE', 'SEEDS', 'LEECHERS']
for i in xrange_(105,120):
losslessmusic.append(torrents[i])
print '\nLATEST LOSSLESS MUSIC TORRENTS\n'
print(tabulate(losslessmusic, headers, tablefmt='psql', numalign="center"))
print('Enter torrent No.(s) to download or e to exit : '),
serial = raw_input_()
if serial == 'e' or serial == 'E':
exit()
else:
if ',' in serial:
numbs = serial.split(',')
if len(numbs) < 3:
if numbs[0] != '' and numbs[1] != '' :
start = int(numbs[0])
end = int(numbs[1])
if start < end:
if end < 16 and start > 0:
for i in xrange_(start,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+105) - 1],losslessmusic[i - 1][1], ssl)
elif numbs[0] != '' and numbs[1] == '' :
start = int(numbs[0])
if start > 0 and start < 16:
for i in xrange_(start,16):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+105) - 1],losslessmusic[i - 1][1], ssl)
else:
end = int(numbs[1])
if end > 0 and end < 16:
for i in xrange_(1,end+1):
download_torrent('https://kickasstorrents.to' + torrent_href[(i+105) - 1],losslessmusic[i - 1][1], ssl)
else:
for sn in numbs:
i = int(sn)
if i > 0 and i < 16:
download_torrent('https://kickasstorrents.to' + torrent_href[(i+105) - 1],losslessmusic[i - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL NUMBERS....TRY AGAIN!!\n\n"
else:
if int(serial) <= 15 and int(serial) >= 1:
download_torrent('https://kickasstorrents.to' + torrent_href[(int(serial) + 105) - 1],losslessmusic[int(serial) - 1][1], ssl)
else:
print "\n\n\tINCORRECT SERIAL, TORRRENT DOES NOT EXIST...TRY AGAIN!!\n\n"
| 42.619318
| 142
| 0.467182
| 2,567
| 22,503
| 4.023374
| 0.078302
| 0.045798
| 0.087335
| 0.135554
| 0.780596
| 0.728021
| 0.726665
| 0.726665
| 0.726665
| 0.716789
| 0
| 0.035211
| 0.394214
| 22,503
| 528
| 143
| 42.619318
| 0.722418
| 0
| 0
| 0.715203
| 0
| 0
| 0.157574
| 0.002594
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004283
| 0.017131
| null | null | 0.092077
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
077daff5d4e0241faf3062d3433725f2ad0a6495
| 3,126
|
py
|
Python
|
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/compat/v1/errors/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 2
|
2019-08-04T20:28:14.000Z
|
2019-10-27T23:26:42.000Z
|
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/compat/v1/errors/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | null | null | null |
cart_venv/Lib/site-packages/tensorflow_core/_api/v1/compat/v1/errors/__init__.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 1
|
2020-11-04T03:16:29.000Z
|
2020-11-04T03:16:29.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Exception types for TensorFlow errors.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.framework.errors_impl import ABORTED
from tensorflow.python.framework.errors_impl import ALREADY_EXISTS
from tensorflow.python.framework.errors_impl import AbortedError
from tensorflow.python.framework.errors_impl import AlreadyExistsError
from tensorflow.python.framework.errors_impl import CANCELLED
from tensorflow.python.framework.errors_impl import CancelledError
from tensorflow.python.framework.errors_impl import DATA_LOSS
from tensorflow.python.framework.errors_impl import DEADLINE_EXCEEDED
from tensorflow.python.framework.errors_impl import DataLossError
from tensorflow.python.framework.errors_impl import DeadlineExceededError
from tensorflow.python.framework.errors_impl import FAILED_PRECONDITION
from tensorflow.python.framework.errors_impl import FailedPreconditionError
from tensorflow.python.framework.errors_impl import INTERNAL
from tensorflow.python.framework.errors_impl import INVALID_ARGUMENT
from tensorflow.python.framework.errors_impl import InternalError
from tensorflow.python.framework.errors_impl import InvalidArgumentError
from tensorflow.python.framework.errors_impl import NOT_FOUND
from tensorflow.python.framework.errors_impl import NotFoundError
from tensorflow.python.framework.errors_impl import OK
from tensorflow.python.framework.errors_impl import OUT_OF_RANGE
from tensorflow.python.framework.errors_impl import OpError
from tensorflow.python.framework.errors_impl import OutOfRangeError
from tensorflow.python.framework.errors_impl import PERMISSION_DENIED
from tensorflow.python.framework.errors_impl import PermissionDeniedError
from tensorflow.python.framework.errors_impl import RESOURCE_EXHAUSTED
from tensorflow.python.framework.errors_impl import ResourceExhaustedError
from tensorflow.python.framework.errors_impl import UNAUTHENTICATED
from tensorflow.python.framework.errors_impl import UNAVAILABLE
from tensorflow.python.framework.errors_impl import UNIMPLEMENTED
from tensorflow.python.framework.errors_impl import UNKNOWN
from tensorflow.python.framework.errors_impl import UnauthenticatedError
from tensorflow.python.framework.errors_impl import UnavailableError
from tensorflow.python.framework.errors_impl import UnimplementedError
from tensorflow.python.framework.errors_impl import UnknownError
from tensorflow.python.framework.errors_impl import error_code_from_exception_type
from tensorflow.python.framework.errors_impl import exception_type_from_error_code
from tensorflow.python.framework.errors_impl import raise_exception_on_not_ok_status
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "compat.v1.errors", public_apis=None, deprecation=False,
has_lite=False)
| 55.821429
| 86
| 0.878759
| 398
| 3,126
| 6.660804
| 0.243719
| 0.235383
| 0.286684
| 0.404753
| 0.665032
| 0.665032
| 0.665032
| 0.036967
| 0.036967
| 0
| 0
| 0.000344
| 0.069738
| 3,126
| 55
| 87
| 56.836364
| 0.911279
| 0.052783
| 0
| 0
| 1
| 0
| 0.005418
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.888889
| 0
| 0.888889
| 0.044444
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
079f9ae8086b019bc9836e87213c153e9a74bd30
| 38
|
py
|
Python
|
catkin_ws/devel/lib/python2.7/dist-packages/pr2_gazebo_plugins/srv/__init__.py
|
Sinchiguano/Robotic-Arm-Imitation-by-Human-Gesture
|
28a6a855581881d3c9197619a318a5094294e5b6
|
[
"BSD-2-Clause"
] | null | null | null |
catkin_ws/devel/lib/python2.7/dist-packages/pr2_gazebo_plugins/srv/__init__.py
|
Sinchiguano/Robotic-Arm-Imitation-by-Human-Gesture
|
28a6a855581881d3c9197619a318a5094294e5b6
|
[
"BSD-2-Clause"
] | null | null | null |
catkin_ws/devel/lib/python2.7/dist-packages/pr2_gazebo_plugins/srv/__init__.py
|
Sinchiguano/Robotic-Arm-Imitation-by-Human-Gesture
|
28a6a855581881d3c9197619a318a5094294e5b6
|
[
"BSD-2-Clause"
] | null | null | null |
from ._SetModelsJointsStates import *
| 19
| 37
| 0.842105
| 3
| 38
| 10.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6af95572f1555d6aa3567f19474bfbc2eb388adf
| 10,725
|
py
|
Python
|
nova/tests/unit/scheduler/filters/test_image_props_filters.py
|
tbreeds/nova
|
3f8c69b2ef3eef886e36c0b7f397b83a36a7beb8
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/filters/test_image_props_filters.py
|
tbreeds/nova
|
3f8c69b2ef3eef886e36c0b7f397b83a36a7beb8
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/filters/test_image_props_filters.py
|
tbreeds/nova
|
3f8c69b2ef3eef886e36c0b7f397b83a36a7beb8
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import versionutils
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import vm_mode
from nova.scheduler.filters import image_props_filter
from nova import test
from nova.tests.unit.scheduler import fakes
class TestImagePropsFilter(test.NoDBTestCase):
def setUp(self):
super(TestImagePropsFilter, self).setUp()
self.filt_cls = image_props_filter.ImagePropertiesFilter()
def test_image_properties_filter_passes_same_inst_props_and_version(self):
img_props = {'properties': {'_architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0,<6.2'
}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_different_inst_props(self):
img_props = {'properties': {'architecture': arch.ARMV7,
'hypervisor_type': hv_type.QEMU,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_different_hyper_version(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.2'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_partial_inst_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_partial_inst_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.XEN, vm_mode.XEN)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_without_inst_props(self):
filter_properties = {'request_spec': {}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_without_host_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'enabled': True,
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_without_hyper_version(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)]}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_with_unsupported_hyper_ver(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': 5000}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_pv_mode_compat(self):
# if an old image has 'pv' for a vm_mode it should be treated as xen
img_props = {'properties': {'vm_mode': 'pv'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.XEN, vm_mode.XEN)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_hvm_mode_compat(self):
# if an old image has 'hv' for a vm_mode it should be treated as xen
img_props = {'properties': {'vm_mode': 'hv'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_xen_arch_compat(self):
# if an old image has 'x86_32' for arch it should be treated as i686
img_props = {'properties': {'architecture': 'x86_32'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_xen_hv_type_compat(self):
# if an old image has 'xapi' for hv_type it should be treated as xen
img_props = {'properties': {'hypervisor_type': 'xapi'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.XEN, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_baremetal_vmmode_compat(self):
# if an old image has 'baremetal' for vmmode it should be
# treated as hvm
img_props = {'properties': {'vm_mode': 'baremetal'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = versionutils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.BAREMETAL, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
| 56.151832
| 79
| 0.628998
| 1,189
| 10,725
| 5.357443
| 0.119428
| 0.109419
| 0.026845
| 0.048352
| 0.836107
| 0.825903
| 0.820565
| 0.808791
| 0.792308
| 0.785243
| 0
| 0.021648
| 0.263497
| 10,725
| 190
| 80
| 56.447368
| 0.784783
| 0.082517
| 0
| 0.727273
| 0
| 0
| 0.1547
| 0.010999
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.097403
| false
| 0.116883
| 0.045455
| 0
| 0.149351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6afada116d0f9018dc761b5b4ea8cc18f9581927
| 100,272
|
py
|
Python
|
OmniDB/OmniDB_app/views/tree_postgresql.py
|
bylee5/OmniDB-AgensManager
|
04da397694b2c7c47e0a9f43c049fab98b4e5fc6
|
[
"MIT"
] | null | null | null |
OmniDB/OmniDB_app/views/tree_postgresql.py
|
bylee5/OmniDB-AgensManager
|
04da397694b2c7c47e0a9f43c049fab98b4e5fc6
|
[
"MIT"
] | null | null | null |
OmniDB/OmniDB_app/views/tree_postgresql.py
|
bylee5/OmniDB-AgensManager
|
04da397694b2c7c47e0a9f43c049fab98b4e5fc6
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.template import loader
from django.http import JsonResponse
from django.core import serializers
import json
import sys
import OmniDB_app.include.Spartacus as Spartacus
import OmniDB_app.include.Spartacus.Database as Database
import OmniDB_app.include.Spartacus.Utils as Utils
from OmniDB_app.include.Session import Session
from datetime import datetime
def get_tree_info(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = {
'v_mode': 'database',
'v_database_return': {
'v_database': v_database.GetName(),
'version': v_database.GetVersion(),
#'superuser': v_database.GetUserSuper(),
'create_role': v_database.TemplateCreateRole().v_text,
'alter_role': v_database.TemplateAlterRole().v_text,
'drop_role': v_database.TemplateDropRole().v_text,
'create_tablespace': v_database.TemplateCreateTablespace().v_text,
'alter_tablespace': v_database.TemplateAlterTablespace().v_text,
'drop_tablespace': v_database.TemplateDropTablespace().v_text,
'create_database': v_database.TemplateCreateDatabase().v_text,
'alter_database': v_database.TemplateAlterDatabase().v_text,
'drop_database': v_database.TemplateDropDatabase().v_text,
'create_extension': v_database.TemplateCreateExtension().v_text,
'alter_extension': v_database.TemplateAlterExtension().v_text,
'drop_extension': v_database.TemplateDropExtension().v_text,
'create_schema': v_database.TemplateCreateSchema().v_text,
'alter_schema': v_database.TemplateAlterSchema().v_text,
'drop_schema': v_database.TemplateDropSchema().v_text,
'create_sequence': v_database.TemplateCreateSequence().v_text,
'alter_sequence': v_database.TemplateAlterSequence().v_text,
'drop_sequence': v_database.TemplateDropSequence().v_text,
'create_function': v_database.TemplateCreateFunction().v_text,
'drop_function': v_database.TemplateDropFunction().v_text,
'create_procedure': v_database.TemplateCreateProcedure().v_text,
'drop_procedure': v_database.TemplateDropProcedure().v_text,
'create_triggerfunction': v_database.TemplateCreateTriggerFunction().v_text,
'drop_triggerfunction': v_database.TemplateDropTriggerFunction().v_text,
'create_view': v_database.TemplateCreateView().v_text,
'drop_view': v_database.TemplateDropView().v_text,
'create_mview': v_database.TemplateCreateMaterializedView().v_text,
'refresh_mview': v_database.TemplateRefreshMaterializedView().v_text,
'drop_mview': v_database.TemplateDropMaterializedView().v_text,
'create_table': v_database.TemplateCreateTable().v_text,
'alter_table': v_database.TemplateAlterTable().v_text,
'drop_table': v_database.TemplateDropTable().v_text,
'create_column': v_database.TemplateCreateColumn().v_text,
'alter_column': v_database.TemplateAlterColumn().v_text,
'drop_column': v_database.TemplateDropColumn().v_text,
'create_primarykey': v_database.TemplateCreatePrimaryKey().v_text,
'drop_primarykey': v_database.TemplateDropPrimaryKey().v_text,
'create_unique': v_database.TemplateCreateUnique().v_text,
'drop_unique': v_database.TemplateDropUnique().v_text,
'create_foreignkey': v_database.TemplateCreateForeignKey().v_text,
'drop_foreignkey': v_database.TemplateDropForeignKey().v_text,
'create_index': v_database.TemplateCreateIndex().v_text,
'alter_index': v_database.TemplateAlterIndex().v_text,
'drop_index': v_database.TemplateDropIndex().v_text,
'create_check': v_database.TemplateCreateCheck().v_text,
'drop_check': v_database.TemplateDropCheck().v_text,
'create_exclude': v_database.TemplateCreateExclude().v_text,
'drop_exclude': v_database.TemplateDropExclude().v_text,
'create_rule': v_database.TemplateCreateRule().v_text,
'alter_rule': v_database.TemplateAlterRule().v_text,
'drop_rule': v_database.TemplateDropRule().v_text,
'create_trigger': v_database.TemplateCreateTrigger().v_text,
'create_view_trigger': v_database.TemplateCreateViewTrigger().v_text,
'alter_trigger': v_database.TemplateAlterTrigger().v_text,
'enable_trigger': v_database.TemplateEnableTrigger().v_text,
'disable_trigger': v_database.TemplateDisableTrigger().v_text,
'drop_trigger': v_database.TemplateDropTrigger().v_text,
'create_inherited': v_database.TemplateCreateInherited().v_text,
'noinherit_partition': v_database.TemplateNoInheritPartition().v_text,
'create_partition': v_database.TemplateCreatePartition().v_text,
'detach_partition': v_database.TemplateDetachPartition().v_text,
'drop_partition': v_database.TemplateDropPartition().v_text,
'vacuum': v_database.TemplateVacuum().v_text,
'vacuum_table': v_database.TemplateVacuumTable().v_text,
'analyze': v_database.TemplateAnalyze().v_text,
'analyze_table': v_database.TemplateAnalyzeTable().v_text,
'delete': v_database.TemplateDelete().v_text,
'truncate': v_database.TemplateTruncate().v_text,
'create_physicalreplicationslot': v_database.TemplateCreatePhysicalReplicationSlot().v_text,
'drop_physicalreplicationslot': v_database.TemplateDropPhysicalReplicationSlot().v_text,
'create_logicalreplicationslot': v_database.TemplateCreateLogicalReplicationSlot().v_text,
'drop_logicalreplicationslot': v_database.TemplateDropLogicalReplicationSlot().v_text,
'create_publication': v_database.TemplateCreatePublication().v_text,
'alter_publication': v_database.TemplateAlterPublication().v_text,
'drop_publication': v_database.TemplateDropPublication().v_text,
'add_pubtable': v_database.TemplateAddPublicationTable().v_text,
'drop_pubtable': v_database.TemplateDropPublicationTable().v_text,
'create_subscription': v_database.TemplateCreateSubscription().v_text,
'alter_subscription': v_database.TemplateAlterSubscription().v_text,
'drop_subscription': v_database.TemplateDropSubscription().v_text,
'create_fdw': v_database.TemplateCreateForeignDataWrapper().v_text,
'alter_fdw': v_database.TemplateAlterForeignDataWrapper().v_text,
'drop_fdw': v_database.TemplateDropForeignDataWrapper().v_text,
'create_foreign_server': v_database.TemplateCreateForeignServer().v_text,
'alter_foreign_server': v_database.TemplateAlterForeignServer().v_text,
'import_foreign_schema': v_database.TemplateImportForeignSchema().v_text,
'drop_foreign_server': v_database.TemplateDropForeignServer().v_text,
'create_foreign_table': v_database.TemplateCreateForeignTable().v_text,
'alter_foreign_table': v_database.TemplateAlterForeignTable().v_text,
'drop_foreign_table': v_database.TemplateDropForeignTable().v_text,
'create_foreign_column': v_database.TemplateCreateForeignColumn().v_text,
'alter_foreign_column': v_database.TemplateAlterForeignColumn().v_text,
'drop_foreign_column': v_database.TemplateDropForeignColumn().v_text,
'create_user_mapping': v_database.TemplateCreateUserMapping().v_text,
'alter_user_mapping': v_database.TemplateAlterUserMapping().v_text,
'drop_user_mapping': v_database.TemplateDropUserMapping().v_text,
'create_type': v_database.TemplateCreateType().v_text,
'alter_type': v_database.TemplateAlterType().v_text,
'drop_type': v_database.TemplateDropType().v_text,
'create_domain': v_database.TemplateCreateDomain().v_text,
'alter_domain': v_database.TemplateAlterDomain().v_text,
'drop_domain': v_database.TemplateDropDomain().v_text,
}
}
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_database_objects(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = {}
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_properties(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_data = json_object['p_data']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_properties = []
v_ddl = ''
try:
v_properties = v_database.GetProperties(v_data['p_schema'],v_data['p_table'],v_data['p_object'],v_data['p_type'])
for v_property in v_properties.Rows:
v_list_properties.append([v_property['Property'],v_property['Value']])
v_ddl = v_database.GetDDL(v_data['p_schema'],v_data['p_table'],v_data['p_object'],v_data['p_type'])
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'properties': v_list_properties,
'ddl': v_ddl
}
return JsonResponse(v_return)
def get_tables(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryTables(False,v_schema)
for v_table in v_tables.Rows:
if v_table['is_partition'] == 'False' and v_table['is_partitioned'] == 'False':
v_icon = 'table'
elif v_table['is_partition'] == 'False' and v_table['is_partitioned'] == 'True':
v_icon = 'table_partitioned'
elif v_table['is_partition'] == 'True' and v_table['is_partitioned'] == 'False':
v_icon = 'table_partition'
else:
v_icon = 'table_partitioned_partition'
v_table_data = {
'v_name': v_table['table_name'],
'v_icon': v_icon,
'v_has_primary_keys': v_database.v_has_primary_keys,
'v_has_foreign_keys': v_database.v_has_foreign_keys,
'v_has_uniques': v_database.v_has_uniques,
'v_has_indexes': v_database.v_has_indexes,
'v_has_checks': v_database.v_has_checks,
'v_has_excludes': v_database.v_has_excludes,
'v_has_rules': v_database.v_has_rules,
'v_has_triggers': v_database.v_has_triggers,
'v_has_partitions': v_database.v_has_partitions
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryTablesFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
'v_nullable': v_column['nullable']
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_pk(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_pk = []
try:
v_pks = v_database.QueryTablesPrimaryKeys(v_table, False, v_schema)
for v_pk in v_pks.Rows:
v_pk_data = []
v_pk_data.append(v_pk['constraint_name'])
v_list_pk.append(v_pk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_pk
return JsonResponse(v_return)
def get_pk_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_pkey = json_object['p_key']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_pk = []
try:
v_pks = v_database.QueryTablesPrimaryKeysColumns(v_pkey, v_table, False, v_schema)
for v_pk in v_pks.Rows:
v_pk_data = []
v_pk_data.append(v_pk['column_name'])
v_list_pk.append(v_pk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_pk
return JsonResponse(v_return)
def get_fks(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fk = []
try:
v_fks = v_database.QueryTablesForeignKeys(v_table, False, v_schema)
for v_fk in v_fks.Rows:
v_fk_data = []
v_fk_data.append(v_fk['constraint_name'])
v_fk_data.append(v_fk['r_table_name'])
v_fk_data.append(v_fk['delete_rule'])
v_fk_data.append(v_fk['update_rule'])
v_list_fk.append(v_fk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fk
return JsonResponse(v_return)
def get_fks_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_fkey = json_object['p_fkey']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fk = []
try:
v_fks = v_database.QueryTablesForeignKeysColumns(v_fkey, v_table, False, v_schema)
for v_fk in v_fks.Rows:
v_fk_data = []
v_fk_data.append(v_fk['r_table_name'])
v_fk_data.append(v_fk['delete_rule'])
v_fk_data.append(v_fk['update_rule'])
v_fk_data.append(v_fk['column_name'])
v_fk_data.append(v_fk['r_column_name'])
v_list_fk.append(v_fk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fk
return JsonResponse(v_return)
def get_uniques(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_uniques = []
try:
v_uniques = v_database.QueryTablesUniques(v_table, False, v_schema)
for v_unique in v_uniques.Rows:
v_unique_data = []
v_unique_data.append(v_unique['constraint_name'])
v_list_uniques.append(v_unique_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_uniques
return JsonResponse(v_return)
def get_uniques_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_unique = json_object['p_unique']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_uniques = []
try:
v_uniques = v_database.QueryTablesUniquesColumns(v_unique, v_table, False, v_schema)
for v_unique in v_uniques.Rows:
v_unique_data = []
v_unique_data.append(v_unique['column_name'])
v_list_uniques.append(v_unique_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_uniques
return JsonResponse(v_return)
def get_indexes(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_indexes = []
try:
v_indexes = v_database.QueryTablesIndexes(v_table, False, v_schema)
for v_index in v_indexes.Rows:
v_index_data = []
v_index_data.append(v_index['index_name'])
v_index_data.append(v_index['uniqueness'])
v_list_indexes.append(v_index_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_indexes
return JsonResponse(v_return)
def get_indexes_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_index = json_object['p_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_indexes = []
try:
v_indexes = v_database.QueryTablesIndexesColumns(v_index, v_table, False, v_schema)
for v_index in v_indexes.Rows:
v_index_data = []
v_index_data.append(v_index['column_name'])
v_list_indexes.append(v_index_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_indexes
return JsonResponse(v_return)
def get_checks(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_checks = []
try:
v_checks = v_database.QueryTablesChecks(v_table,False,v_schema)
for v_check in v_checks.Rows:
v_check_data = []
v_check_data.append(v_check['constraint_name'])
v_check_data.append(v_check['constraint_source'])
v_list_checks.append(v_check_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_checks
return JsonResponse(v_return)
def get_excludes(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_excludes = []
try:
v_excludes = v_database.QueryTablesExcludes(v_table,False,v_schema)
for v_exclude in v_excludes.Rows:
v_exclude_data = []
v_exclude_data.append(v_exclude['constraint_name'])
v_exclude_data.append(v_exclude['attributes'])
v_exclude_data.append(v_exclude['operations'])
v_list_excludes.append(v_exclude_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_excludes
return JsonResponse(v_return)
def get_rules(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_rules = []
try:
v_rules = v_database.QueryTablesRules(v_table,False,v_schema)
for v_rule in v_rules.Rows:
v_rule_data = []
v_rule_data.append(v_rule['rule_name'])
v_list_rules.append(v_rule_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_rules
return JsonResponse(v_return)
def get_rule_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_rule = json_object['p_rule']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetRuleDefinition(v_rule, v_table, v_schema)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_triggers(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_triggers = []
try:
v_triggers = v_database.QueryTablesTriggers(v_table,False,v_schema)
for v_trigger in v_triggers.Rows:
v_trigger_data = {
'v_name': v_trigger['trigger_name'],
'v_enabled': v_trigger['trigger_enabled'],
'v_function': v_trigger['trigger_function'],
'v_id': v_trigger['id']
}
v_list_triggers.append(v_trigger_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_triggers
return JsonResponse(v_return)
def get_inheriteds(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_partitions = []
try:
v_partitions = v_database.QueryTablesInheriteds(v_table,False,v_schema)
for v_partition in v_partitions.Rows:
v_partition_data = []
v_partition_data.append(v_partition['child_schema'] + '.' + v_partition['child_table'])
v_list_partitions.append(v_partition_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_partitions
return JsonResponse(v_return)
def get_partitions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_partitions = []
try:
v_partitions = v_database.QueryTablesPartitions(v_table,False,v_schema)
for v_partition in v_partitions.Rows:
v_partition_data = []
v_partition_data.append(v_partition['child_schema'] + '.' + v_partition['child_table'])
v_list_partitions.append(v_partition_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_partitions
return JsonResponse(v_return)
def get_views(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryViews(False,v_schema)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name'],
'v_has_rules': v_database.v_has_rules,
'v_has_triggers': v_database.v_has_triggers,
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_views_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryViewFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_view_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_view = json_object['p_view']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetViewDefinition(v_view, v_schema)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_mviews(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryMaterializedViews(False,v_schema)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name'],
'v_has_indexes': v_database.v_has_indexes
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_mviews_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryMaterializedViewFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_mview_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_view = json_object['p_view']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetMaterializedViewDefinition(v_view, v_schema)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_schemas(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_schemas = []
try:
v_schemas = v_database.QuerySchemas()
for v_schema in v_schemas.Rows:
v_schema_data = {
'v_name': v_schema['schema_name']
}
v_list_schemas.append(v_schema_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_schemas
return JsonResponse(v_return)
def get_databases(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_databases = []
try:
v_databases = v_database.QueryDatabases()
for v_database in v_databases.Rows:
v_database_data = {
'v_name': v_database['database_name']
}
v_list_databases.append(v_database_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_databases
return JsonResponse(v_return)
def get_tablespaces(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tablespaces = []
try:
v_tablespaces = v_database.QueryTablespaces()
for v_tablespace in v_tablespaces.Rows:
v_tablespace_data = {
'v_name': v_tablespace['tablespace_name']
}
v_list_tablespaces.append(v_tablespace_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tablespaces
return JsonResponse(v_return)
def get_roles(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_roles = []
try:
v_roles = v_database.QueryRoles()
for v_role in v_roles.Rows:
v_role_data = {
'v_name': v_role['role_name']
}
v_list_roles.append(v_role_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_roles
return JsonResponse(v_return)
def get_functions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_functions = []
try:
v_functions = v_database.QueryFunctions(False,v_schema)
for v_function in v_functions.Rows:
v_function_data = {
'v_name': v_function['name'],
'v_id': v_function['id']
}
v_list_functions.append(v_function_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_functions
return JsonResponse(v_return)
def get_function_fields(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_function']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fields = []
try:
v_fields = v_database.QueryFunctionFields(v_function,v_schema)
for v_field in v_fields.Rows:
v_field_data = {
'v_name': v_field['name'],
'v_type': v_field['type']
}
v_list_fields.append(v_field_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fields
return JsonResponse(v_return)
def get_function_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_function']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetFunctionDefinition(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_function_debug(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_function']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetFunctionDebug(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_procedures(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_functions = []
try:
v_functions = v_database.QueryProcedures(False,v_schema)
for v_function in v_functions.Rows:
v_function_data = {
'v_name': v_function['name'],
'v_id': v_function['id']
}
v_list_functions.append(v_function_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_functions
return JsonResponse(v_return)
def get_procedure_fields(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_procedure']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fields = []
try:
v_fields = v_database.QueryProcedureFields(v_function,v_schema)
for v_field in v_fields.Rows:
v_field_data = {
'v_name': v_field['name'],
'v_type': v_field['type']
}
v_list_fields.append(v_field_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fields
return JsonResponse(v_return)
def get_procedure_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_procedure']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetProcedureDefinition(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_procedure_debug(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_procedure']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetProcedureDebug(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_triggerfunctions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_functions = []
try:
v_functions = v_database.QueryTriggerFunctions(False,v_schema)
for v_function in v_functions.Rows:
v_function_data = {
'v_name': v_function['name'],
'v_id': v_function['id']
}
v_list_functions.append(v_function_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_functions
return JsonResponse(v_return)
def get_triggerfunction_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_function']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetTriggerFunctionDefinition(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_sequences(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_sequences = []
try:
v_sequences = v_database.QuerySequences(False,v_schema)
for v_sequence in v_sequences.Rows:
v_sequence_data = {
'v_sequence_name': v_sequence['sequence_name']
}
v_list_sequences.append(v_sequence_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_sequences
return JsonResponse(v_return)
def get_extensions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_extensions = []
try:
v_extensions = v_database.QueryExtensions()
for v_extension in v_extensions.Rows:
v_extension_data = {
'v_name': v_extension['extension_name']
}
v_list_extensions.append(v_extension_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_extensions
return JsonResponse(v_return)
def get_physicalreplicationslots(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_repslots = []
try:
v_repslots = v_database.QueryPhysicalReplicationSlots()
for v_repslot in v_repslots.Rows:
v_repslot_data = {
'v_name': v_repslot['slot_name']
}
v_list_repslots.append(v_repslot_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_repslots
return JsonResponse(v_return)
def get_logicalreplicationslots(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_repslots = []
try:
v_repslots = v_database.QueryLogicalReplicationSlots()
for v_repslot in v_repslots.Rows:
v_repslot_data = {
'v_name': v_repslot['slot_name']
}
v_list_repslots.append(v_repslot_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_repslots
return JsonResponse(v_return)
def get_publications(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_pubs = []
try:
v_pubs = v_database.QueryPublications()
for v_pub in v_pubs.Rows:
v_pub_data = {
'v_name': v_pub['pubname'],
'v_alltables': v_pub['puballtables'],
'v_insert': v_pub['pubinsert'],
'v_update': v_pub['pubupdate'],
'v_delete': v_pub['pubdelete'],
'v_truncate': v_pub['pubtruncate']
}
v_list_pubs.append(v_pub_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_pubs
return JsonResponse(v_return)
def get_publication_tables(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
v_pub = json_object['p_pub']
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryPublicationTables(v_pub)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name']
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_subscriptions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_subs = []
try:
v_subs = v_database.QuerySubscriptions()
for v_sub in v_subs.Rows:
v_sub_data = {
'v_name': v_sub['subname'],
'v_enabled': v_sub['subenabled'],
'v_conninfo': v_sub['subconninfo'],
'v_publications': v_sub['subpublications']
}
v_list_subs.append(v_sub_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_subs
return JsonResponse(v_return)
def get_subscription_tables(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
v_sub = json_object['p_sub']
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QuerySubscriptionTables(v_sub)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name']
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_foreign_data_wrappers(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fdws = []
try:
v_fdws = v_database.QueryForeignDataWrappers()
for v_fdw in v_fdws.Rows:
v_fdw_data = {
'v_name': v_fdw['fdwname']
}
v_list_fdws.append(v_fdw_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fdws
return JsonResponse(v_return)
def get_foreign_servers(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_fdw = json_object['p_fdw']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_servers = []
try:
v_servers = v_database.QueryForeignServers(v_fdw)
for v_server in v_servers.Rows:
v_server_data = {
'v_name': v_server['srvname'],
'v_type': v_server['srvtype'],
'v_version': v_server['srvversion'],
'v_options': v_server['srvoptions']
}
v_list_servers.append(v_server_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_servers
return JsonResponse(v_return)
def get_user_mappings(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_foreign_server = json_object['p_foreign_server']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_mappings = []
try:
v_mappings = v_database.QueryUserMappings(v_foreign_server)
for v_mapping in v_mappings.Rows:
v_mapping_data = {
'v_name': v_mapping['rolname'],
'v_options': v_mapping['umoptions']
}
v_list_mappings.append(v_mapping_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_mappings
return JsonResponse(v_return)
def get_foreign_tables(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryForeignTables(False,v_schema)
for v_table in v_tables.Rows:
if v_table['is_partition'] == 'False' and v_table['is_partitioned'] == 'False':
v_icon = 'table'
elif v_table['is_partition'] == 'False' and v_table['is_partitioned'] == 'True':
v_icon = 'table_partitioned'
elif v_table['is_partition'] == 'True' and v_table['is_partitioned'] == 'False':
v_icon = 'table_partition'
else:
v_icon = 'table_partitioned_partition'
v_table_data = {
'v_name': v_table['table_name'],
'v_icon': v_icon
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_foreign_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryForeignTablesFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
'v_nullable': v_column['nullable'],
'v_options': v_column['attfdwoptions'],
'v_tableoptions': v_column['ftoptions'],
'v_server': v_column['srvname'],
'v_fdw': v_column['fdwname']
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_types(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_types = []
try:
v_types = v_database.QueryTypes(False,v_schema)
for v_type in v_types.Rows:
v_type_data = {
'v_type_name': v_type['type_name']
}
v_list_types.append(v_type_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_types
return JsonResponse(v_return)
def get_domains(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_domains = []
try:
v_domains = v_database.QueryDomains(False,v_schema)
for v_domain in v_domains.Rows:
v_domain_data = {
'v_domain_name': v_domain['domain_name']
}
v_list_domains.append(v_domain_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_domains
return JsonResponse(v_return)
def kill_backend(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_pid = json_object['p_pid']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_database.v_connection.Terminate(v_pid)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def template_select(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_kind = json_object['p_kind']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateSelect(v_schema, v_table, v_kind).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_insert(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateInsert(v_schema, v_table).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_update(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateUpdate(v_schema, v_table).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_select_function(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_function = json_object['p_function']
v_functionid = json_object['p_functionid']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateSelectFunction(v_schema, v_function, v_functionid).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_call_procedure(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_procedure = json_object['p_procedure']
v_procedureid = json_object['p_procedureid']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateCallProcedure(v_schema, v_procedure, v_procedureid).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
| 33.887124
| 121
| 0.656564
| 13,222
| 100,272
| 4.587581
| 0.029043
| 0.105478
| 0.10894
| 0.078441
| 0.832715
| 0.828066
| 0.818751
| 0.812586
| 0.812173
| 0.811201
| 0
| 0.00157
| 0.224908
| 100,272
| 2,958
| 122
| 33.89858
| 0.778886
| 0.027156
| 0
| 0.796245
| 0
| 0
| 0.161671
| 0.002597
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02793
| false
| 0.083791
| 0.005495
| 0
| 0.145147
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ed0f8edde1e77b8f687ea44a6dce64703fad361d
| 15,986
|
py
|
Python
|
hwtLib/abstract/frame_utils/join/test.py
|
optical-o/hwtLib
|
edad621f5ad4cdbea20a5751ff4468979afe2f77
|
[
"MIT"
] | null | null | null |
hwtLib/abstract/frame_utils/join/test.py
|
optical-o/hwtLib
|
edad621f5ad4cdbea20a5751ff4468979afe2f77
|
[
"MIT"
] | null | null | null |
hwtLib/abstract/frame_utils/join/test.py
|
optical-o/hwtLib
|
edad621f5ad4cdbea20a5751ff4468979afe2f77
|
[
"MIT"
] | null | null | null |
from math import inf
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.stream import HStream
from hwtLib.abstract.frame_utils.alignment_utils import FrameAlignmentUtils
from hwtLib.abstract.frame_utils.join.fsm import input_B_dst_to_fsm
from hwtLib.abstract.frame_utils.join.state_trans_item import StateTransItem
class FrameJoinUtilsTC(unittest.TestCase):
def test_fsm0(self):
word_bytes = 2
f_len = (1, 1)
streams = [
HStream(Bits(8), frame_len=f_len, start_offsets=[1]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [
[st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[0, 0]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 1), None], 'out.last':1}
)],
]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm0_arbitrary_len(self):
word_bytes = 2
f_len = (1, inf)
streams = [
HStream(Bits(8), frame_len=f_len),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st': '0->0', 'in': [[{'keep': [1, 0], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[0, 0]]], 'in.rd':[1], 'out.keep':[1, 0],
'out.mux':[(0, 0, 0), None], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [1, 1], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[0, 0]]], 'in.rd':[1], 'out.keep':[1, 1],
'out.mux':[(0, 0, 0), (0, 0, 1)], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [1, 1], 'relict':'X', 'last': 0}]],
'in.keep_mask':[[[0, 0]]], 'in.rd':[1], 'out.keep':[1, 1],
'out.mux':[(0, 0, 0), (0, 0, 1)], 'out.last':0}),
]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm0_arbitrary_len_unaligned(self):
word_bytes = 2
f_len = (1, inf)
streams = [
HStream(Bits(8), frame_len=f_len, start_offsets=[1]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict': 0, 'last': 1},
{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 1), None], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict': 1, 'last': 1},
{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 1), None], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict':'X', 'last': 0},
{'keep': [1, 0], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0], [0, 0]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict':'X', 'last': 0},
{'keep': [1, 1], 'relict':'X', 'last': 0}]],
'in.keep_mask':[[[0, 0], [0, 1]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':0}),
st({'st': '0->0', 'in': [[{'keep': [0, 1], 'relict':'X', 'last': 0},
{'keep': [1, 1], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0], [0, 1]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':0}),
]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm1(self):
word_bytes = 2
f_len = (2, 2)
streams = [
HStream(Bits(8), frame_len=f_len, start_offsets=[1]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st': '0->0',
'in': [[{'keep': [0, 1], 'relict':'X', 'last': 0},
{'keep': [1, 0], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0], [0, 0]]], 'in.rd':[1], 'out.keep':[1, 1],
'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':1}
)
]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm_2x1B_on_2B(self):
word_bytes = 2
streams = [
HStream(Bits(8 * 1), (1, 2), [0]),
HStream(Bits(8 * 1), (1, 2), [0]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st': '0->0', 'in': [[{'keep': [1, 0], 'relict': 0, 'last': 1}],
[{'keep': [1, 0], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0]], [[0, 0]]], 'in.rd':[1, 1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 0), (1, 0, 0)], 'out.last':1}),
st({'st': '0->1', 'in': [[{'keep': [1, 0], 'relict': 0, 'last': 1}],
[{'keep': [1, 1], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0]], [[0, 1]]], 'in.rd':[1, 1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 0), (1, 0, 0)], 'out.last':0}),
st({'st': '0->1', 'in': [[{'keep': [1, 1], 'relict': 0, 'last': 1}],
[{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0]], [[1, 1]]], 'in.rd':[1, 0],
'out.keep':[1, 1], 'out.mux':[(0, 0, 0), (0, 0, 1)], 'out.last':0})
], [
st({'st': '1->0', 'in': [[{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [0, 1], 'relict': 1, 'last': 1}]],
'in.keep_mask':[[[1, 1]], [[0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 0], 'out.mux':[(1, 0, 1), None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [1, 0], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[1, 1]], [[0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 0], 'out.mux':[(1, 0, 0), None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [1, 1], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[1, 1]], [[0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 1], 'out.mux':[(1, 0, 0), (1, 0, 1)], 'out.last':1})
]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm_2x1B_on_3B(self):
word_bytes = 3
streams = [
HStream(Bits(8 * 1), (1, 3), [0]),
HStream(Bits(8 * 1), (1, 3), [0]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[st({'st': '0->0', 'in': [[{'keep': [1, 0, 'X'], 'relict': 0, 'last': 1}],
[{'keep': [1, 0, 'X'], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0, 1]], [[0, 0, 1]]], 'in.rd':[1, 1],
'out.keep':[1, 1, 0], 'out.mux':[(0, 0, 0), (1, 0, 0), None], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [1, 0, 'X'], 'relict': 0, 'last': 1}],
[{'keep': [1, 1, 0], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0, 1]], [[0, 0, 0]]], 'in.rd':[1, 1],
'out.keep':[1, 1, 1], 'out.mux':[(0, 0, 0), (1, 0, 0), (1, 0, 1)], 'out.last':1}),
st({'st': '0->0', 'in': [[{'keep': [1, 1, 0], 'relict': 0, 'last': 1}],
[{'keep': [1, 0, 'X'], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0, 0]], [[0, 0, 1]]], 'in.rd':[1, 1],
'out.keep':[1, 1, 1], 'out.mux':[(0, 0, 0), (0, 0, 1), (1, 0, 0)], 'out.last':1}),
st({'st': '0->1', 'in': [[{'keep': [1, 0, 'X'], 'relict': 0, 'last': 1}],
[{'keep': [1, 1, 1], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0, 1]], [[0, 0, 1]]], 'in.rd':[1, 1],
'out.keep':[1, 1, 1], 'out.mux':[(0, 0, 0), (1, 0, 0), (1, 0, 1)], 'out.last':0}),
st({'st': '0->1', 'in': [[{'keep': [1, 1, 0], 'relict': 0, 'last': 1}],
[{'keep': [1, 1, 'X'], 'relict':'X', 'last': 1}]],
'in.keep_mask':[[[0, 0, 0]], [[0, 1, 1]]], 'in.rd':[1, 1],
'out.keep':[1, 1, 1], 'out.mux':[(0, 0, 0), (0, 0, 1), (1, 0, 0)], 'out.last':0}),
st({'st': '0->1', 'in': [[{'keep': [1, 1, 1], 'relict': 0, 'last': 1}],
[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0, 0]], [[1, 1, 1]]], 'in.rd':[1, 0],
'out.keep':[1, 1, 1], 'out.mux':[(0, 0, 0), (0, 0, 1), (0, 0, 2)], 'out.last':0})],
[st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [0, 0, 1], 'relict': 1, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 0, 0], 'out.mux':[(1, 0, 2), None, None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [0, 1, 0], 'relict': 1, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 0, 0], 'out.mux':[(1, 0, 1), None, None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [0, 1, 1], 'relict': 1, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 1, 0], 'out.mux':[(1, 0, 1), (1, 0, 2), None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [1, 0, 'X'], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 1]]], 'in.rd':[0, 1],
'out.keep':[1, 0, 0], 'out.mux':[(1, 0, 0), None, None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [1, 1, 0], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 1, 0], 'out.mux':[(1, 0, 0), (1, 0, 1), None], 'out.last':1}),
st({'st': '1->0', 'in': [[{'keep': ['X', 'X', 'X'], 'relict':'X', 'last':'X'}],
[{'keep': [1, 1, 1], 'relict': 0, 'last': 1}]],
'in.keep_mask':[[[1, 1, 1]], [[0, 0, 0]]], 'in.rd':[0, 1],
'out.keep':[1, 1, 1], 'out.mux':[(1, 0, 0), (1, 0, 1), (1, 0, 2)], 'out.last':1})]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm_1x3B_on_2B_offset_1(self):
word_bytes = 2
streams = [
HStream(Bits(8 * 3), (1, 1), [1]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st':'0->0', 'in':[[{'keep':[ 0 , 1 ], 'relict': 1 , 'last': 1 },
{'keep':['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 1), None], 'out.last':1}),
st({'st':'0->0', 'in':[[{'keep':[ 0 , 1 ], 'relict':'X', 'last': 0 },
{'keep':[ 1 , 1 ], 'relict':'X', 'last': 1 }]],
'in.keep_mask':[[[0, 0], [0, 1]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':0})
]]
self.assertSequenceEqual(tt.state_trans, ref)
def test_fsm_1x3B_on_2B_offset_0_1(self):
word_bytes = 2
streams = [
HStream(Bits(8 * 3), (1, 1), [0, 1]),
]
out_offset = 0
sju = FrameAlignmentUtils(word_bytes, out_offset)
input_B_dst = sju.resolve_input_bytes_destinations(streams)
tt = input_B_dst_to_fsm(word_bytes, len(streams), input_B_dst)
def st(d):
return StateTransItem.from_dict(tt, d)
ref = [[
st({'st':'0->0', 'in':[[{'keep':[ 0 , 1 ], 'relict': 1 , 'last': 1 }, # w 1, off 0
{'keep':['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 1), None], 'out.last':1}),
st({'st':'0->0', 'in':[[{'keep':[ 0 , 1 ], 'relict':'X', 'last': 0 }, # w 0, off 1
{'keep':[ 1 , 1 ], 'relict':'X', 'last': 1 }]], # w 1, off 1
'in.keep_mask':[[[0, 0], [0, 1]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 1), (0, 1, 0)], 'out.last':0}),
st({'st':'0->0', 'in':[[{'keep':[ 1 , 0 ], 'relict': 0 , 'last': 1 }, # w 1
{'keep':['X', 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 0], 'out.mux':[(0, 0, 0), None], 'out.last':1}),
st({'st':'0->0', 'in':[[{'keep':[ 1 , 1 ], 'relict':'X', 'last': 0 },
{'keep':[ 1 , 'X'], 'relict':'X', 'last':'X'}]],
'in.keep_mask':[[[0, 0], [1, 1]]], 'in.rd':[1],
'out.keep':[1, 1], 'out.mux':[(0, 0, 0), (0, 0, 1)], 'out.last':0})
]]
self.assertSequenceEqual(tt.state_trans, ref)
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(FrameJoinUtilsTC('test_fsm_1x3B_on_2B_offset_1'))
for tc in [FrameJoinUtilsTC, ]:
suite.addTest(unittest.makeSuite(tc))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| 52.933775
| 103
| 0.39447
| 2,200
| 15,986
| 2.754091
| 0.040909
| 0.049513
| 0.025252
| 0.047533
| 0.928206
| 0.919459
| 0.903284
| 0.88645
| 0.875887
| 0.872256
| 0
| 0.07896
| 0.33373
| 15,986
| 301
| 104
| 53.109635
| 0.489907
| 0.006255
| 0
| 0.567669
| 0
| 0
| 0.166121
| 0
| 0
| 0
| 0
| 0
| 0.030075
| 1
| 0.06015
| false
| 0
| 0.026316
| 0.030075
| 0.120301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed18b6fb9c01acaf0356bae3dcf8376fe933976d
| 2,461
|
py
|
Python
|
wechat_model/_generated_message.py
|
Cologler/wechat-model-python
|
8d67fbf5db9d3d27428100246011c1113f418971
|
[
"MIT"
] | 1
|
2017-09-10T07:44:31.000Z
|
2017-09-10T07:44:31.000Z
|
wechat_model/_generated_message.py
|
Cologler/wechat-model-python
|
8d67fbf5db9d3d27428100246011c1113f418971
|
[
"MIT"
] | null | null | null |
wechat_model/_generated_message.py
|
Cologler/wechat-model-python
|
8d67fbf5db9d3d27428100246011c1113f418971
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from ._base import _BaseModel
class _Generated(_BaseModel):
@property
def forward_flag(self):
return self._get('ForwardFlag')
@property
def create_time(self):
return self._get('CreateTime')
@property
def media_id(self):
return self._get('MediaId')
@property
def msg_id(self):
return self._get('MsgId')
@property
def status_notify_user_name(self):
return self._get('StatusNotifyUserName')
@property
def file_size(self):
return self._get('FileSize')
@property
def url(self):
return self._get('Url')
@property
def img_height(self):
return self._get('ImgHeight')
@property
def voice_length(self):
return self._get('VoiceLength')
@property
def play_length(self):
return self._get('PlayLength')
@property
def img_width(self):
return self._get('ImgWidth')
@property
def status(self):
return self._get('Status')
@property
def app_msg_type(self):
return self._get('AppMsgType')
@property
def recommend_info(self):
return self._get('RecommendInfo')
@property
def app_info(self):
return self._get('AppInfo')
@property
def has_product_id(self):
return self._get('HasProductId')
@property
def msg_type(self):
return self._get('MsgType')
@property
def new_msg_id(self):
return self._get('NewMsgId')
@property
def to_user_name(self):
return self._get('ToUserName')
@property
def ori_content(self):
return self._get('OriContent')
@property
def ticket(self):
return self._get('Ticket')
@property
def type(self):
return self._get('Type')
@property
def file_name(self):
return self._get('FileName')
@property
def text(self):
return self._get('Text')
@property
def sub_msg_type(self):
return self._get('SubMsgType')
@property
def status_notify_code(self):
return self._get('StatusNotifyCode')
@property
def img_status(self):
return self._get('ImgStatus')
@property
def content(self):
return self._get('Content')
@property
def from_user_name(self):
return self._get('FromUserName')
| 24.61
| 56
| 0.613978
| 286
| 2,461
| 5.055944
| 0.29021
| 0.220609
| 0.280775
| 0.340941
| 0.313278
| 0.132089
| 0
| 0
| 0
| 0
| 0
| 0.004937
| 0.259244
| 2,461
| 99
| 57
| 24.858586
| 0.788261
| 0.048354
| 0
| 0.325843
| 1
| 0
| 0.111777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.325843
| false
| 0
| 0.011236
| 0.325843
| 0.674157
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ed50289174fdf61f6bf201e6c7757fc27878b219
| 1,264
|
py
|
Python
|
estimators/Templates.py
|
tum-msv/mimo-cnn-est
|
8915a918c08c5ae61dc2208352ebb9676395b3c8
|
[
"Unlicense"
] | 2
|
2021-04-28T17:33:07.000Z
|
2021-09-22T19:35:05.000Z
|
estimators/Templates.py
|
tum-msv/mimo-cnn-est
|
8915a918c08c5ae61dc2208352ebb9676395b3c8
|
[
"Unlicense"
] | null | null | null |
estimators/Templates.py
|
tum-msv/mimo-cnn-est
|
8915a918c08c5ae61dc2208352ebb9676395b3c8
|
[
"Unlicense"
] | null | null | null |
class Estimator(object):
def valid(self, channel_config, snr, n_coherences, n_antennas):
return False
def estimate(self, y, n_pilots):
pass
class GenieEstimator(object):
def valid(self, channel_config, snr, n_coherences, n_antennas):
return False
def estimate(self, h, t, y):
pass
class Descriptor(object):
@property
def description(self):
return ''
#MIMO case
class Estimator_mimo(object):
def valid(self, channel_config, snr, n_coherences, n_antennas_BS, n_antennas_MS, n_pilots):
return False
def estimate(self, y, n_pilots, n_antennas_MS):
pass
class Estimator_mimo_cnn(object):
def valid(self, channel_config, snr, n_coherences, n_antennas_BS, n_antennas_MS, n_pilots):
return False
def estimate(self, y):
pass
class Estimator_mimo_ML(object):
def valid(self, channel_config, snr, n_coherences, n_antennas_BS, n_antennas_MS, n_pilots):
return False
def estimate(self, y, n_pilots, n_antennas_MS, t_BS, t_MS):
pass
class GenieEstimator_mimo(object):
def valid(self, channel_config, snr, n_coherences, n_antennas_BS, n_antennas_MS, n_pilots):
return False
def estimate(self, h, y):
pass
| 25.28
| 95
| 0.685918
| 179
| 1,264
| 4.569832
| 0.167598
| 0.132029
| 0.102689
| 0.132029
| 0.754279
| 0.754279
| 0.751834
| 0.751834
| 0.742054
| 0.742054
| 0
| 0
| 0.225475
| 1,264
| 50
| 96
| 25.28
| 0.835546
| 0.00712
| 0
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.382353
| false
| 0.176471
| 0
| 0.205882
| 0.794118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 9
|
ed896f44517b7b020aae1871c8b6cf792ca7d91b
| 186
|
py
|
Python
|
wagtail/wagtailsearch/__init__.py
|
lojack/wagtail
|
eaf61d5550795a3278184261f6f956f603df8d46
|
[
"BSD-3-Clause"
] | null | null | null |
wagtail/wagtailsearch/__init__.py
|
lojack/wagtail
|
eaf61d5550795a3278184261f6f956f603df8d46
|
[
"BSD-3-Clause"
] | null | null | null |
wagtail/wagtailsearch/__init__.py
|
lojack/wagtail
|
eaf61d5550795a3278184261f6f956f603df8d46
|
[
"BSD-3-Clause"
] | null | null | null |
from wagtail.wagtailsearch.indexed import Indexed
from wagtail.wagtailsearch.signal_handlers import register_signal_handlers
from wagtail.wagtailsearch.backends import get_search_backend
| 62
| 74
| 0.908602
| 23
| 186
| 7.130435
| 0.521739
| 0.20122
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05914
| 186
| 3
| 75
| 62
| 0.937143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c0da1c7e6832c0148cd057c0174a09761f5c0b1
| 42
|
py
|
Python
|
template_creator/tests/it-tests/python_two_lambda_folders_s3_event_api_gateway/s3_lambda/helper_file.py
|
VanOvermeire/sam-template-creator
|
0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7
|
[
"MIT"
] | 3
|
2019-06-10T19:46:23.000Z
|
2021-05-06T12:15:45.000Z
|
template_creator/tests/it-tests/python_two_lambda_folders_s3_event_api_gateway/s3_lambda/helper_file.py
|
VanOvermeire/sam-template-creator
|
0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7
|
[
"MIT"
] | 2
|
2019-10-20T14:57:50.000Z
|
2020-01-01T00:52:32.000Z
|
template_creator/tests/it-tests/python_two_lambda_folders_s3_event_api_gateway/s3_lambda/helper_file.py
|
VanOvermeire/sam-template-creator
|
0b39440c9051ccd30fc80bfa2e4d7da40c7e50b7
|
[
"MIT"
] | 2
|
2019-10-19T07:40:53.000Z
|
2019-10-19T08:29:40.000Z
|
def a_helper():
return "hello world"
| 10.5
| 24
| 0.642857
| 6
| 42
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 42
| 3
| 25
| 14
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9c4a82995208b355de83b3ebee83f4873fa44e3b
| 1,407
|
py
|
Python
|
tests/test_config.py
|
dvdangelo33/pyaim
|
cef3bdc782eddb062763a5ce7318ab4212a4b07c
|
[
"MIT"
] | 18
|
2019-03-11T15:40:44.000Z
|
2022-01-06T07:24:25.000Z
|
tests/test_config.py
|
dvdangelo33/pyaim
|
cef3bdc782eddb062763a5ce7318ab4212a4b07c
|
[
"MIT"
] | 37
|
2019-03-16T17:40:47.000Z
|
2021-12-16T20:32:29.000Z
|
tests/test_config.py
|
dvdangelo33/pyaim
|
cef3bdc782eddb062763a5ce7318ab4212a4b07c
|
[
"MIT"
] | 3
|
2020-07-10T07:05:12.000Z
|
2021-05-25T21:01:58.000Z
|
import unittest
from pathlib import Path
import pyaim
class TestDevelopmentConfig(TestCase):
def create_cfginstances(self):
self.wincli = cfg.BaseConfig.WIN_CLIPASSWORDSDK
self.nixcli = cfg.BaseConfig.NIX_CLIPASSWORDSDK
def test_app_is_development(self):
self.assertTrue(self.wincli is Path('C:/Windows/Program Files (x86)/CyberArk/ApplicationPasswordSdk/CLIPasswordSDK.exe'))
self.assertTrue(self.nixcli is Path('/opt/carkaim/sdk/CLIPasswordSDK'))
class TestTestingConfig(TestCase):
def create_cfginstances(self):
self.wincli = cfg.BaseConfig.WIN_CLIPASSWORDSDK
self.nixcli = cfg.BaseConfig.NIX_CLIPASSWORDSDK
def test_app_is_testing(self):
self.assertTrue(self.wincli is Path('C:/Windows/Program Files (x86)/CyberArk/ApplicationPasswordSdk/CLIPasswordSDK.exe'))
self.assertTrue(self.nixcli is Path('/opt/carkaim/sdk/CLIPasswordSDK'))
class TestProductionConfig(TestCase):
def create_cfginstances(self):
self.wincli = cfg.BaseConfig.WIN_CLIPASSWORDSDK
self.nixcli = cfg.BaseConfig.NIX_CLIPASSWORDSDK
def test_app_is_production(self):
self.assertTrue(self.wincli is Path('C:/Windows/Program Files (x86)/CyberArk/ApplicationPasswordSdk/CLIPasswordSDK.exe'))
self.assertTrue(self.nixcli is Path('/opt/carkaim/sdk/CLIPasswordSDK'))
if __name__ == "__main__":
unittest.main()
| 37.026316
| 129
| 0.748401
| 163
| 1,407
| 6.300614
| 0.263804
| 0.046738
| 0.105161
| 0.084713
| 0.84518
| 0.84518
| 0.84518
| 0.84518
| 0.84518
| 0.84518
| 0
| 0.005004
| 0.147832
| 1,407
| 38
| 130
| 37.026316
| 0.851543
| 0
| 0
| 0.576923
| 0
| 0
| 0.244318
| 0.185369
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.230769
| false
| 0.461538
| 0.115385
| 0
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
9c4ae65217a3ce952e82a4f18cbc5fac6805b0f9
| 28,929
|
py
|
Python
|
DeBERTa/apps/_glue_tasks.py
|
novyantsp/DeBERTa
|
919cf8146480daa6c39ed8b1afaeb0d5f7775b14
|
[
"MIT"
] | 2
|
2021-12-29T08:50:46.000Z
|
2022-03-23T01:55:50.000Z
|
DeBERTa/apps/_glue_tasks.py
|
novyantsp/DeBERTa
|
919cf8146480daa6c39ed8b1afaeb0d5f7775b14
|
[
"MIT"
] | null | null | null |
DeBERTa/apps/_glue_tasks.py
|
novyantsp/DeBERTa
|
919cf8146480daa6c39ed8b1afaeb0d5f7775b14
|
[
"MIT"
] | 1
|
2021-11-11T23:09:35.000Z
|
2021-11-11T23:09:35.000Z
|
# Copyright (c) Microsoft, Inc. 2020
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# Author: penhe@microsoft.com
# Date: 01/25/2019
#
from glob import glob
from collections import OrderedDict,defaultdict,Sequence
import copy
import math
from scipy.special import softmax
import numpy as np
import pdb
import os
import sys
import csv
import random
import torch
import re
import ujson as json
from .metrics import *
from .task import EvalData, DataTask
from DeBERTa.utils import xtqdm as tqdm
from DeBERTa.data import ExampleInstance, ExampleSet, DynamicDataset,example_to_feature
from DeBERTa.data.example import _truncate_segments
from DeBERTa.data.example import *
import DeBERTa.utils as utils
logger=utils.get_logger()
__all__ = ["MNLITask", "ANLITask", "STSBTask", "SST2Task", "QQPTask", "ColaTask", "MRPCTask", "RTETask", "QNLITask"]
class STSBTask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = ExampleSet([ExampleInstance((l[7], l[8]), float(l[9])) for l in data[1:]]) # if l[3] in ['slate']])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen, label_type='float'), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', 'dev.tsv', 'dev')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, label_type='float'), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, label_type='float'), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[7], l[8])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[7], l[8]), float(l[9])) for l in data[1:]])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
def metric_fn(logits, labels):
return OrderedDict(
pearsonr=pearsonr(labels, logits)[0],
spearmanr= spearmanr(labels, logits)[0])
return metric_fn
def get_predict_fn(self):
"""Calcuate metrics based on prediction results"""
def predict_fn(logits, output_dir, name, prefix):
output=os.path.join(output_dir, 'submit-{}-{}.tsv'.format(name, prefix))
with open(output, 'w', encoding='utf-8') as fs:
fs.write('index\tpredictions\n')
for i,p in enumerate(np.squeeze(logits)):
fs.write('{}\t{}\n'.format(i, p))
return predict_fn
def get_labels(self):
"""See base class."""
return ["1"]
class RTETask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = [ExampleInstance((l[1],l[2]), self.label2id(l[3])) for l in data[1:]] # if l[3] in ['slate']])
examples = ExampleSet(examples)
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, extra_data=None, **kwargs):
ds = [
self._data('dev', "dev.tsv", 'dev'),
]
if extra_data is not None:
extra_data = extra_data.split(',')
for d in extra_data:
n,path=d.split(':')
ds.append(self._data(n, path, 'dev+'))
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[1], l[2])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[1],l[2]), self.label2id(l[3])) for l in data[1:]])
predict_fn = self.get_predict_fn(examples)
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
"""Calcuate metrics based on prediction results"""
def metrics_fn(logits, labels):
return OrderedDict(accuracy=metric_accuracy(logits, labels))
return metrics_fn
def get_predict_fn(self, data):
"""Calcuate metrics based on prediction results"""
def predict_fn(logits, output_dir, name, prefix):
output = os.path.join(output_dir, 'pred-probs-{}-{}.tsv'.format(name, prefix))
probs = softmax(logits, axis=-1)
with open(output, 'w', encoding='utf-8') as fs:
fs.write('sentence1\tsentence2\tnot_entailment\tentailment\n')
for d,probs in zip(data, probs):
fs.write(f'{d.segments[0]}\t{d.segments[1]}\t{probs[0]}\t{probs[1]}\n')
output=os.path.join(output_dir, 'submit-{}-{}.tsv'.format(name, prefix))
preds = np.argmax(logits, axis=1)
labels = self.get_labels()
with open(output, 'w', encoding='utf-8') as fs:
fs.write('index\tpredictions\n')
for i,p in enumerate(preds):
fs.write('{}\t{}\n'.format(i, labels[p]))
return predict_fn
def get_labels(self):
"""See base class."""
return ["not_entailment", "entailment"]
class MRPCTask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = ExampleSet([ExampleInstance((l[3],l[4]), self.label2id(l[0])) for l in data[1:]]) # if l[3] in ['slate']])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', "dev.tsv", 'dev'),
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[3], l[4])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[3],l[4]), self.label2id(l[0])) for l in data[1:]])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
"""Calcuate metrics based on prediction results"""
def metrics_fn(logits, labels):
return OrderedDict(accuracy= metric_accuracy(logits, labels),
f1=metric_f1(logits, labels))
return metrics_fn
def get_predict_fn(self):
"""Calcuate metrics based on prediction results"""
def predict_fn(logits, output_dir, name, prefix):
output=os.path.join(output_dir, 'submit-{}-{}.tsv'.format(name, prefix))
preds = np.argmax(logits, axis=1)
labels = self.get_labels()
with open(output, 'w', encoding='utf-8') as fs:
fs.write('index\tpredictions\n')
for i,p in enumerate(preds):
fs.write('{}\t{}\n'.format(i, labels[p]))
return predict_fn
def get_labels(self):
"""See base class."""
return ["0", "1"]
class QNLITask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = ExampleSet([ExampleInstance((l[2],l[1]), self.label2id(l[3])) for l in data[1:]]) # if l[3] in ['slate']])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', "dev.tsv", 'dev')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[2], l[1])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[2],l[1]), self.label2id(l[3])) for l in data[1:]])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
"""Calcuate metrics based on prediction results"""
def metrics_fn(logits, labels):
return OrderedDict(accuracy=metric_accuracy(logits, labels))
return metrics_fn
def get_predict_fn(self):
"""Calcuate metrics based on prediction results"""
def predict_fn(logits, output_dir, name, prefix):
output=os.path.join(output_dir, 'submit-{}-{}.tsv'.format(name, prefix))
preds = np.argmax(logits, axis=1)
labels = self.get_labels()
with open(output, 'w', encoding='utf-8') as fs:
fs.write('index\tpredictions\n')
for i,p in enumerate(preds):
fs.write('{}\t{}\n'.format(i, labels[p]))
return predict_fn
def get_labels(self):
"""See base class."""
return ["not_entailment", "entailment"]
class ColaTask(DataTask):
def __init__(self, data_dir, tokenizer, soft_threshold=0, with_dev_data=None, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
self.soft_threshold = soft_threshold
if True:
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
def get_hard_label(l):
#return self.label2id(l[1])
try:
l = self.label2id(l[1])
except Exception:
import pdb
pdb.set_trace()
if l==0:
return [1,0]
else:
return [0,1]
train_examples = [ExampleInstance((l[3],), label=get_hard_label(l), domain_label=1) for l in data]
self.train_split = train_examples[:-1000]
self.train_dev = train_examples[-1000:]
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
examples = ExampleSet(self.train_dev + self.train_split) # if l[3] in ['slate']])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen, label_type='float', training=True), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', "dev.tsv", 'dev'),
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, label_type='int'), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test'),
]
if 'extra_data' in kwargs and kwargs['extra_data'] is not None:
extra_data = kwargs['extra_data'].split(',')
for d in extra_data:
n,path=d.split(':')
ds.append(self._data(n, path, 'test+'))
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, label_type='int'), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev', ignore_metric=False):
if isinstance(path, str):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[1], )) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[3],), self.label2id(l[1])) for l in data])
elif isinstance(path, ExampleSet):
examples = path
else:
raise ValueError('Input type of path not supported')
predict_fn = self.get_predict_fn(examples)
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn, ignore_metric=ignore_metric, critial_metrics=['mcc'])
def get_metrics_fn(self):
def metric_fn(logits, labels):
return OrderedDict(
accuracy= metric_accuracy(logits, labels),
mcc= metric_mcc(logits, labels))
return metric_fn
def get_predict_fn(self, data):
"""Calcuate metrics based on prediction results"""
def predict_fn(logits, output_dir, name, prefix):
output = os.path.join(output_dir, 'pred-probs-{}-{}.tsv'.format(name, prefix))
probs = softmax(logits, axis=-1)
with open(output, 'w', encoding='utf-8') as fs:
fs.write('sentence\tlable_0\tlabel_1\n')
for d,probs in zip(data, probs):
fs.write(f'{d.segments[0]}\t{probs[0]}\t{probs[1]}\n')
output=os.path.join(output_dir, 'submit-{}-{}.tsv'.format(name, prefix))
preds = np.argmax(logits, axis=-1)
labels = self.get_labels()
with open(output, 'w', encoding='utf-8') as fs:
offset = 0
sep = '\t'
if name in ['test_id', 'test_od']:
offset = 1
sep = ','
fs.write('Id,Label\n')
else:
fs.write('index\tpredictions\n')
for i,p in enumerate(preds):
fs.write('{}{}{}\n'.format(i+offset, sep, labels[p]))
return predict_fn
def get_labels(self):
"""See base class."""
return ["0", "1"]
class SST2Task(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = ExampleSet([ExampleInstance((l[0],), self.label2id(l[1])) for l in data[1:]]) # if l[3] in ['slate']])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', 'dev.tsv', 'dev')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[1], )) for l in data[1:]])
elif type_name=='orig-test':
examples = ExampleSet([ExampleInstance((l[1], ), self.label2id(l[3])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[0],), self.label2id(l[1])) for l in data[1:]])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
"""Calcuate metrics based on prediction results"""
def metrics_fn(logits, labels):
return OrderedDict(accuracy= metric_accuracy(logits, labels))
return metrics_fn
def get_labels(self):
"""See base class."""
return ["0", "1"]
class QQPTask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
# examples = ExampleSet([ExampleInstance((l[3], l[4]), self.label2id(l[5])) for l in data[1:] if len(l)==6]) # if l[3] in ['slate']])
examples = ExampleSet([ExampleInstance((l[0], l[1]), self.label2id(l[2])) for l in data[1:] if len(l)==3])
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('dev', 'dev.tsv', 'dev')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('test', 'test.tsv', 'test')
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
else:
_size = dataset_size
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def _data(self, name, path, type_name = 'dev'):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
# examples = ExampleSet([ExampleInstance((l[-2], l[-1])) for l in data[1:]])
examples = ExampleSet([ExampleInstance((l[0], l[1])) for l in data[1:]])
# examples = ExampleSet([ExampleInstance((l[0], l[1]), self.label2id(l[2])) for l in data[1:] if len(l)==3])
else:
# examples = ExampleSet([ExampleInstance((l[3], l[4]), self.label2id(l[5])) for l in data[1:] if len(l)==6])
examples = ExampleSet([ExampleInstance((l[0], l[1]), self.label2id(l[2])) for l in data[1:] if len(l)==3])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn)
def get_metrics_fn(self):
"""Calcuate metrics based on prediction results"""
def metrics_fn(logits, labels):
return OrderedDict(accuracy= metric_accuracy(logits, labels),
f1=metric_f1(logits, labels))
return metrics_fn
def get_labels(self):
"""See base class."""
return ["-1", "0", "1"]
class MNLITask(DataTask):
def __init__(self, data_dir, tokenizer, **kwargs):
super().__init__(tokenizer, **kwargs)
self.data_dir = data_dir
def get_feature_fn(self, max_seq_len = 512, mask_gen = None):
def _example_to_feature(example, rng=None, ext_params=None, **kwargs):
return example_to_feature(self.tokenizer, example, max_seq_len = max_seq_len, \
rng = rng, mask_generator = mask_gen, ext_params = ext_params, **kwargs)
return _example_to_feature
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
input_src = os.path.join(self.data_dir, 'train.tsv')
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
examples = [ExampleInstance((l[8], l[9]), self.label2id(l[-1])) for l in data[1:]] # if l[3] in ['slate']])
examples = ExampleSet(examples)
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs):
ds = [
self._data('matched', 'dev_matched.tsv', 'dev'),
self._data('mismatched', 'dev_mismatched.tsv', 'dev'),
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def test_data(self,max_seq_len=512, dataset_size = None, **kwargs):
"""See base class."""
ds = [
self._data('matched', 'test_matched.tsv', 'test'),
self._data('mismatched', 'test_mismatched.tsv', 'test'),
]
for d in ds:
if dataset_size is None:
_size = len(d.data)
d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs)
return ds
def diagnostic_data(self, name, path, type_name='dev', ignore_metric=False):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
examples = ExampleSet([ExampleInstance((l[5], l[6]), self.label2id(l[7])) for l in data[1:]])
def _metric_fn(logits, labels):
return OrderedDict(
accuracy= metric_accuracy(logits, labels),
mcc= metric_mcc(logits, labels))
return EvalData(name, examples,
metrics_fn = _metric_fn, predict_fn = predict_fn, ignore_metric=ignore_metric, critial_metrics=['mcc'])
def anli_data(self, name, path, type_name='dev', ignore_metric=False):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
examples = ExampleSet([ExampleInstance((l[1], l[2]), self.label2id(l[3])) for l in data[1:]])
def _metric_fn(logits, labels):
return OrderedDict(
accuracy= metric_accuracy(logits, labels))
return EvalData(name, examples,
metrics_fn = _metric_fn, predict_fn = predict_fn, ignore_metric=ignore_metric, critial_metrics=['accuracy'])
def _data(self, name, path, type_name = 'dev', ignore_metric=False):
input_src = os.path.join(self.data_dir, path)
assert os.path.exists(input_src), f"{input_src} doesn't exists"
data = self._read_tsv(input_src)
predict_fn = self.get_predict_fn()
if type_name=='test':
examples = ExampleSet([ExampleInstance((l[8], l[9])) for l in data[1:]])
else:
examples = ExampleSet([ExampleInstance((l[8], l[9]), self.label2id(l[-1])) for l in data[1:]])
return EvalData(name, examples,
metrics_fn = self.get_metrics_fn(input_src), predict_fn = predict_fn, ignore_metric=ignore_metric, critial_metrics=['accuracy'])
def get_metrics_fn(self, input_src):
"""Calcuate metrics based on prediction results"""
data = self._read_tsv(input_src)
genres = [l[3] for l in data[1:]]
def metrics_fn(logits, labels):
metrics = OrderedDict(accuracy= metric_accuracy(logits, labels))
genres_predicts = defaultdict(list)
for g,lg,lab in zip(genres,logits,labels):
genres_predicts[g].append((lg, lab))
for k in genres_predicts:
logits_ = [x[0] for x in genres_predicts[k]]
labels_ = [x[1] for x in genres_predicts[k]]
acc = metric_accuracy(logits_, labels_)
metrics[f'accuracy_{k}'] = acc
return metrics
return metrics_fn
def get_labels(self):
"""See base class."""
return ["contradiction", "neutral", "entailment"]
class ANLITask(MNLITask):
def __init__(self, data_dir, tokenizer, **kwargs):
data_dir = data_dir.replace('/ANLI', '/MNLI')
super().__init__(data_dir, tokenizer, **kwargs)
def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs):
examples = []
data_src = ['R1', 'R2', 'R3']
for d in data_src:
input_src = os.path.join(self.data_dir, f'anli_v0.1/{d}/train.tsv')
data = self._read_tsv(input_src)
examples += [ExampleInstance((l[1], l[2]), self.label2id(l[3])) for l in data[1:]]
examples = ExampleSet(examples)
if dataset_size is None:
dataset_size = len(examples)*epochs
return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \
dataset_size = dataset_size, shuffle=True, **kwargs)
| 39.305707
| 150
| 0.659477
| 4,281
| 28,929
| 4.224247
| 0.05723
| 0.065085
| 0.038819
| 0.018248
| 0.882493
| 0.87276
| 0.860595
| 0.852632
| 0.847821
| 0.846052
| 0
| 0.013472
| 0.196861
| 28,929
| 735
| 151
| 39.359184
| 0.76487
| 0.053856
| 0
| 0.759599
| 0
| 0.001669
| 0.065006
| 0.007349
| 0
| 0
| 0
| 0
| 0.03005
| 1
| 0.138564
| false
| 0
| 0.036728
| 0.016694
| 0.307179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c6416a69adb8668c8d701ab6cd5b798cf549516
| 76,209
|
py
|
Python
|
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_9():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,4000.0,401,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([5.0,15.0,25.0,35.0,45.0,55.0,65.0,75.0,85.0,95.0,105.0,115.0,125.0,135.0,145.0,155.0,165.0,175.0,185.0,195.0,205.0,215.0,225.0,235.0,245.0,255.0,265.0,275.0,285.0,295.0,305.0,315.0,325.0,335.0,345.0,355.0,365.0,375.0,385.0,395.0,405.0,415.0,425.0,435.0,445.0,455.0,465.0,475.0,485.0,495.0,505.0,515.0,525.0,535.0,545.0,555.0,565.0,575.0,585.0,595.0,605.0,615.0,625.0,635.0,645.0,655.0,665.0,675.0,685.0,695.0,705.0,715.0,725.0,735.0,745.0,755.0,765.0,775.0,785.0,795.0,805.0,815.0,825.0,835.0,845.0,855.0,865.0,875.0,885.0,895.0,905.0,915.0,925.0,935.0,945.0,955.0,965.0,975.0,985.0,995.0,1005.0,1015.0,1025.0,1035.0,1045.0,1055.0,1065.0,1075.0,1085.0,1095.0,1105.0,1115.0,1125.0,1135.0,1145.0,1155.0,1165.0,1175.0,1185.0,1195.0,1205.0,1215.0,1225.0,1235.0,1245.0,1255.0,1265.0,1275.0,1285.0,1295.0,1305.0,1315.0,1325.0,1335.0,1345.0,1355.0,1365.0,1375.0,1385.0,1395.0,1405.0,1415.0,1425.0,1435.0,1445.0,1455.0,1465.0,1475.0,1485.0,1495.0,1505.0,1515.0,1525.0,1535.0,1545.0,1555.0,1565.0,1575.0,1585.0,1595.0,1605.0,1615.0,1625.0,1635.0,1645.0,1655.0,1665.0,1675.0,1685.0,1695.0,1705.0,1715.0,1725.0,1735.0,1745.0,1755.0,1765.0,1775.0,1785.0,1795.0,1805.0,1815.0,1825.0,1835.0,1845.0,1855.0,1865.0,1875.0,1885.0,1895.0,1905.0,1915.0,1925.0,1935.0,1945.0,1955.0,1965.0,1975.0,1985.0,1995.0,2005.0,2015.0,2025.0,2035.0,2045.0,2055.0,2065.0,2075.0,2085.0,2095.0,2105.0,2115.0,2125.0,2135.0,2145.0,2155.0,2165.0,2175.0,2185.0,2195.0,2205.0,2215.0,2225.0,2235.0,2245.0,2255.0,2265.0,2275.0,2285.0,2295.0,2305.0,2315.0,2325.0,2335.0,2345.0,2355.0,2365.0,2375.0,2385.0,2395.0,2405.0,2415.0,2425.0,2435.0,2445.0,2455.0,2465.0,2475.0,2485.0,2495.0,2505.0,2515.0,2525.0,2535.0,2545.0,2555.0,2565.0,2575.0,2585.0,2595.0,2605.0,2615.0,2625.0,2635.0,2645.0,2655.0,2665.0,2675.0,2685.0,2695.0,2705.0,2715.0,2725.0,2735.0,2745.0,2755.0,2765.0,2775.0,2785.0,2795.0,2805.0,2815.0,2825.0,2835.0,2845.0,2855.0,2865.0,2875.0,2885.0,2895.0,2905.0,2915.0,2925.0,2935.0,2945.0,2955.0,2965.0,2975.0,2985.0,2995.0,3005.0,3015.0,3025.0,3035.0,3045.0,3055.0,3065.0,3075.0,3085.0,3095.0,3105.0,3115.0,3125.0,3135.0,3145.0,3155.0,3165.0,3175.0,3185.0,3195.0,3205.0,3215.0,3225.0,3235.0,3245.0,3255.0,3265.0,3275.0,3285.0,3295.0,3305.0,3315.0,3325.0,3335.0,3345.0,3355.0,3365.0,3375.0,3385.0,3395.0,3405.0,3415.0,3425.0,3435.0,3445.0,3455.0,3465.0,3475.0,3485.0,3495.0,3505.0,3515.0,3525.0,3535.0,3545.0,3555.0,3565.0,3575.0,3585.0,3595.0,3605.0,3615.0,3625.0,3635.0,3645.0,3655.0,3665.0,3675.0,3685.0,3695.0,3705.0,3715.0,3725.0,3735.0,3745.0,3755.0,3765.0,3775.0,3785.0,3795.0,3805.0,3815.0,3825.0,3835.0,3845.0,3855.0,3865.0,3875.0,3885.0,3895.0,3905.0,3915.0,3925.0,3935.0,3945.0,3955.0,3965.0,3975.0,3985.0,3995.0])
# Creating weights for histo: y10_M_0
y10_M_0_weights = numpy.array([0.380749794766,1.06446189074,1.55575187109,2.19033492903,2.74303645691,3.20976165824,3.81978033717,4.52396413566,4.97840774747,5.67439915296,5.99373888018,6.25575865637,6.76752221922,7.23015382405,7.67231344636,7.63546547783,8.20454499173,8.66308060005,8.91282038672,9.03564428181,9.66203974674,9.94043550894,9.98137547397,10.5832069599,10.1410473376,10.6241469249,10.9230146696,11.2628263794,11.6190100751,11.6681380331,11.6763260262,11.5780701101,11.9792897674,12.1839935925,12.147145624,12.2863455051,12.16352161,12.4623933547,12.2986294946,12.3272854701,12.2781575121,12.4337333792,12.6097772288,12.7817290819,13.1911367322,13.0805968266,12.3968854107,12.6711891763,12.9332129525,12.9700569211,12.8185770504,12.6834731659,12.822669047,12.6056852323,12.879988998,12.7981050679,12.6138732253,12.3968854107,12.4255453862,12.7366971204,12.6097772288,12.6711891763,12.3927934142,12.8677050085,12.2904415016,12.171713603,11.6722340296,12.3436614561,12.3395694596,12.4501093652,11.8196219037,11.2955783514,12.0407017149,11.5944460961,11.8155259072,11.5371301451,11.5535061311,11.1481904773,10.9393906556,10.8411347396,11.1645664633,10.9844266172,11.0826865332,10.6732788829,10.2761512222,10.7592508095,10.1737993096,9.98137547397,10.1451393341,10.4644790613,10.038695425,9.96909548446,9.8831195579,9.8790235614,9.8831195579,9.69888371527,9.67841573276,9.41229996007,9.4532399251,9.17893615941,9.08886823634,9.19531214542,9.09705622935,8.58120066999,8.65080061054,8.7367765371,8.69174057557,8.44609678539,8.43381279589,7.80741733095,7.95890120155,8.19635699872,7.99165317358,7.78694934843,7.80741733095,7.65184146384,7.64365347084,7.37344570165,7.47170161772,6.80027419125,7.12780191147,7.28337377859,7.07457795694,6.75114623321,6.94766206535,6.66926230316,6.51778243255,6.57919438009,6.85349814578,6.50140644654,6.4440864955,6.28441863189,6.3089826109,6.11656277527,6.11656277527,5.98145489068,5.69896313198,5.77675106553,5.66211916345,5.91185895013,5.92413893964,5.33868743973,5.27317949568,5.19948755863,5.16673558661,5.51473128935,5.51473128935,5.10122764256,4.97840774747,4.91699579993,5.0193477125,5.03981569502,4.92927578944,4.67953600277,4.64678403074,4.83101987337,4.60584406572,4.41751622658,4.39704424407,4.21281240144,4.33154030002,4.53624412517,4.40932823358,4.1514004539,3.98354379728,4.05723653433,3.90575626373,3.88528588121,3.88937987772,3.4431246589,3.94260303225,3.62735850153,3.77883957214,3.67648725957,3.58232334,3.50862980295,3.61507611202,3.32849035682,3.32849035682,3.1524445072,3.17700928622,3.14016251769,3.16472689671,3.09103335966,3.21795005125,3.11150374217,3.16882089321,2.93545829255,2.85767075899,2.98458705058,2.79216521495,2.53423783527,2.66115452686,2.69390729888,2.62430775833,2.56699060729,2.5833669933,2.31315762411,2.48510907723,2.4400739157,2.53014383877,2.49739106674,2.34590999614,2.5833669933,2.06341823744,2.27631085559,2.30087523461,2.02657146891,2.18624093253,2.23127569406,2.06751223394,2.06341823744,2.17805253952,1.91603116334,2.0061010864,1.83824362978,1.87509039831,2.01838347591,1.73589171721,1.93240754935,1.83005563678,1.90374917383,1.77683248224,1.84643202279,1.59669263611,1.75226810322,1.70313894519,1.60488102912,1.5803162501,1.67857456617,1.53937548507,1.44930556201,1.39198841097,1.45339995851,1.41245879349,1.50252871655,1.36333003545,1.41245879349,1.38789441447,1.33467125993,1.22003695785,1.19956657534,1.17909619282,1.26097772288,1.16271980681,1.1422494243,1.05627349774,1.22413095435,1.2896364984,1.27326011239,1.15862581031,1.20366057184,0.982579960686,1.12587303829,1.0071447397,1.02352112571,1.03580311522,0.929357206149,1.03989751173,1.06446189074,0.921168813143,0.953921585166,0.904792427132,0.949827588663,0.912980820137,0.978485964183,0.908886823634,0.896604434126,0.835193286583,0.929357206149,0.806534511063,0.937545199155,0.867945658606,0.835193286583,0.851569272595,0.769687742537,0.753311356526,0.753311356526,0.790158125052,0.663241433463,0.732840974012,0.724652981006,0.695994205486,0.781970132046,0.712370591497,0.68780621248,0.642771050949,0.614112675429,0.667335829966,0.618206671932,0.564983517394,0.663241433463,0.5731719104,0.614112675429,0.556795524389,0.634583057943,0.614112675429,0.593642292914,0.536325141874,0.528136748869,0.622300668435,0.524042752366,0.552701527886,0.466725601326,0.556795524389,0.491289980343,0.499478373349,0.442161222309,0.405314453783,0.470819597829,0.421690839794,0.376655718263,0.442161222309,0.397126140777,0.376655718263,0.446255218811,0.384843911269,0.438066825806,0.364373488754,0.397126140777,0.352091219246,0.34390306624,0.364373488754,0.319338567223,0.380749794766,0.327526720229,0.335714873234,0.31524445072,0.282491798697,0.2865858752,0.37256164176,0.31524445072,0.241550953669,0.2865858752,0.282491798697,0.34390306624,0.282491798697,0.221080531154,0.253833223177,0.249739106674,0.262021376183,0.298868144709,0.270209529189,0.25792729968,0.249739106674,0.208798301646,0.241550953669,0.249739106674,0.225174607657,0.192421955634,0.221080531154,0.241550953669,0.253833223177,0.237456877166,0.208798301646,0.192421955634,0.253833223177,0.225174607657,0.180139686126,0.249739106674,0.159669263611,0.1432929576,0.196516032137,0.192421955634,0.17195153312,0.216986454651,0.118728418583,0.139198841097,0.131010688091,0.188327879131,0.204704185143,0.208798301646,0.135104764594,0.135104764594,0.196516032137,0.11463434208,0.135104764594,0.155575187109,0.163763340114,0.17195153312,0.163763340114,0.139198841097,0.151481110606,0.106446189074,0.110540265577,0.126916611589,0.106446189074,0.106446189074,0.11463434208,0.11463434208,0.0941639195657,0.0900698430629,0.0777876135543,0.131010688091,0.110540265577,0.126916611589,0.118728418583,0.0818816900572,0.131010688091,0.0818816900572])
# Creating weights for histo: y10_M_1
y10_M_1_weights = numpy.array([1.71346366061,11.7008987679,22.8930866643,27.6311510708,25.7101937743,21.5568164342,17.2781578773,13.8025423477,10.5344605067,8.43174351139,6.71849613601,5.61274590827,4.89533981626,3.56044299739,3.51091848606,2.61210916532,2.18747917698,1.49394336432,1.80966332563,1.53025645285,1.16678071277,0.862508248059,0.801951587888,0.8016435817,0.486115473407,0.595407203816,0.52232322302,0.412858063899,0.303709081741,0.303782859036,0.291637281841,0.206437082566,0.267356621289,0.194406577124,0.206456548237,0.182138878879,0.170115542891,0.158006774238,0.16985564014,0.0486367405647,0.133538466222,0.0850441935395,0.0486341371314,0.0242901851127,0.0242576982701,0.0486191173238,0.0243677353835,0.0242686927693,0.036531889077,0.0363598301713,0.0121202397813,0.0486121080802,0.0727981238177,0.0,0.0243145292169,0.0242908740212,0.0364551999419,0.0,0.0,0.0,0.0,0.0121061692255,0.0,0.0,0.0,0.0243338947555,0.0,0.0121295200198,0.0,0.0,0.0121276615689,0.0,0.0121061692255,0.0121663686145,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121295200198,0.0,0.0,0.0,0.0121234440069,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.024338829263,0.0,0.0,0.0,0.0,0.0,0.0,0.0120930559321,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_2
y10_M_2_weights = numpy.array([7.00783200382,46.8081744577,84.9501197244,103.312707767,102.578270359,90.0020111128,77.9790798365,64.7686276488,54.1751923857,45.6828414782,39.5080880992,31.1247251577,26.7869123776,22.5102710354,19.2159800147,16.2043610356,14.0759470466,12.0378811725,9.7780750338,9.30636820799,8.07281996301,7.14850482439,6.06390184114,5.59197601464,4.55803243628,4.096567405,3.49397998193,3.08238223418,2.61024319191,2.620828363,2.42955150752,2.14887815718,1.94782932563,1.65650510399,1.39571123137,1.38543514051,1.1648931815,1.02438605768,0.98376721461,1.06455532952,0.662402709647,0.87356730703,0.562272701718,0.512100056849,0.65267949222,0.542309755803,0.542259344324,0.371428682509,0.371280257891,0.441791331557,0.341431703471,0.331240072493,0.26115456262,0.291256579348,0.240980962834,0.271015254631,0.190689479099,0.150576527623,0.180696353652,0.200791195832,0.140581088206,0.210870681929,0.180740897566,0.120415959201,0.100399585381,0.0702841393659,0.0603142775769,0.100423386211,0.0502045859134,0.0300927360572,0.0804020538855,0.0803068918876,0.0703028164059,0.0803689145358,0.0301333751475,0.0903016288493,0.0602867991884,0.0802276384303,0.0401075301765,0.0301117560606,0.0200572938918,0.0602617587322,0.0301355692865,0.0401865274442,0.0301005870254,0.0201197132206,0.0301235407769,0.0401355829252,0.030117520324,0.0502060734653,0.020091718321,0.0401786558157,0.0,0.0200667852991,0.0100230789567,0.0201069698596,0.0,0.0100704161624,0.0,0.0,0.0100370991329,0.0200725165058,0.0200902018446,0.0100325703639,0.0100154883101,0.010046069897,0.0100611106991,0.010046069897,0.0100300828467,0.0100272854227,0.0,0.0100272854227,0.0,0.01002640942,0.0200601780896,0.0100342190672,0.0,0.0301225573399,0.0,0.0100230789567,0.0200458273463,0.0,0.0,0.0,0.0,0.0,0.0200636655723,0.0,0.0100586066535,0.0,0.0,0.0100547803396,0.0,0.0100586066535,0.0,0.0100569827427,0.0100230789567,0.0100370991329,0.0,0.0,0.0100458591605,0.0,0.0100325703639,0.0100697963491,0.0,0.0,0.0,0.0,0.0,0.0,0.01002640942,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100569827427,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100154883101,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_3
y10_M_3_weights = numpy.array([5.94579423093,42.1759936575,79.7555523662,100.067421926,99.2544677641,91.6629278445,83.0100096616,73.2332720306,65.7529237326,56.7070364024,50.7950302868,43.8076707826,38.2604479366,33.5300102242,28.9090538056,25.6100711111,22.2163053795,19.5752667073,18.0298737931,15.3734137327,14.1033754741,12.3652842702,11.08363122,9.74725580425,9.14758074173,7.65041825984,7.15063284266,6.93034180564,5.39654743291,5.69310446602,4.76355957181,4.41117516331,3.94863223619,3.85603281245,3.305427096,2.93145640027,2.98077599859,2.59610458194,2.48021991824,2.07903510359,2.21665027456,1.80936474898,1.74901658306,1.65013363425,1.40803043298,1.54559969729,1.42998966105,1.2979328524,1.171455125,1.04490549071,0.890943485718,0.990087249365,0.962526596083,0.852478172195,0.715239598346,0.770041995802,0.544525318197,0.522651403387,0.484052026162,0.489700983045,0.401617474784,0.467421220847,0.45104798034,0.395989602465,0.330083074231,0.379537670628,0.252941682796,0.379666371781,0.324542140191,0.313527100836,0.214489897519,0.28052212063,0.230949182547,0.280547105229,0.197985599587,0.280691975277,0.192621548383,0.093536975892,0.175955724004,0.153993164649,0.137538429661,0.137502882468,0.137485088558,0.115531832412,0.115540363738,0.137519701369,0.131989167422,0.0825097083022,0.0989732996189,0.0825526086868,0.0660512764068,0.0715058002969,0.055002477374,0.0659943196463,0.104511633635,0.0604616325556,0.0715141691219,0.0440306227809,0.0660221480207,0.0605189549444,0.0605072954649,0.0824352826352,0.0714915001687,0.0110342185896,0.0439744379023,0.0825437929828,0.0440370415885,0.0385139908342,0.0550171837558,0.0440418353814,0.0330209337862,0.0164898840524,0.0275258007822,0.0109628357623,0.016520743079,0.033008640551,0.0440252602329,0.0384887462329,0.0,0.0110067233431,0.00550225869293,0.0274982892856,0.0165038063647,0.027529245813,0.0275388252739,0.0220022137731,0.0110185046987,0.00550880750163,0.0165191952526,0.016494470656,0.0219779523056,0.0274863332409,0.0,0.0164893152973,0.021967345023,0.00550037773857,0.00548468822294,0.0,0.0,0.032983420325,0.0,0.0165081492161,0.0109878772367,0.0220133044975,0.00550237650648,0.0220120857366,0.0110047286377,0.00551416192462,0.00548800731519,0.0110066136546,0.00550225869293,0.016512715507,0.0220093394619,0.0109902660081,0.0,0.0,0.0110085271093,0.0,0.0,0.0,0.00550365214292,0.0,0.0,0.0109946941728,0.00550872218836,0.00549789552882,0.0,0.0,0.00549437330975,0.00549973992035,0.00549592519866,0.0054989639759,0.0,0.00548800731519,0.0,0.0,0.0,0.0110109808813,0.0,0.00549592519866,0.0,0.00550225869293,0.0,0.00548468822294,0.0,0.0,0.0,0.0,0.00550365214292,0.0,0.00549789552882,0.0,0.0,0.0,0.0,0.0,0.00549231766632,0.00550880750163,0.0055162460058,0.0,0.0,0.0,0.0,0.0,0.00549386955523,0.0,0.0,0.0,0.0,0.0,0.0,0.00550487496638,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00550365214292,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00550967282188,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00549705052124,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00549592519866,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_4
y10_M_4_weights = numpy.array([1.62033690678,10.9947408908,21.0276448741,26.2213260813,26.4523607807,25.0360221544,22.8170444755,20.4021809249,18.4384120337,16.6628050102,14.8088396297,13.0521037068,11.8038212898,10.5920585407,9.5794845069,8.33123415628,7.78731115833,6.83994824066,6.13114761947,5.55005560804,5.04773881735,4.68740821931,4.2335919207,3.82605230649,3.43118374646,3.09684259806,2.86279245067,2.59526734382,2.43755616931,2.22030202291,2.08131243918,1.91154428284,1.7733034505,1.56818565038,1.43002177751,1.38156497595,1.23057412582,1.20396980218,1.11121601813,1.0036118644,0.940474645303,0.909803896103,0.786520856025,0.737142945949,0.695730339828,0.678898264093,0.599997182575,0.565516539349,0.51909756012,0.526974681363,0.494420830413,0.465803535931,0.439133075252,0.450985633523,0.369018559329,0.353321675689,0.339457148657,0.32763421176,0.300945954205,0.28815817818,0.275325629387,0.271369432223,0.227002264308,0.218080137834,0.214172521328,0.194400554194,0.17071395602,0.178644147223,0.158917393771,0.143091843536,0.155932048193,0.137173500845,0.138159864571,0.111487760487,0.125315531361,0.107572528201,0.113470989699,0.0947423443078,0.0868426563068,0.0917769200059,0.0897988214238,0.0769669540424,0.0799319374307,0.083881200227,0.0572505420175,0.0740217315984,0.0740269824781,0.0572518246751,0.0651310302369,0.0661132654092,0.0592243917961,0.0611903452136,0.0542926132462,0.0503317664481,0.0444122212648,0.0325519470056,0.0453930535303,0.0493297703315,0.0335514539647,0.0424279098098,0.0355241734013,0.0365142688599,0.0315752513109,0.0276421299595,0.0266450159585,0.0167796952044,0.0385029974672,0.0335457381216,0.0217100908891,0.0236780364342,0.0207162997731,0.020720528535,0.0246789663417,0.0177639986622,0.0167769094324,0.0226799444068,0.0157891067242,0.0167700512223,0.0138203074101,0.0167809979036,0.0118394230559,0.014807528914,0.0187431233897,0.0177724120946,0.0157831904658,0.00690647808923,0.0148002859066,0.0128332823131,0.0138149042147,0.00986481570764,0.00888167866668,0.00691277914489,0.0128313984097,0.00592223475605,0.00987343356367,0.0088759187322,0.00493489701131,0.00197526871864,0.00591779756227,0.00888127783616,0.00394592267558,0.00493854857728,0.00395044244045,0.00788723820525,0.00394777371089,0.0118427419326,0.00691142834606,0.00296039107313,0.00197519616832,0.00197623993097,0.00592824721374,0.000988013544985,0.0059264915761,0.00296026561318,0.00691090325809,0.00493070031583,0.00690830186806,0.00197208933101,0.00394395299444,0.0049308285816,0.00493392700147,0.00592473193014,0.00295812678157,0.00296033455603,0.00296365182936,0.00197573608702,0.00394964398607,0.00295592261457,0.00197418527376,0.00197464823301,0.00197503623694,0.00295490650922,0.00197408185949,0.0019755296593,0.0,0.000988442433634,0.00197240398296,0.0,0.000988898578758,0.000988576711856,0.00197471797751,0.00197631448545,0.00295959301958,0.000986141666489,0.0,0.000988576711856,0.00197390348991,0.00197433277939,0.0,0.000986205799371,0.000987737773592,0.00197261321649,0.0,0.00197274068059,0.0029624950325,0.0,0.0,0.0,0.000983912648006,0.000988898578758,0.0,0.0,0.000984223291653,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00197036495814,0.00197423658007,0.0,0.0,0.0,0.0,0.0,0.000985771699925,0.000988176683004,0.000986319234406,0.0,0.0,0.0,0.0,0.000986459525086,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988576711856,0.0,0.0,0.00197208933101,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988576711856,0.0,0.000986319234406,0.0,0.000987866039356,0.000988442433634,0.000985432597311,0.0,0.0,0.0,0.0,0.0,0.000986205799371,0.000986459525086,0.0,0.0,0.0,0.0,0.0,0.0,0.000988013544985,0.0,0.0,0.0,0.000987948610442,0.0,0.000987866039356,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00197424259253,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000986459525086,0.0,0.0,0.0,0.000986798226869,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_5
y10_M_5_weights = numpy.array([0.452987003994,3.29313847222,6.39449331741,7.91594080306,7.96779346371,7.57296906104,6.95733874509,6.24451669823,5.57899820129,5.02142206019,4.53481124641,4.09566402162,3.63321470725,3.33044998242,2.97827599353,2.70301158334,2.44439843882,2.22381978103,2.00934261301,1.85708823768,1.68945650772,1.56366339333,1.42574571853,1.3032330051,1.2132290292,1.11495703452,1.01387755436,0.931684285561,0.850972178414,0.812163502658,0.747142026629,0.676320734482,0.62640094577,0.576502762334,0.553812822223,0.510460236999,0.493554509196,0.452728540886,0.415942759051,0.380636418446,0.354153072115,0.339280880832,0.322651300455,0.292151933549,0.266439295399,0.245506824402,0.250825523047,0.233439798046,0.223350614611,0.191327795729,0.188550037495,0.175181813431,0.17947250107,0.166615121736,0.161333632177,0.138639331001,0.134349283518,0.135358129844,0.116960957881,0.119983015765,0.103343953073,0.107384019519,0.0899883720947,0.0965354506682,0.0887278643216,0.0849405795957,0.0826727858777,0.0746167388655,0.0721056857519,0.0612631583761,0.0698335709789,0.0630092647158,0.0607549942998,0.0574764737967,0.054190631506,0.0526851039074,0.0453701978566,0.0484029383485,0.0365479477637,0.0451160558038,0.0418445770201,0.0390768572372,0.0360503022556,0.032513966803,0.0365535531323,0.0368018777647,0.0307490758767,0.0254599204452,0.0312523027476,0.025968356588,0.0247073927035,0.0320096116566,0.0236963778482,0.0221810998689,0.0214296723959,0.0226816180784,0.0199156467224,0.0201685284675,0.0206696108148,0.017390882261,0.0153751620908,0.0161352756846,0.0161353236963,0.0148692225576,0.0191590779941,0.0148742237787,0.0113416052336,0.0128553948495,0.009828291734,0.012096249492,0.0115906940526,0.0136128557956,0.0108403108346,0.0113453061372,0.00856997649629,0.00958007912894,0.00781331974649,0.0108408109567,0.0085735773755,0.00958060725789,0.00882393050321,0.00504493180047,0.00681019081658,0.00857165290561,0.00579951604423,0.00731026491764,0.0052939125931,0.00604838480951,0.00554460180286,0.00529501686272,0.00453553942421,0.00529308839186,0.00705740317742,0.00504014263112,0.00479047367046,0.00428388597901,0.0035280974412,0.00327703494029,0.00277333835474,0.00428739483575,0.00428820703406,0.00428495824082,0.00453617557953,0.00226941131293,0.00327862292802,0.00302610287123,0.00176418955499,0.00277294225803,0.00378109681502,0.0042845981529,0.00176471888423,0.00302557674276,0.00151365398273,0.00176533703516,0.0017637714529,0.00302414519323,0.00201606145352,0.00302526426647,0.00302438805252,0.00176627286366,0.00201660478619,0.002772634983,0.00101010943431,0.00126018649461,0.00100821777243,0.0012609854897,0.00176381626384,0.00252053342839,0.000755960579819,0.00100903637231,0.00100796971186,0.00126010567488,0.00302316135301,0.00126124195232,0.001008310195,0.00201852365471,0.000755413246179,0.00125915064169,0.00151328709315,0.00126112512379,0.00201673881891,0.0012611407276,0.00126042295234,0.000503712589453,0.00151210880545,0.000756379882198,0.00100906958041,0.000504618010526,0.000251605513494,0.000755965380991,0.00176358980855,0.000756495110333,0.000251541177786,0.000756031797208,0.0010083450035,0.000756702761034,0.0,0.000252211301407,0.000505198952373,0.00100784448129,0.000251929552614,0.00125916304472,0.00100829979246,0.0,0.00075594297552,0.0,0.000504112687144,0.00075623584703,0.000252177413133,0.000756162629152,0.0,0.000251605513494,0.0,0.000251948997362,0.0,0.00050419470717,0.000252440477364,0.000755232402023,0.0,0.000252008691937,0.000252080629502,0.000252296482205,0.000252315446836,0.000252349255091,0.000252177413133,0.0,0.000504075878156,0.0,0.0,0.000252349255091,0.0,0.0,0.000252039499459,0.000252008691937,0.0,0.0,0.000253121803721,0.000504215112152,0.0,0.0,0.000252138603657,0.0,0.000252138603657,0.0,0.0,0.0,0.0,0.000252073187685,0.000756321868033,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000252349255091,0.0,0.0,0.000251948997362,0.0,0.000252077028622,0.0,0.0,0.0,0.000251541177786,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000252315446836,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000251635560831,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000252077028622,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000251948997362,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000252121639515,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_6
y10_M_6_weights = numpy.array([0.151726141055,1.06292770423,2.09932030309,2.65011467557,2.70693063073,2.51084935762,2.34932949944,2.16711757729,1.89410109303,1.76263915937,1.57653254273,1.42635988094,1.32965129788,1.20787011921,1.07916026304,1.00780518143,0.90505702284,0.818047188733,0.761339397313,0.688479120243,0.626940748044,0.586077445515,0.543684355775,0.50439182672,0.484731916766,0.440341496535,0.398840607727,0.389411368429,0.366231738293,0.345596255354,0.305301062394,0.27312294857,0.255679195756,0.241646899413,0.221077994157,0.212159783561,0.198423687931,0.192090910693,0.169143402749,0.163213790369,0.158337324971,0.157177313809,0.152581754251,0.126563975469,0.129444010075,0.123111432769,0.116207846867,0.113367998791,0.105592105288,0.100195314304,0.09248082022,0.0830218009047,0.0815870418392,0.0661506561505,0.0718389694151,0.0606663944874,0.0692518166345,0.0561296152261,0.0578415613779,0.0555514890153,0.0566824998979,0.0603831594285,0.0455201015591,0.0475129335573,0.0429314892678,0.0403786449869,0.0354898137342,0.0415121950184,0.034044648157,0.0323454077462,0.0331987716967,0.0323475470291,0.028624425028,0.0297803275669,0.0297934531672,0.0245961053299,0.0289129083278,0.0283519763534,0.0243290348525,0.0214716426568,0.0254915751664,0.0191803507031,0.0211787808248,0.0163115323375,0.0157380145819,0.0214512494927,0.015169865027,0.0171952761046,0.0143127123465,0.0102935395823,0.0157584977159,0.0151776824065,0.0103081446866,0.0131602386583,0.00858970871022,0.0137373552075,0.00830202714174,0.0105913197657,0.0131833309177,0.00944464713293,0.00888397207248,0.00744547026154,0.00802030657511,0.00859794295009,0.0108694865235,0.0080200646562,0.00573192264657,0.00601346627251,0.00744102075302,0.00888186078019,0.00686836170963,0.00743592446131,0.00716124053598,0.00716360774249,0.00773729444141,0.00743894344934,0.0051471496638,0.00372098271818,0.00486993658625,0.00486493926136,0.00487131412449,0.00486143943451,0.00286236053028,0.00371723997276,0.00343361104582,0.00457751160776,0.00314930734506,0.00371775080153,0.00343450874491,0.0020016210521,0.00343342710747,0.00343499058339,0.00314617439523,0.00286023224369,0.00114462531901,0.00372724661851,0.00429312693415,0.00171864990467,0.00171714340965,0.00229509767807,0.00343605322719,0.00200404024118,0.00143232987979,0.00171839998844,0.00314175987499,0.00171902377934,0.00285921158581,0.00285722825063,0.00171928869054,0.00142905497754,0.00200901857243,0.00171607576752,0.000857486168698,0.00114667763107,0.00143024757778,0.00114371862293,0.00143060845681,0.00200285263927,0.00143576272909,0.000859588364037,0.00113931010068,0.00143450515063,0.00143043851378,0.000571954979398,0.00143403530812,0.000571060279304,0.00142649183671,0.00114734840622,0.00143342551253,0.000574511122573,0.000286302130879,0.00200168303132,0.000574378766939,0.000572286968115,0.000861685161186,0.0,0.000573552743824,0.000574061673229,0.000858856109491,0.000859719819973,0.000573541947443,0.000569991537548,0.000860697092389,0.00114908482417,0.000573552743824,0.000286302130879,0.000287115558216,0.00057087064287,0.000572874271249,0.000286751580222,0.0,0.000570115396031,0.0,0.000569856482819,0.00114893687376,0.000571958078359,0.000287759542351,0.000573541947443,0.000287115558216,0.0,0.000286437185608,0.000286809960653,0.000286302130879,0.000287759542351,0.000284527425764,0.000573700294365,0.000860657105793,0.000286302130879,0.000287115558216,0.000572159710772,0.000286437185608,0.000287759542351,0.000570246752,0.0,0.000287115558216,0.0,0.000571176240433,0.0,0.000286302130879,0.0,0.000284896502049,0.0,0.000570371010348,0.0,0.0,0.0,0.000286437185608,0.000573433883666,0.0,0.000287759542351,0.0,0.0,0.000284455949723,0.00028355435194,0.00028355435194,0.000284896502049,0.000286302130879,0.0,0.000856246584209,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286302130879,0.0,0.00028515571516,0.00028642628926,0.000283944621121,0.0,0.0,0.0,0.0,0.0,0.0,0.00028642628926,0.0,0.0,0.0,0.0,0.0,0.0,0.000286809960653,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00028355435194,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_7
y10_M_7_weights = numpy.array([0.0138803677137,0.113816875383,0.223063165562,0.28240986336,0.286463376492,0.273198118353,0.253750123243,0.234331715819,0.211653969018,0.193747215806,0.168895695999,0.157152527389,0.145144536408,0.132008776786,0.117104168001,0.109745928146,0.097345083087,0.0894921924238,0.0853322306723,0.0768331046903,0.0710755754712,0.065966838014,0.0637111330985,0.0574360288997,0.0543650281418,0.048693537905,0.0462942111714,0.0440128580059,0.0406405322193,0.0362356590165,0.0350701975072,0.0335571519197,0.0320668670586,0.0289632653633,0.0289830715119,0.0258033213358,0.0237725792328,0.0228270933762,0.0213983643151,0.0191772761554,0.0186328794764,0.0189357928525,0.0172562096574,0.0157150934851,0.0158708227861,0.0139658032079,0.0141419086186,0.0139305620095,0.0131066413143,0.0108632115172,0.0115738817777,0.0110740887466,0.0113598362351,0.00989028790036,0.00928351400678,0.00887330964233,0.00863447079139,0.00814117364052,0.00783945466816,0.00773088964915,0.00762328853496,0.00712616091015,0.00639083144977,0.00654189628958,0.00617691582477,0.00621828410513,0.00561331648537,0.00533465160178,0.00529092385006,0.00505061818743,0.00440457145876,0.00425217810665,0.00416536003765,0.00425218229754,0.0039728799795,0.0038022378137,0.00324004577534,0.00355999847418,0.00328093252237,0.0028726086105,0.00315187488688,0.00257046342457,0.00304590570545,0.00271781345903,0.00259170830591,0.00239650752163,0.00248059522545,0.00259075110651,0.001963854233,0.00183552120247,0.00179276867097,0.0020302299762,0.00179299078817,0.00192230700161,0.00207368322459,0.00155476674087,0.00183607314275,0.00172637825965,0.00159866548093,0.00157504729829,0.00138251611128,0.00177133603775,0.00136094224504,0.00127283170541,0.00153357801747,0.00140450697112,0.00110165184834,0.00118785176099,0.00109995663313,0.000992215962279,0.00103717876944,0.000949943287745,0.00118756761861,0.00077585076177,0.000950412248395,0.000906984983522,0.000993509271094,0.000777507001705,0.000516776615024,0.000885813023685,0.000756172854405,0.000734180737294,0.000777412706668,0.00079769326409,0.000583312869333,0.000691221594892,0.000561518981343,0.000755853089458,0.000561301474125,0.000669687542114,0.000626298833434,0.000626304281592,0.000496649025106,0.000561629201764,0.000539947210551,0.000475428031851,0.000324260096129,0.000453478661822,0.000410115014667,0.000323893435117,0.000496643576948,0.000388744909527,0.000259139776092,0.000410266012453,0.000388802282818,0.000324113247325,0.000410427822736,0.000453664318272,0.000367051938181,0.000259168357965,0.000324124017914,0.00036711170028,0.000324068237161,0.000172862331971,0.000237425892202,0.000129453674652,0.00032411035561,0.000237586068038,0.000172807515123,0.0001512384265,0.000151122967466,0.000237625839589,0.000281027875301,0.000194348315234,0.000237324933649,0.00021606693483,0.000237496634434,0.000194437245931,0.00015127212126,0.000216092792625,0.000194588578988,0.000108005325585,0.000194398564011,0.000108019574613,8.6337122402e-05,0.000129685430898,0.000107971798461,0.000129651149413,0.000107834840159,0.000151161565568,6.46969143675e-05,6.48673578851e-05,0.000129638031926,0.00017278618349,6.47074754116e-05,0.000129526428511,0.000172845400773,8.63370804931e-05,0.000149616719503,0.000108033781732,8.63847309184e-05,2.15256207058e-05,8.63847728273e-05,6.48553300293e-05,0.0,6.48458586167e-05,6.47110795775e-05,6.47631723467e-05,6.47022787074e-05,2.16254351454e-05,6.4905872169e-05,0.000107930518189,0.000107942587954,2.16138724785e-05,2.15974567603e-05,4.32465536737e-05,8.64449959241e-05,6.47847973418e-05,2.163999849e-05,2.15974567603e-05,8.64341834266e-05,6.48747757613e-05,8.65159896096e-05,2.1582424036e-05,6.48634603569e-05,4.3119024875e-05,0.0,2.15256207058e-05,2.15974567603e-05,2.16748541265e-05,0.0,0.0,2.1593064707e-05,4.31151273468e-05,6.48308552286e-05,2.1593064707e-05,4.31723749114e-05,0.0,2.1593064707e-05,2.00043023004e-05,0.0,4.31766915286e-05,0.0,2.16082860214e-05,6.48940119488e-05,0.0,0.0,4.31346568967e-05,2.16090445726e-05,0.0,6.48143850289e-05,0.0,4.31193182374e-05,2.16138724785e-05,0.0,0.0,0.0,4.3161059507e-05,2.15974567603e-05,0.0,6.47667765125e-05,6.46756665525e-05,2.15786522345e-05,4.32887140323e-05,4.31331062672e-05,0.0,0.0,0.0,0.0,0.0,2.15879895386e-05,2.16288716757e-05,2.15980351032e-05,0.0,2.15591268756e-05,0.0,2.16090445726e-05,0.0,0.0,0.0,0.0,0.0,4.3110643094e-05,0.0,0.0,4.3230921652e-05,0.0,0.0,2.16217513527e-05,0.0,4.31907310119e-05,0.0,0.0,0.0,0.0,2.16176652344e-05,2.15256207058e-05,0.0,0.0,0.0,2.16138724785e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15786522345e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.1582424036e-05,0.0,2.1582424036e-05,0.0,0.0,0.0,2.16549976872e-05,0.0,0.0,0.0,2.1582424036e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.16176652344e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15786522345e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_8
y10_M_8_weights = numpy.array([0.00204327766361,0.0198698364723,0.0374789279375,0.0494921791435,0.0511297962563,0.047940648454,0.0460186951769,0.0430691399831,0.037299226056,0.0324754905752,0.0310115164754,0.0280365971949,0.0254458666075,0.0238721646173,0.0208423442661,0.0189056299222,0.0170149659488,0.0163079049057,0.0145161341403,0.0138940279376,0.0123641042423,0.0114476365269,0.0109994480653,0.00925399200347,0.00915718821413,0.00944464885378,0.00799340580294,0.00714522064447,0.00677721714931,0.00714843719083,0.00702833051807,0.00651883165334,0.00516346128631,0.00535929490514,0.00462271618335,0.00558824736838,0.00413858734543,0.0037988318396,0.00379876055879,0.00337359134239,0.00317440158561,0.00297707700992,0.00351155385701,0.00272248721686,0.00306414057415,0.00300738026249,0.00254856205069,0.00241231473046,0.00249292738176,0.00221072370154,0.00226544805518,0.0012437141823,0.00198332011081,0.00169741279872,0.00167186605492,0.00147658486233,0.00161732287333,0.00158337538956,0.00139166774137,0.00141899724782,0.00147551461073,0.00127858638602,0.00121869075842,0.00124459271823,0.00102156532347,0.000851401259323,0.0010788005435,0.000848558640189,0.00121677137422,0.00096148362327,0.000652081563577,0.000736883447236,0.000567641875513,0.000938468981597,0.000681323771651,0.000706032668977,0.000567274333858,0.000734513360442,0.000681684482229,0.000852121343964,0.000736389827655,0.000564962311221,0.000622430975886,0.000510749694365,0.00059551771964,0.000594156701755,0.000423797357896,0.000338774206736,0.000680730210441,0.000224842530774,0.000425458052169,0.000567709443777,0.000369113545213,0.000368782534971,0.000340588303244,0.000340464749847,0.00031014397408,0.000283252993086,0.000425969788954,0.000141936894752,0.000397414698148,0.00028394159537,0.000170308704786,0.000453111589284,0.000198686930093,0.000254131516877,0.000312156320329,0.000283833486148,0.000198639261054,0.00011314951762,0.000198045996848,0.000113595987917,0.000312462827794,0.000340512864391,8.5215371733e-05,0.000142041677536,0.000198319091435,0.000284020895267,0.000340547019777,0.000198880873286,0.000283879373167,2.83716615327e-05,0.000169968932945,5.69041800839e-05,0.000225494898649,5.6785482692e-05,0.000198933888385,0.000198766378491,5.68330329296e-05,8.51252757645e-05,5.69041800839e-05,0.0,8.51197218017e-05,2.83530394222e-05,0.000141981905611,0.000170426021113,2.84112669304e-05,2.84324432698e-05,0.000113576341145,0.000113557719034,0.000142000854425,8.49192593855e-05,8.51197218017e-05,8.66790339792e-05,5.68011644693e-05,0.0,8.50963476374e-05,0.000141870143247,2.84112669304e-05,5.68330329296e-05,0.0,5.68845333118e-05,5.6785482692e-05,5.6523555431e-05,8.50145380626e-05,2.83530394222e-05,5.68237515746e-05,5.6785482692e-05,2.83716615327e-05,5.66177351957e-05,8.52057042737e-05,2.67536169412e-05,5.68633569724e-05,2.70043026251e-05,0.0,2.84324432698e-05,2.84112669304e-05,0.0,2.8260493176e-05,8.53154470143e-05,0.0,5.68526648515e-05,8.51545899959e-05,0.0,0.0,2.84005748096e-05,2.84005748096e-05,2.84112669304e-05,2.84520900419e-05,0.0,2.83530394222e-05,5.66351247423e-05,0.0,0.0,5.67829284631e-05,8.50963476374e-05,0.0,0.0,0.0,0.0,0.0,2.84005748096e-05,0.0,2.79758600083e-05,2.84005748096e-05,2.84324432698e-05,0.0,0.0,0.0,2.83530394222e-05,2.82195661133e-05,0.0,0.0,0.0,0.0,2.84005748096e-05,0.0,0.0,0.0,0.0,0.0,2.83716615327e-05,0.0,5.66547715144e-05,2.83530394222e-05,2.81656451538e-05,2.83530394222e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.61884047017e-05,0.0,0.0,2.83530394222e-05,0.0,0.0,0.0,0.0,0.0,2.84324432698e-05,0.0,2.8260493176e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83530394222e-05,2.84520900419e-05,0.0,2.83530394222e-05,0.0,2.84520900419e-05,0.0,0.0,2.84005748096e-05,0.0,0.0,0.0,0.0,0.0,2.83530394222e-05,0.0,0.0,0.0,2.83530394222e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.67829284631e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.83716615327e-05,2.83716615327e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84005748096e-05,0.0,0.0,0.0,2.83530394222e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_9
y10_M_9_weights = numpy.array([10.4258826985,28.6806120085,39.111682096,33.8995559628,15.6327214634,18.2280848193,2.60604499341,13.0412996002,10.4204222891,10.4072980797,10.4306086444,5.20808850652,10.4229909887,2.61303893189,2.60667140235,7.81836800897,0.0,0.0,2.60399272545,2.60399272545,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.61802251682,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_10
y10_M_10_weights = numpy.array([26.3240853567,92.6963991207,134.80174479,119.002982025,75.820133186,80.0498092572,57.9311181559,67.4193021048,43.1815791509,31.5907038456,27.3809349388,31.592985436,20.0048649683,24.2291083151,12.6407339098,13.6874683131,13.6974565232,10.5320519359,6.31812768749,6.32253696666,12.6410878834,5.26646843489,7.37382300803,3.16016577878,12.641992055,4.21152338437,5.27135865638,1.05320788687,3.16097953319,2.10776241223,4.21626739939,3.1628028817,2.10643385717,1.05394199722,3.15950708019,1.05320788687,5.26579896318,0.0,0.0,1.05262806281,1.05368267312,0.0,2.10856193074,1.05340334183,0.0,3.16257472267,0.0,0.0,1.05461454696,2.10671472748,0.0,0.0,0.0,0.0,0.0,0.0,1.05384696302,1.05251763845,1.05193050407,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.10769238702,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05083126231,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_11
y10_M_11_weights = numpy.array([27.8700414614,151.099162407,191.630729983,171.358145197,147.423126768,120.684022161,100.421850202,84.0720966025,61.0351173389,57.5764062561,47.6824906835,42.1505894081,34.0931816027,39.1550762602,32.7126711654,28.5686231,23.7308424694,24.6496304427,19.5797474939,15.8944056306,19.5798704498,12.2070349949,14.7416710007,12.9047597845,9.44738968953,10.3609328251,9.67218381628,8.52183914171,11.2866793339,8.75069465557,6.91406009019,8.06554594922,3.68361356444,7.14273885494,4.14788275256,4.60600875645,4.83673934781,4.14623053263,3.68467866994,2.07327510899,4.37680358674,3.68177306829,3.22319289604,2.53469863646,1.84468202913,1.61289171159,2.07068381337,2.07241480191,2.76201643308,1.61243754323,1.15245836336,1.84215413264,1.38218724836,1.38312632406,1.61280064738,0.69214067053,1.38199436129,0.460027593755,0.460037968159,0.691903980419,0.922012107527,1.38201664705,0.921835742656,0.460792225766,0.45986813532,0.691297654131,0.691431752911,0.691683044034,0.230818889065,0.229912435106,0.230577511261,0.230166646432,0.692119537484,0.460626235299,0.691208126865,0.692076887156,0.461300571571,0.460251988275,0.230340552185,0.460532865662,0.460993950292,0.230531633341,0.461073103154,0.691175466704,0.229912435106,0.0,0.0,0.460917871328,0.0,0.0,0.230818889065,0.0,0.460408757049,0.0,0.230653513378,0.229459457881,0.0,0.0,0.0,0.230578625549,0.0,0.230000579118,0.0,0.0,0.0,0.0,0.230744385474,0.0,0.0,0.229459457881,0.230408639016,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.23056809745,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230108934006,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230000579118,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_12
y10_M_12_weights = numpy.array([13.4853591266,76.9226544905,113.508654542,107.827966621,88.6955589711,78.9487511272,60.839105826,52.4188305583,42.9220483869,39.8437587755,32.7037849947,30.4036757634,26.1946961189,25.4200666497,23.1494611402,19.743387449,17.5020625272,16.3660788286,14.5385638993,12.5999858658,12.2675198158,12.1289458087,9.85863652904,9.1104446105,9.0271001549,7.44987250366,7.91960838012,6.95002890427,6.20328735775,5.76010829337,4.90060320652,4.87327311916,4.59587888791,4.68011588013,4.12522739196,3.79405744363,3.65563578327,3.73829660197,3.26787324146,2.85218545963,2.63025814849,2.04897442814,1.99386913925,2.29862615856,2.27087981058,1.88309380122,1.91052160595,1.3015538613,1.38446359,1.32864350237,1.21806860036,1.60576920321,1.30123454862,1.32972801132,0.747841828747,1.13566861781,1.07986815059,0.969078962852,0.775184996383,0.996644879933,0.719978142977,0.913615889877,0.664523533712,0.581216010671,0.581467228957,0.276756798519,0.636622145964,0.442979013067,0.498153935205,0.360078940695,0.470706125342,0.719603816189,0.387737535395,0.276827047308,0.332200828148,0.360052472367,0.304630948511,0.221473387014,0.276859017047,0.387722146832,0.16610907014,0.13842366087,0.166149234289,0.249160527498,0.138507374651,0.19374385104,0.193766125984,0.166126690045,0.110807769431,0.0830072152399,0.221561448064,0.13844255033,0.193859303731,0.138384150735,0.0554560711787,0.138426969411,0.110615835583,0.249065349238,0.0553834371628,0.138568044059,0.166153658501,0.0554735756688,0.166172317133,0.0553468893264,0.110824619907,0.166081947799,0.0830605366096,0.0276065852054,0.11090044705,0.0554058659929,0.0832178461918,0.0276261979286,0.0554131370888,0.0831299390274,0.0277086383061,0.0277247116598,0.0,0.0276858901633,0.0830526884426,0.0276261979286,0.0829700903323,0.0830176794625,0.0,0.0,0.0276065852054,0.0,0.0,0.0,0.027693595986,0.0276614569728,0.055495158128,0.0277206913978,0.0277272430784,0.0277247116598,0.0276723905466,0.0,0.0,0.0,0.0,0.0277289473617,0.0,0.0276432830803,0.0277247116598,0.0,0.0276261979286,0.0277659760907,0.027693595986,0.0276261979286,0.0276858901633,0.0,0.0277334869877,0.0277334869877,0.0553861686327,0.0,0.0276409209359,0.0276261979286,0.0276901027823,0.0276901027823,0.0,0.055456186593,0.0,0.0552966841406,0.0,0.0,0.0276432830803,0.0,0.0,0.0,0.055456186593,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0276981471535,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0277401040697,0.0,0.0,0.0,0.0,0.0276723905466,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0276065852054,0.0,0.0,0.0276409209359,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0277086383061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.027729174343,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_13
y10_M_13_weights = numpy.array([4.47540429518,30.7489320592,49.5643309523,50.3297823691,43.6637970133,36.6489744645,31.5967123304,27.0198117952,24.6309642598,21.072193786,18.4093010625,16.2629556043,13.9221509332,12.8642066452,11.5540206315,10.1935404079,9.55815122575,8.57968975159,7.7525720992,6.97711402799,6.15033620135,5.85860793801,5.74722519624,5.04065040988,4.65765769556,4.41590626915,4.11361427987,3.61973096428,3.45779610915,3.02436354533,2.96440493405,2.8029676809,2.6414011726,2.32888953095,2.21765424928,2.2180147073,1.92518383079,1.83487028506,1.4515875052,1.42144496183,1.52257831648,1.28037115947,1.37127515243,1.27036268452,1.18959763532,1.12951826454,0.997591842967,0.816647985362,0.886930017235,0.876947029217,0.816491422787,0.947800089704,0.625151931619,0.574652673297,0.554444571418,0.453721434993,0.544124670449,0.48399735996,0.4435880134,0.544218850726,0.544372561192,0.342847764321,0.473814360219,0.373089281932,0.403240685038,0.393308731563,0.393217464078,0.242005992302,0.231911225887,0.262014871339,0.221728711611,0.2320106856,0.342822641489,0.181454627833,0.302532901454,0.12093038874,0.20173469995,0.191521904772,0.211762229414,0.181426046061,0.181553905496,0.120928750294,0.191557343743,0.0806752987937,0.110926161612,0.161205868231,0.0806621305462,0.110863900682,0.110980837147,0.100869929009,0.100830545633,0.161363826518,0.14111227549,0.161274682944,0.100778297425,0.0201664732256,0.120803560918,0.0402529235572,0.0906685422679,0.0604810508819,0.120993741968,0.0604976052502,0.0907533773372,0.0604943526324,0.0403573714265,0.0402213622414,0.0604808506274,0.0503710772651,0.0,0.0504039736107,0.0503614832562,0.0504511305007,0.030275020788,0.0604823495017,0.0504660828334,0.0201575285266,0.0504181491988,0.0403710069345,0.0302460263702,0.0403440393345,0.0605190203403,0.0202015845068,0.050400799881,0.0403190318008,0.0101024428356,0.0100940018069,0.0201857340633,0.0,0.0403489607394,0.0504263414265,0.02012871009,0.0100703353712,0.0100820654277,0.0302420516226,0.030240334289,0.0302385077256,0.0403180305285,0.0201310403237,0.0302198112414,0.0201784096049,0.0,0.0,0.0201927915157,0.0101024428356,0.0201935864652,0.0201900729098,0.0302978497959,0.010084407798,0.0403601264424,0.0100991416712,0.0,0.0201524007989,0.0100944508623,0.0,0.0,0.0403489364661,0.0100838495128,0.0100441202425,0.0302143376197,0.0,0.0,0.0201664732256,0.0,0.0201650350346,0.0100662453257,0.0,0.0100912832009,0.0,0.0100226262643,0.0100786186237,0.0,0.0100796623742,0.0,0.0,0.0,0.0,0.0100787946049,0.0100820654277,0.0201524007989,0.0100662453257,0.0100787946049,0.0100583747188,0.0,0.0100944508623,0.0,0.0100966111831,0.0,0.0,0.0,0.0,0.0,0.0100966111831,0.0,0.0100940018069,0.0,0.0100420570148,0.0,0.0,0.0100912832009,0.0100987897089,0.0100724653504,0.0,0.0100117457722,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200667525752,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100838495128,0.0,0.0,0.0,0.0,0.010084407798,0.0,0.0,0.0,0.010084407798,0.0,0.0,0.0,0.0,0.010084407798,0.0,0.0,0.0,0.0,0.0,0.0100117457722,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_14
y10_M_14_weights = numpy.array([1.47989886415,9.97872138787,15.7298682292,16.3812107749,14.7934406838,12.513560946,10.9545925271,9.41251421529,8.02959149719,7.37271253604,6.78421350014,5.8361783345,5.29903830507,4.59442074536,4.52370544541,3.91563466137,3.542021647,3.23088779315,3.05836823889,2.87720202712,2.6592650136,2.40753739807,2.16426406982,2.19264175803,1.81345154568,1.76841805741,1.63821687787,1.54479419488,1.38069930288,1.3579792234,1.24193918665,1.24769875161,1.11187689841,1.01856463594,1.0495527883,0.905308966028,0.837477680708,0.721388397171,0.749702603205,0.670509931555,0.645110904127,0.636590826231,0.546080243925,0.585671962864,0.512107274934,0.551675524704,0.424396833831,0.486718250758,0.364955430229,0.41309085013,0.353627593269,0.432771095086,0.401819493072,0.328212560637,0.35360716355,0.3168565222,0.263143750427,0.27726907388,0.186709475638,0.254705083616,0.257519614112,0.263079575715,0.226342400282,0.200827027333,0.181027743673,0.147101058962,0.203706348126,0.161265548996,0.198049239552,0.164107396066,0.175352244505,0.17543507913,0.152760745296,0.135856056626,0.113154637063,0.14423705084,0.113188648122,0.0905182386411,0.124479511522,0.0961344108278,0.124494208608,0.0848538584724,0.124493246757,0.0650763895977,0.104660028987,0.084867939974,0.0735956210649,0.0679040397444,0.0791672018301,0.045260254305,0.0792233354659,0.0792268750783,0.0651179800435,0.079208099743,0.0452682569069,0.0480890971471,0.0509315982762,0.0594269758337,0.0395957971882,0.070714722836,0.0254476432349,0.0339367956915,0.0452958427992,0.0367863491136,0.0396005679702,0.0226488524716,0.0339516120473,0.0311150436163,0.0424425689367,0.0311360427517,0.0254737594186,0.0367571780905,0.0339533703113,0.0254550533366,0.0339526623888,0.0311246044172,0.0169698752492,0.0367797815936,0.0254758100854,0.0283012826009,0.0141544021386,0.0141293747705,0.0169784703515,0.0141446797468,0.0169840721728,0.0113331579209,0.0198107450787,0.0141436755741,0.0169894546921,0.011318083789,0.0113175874738,0.0113137477638,0.0169746614207,0.0113217195865,0.0169758156422,0.0113204384007,0.0141552754995,0.0,0.0141521552543,0.0226311544096,0.0141601540088,0.0113215002844,0.00566972036648,0.00848457404483,0.00848473178843,0.00283247440753,0.0113299953542,0.0169570403068,0.00848853687176,0.00848942562227,0.00848311587842,0.011320792362,0.0113149520015,0.00849038362606,0.0056602519033,0.00283619831062,0.011309273232,0.0,0.00566123299152,0.0141461379132,0.00566790439142,0.0113157984306,0.00282344993487,0.0113289642497,0.0,0.00565939008463,0.00566598068903,0.00849238812395,0.00848219634868,0.00849184563988,0.00847589429964,0.00283619831062,0.00283011133151,0.00566175623857,0.00283099969728,0.0,0.00282705418367,0.00282705418367,0.00566398773335,0.00283619831062,0.00283009170975,0.00282510624263,0.00282705418367,0.00282354650473,0.00283038411251,0.0,0.00282705418367,0.00566714645268,0.00282927990734,0.00283013941757,0.00283009170975,0.00283099969728,0.00283195116048,0.0,0.00282354650473,0.00282541057235,0.0,0.00565099504739,0.0,0.0,0.00282797563712,0.00849024511949,0.00566497651638,0.00848465868774,0.00848876386865,0.0,0.00282510624263,0.00282510624263,0.0,0.0,0.0,0.0,0.00283438387453,0.00282583070895,0.0,0.00282797563712,0.00283011133151,0.0,0.0,0.00283099969728,0.0,0.00283094852679,0.00564927910486,0.0,0.00283198540238,0.00564680907098,0.0,0.0,0.0,0.0,0.00283011133151,0.0,0.00282510624263,0.0,0.00283438387453,0.00566261420984,0.0,0.0,0.0,0.0,0.0,0.00283247440753,0.0,0.0,0.0,0.0,0.0,0.00283011133151,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00282354650473,0.0,0.0,0.00283099969728,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0028213981139,0.0,0.0,0.0,0.00283247440753,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_15
y10_M_15_weights = numpy.array([0.13096904571,0.907858580462,1.54456956074,1.72226557902,1.52629588187,1.36016551304,1.11804748028,1.00522662591,1.01909949717,0.904631589386,0.722134195956,0.676276774003,0.594101109502,0.537978051127,0.510574268034,0.493197671417,0.44493578072,0.376228047958,0.3383094258,0.33217140755,0.313836993662,0.25436074771,0.252603449604,0.243817549881,0.230012385167,0.21167253585,0.237580157794,0.198041895699,0.208604826502,0.18268881509,0.167580092502,0.140137906915,0.123405999544,0.132516370797,0.126406238665,0.117336975805,0.103633193674,0.0806849505979,0.0913404018696,0.0868601897266,0.0852944078101,0.070106268865,0.0655447025164,0.0548455551152,0.0823082180934,0.0685657144327,0.0944241811846,0.0517830566897,0.0609739313855,0.0548384181594,0.059375477809,0.062347299085,0.0350597278452,0.0502558072444,0.028934943685,0.0472018400805,0.0457763158867,0.0320478734467,0.0319558965205,0.0319730299407,0.0304511686607,0.0380533853699,0.0304843247828,0.0152419378845,0.0259358743628,0.0182709966002,0.0258672461527,0.0304962000154,0.0304445161672,0.0167310447916,0.0289504701086,0.0213547404667,0.0228931561756,0.02287155625,0.0228688503512,0.0167725785654,0.0137013125553,0.0212924338979,0.0167575838687,0.0152499728678,0.0152732152385,0.0152791114983,0.0137010407838,0.0121643856816,0.022864868308,0.00456769538656,0.0106469991782,0.019799994836,0.012203863445,0.00457687653656,0.00457712703898,0.00458550232743,0.00765758601529,0.00912106841544,0.0121843077136,0.00760837765083,0.0152353562879,0.00607814935362,0.00608951649172,0.00605941957129,0.0121848394405,0.00611288175042,0.00458750516517,0.00764350943357,0.00153593313534,0.00304684793232,0.00759519437019,0.00457513483578,0.00153529506313,0.00611007423274,0.00151229728675,0.00611503819814,0.00763730122738,0.00153529506313,0.00760032848818,0.00152608437275,0.0060683443108,0.00306710317985,0.00304684793232,0.00611747587027,0.00458867260097,0.00456838072337,0.00305905874366,0.00610062958255,0.00304381708936,0.00761683801557,0.00151080018031,0.0,0.00150814036452,0.00304472929629,0.00454118821307,0.00152608437275,0.00151691622052,0.00152269668201,0.00304667659811,0.00152460026408,0.00304216046487,0.00152413707092,0.00153297437091,0.00912028973103,0.0,0.00153117004452,0.00152460026408,0.0,0.00763090278114,0.00153297437091,0.0,0.0,0.0,0.00455667091686,0.00153117004452,0.0,0.0,0.00304328772576,0.00303454495499,0.00304216046487,0.0,0.0,0.00455702658304,0.0,0.00304587073656,0.0,0.0,0.00154504575166,0.0,0.00305728277603,0.00302926077187,0.0,0.0,0.00152772799948,0.0,0.0,0.0,0.00303454495499,0.00152059104375,0.0,0.0,0.00151229728675,0.00152059104375,0.00152224766824,0.00304823042208,0.0,0.0,0.0,0.00151846059157,0.00151229728675,0.0,0.00151846059157,0.0,0.0,0.0,0.00153529506313,0.0,0.00152460026408,0.0,0.00153593313534,0.0,0.0,0.0,0.00152413707092,0.0,0.0,0.0,0.00303493725123,0.00152460026408,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152059104375,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00153297437091,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152413707092,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00150814036452,0.0,0.00152460026408,0.0,0.0015378520784,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0015378520784,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_16
y10_M_16_weights = numpy.array([0.0223851436634,0.156917332859,0.247006779287,0.290317729645,0.261265360221,0.236186474601,0.226245871361,0.184171632887,0.175866769055,0.16376288283,0.133079318467,0.126025108027,0.115551222854,0.111774306609,0.100383439935,0.0902742676891,0.0783733035777,0.0743908078595,0.0684353962099,0.0649962341757,0.0659078624457,0.0657230411847,0.0543527401603,0.0518195450656,0.0545331324892,0.0447839746525,0.0424221599727,0.033758571899,0.0368398877756,0.0337579056336,0.0344891531284,0.0317824093677,0.0274461421095,0.0227495600454,0.0256366460688,0.0243773658875,0.0191445518358,0.0223882246596,0.0241979556262,0.0236591163455,0.0184149526739,0.0173339658928,0.0160720514597,0.0130003983575,0.0117332385283,0.012274527201,0.0120997730952,0.0128209495837,0.0139069699424,0.0102948792927,0.0092057625328,0.0115594395313,0.00884581744653,0.00812917002252,0.00721768039735,0.00903094295585,0.00650234624798,0.00758254737509,0.00722132367539,0.00740342210592,0.00740637601104,0.00704213293504,0.0059593938373,0.00776731856984,0.00632061368575,0.00469347796035,0.00487511809269,0.00523367673065,0.00451419864136,0.00524022384763,0.00487619259012,0.00397165061143,0.00342898588877,0.00397304861347,0.00288783046914,0.00252774327192,0.00415248583351,0.00216790640564,0.00234712487495,0.0045156390071,0.00252775675128,0.00343039351892,0.00288849057258,0.00325071829192,0.00216763335235,0.0018050255151,0.00234928503843,0.00198738005344,0.00252830786448,0.00180568600366,0.00198501115246,0.00144693480343,0.00198730803515,0.00180606072983,0.00252746405663,0.00180640156503,0.00180566135569,0.00252781875632,0.00108374966451,0.00126337444019,0.000722007587192,0.00126347726844,0.00162533603849,0.0012639005203,0.00144536696148,0.00126301203801,0.00126290920976,0.00144511046854,0.00180495657781,0.000361125415922,0.000902084884198,0.00108215370846,0.00054129868596,0.00108318776782,0.000541047584768,0.00162516080683,0.000902183090952,0.000903844133041,0.000721642104015,0.000902989156589,0.000541557104518,0.000720893807057,0.000361324871915,0.000541626426933,0.000180407502776,0.000361527370392,0.000361940955622,0.00036178798416,0.000361022241061,0.000361089483803,0.000180800137232,0.00054092781104,0.000722548302029,0.000903096991457,0.000180182782614,0.000722113111312,0.000722333787667,0.00036048884359,0.000180220178206,0.000361534302633,0.000541052591387,0.00018123440365,0.000360789317746,0.00090254780388,0.000180373573305,0.000361209527119,0.000180751842617,0.000542807218736,0.000542466383529,0.0,0.000361653652724,0.000180064241284,0.000180220178206,0.0,0.000180268626871,0.000361868937335,0.000180915520541,0.000360877126139,0.000180915520541,0.0,0.000542333900691,0.000180373573305,0.000180599564378,0.000541670716254,0.000360440394924,0.0,0.0,0.000180069170878,0.000361638401793,0.000360984652907,0.000361418957837,0.000360901966671,0.000180706551972,0.0,0.0,0.0,0.000180069170878,0.0,0.000180268626871,0.0,0.0,0.000180467736252,0.000360821013495,0.0,0.000180373573305,0.000360663805663,0.0,0.0,0.000180706551972,0.0,0.0,0.0,0.0,0.0,0.0,0.000181036141543,0.0,0.000180220178206,0.0,0.0,0.0,0.0,0.0,0.000180832795792,0.0,0.000180820856932,0.0,0.0,0.0,0.000180678553419,0.0,0.0,0.0,0.000180268626871,0.0,0.000180599564378,0.0,0.0,0.000361228359708,0.0,0.000180722919764,0.0,0.0,0.0,0.000180820856932,0.000180915520541,0.0,0.0,0.000180268626871,0.000180722919764,0.0,0.0,0.0,0.0,0.0,0.0,0.000180409389886,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180599564378,0.0,0.0,0.000180064241284,0.0,0.0,0.000180599564378,0.0,0.0,0.0,0.000180618782092,0.0,0.0,0.0,0.0,0.0,0.0,0.000180373573305,0.0,0.000180569101028,0.0,0.0,0.0,0.0,0.0,0.000180569101028,0.0,0.0,0.0,0.0,0.0,0.0,0.000180618782092,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180569101028,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00018069191724,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180820856932,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights+y10_M_14_weights+y10_M_15_weights+y10_M_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights+y10_M_14_weights+y10_M_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights+y10_M_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights+y10_M_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"M [ a_{1} , a_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights+y10_M_14_weights+y10_M_15_weights+y10_M_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y10_M_0_weights+y10_M_1_weights+y10_M_2_weights+y10_M_3_weights+y10_M_4_weights+y10_M_5_weights+y10_M_6_weights+y10_M_7_weights+y10_M_8_weights+y10_M_9_weights+y10_M_10_weights+y10_M_11_weights+y10_M_12_weights+y10_M_13_weights+y10_M_14_weights+y10_M_15_weights+y10_M_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_9.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_9.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_9.eps')
# Running!
if __name__ == '__main__':
selection_9()
| 392.829897
| 5,735
| 0.768124
| 16,335
| 76,209
| 3.538843
| 0.204591
| 0.259969
| 0.372879
| 0.476275
| 0.277977
| 0.254986
| 0.253412
| 0.241303
| 0.240057
| 0.233034
| 0
| 0.690109
| 0.021743
| 76,209
| 193
| 5,736
| 394.865285
| 0.085283
| 0.016835
| 0
| 0.185841
| 0
| 0.00885
| 0.013927
| 0.002671
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c8160c70dd4daae52734792bc0763c28599826e
| 250
|
py
|
Python
|
core/dbt/flags.py
|
pieter-lazzaro/dbt
|
b6d1e15a9f677a7569eec47f19c8baebb6ed7818
|
[
"Apache-2.0"
] | null | null | null |
core/dbt/flags.py
|
pieter-lazzaro/dbt
|
b6d1e15a9f677a7569eec47f19c8baebb6ed7818
|
[
"Apache-2.0"
] | 1
|
2019-02-14T20:10:46.000Z
|
2019-02-19T13:06:38.000Z
|
core/dbt/flags.py
|
pieter-lazzaro/dbt
|
b6d1e15a9f677a7569eec47f19c8baebb6ed7818
|
[
"Apache-2.0"
] | null | null | null |
STRICT_MODE = False
NON_DESTRUCTIVE = False
FULL_REFRESH = False
USE_CACHE = True
def reset():
global STRICT_MODE, NON_DESTRUCTIVE, FULL_REFRESH
STRICT_MODE = False
NON_DESTRUCTIVE = False
FULL_REFRESH = False
USE_CACHE = True
| 17.857143
| 53
| 0.736
| 33
| 250
| 5.242424
| 0.393939
| 0.17341
| 0.17341
| 0.208092
| 0.716763
| 0.716763
| 0.716763
| 0.716763
| 0.716763
| 0.716763
| 0
| 0
| 0.212
| 250
| 13
| 54
| 19.230769
| 0.878173
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92f82a90b82bcb2d6a7fd9bd0e8316a4f0ddbcbf
| 129
|
py
|
Python
|
test.py
|
Kushal-kothari/Cryptography-Network_Security
|
fd85ead3e2794d856de2072dc9d40d69c1b94f37
|
[
"MIT"
] | 9
|
2020-08-24T22:07:44.000Z
|
2022-01-15T12:27:23.000Z
|
test.py
|
Kushal-kothari/Cryptography-Network_Security
|
fd85ead3e2794d856de2072dc9d40d69c1b94f37
|
[
"MIT"
] | null | null | null |
test.py
|
Kushal-kothari/Cryptography-Network_Security
|
fd85ead3e2794d856de2072dc9d40d69c1b94f37
|
[
"MIT"
] | 2
|
2020-11-01T00:28:30.000Z
|
2021-02-16T17:21:36.000Z
|
from mathematics import *
from ciphers import *
from ciphers.utils import *
import numpy as np
import math
print(is_prime(59))
| 14.333333
| 27
| 0.775194
| 20
| 129
| 4.95
| 0.65
| 0.20202
| 0.343434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.162791
| 129
| 8
| 28
| 16.125
| 0.898148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
131828a358cb99f23a182463a70682a6ca2160b5
| 15,494
|
py
|
Python
|
angr/procedures/definitions/win32_traffic.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_traffic.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_traffic.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("traffic.dll")
prototypes = \
{
#
'TcRegisterClient': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ClNotifyHandler": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ClRegCtx", "ClIfcCtx", "Event", "SubCode", "BufSize", "Buffer"]), offset=0), "ClAddFlowCompleteHandler": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeBottom(label="Void"), arg_names=["ClFlowCtx", "Status"]), offset=0), "ClModifyFlowCompleteHandler": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeBottom(label="Void"), arg_names=["ClFlowCtx", "Status"]), offset=0), "ClDeleteFlowCompleteHandler": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeBottom(label="Void"), arg_names=["ClFlowCtx", "Status"]), offset=0)}, name="TCI_CLIENT_FUNC_LIST", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["TciVersion", "ClRegCtx", "ClientHandlerList", "pClientHandle"]),
#
'TcEnumerateInterfaces': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Length": SimTypeInt(signed=False, label="UInt32"), "pInterfaceName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pInterfaceID": SimTypePointer(SimTypeChar(label="Char"), offset=0), "AddressListDesc": SimStruct({"MediaType": SimTypeInt(signed=False, label="UInt32"), "AddressList": SimStruct({"AddressCount": SimTypeInt(signed=True, label="Int32"), "AddressType": SimTypeShort(signed=False, label="UInt16"), "Address": SimTypePointer(SimStruct({"AddressLength": SimTypeShort(signed=False, label="UInt16"), "AddressType": SimTypeShort(signed=False, label="UInt16"), "Address": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="NETWORK_ADDRESS", pack=False, align=None), offset=0)}, name="NETWORK_ADDRESS_LIST", pack=False, align=None)}, name="ADDRESS_LIST_DESCRIPTOR", pack=False, align=None)}, name="TC_IFC_DESCRIPTOR", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["ClientHandle", "pBufferSize", "InterfaceBuffer"]),
#
'TcOpenInterfaceA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pInterfaceName", "ClientHandle", "ClIfcCtx", "pIfcHandle"]),
#
'TcOpenInterfaceW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pInterfaceName", "ClientHandle", "ClIfcCtx", "pIfcHandle"]),
#
'TcCloseInterface': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["IfcHandle"]),
#
'TcQueryInterface': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["IfcHandle", "pGuidParam", "NotifyChange", "pBufferSize", "Buffer"]),
#
'TcSetInterface': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["IfcHandle", "pGuidParam", "BufferSize", "Buffer"]),
#
'TcQueryFlowA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pFlowName", "pGuidParam", "pBufferSize", "Buffer"]),
#
'TcQueryFlowW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pFlowName", "pGuidParam", "pBufferSize", "Buffer"]),
#
'TcSetFlowA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pFlowName", "pGuidParam", "BufferSize", "Buffer"]),
#
'TcSetFlowW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pFlowName", "pGuidParam", "BufferSize", "Buffer"]),
#
'TcAddFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"SendingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "ReceivingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "TcObjectsLength": SimTypeInt(signed=False, label="UInt32"), "TcObjects": SimTypePointer(SimStruct({"ObjectType": SimTypeInt(signed=False, label="UInt32"), "ObjectLength": SimTypeInt(signed=False, label="UInt32")}, name="QOS_OBJECT_HDR", pack=False, align=None), offset=0)}, name="TC_GEN_FLOW", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["IfcHandle", "ClFlowCtx", "Flags", "pGenericFlow", "pFlowHandle"]),
#
'TcGetFlowNameA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FlowHandle", "StrSize", "pFlowName"]),
#
'TcGetFlowNameW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FlowHandle", "StrSize", "pFlowName"]),
#
'TcModifyFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SendingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "ReceivingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "TcObjectsLength": SimTypeInt(signed=False, label="UInt32"), "TcObjects": SimTypePointer(SimStruct({"ObjectType": SimTypeInt(signed=False, label="UInt32"), "ObjectLength": SimTypeInt(signed=False, label="UInt32")}, name="QOS_OBJECT_HDR", pack=False, align=None), offset=0)}, name="TC_GEN_FLOW", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FlowHandle", "pGenericFlow"]),
#
'TcAddFilter': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"AddressType": SimTypeShort(signed=False, label="UInt16"), "PatternSize": SimTypeInt(signed=False, label="UInt32"), "Pattern": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "Mask": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="TC_GEN_FILTER", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FlowHandle", "pGenericFilter", "pFilterHandle"]),
#
'TcDeregisterClient': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["ClientHandle"]),
#
'TcDeleteFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FlowHandle"]),
#
'TcDeleteFilter': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["FilterHandle"]),
#
'TcEnumerateFlows': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Length": SimTypeInt(signed=False, label="UInt32"), "OwnerProcessId": SimTypeInt(signed=False, label="UInt32"), "FlowNameLength": SimTypeShort(signed=False, label="UInt16"), "FlowName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 256), "pFlow": SimTypePointer(SimStruct({"SendingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "ReceivingFlowspec": SimStruct({"TokenRate": SimTypeInt(signed=False, label="UInt32"), "TokenBucketSize": SimTypeInt(signed=False, label="UInt32"), "PeakBandwidth": SimTypeInt(signed=False, label="UInt32"), "Latency": SimTypeInt(signed=False, label="UInt32"), "DelayVariation": SimTypeInt(signed=False, label="UInt32"), "ServiceType": SimTypeInt(signed=False, label="UInt32"), "MaxSduSize": SimTypeInt(signed=False, label="UInt32"), "MinimumPolicedSize": SimTypeInt(signed=False, label="UInt32")}, name="FLOWSPEC", pack=False, align=None), "TcObjectsLength": SimTypeInt(signed=False, label="UInt32"), "TcObjects": SimTypePointer(SimStruct({"ObjectType": SimTypeInt(signed=False, label="UInt32"), "ObjectLength": SimTypeInt(signed=False, label="UInt32")}, name="QOS_OBJECT_HDR", pack=False, align=None), offset=0)}, name="TC_GEN_FLOW", pack=False, align=None), offset=0), "NumberOfFilters": SimTypeInt(signed=False, label="UInt32"), "GenericFilter": SimTypePointer(SimStruct({"AddressType": SimTypeShort(signed=False, label="UInt16"), "PatternSize": SimTypeInt(signed=False, label="UInt32"), "Pattern": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "Mask": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="TC_GEN_FILTER", pack=False, align=None), offset=0)}, name="ENUMERATION_BUFFER", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["IfcHandle", "pEnumHandle", "pFlowCount", "pBufSize", "Buffer"]),
}
lib.set_prototypes(prototypes)
| 249.903226
| 2,669
| 0.740157
| 1,634
| 15,494
| 6.977968
| 0.104651
| 0.188037
| 0.151552
| 0.232591
| 0.867041
| 0.840905
| 0.831345
| 0.828451
| 0.808279
| 0.808279
| 0
| 0.022027
| 0.074093
| 15,494
| 61
| 2,670
| 254
| 0.772759
| 0.001807
| 0
| 0
| 0
| 0
| 0.230096
| 0.00791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.147059
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1382e1950e93fb3dbe54ecdda57364d72349d717
| 19,862
|
py
|
Python
|
pytests/tuqquery/tuq_chained_let.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 14
|
2015-02-06T02:47:57.000Z
|
2020-03-14T15:06:05.000Z
|
pytests/tuqquery/tuq_chained_let.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 3
|
2019-02-27T19:29:11.000Z
|
2021-06-02T02:14:27.000Z
|
pytests/tuqquery/tuq_chained_let.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 108
|
2015-03-26T08:58:49.000Z
|
2022-03-21T05:21:39.000Z
|
from .tuq import QueryTests
class QueryChainedLetTests(QueryTests):
def setUp(self):
super(QueryChainedLetTests, self).setUp()
self.log.info("============== QueryChainedLetTests setup has started ==============")
self.log.info("============== QueryChainedLetTests setup has completed ==============")
self.log_config_info()
self.query_bucket = self.get_query_buckets(check_all_buckets=True)[0]
def suite_setUp(self):
super(QueryChainedLetTests, self).suite_setUp()
self.log.info("============== QueryChainedLetTests suite_setup has started ==============")
self.log.info("============== QueryChainedLetTests suite_setup has completed ==============")
def tearDown(self):
self.log.info("============== QueryChainedLetTests tearDown has started ==============")
self.log.info("============== QueryChainedLetTests tearDown has completed ==============")
super(QueryChainedLetTests, self).tearDown()
def suite_tearDown(self):
self.log.info("============== QueryChainedLetTests suite_tearDown has started ==============")
self.log.info("============== QueryChainedLetTests suite_tearDown has completed ==============")
super(QueryChainedLetTests, self).suite_tearDown()
def verifier(self, compare_query):
return lambda x: self.compare_queries(x['q_res'][0], compare_query)
def compare_queries(self, actual_results, compare_query):
let_letting_docs = actual_results['results']
compare_results = self.run_cbq_query(query=compare_query)
compare_docs = compare_results['results']
self.assertEqual(len(let_letting_docs), len(compare_docs))
self.assertEqual(let_letting_docs, compare_docs)
# creates a let query for different scenarios and and equivalent query without let to compare results with
def test_basic_chained_let(self):
queries = dict()
# constants
query_1 = 'select a, b, c from ' + self.query_bucket + ' let a=1,b=2,c=3 order by a, b, c limit 10'
verify_1 = 'select 1 as a, 2 as b, 3 as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_2 = 'select a, b, c from ' + self.query_bucket + ' let a=cos(1),b=cos(2),c=cos(3) order by a, b, c limit 10'
verify_2 = 'select cos(1) as a, cos(2) as b, cos(3) as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_3 = 'select a, b, c from ' + self.query_bucket + ' let a=1,b=a+1,c=b+1 order by a, b, c limit 10'
verify_3 = 'select 1 as a, 2 as b, 3 as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_4 = 'select a, b, c from ' + self.query_bucket + ' let a=cos(1),b=cos(a+1),c=cos(b+1) order by a, b, c limit 10'
verify_4 = 'select cos(1) as a, cos(cos(1)+1) as b, cos(cos(cos(1)+1)+1) as c from ' + self.query_bucket + ' order by a, b, c limit 10'
# fields
query_5 = 'select a, b, c from ' + self.query_bucket + ' let a=join_yr,b=join_day,c=join_mo order by a, b, c limit 10'
verify_5 = 'select join_yr as a, join_day as b, join_mo as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_6 = 'select a, b, c from ' + self.query_bucket + ' let a=cos(join_yr+1),b=cos(join_day+1),c=cos(join_mo+1) order by a, b, c limit 10'
verify_6 = 'select cos(join_yr+1) as a, cos(join_day+1) as b, cos(join_mo+1) as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_7 = 'select a, b, c from ' + self.query_bucket + ' let a=join_yr,b=a+join_day,c=b+join_mo order by a, b, c limit 10'
verify_7 = 'select join_yr as a, join_yr+join_day as b, join_yr+join_day+join_mo as c from ' + self.query_bucket + ' order by a, b, c limit 10'
query_8 = 'select a, b, c from ' + self.query_bucket + ' let a=cos(join_yr+1),b=cos(a+join_day+1),c=cos(b+join_mo+1) order by a, b, c limit 10'
verify_8 = 'select cos(join_yr+1) as a, cos(cos(join_yr+1)+join_day+1) as b, cos(cos(cos(join_yr+1)+join_day+1)+join_mo+1) as c from ' + self.query_bucket + ' order by a, b, c limit 10'
# subqueries
query_9 = 'select a, b, c from ' + self.query_bucket + ' d0 let a=(select join_yr from ' + self.query_bucket + ' d2 order by join_yr limit 5), b=(select join_mo from ' + self.query_bucket + ' d3 order by join_mo limit 5), c=(select join_day from ' + self.query_bucket + ' d4 order by join_day limit 5) order by a, b, c limit 10'
verify_9 = 'select (select join_yr from ' + self.query_bucket + ' d2 order by join_yr limit 5) as a, (select join_mo from ' + self.query_bucket + ' d3 order by join_mo limit 5) as b, (select join_day from ' + self.query_bucket + ' d4 order by join_day limit 5) as c from ' + self.query_bucket + ' d0 order by a, b, c limit 10'
query_10 = 'select a, b from ' + self.query_bucket + ' d0 let usekeys=(select meta(d1).id, join_day from ' + self.query_bucket + ' d1 limit 10), a=(select join_day from ' + self.query_bucket + ' d2 limit 10), b=(select join_mo from ' + self.query_bucket + ' d3 use keys usekeys[*].id where join_day in a[*].join_day limit 10) order by a, b limit 10'
verify_10 = 'select (select join_day from ' + self.query_bucket + ' d1 limit 10) as a, (select join_mo from ' + self.query_bucket + ' d2 where join_mo in (select raw join_mo from ' + self.query_bucket + ' d3 limit 10) limit 10) as b from ' + self.query_bucket + ' d7 order by a, b limit 10'
query_11 = 'select a, b, c from ' + self.query_bucket + ' d0 let usekeys=(select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 10),a=1,b=join_mo+a,c=(select join_day from ' + self.query_bucket + ' d2 use keys usekeys where join_day != b order by join_day limit 10) order by a, b, c limit 10'
verify_11 = 'select 1 as a, join_mo+1 as b, (select join_day from ' + self.query_bucket + ' d2 use keys (select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 10) where join_day != (join_mo+1) order by join_day limit 10) as c from ' + self.query_bucket + ' d0 order by a, b, c limit 10'
# full query
#/MB-34680
query_12 = 'select a, b, c from ' + self.query_bucket + ' let a=join_yr, b=join_mo, c=join_day where (a > 100 OR c < 100) and b != 200 group by a,b,c having (a>b and b>c) or a == b+c order by a, b, c limit 10'
verify_12 = 'select join_yr as a, join_mo as b, join_day as c from ' + self.query_bucket + ' where (join_yr > 100 OR join_day < 100) and join_mo != 200 group by join_yr, join_mo, join_day having (join_yr>join_mo and join_mo>join_day) or join_yr == join_mo+join_day order by join_yr, join_mo, join_day limit 10'
# mixed
query_13 = 'select a, b, c from ' + self.query_bucket + ' d0 let usekeys=(select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 10),a=1,b=cos(join_day)+a,c=(select join_day from ' + self.query_bucket + ' d1 use keys usekeys where join_day > a + b order by join_day limit 10) where join_day in c order by a,b,c limit 10'
verify_13 = 'select 1 as a, cos(join_day)+1 as b, (select join_day from ' + self.query_bucket + ' d1 use keys (select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 10) where join_day > 1 + cos(join_day)+1 order by join_day limit 10) as c from ' + self.query_bucket + ' d0 where join_day in (select join_day from ' + self.query_bucket + ' d1 use keys (select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 10) where join_day > 1 + cos(join_day)+1 order by join_day limit 10) order by a,b,c limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
queries["b"] = {"queries": [query_2], "asserts": [self.verifier(verify_2)]}
queries["c"] = {"queries": [query_3], "asserts": [self.verifier(verify_3)]}
queries["c"] = {"queries": [query_4], "asserts": [self.verifier(verify_4)]}
queries["e"] = {"queries": [query_5], "asserts": [self.verifier(verify_5)]}
queries["f"] = {"queries": [query_6], "asserts": [self.verifier(verify_6)]}
queries["g"] = {"queries": [query_7], "asserts": [self.verifier(verify_7)]}
queries["h"] = {"queries": [query_8], "asserts": [self.verifier(verify_8)]}
queries["i"] = {"queries": [query_9], "asserts": [self.verifier(verify_9)]}
queries["j"] = {"queries": [query_10], "asserts": [self.verifier(verify_10)]}
queries["k"] = {"queries": [query_11], "asserts": [self.verifier(verify_11)]}
queries["l"] = {"queries": [query_12], "asserts": [self.verifier(verify_12)]}
queries["m"] = {"queries": [query_13], "asserts": [self.verifier(verify_13)]}
self.query_runner(queries)
def test_basic_chained_letting(self):
queries = dict()
# constants
query_1 = 'select a, b, c from ' + self.query_bucket + ' group by a, b, c letting a=1,b=2,c=3 order by a, b, c'
verify_1 = 'select 1 as a, 2 as b, 3 as c from ' + self.query_bucket + ' group by a, b, c order by a, b, c'
query_2 = 'select a, b, c from ' + self.query_bucket + ' group by a, b, c letting a=cos(1),b=cos(2),c=cos(3) order by a, b, c'
verify_2 = 'select cos(1) as a, cos(2) as b, cos(3) as c from ' + self.query_bucket + ' group by a, b, c order by a, b, c'
query_3 = 'select a, b, c from ' + self.query_bucket + ' group by a, b, c letting a=1,b=a+1,c=b+1 order by a, b, c'
verify_3 = 'select 1 as a, 2 as b, 3 as c from ' + self.query_bucket + ' group by a, b, c order by a, b, c'
query_4 = 'select a, b, c from ' + self.query_bucket + ' group by a, b, c letting a=cos(1),b=cos(a+1),c=cos(b+1) order by a, b, c'
verify_4 = 'select cos(1) as a, cos(cos(1)+1) as b, cos(cos(cos(1)+1)+1) as c from ' + self.query_bucket + ' group by a, b, c order by a, b, c'
# fields
query_5 = 'select a, b, c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day letting a=join_yr,b=join_mo,c=join_day order by a, b, c'
verify_5 = 'select join_yr as a, join_mo as b, join_day as c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day order by a, b, c'
query_6 = 'select a, b, c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day letting a=join_yr,b=a+join_mo,c=b+join_day order by a, b, c'
verify_6 = 'select join_yr as a, join_yr+join_mo as b, join_yr+join_mo+join_day as c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day order by a, b, c'
query_7 = 'select a, b, c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day letting a=cos(join_yr+1),b=cos(join_mo+1),c=cos(join_day+1) order by a, b, c'
verify_7 = 'select cos(join_yr+1) as a, cos(join_mo+1) as b, cos(join_day+1) as c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day order by a, b, c'
query_8 = 'select a, b, c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day letting a=cos(join_yr+1),b=cos(a+join_mo+1),c=cos(b+join_day+1) order by a, b, c'
verify_8 = 'select cos(join_yr+1) as a, cos(cos(join_yr+1)+join_mo+1) as b, cos(cos(cos(join_yr+1)+join_mo+1)+join_day+1) as c from ' + self.query_bucket + ' group by join_yr, join_mo, join_day order by a, b, c'
# subqueries
query_9 = 'select a, b, c from ' + self.query_bucket + ' d0 group by join_yr, join_mo, join_day letting a=(select join_yr from ' + self.query_bucket + ' d1 order by join_yr limit 10),b=(select join_mo from ' + self.query_bucket + ' d2 order by join_mo limit 10),c=(select join_day from ' + self.query_bucket + ' d3 order by join_day limit 10) order by a, b, c limit 10'
verify_9 = 'select (select join_yr from ' + self.query_bucket + ' d1 order by join_yr limit 10) as a, (select join_mo from ' + self.query_bucket + ' d2 order by join_mo limit 10) as b, (select join_day from ' + self.query_bucket + ' d3 order by join_day limit 10) as c from ' + self.query_bucket + ' d0 group by join_yr, join_mo, join_day order by a, b, c limit 10'
# full query
query_10 = 'select a, b, c from ' + self.query_bucket + ' d0 where join_yr > 0 group by join_yr letting a=join_yr,b=SUM(join_mo),c=SUM(join_day),d=b+c having d > 0 and a > 0 order by a, b, c limit 10'
verify_10 = 'select join_yr as a, SUM(join_mo) as b, SUM(join_day) as c from ' + self.query_bucket + ' d0 where join_yr > 0 group by join_yr having SUM(join_mo)+SUM(join_day) > 0 and join_yr > 0 order by a, b, c limit 10'
# mixed
query_11 = 'select a, b, c from ' + self.query_bucket + ' d0 where join_yr > 0 group by join_yr letting aa=1,aaa=aa+1,aaaa=join_yr,a=aaaa+aaa,b=SUM(join_mo),c=SUM(join_day),d=b+c,e=(select raw join_yr from ' + self.query_bucket + ' d1 order by join_yr) having d > 0 and a > 0 and aaaa in e order by a, b, c limit 10'
verify_11 = 'select join_yr+2 as a, SUM(join_mo) as b, SUM(join_day) as c from ' + self.query_bucket + ' d0 where join_yr > 0 group by join_yr having SUM(join_mo) + SUM(join_day) > 0 and join_yr + 2 > 0 and join_yr in (select raw join_yr from ' + self.query_bucket + ' d1 order by join_yr) order by a, b, c limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
queries["b"] = {"queries": [query_2], "asserts": [self.verifier(verify_2)]}
queries["c"] = {"queries": [query_3], "asserts": [self.verifier(verify_3)]}
queries["d"] = {"queries": [query_4], "asserts": [self.verifier(verify_4)]}
queries["e"] = {"queries": [query_5], "asserts": [self.verifier(verify_5)]}
queries["f"] = {"queries": [query_6], "asserts": [self.verifier(verify_6)]}
queries["g"] = {"queries": [query_7], "asserts": [self.verifier(verify_7)]}
queries["h"] = {"queries": [query_8], "asserts": [self.verifier(verify_8)]}
queries["i"] = {"queries": [query_9], "asserts": [self.verifier(verify_9)]}
queries["j"] = {"queries": [query_10], "asserts": [self.verifier(verify_10)]}
queries["k"] = {"queries": [query_11], "asserts": [self.verifier(verify_11)]}
self.query_runner(queries)
def test_chained_let_and_letting(self):
queries = dict()
# constants
query_1 = 'select a, b, c, aa, bb, cc from ' + self.query_bucket + ' let a=1,b=2,c=3 group by a, b, c letting aa=a+1,bb=b+2,cc=c+3 order by a, b, c'
verify_1 = 'select 1 as a, 2 as b, 3 as c, 2 as aa, 4 as bb, 6 as cc from ' + self.query_bucket + ' group by a, b, c order by a, b, c'
# fields
query_2 = 'select a, b, c, aa, bb, cc from ' + self.query_bucket + ' let a=join_yr,b=join_day,c=join_mo group by a, b, c letting aa=cos(a),bb=aa+cos(b),cc=bb+c having aa > 0 and bb > aa and cc > 0 order by a, b, c limit 10'
verify_2 = 'select join_yr as a, join_day as b, join_mo as c, cos(join_yr) as aa, cos(join_yr)+cos(join_day) as bb, cos(join_yr)+cos(join_day)+join_mo as cc from ' + self.query_bucket + ' group by join_yr, join_day, join_mo having cos(join_yr) > 0 and cos(join_yr)+cos(join_day) > cos(join_yr) and cos(join_yr)+cos(join_day)+join_mo > 0 order by a, b, c limit 10'
# subqueries
query_3 = 'select a, b, c, aa, bb from ' + self.query_bucket + ' d0 let usekeys=(select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 20),a=(select raw join_day from ' + self.query_bucket + ' d2),b=(select raw join_mo from ' + self.query_bucket + ' d3 use keys usekeys where join_mo in a),c=join_yr group by a,b,c letting aa=(1 in a),bb=COUNT((select raw meta(d4).id from ' + self.query_bucket + ' d4 order by meta(d4).id limit 10)) having aa and bb != 0 order by a, b, c limit 10'
verify_3 = 'select (select raw join_day from ' + self.query_bucket + ' d2) as a, (select raw join_mo from ' + self.query_bucket + ' d3 use keys (select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 20) where join_mo in (select raw join_day from ' + self.query_bucket + ' d6)) as b, join_yr as c, (1 in (select raw join_day from ' + self.query_bucket + ' d2)) as aa, COUNT((select raw meta(d4).id from ' + self.query_bucket + ' d4 order by meta(d4).id limit 10)) as bb from ' + self.query_bucket + ' d0 group by a,b,join_yr having (1 in (select raw join_day from ' + self.query_bucket + ' d2)) and COUNT((select raw meta(d5).id from ' + self.query_bucket + ' d5 order by meta(d5).id limit 10)) != 0 order by a, b, c limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
queries["b"] = {"queries": [query_2], "asserts": [self.verifier(verify_2)]}
queries["c"] = {"queries": [query_3], "asserts": [self.verifier(verify_3)]}
self.query_runner(queries)
def test_chained_let_in_let_subquery(self):
queries = dict()
query_1 = 'select a, b, c from ' + self.query_bucket + ' d0 let usekeys=(select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 20), a=join_yr, b=a+1, c=(select e from ' + self.query_bucket + ' d4 use keys usekeys let d=b+1,e=d+1 order by join_day limit 5) order by a, b, c limit 10'
verify_1 = 'select join_yr as a, join_yr+1 as b, (select d0.join_yr+3 as e from ' + self.query_bucket + ' d4 use keys (select raw meta(d1).id from ' + self.query_bucket + ' d1 order by meta(d1).id limit 20) order by join_day limit 5) as c from ' + self.query_bucket + ' d0 order by a, b, c limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
self.query_runner(queries)
def test_chained_let_in_letting_subquery(self):
queries = dict()
query_1 = 'select a from ' + self.query_bucket + ' d0 group by join_yr letting a=(select bb from ' + self.query_bucket + ' d1 let aa=join_yr,bb=aa+1 order by join_yr limit 10) order by a limit 10'
verify_1 = 'select (select join_yr+1 as bb from ' + self.query_bucket + ' d1 order by join_yr limit 10) as a from ' + self.query_bucket + ' d0 group by join_yr order by a limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
self.query_runner(queries)
def test_chained_letting_in_let_subquery(self):
queries = dict()
query_1 = 'select a from ' + self.query_bucket + ' d0 let a=(select aaa from ' + self.query_bucket + ' d2 let aaa=join_yr,aaaa=aaa+1 group by aaa, aaaa letting bb=aaaa+1,cc=bb+1 having cc > 0 order by aaa limit 5) group by a order by a limit 10'
verify_1 = 'select (select join_yr as aaa from ' + self.query_bucket + ' d2 group by join_yr, join_yr + 1 having join_yr+3 > 0 order by join_yr limit 5) as a from ' + self.query_bucket + ' d0 group by a order by a limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
self.query_runner(queries)
def test_chained_letting_in_letting_subquery(self):
queries = dict()
query_1 = 'select a from ' + self.query_bucket + ' d0 group by join_yr letting a=(select bb from ' + self.query_bucket + ' d1 let aa=join_yr,bb=aa+1 group by bb letting cc=bb+1,dd=cc+1 having dd > 0 order by bb limit 10) order by a limit 10'
verify_1 = 'select (select join_yr+1 as bb from ' + self.query_bucket + ' d1 group by join_yr + 1 having join_yr + 1 + 2 > 0 order by join_yr+1 limit 10) as a from ' + self.query_bucket + ' d0 group by join_yr order by a limit 10'
queries["a"] = {"queries": [query_1], "asserts": [self.verifier(verify_1)]}
self.query_runner(queries)
def test_chained_let_and_letting_index_selection(self):
self.fail()
def test_chained_let_and_letting_with_joins(self):
self.fail()
def test_chained_let_and_letting_negative(self):
self.fail()
| 87.497797
| 763
| 0.64349
| 3,571
| 19,862
| 3.41445
| 0.038645
| 0.090052
| 0.141475
| 0.177643
| 0.880341
| 0.855163
| 0.814648
| 0.777413
| 0.738949
| 0.684819
| 0
| 0.037915
| 0.209898
| 19,862
| 226
| 764
| 87.884956
| 0.739056
| 0.01163
| 0
| 0.297297
| 0
| 0.378378
| 0.557685
| 0.054601
| 0
| 0
| 0
| 0
| 0.222973
| 1
| 0.108108
| false
| 0
| 0.006757
| 0.006757
| 0.128378
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13988d260ea7cc3931ed88f27673909d9d0489e4
| 31,201
|
py
|
Python
|
test/components/httpio.py
|
Chisanan232/pytsunami
|
099f16e6351b8cfcce4528d53273c381c1dc3bf9
|
[
"Apache-2.0"
] | null | null | null |
test/components/httpio.py
|
Chisanan232/pytsunami
|
099f16e6351b8cfcce4528d53273c381c1dc3bf9
|
[
"Apache-2.0"
] | null | null | null |
test/components/httpio.py
|
Chisanan232/pytsunami
|
099f16e6351b8cfcce4528d53273c381c1dc3bf9
|
[
"Apache-2.0"
] | null | null | null |
from smoothcrawler.components.httpio import BaseHTTP, HTTP, AsyncHTTP, set_retry, RetryComponent
from abc import ABCMeta, abstractmethod
import urllib3
import logging
import random
import pytest
import http
HTTP_METHOD = "GET"
TEST_URL = "https://www.google.com"
# TEST_URL = "https://www.youtube.com"
TEST_TIMEOUT_URL = "https://www.test.com"
RETRY_TIMES = 3
REQUEST_TIMEOUT = 5
GET_FLAG = False
POST_FLAG = False
PUT_FLAG = False
DELETE_FLAG = False
HEAD_FLAG = False
OPTION_FLAG = False
Test_Http_Logger = logging.getLogger("smoothcrawler.http_io")
stream_logger = logging.StreamHandler()
stream_logger.setLevel(level=logging.INFO)
formatter = logging.Formatter('%(asctime)s %(module)s.%(funcName)s(): %(levelname)-8s %(message)s')
stream_logger.setFormatter(formatter)
Test_Http_Logger.addHandler(stream_logger)
def init_flag():
global GET_FLAG, POST_FLAG, PUT_FLAG, DELETE_FLAG, HEAD_FLAG, OPTION_FLAG
GET_FLAG = False
POST_FLAG = False
PUT_FLAG = False
DELETE_FLAG = False
HEAD_FLAG = False
OPTION_FLAG = False
class _TestRequestsHTTP(HTTP):
__Http_Response = None
def request(self, url, method="GET", timeout=-1, retry_components=None, *args, **kwargs):
Test_Http_Logger.info(f"Send HTTP request by 'urllib3'.")
_http = urllib3.PoolManager()
self.__Http_Response = _http.request(HTTP_METHOD, url)
return self.__Http_Response
@property
def status_code(self):
if self.__Http_Response:
return self.__Http_Response.status
else:
return -1
class _TestMethodsHTTP(HTTP):
__Http_Response = None
def get(self, url: str, *args, **kwargs):
global GET_FLAG
GET_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("GET", url)
return self.__Http_Response
def post(self, url: str, *args, **kwargs):
global POST_FLAG
POST_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("POST", url)
return self.__Http_Response
def put(self, url: str, *args, **kwargs):
global PUT_FLAG
PUT_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("PUT", url)
return self.__Http_Response
def delete(self, url: str, *args, **kwargs):
global DELETE_FLAG
DELETE_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("DELETE", url)
return self.__Http_Response
def head(self, url: str, *args, **kwargs):
global HEAD_FLAG
HEAD_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("HEAD", url)
return self.__Http_Response
def option(self, url: str, *args, **kwargs):
global OPTION_FLAG
OPTION_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("OPTION", url)
return self.__Http_Response
@property
def status_code(self):
if self.__Http_Response:
return self.__Http_Response.status
else:
return -1
class _TestWrongMethodsHTTP(HTTP):
__Http_Response = None
def no_get(self, url, *args, **kwargs):
global GET_FLAG
GET_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("GET", url)
logging.debug("New get implementation.")
logging.debug(f"Response: {self.__Http_Response}")
return self.__Http_Response
def no_post(self, url, *args, **kwargs):
global POST_FLAG
POST_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("POST", url)
return self.__Http_Response
def no_put(self, url, *args, **kwargs):
global PUT_FLAG
PUT_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("PUT", url)
return self.__Http_Response
def no_delete(self, url, *args, **kwargs):
global DELETE_FLAG
DELETE_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("DELETE", url)
return self.__Http_Response
def no_head(self, url, *args, **kwargs):
global HEAD_FLAG
HEAD_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("HEAD", url)
return self.__Http_Response
def no_option(self, url, *args, **kwargs):
global OPTION_FLAG
OPTION_FLAG = True
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("OPTION", url)
return self.__Http_Response
Test_Sleep_Time = REQUEST_TIMEOUT + RETRY_TIMES - 1
Initial_Flag = 0
Done_Flag = 0
Final_Flag = 0
Exception_Flag = 0
def reset_counter():
global Initial_Flag, Done_Flag, Final_Flag, Exception_Flag
Initial_Flag = 0
Done_Flag = 0
Final_Flag = 0
Exception_Flag = 0
class _MyRetry(RetryComponent):
"""
A sample code for implementing RetryComponent.
"""
def before_request(self, *args, **kwargs):
global Initial_Flag
Initial_Flag += 1
Test_Http_Logger.info("Initial task process.")
def request_done(self, result):
global Done_Flag
Done_Flag += 1
Test_Http_Logger.info("Task done! ")
return result
def request_final(self):
global Final_Flag
Final_Flag += 1
Test_Http_Logger.info("Task done! ")
def request_error(self, error):
global Exception_Flag
Exception_Flag += 1
Test_Http_Logger.info("Got failure when run task.")
return error
class _TestRetryRequestsHTTP(HTTP):
"""
A sample code for implementing some features of HTTP.
"""
__Fail_Mode = None
__Http_Response = None
def __init__(self, fail_mode: bool = False, retry_components: RetryComponent = None):
super().__init__(retry_components=retry_components)
self.__Fail_Mode = fail_mode
def get(self, url, *args, **kwargs):
if self.__Fail_Mode is True:
raise TimeoutError("For testing")
else:
_http = urllib3.PoolManager()
self.__Http_Response = _http.request("GET", url)
return self.__Http_Response
@property
def status_code(self):
if self.__Http_Response:
return self.__Http_Response.status
else:
Test_Http_Logger.warning(f"There is no HTTP response currently.")
return -1
def http_200_response(self, response):
Test_Http_Logger.info("Get the HTTP response successfully.")
class BaseHttpTestSpec(metaclass=ABCMeta):
"""
Test Description:
Testing method 'request' feature, including parameter 'method', 'timeout', 'retry_components'
Test cases:
Parameter 'url':
str type:
URL type:
other type: raise ValueError.
Parameter 'method':
'GET': It should send HTTP request via 'GET' method.
'POST': It should send HTTP request via 'POST' method.
'PUT': It should send HTTP request via 'PUT' method.
'DELETE': It should send HTTP request via 'DELETE' method.
'HEAD': It should send HTTP request via 'HEAD' method.
'OPTION': It should send HTTP request via 'OPTION' method.
Note about testing case of 'method':
Annotate again, this package DOES NOT case about how developers implement HTTP request (GET, POST, etc).
It only cares about the software architecture. That's the reason why we just need to check it work in this package design except feature.
Parameter 'timeout':
-1: It would doesn't timeout and keep waiting for the response util request timeout.
<-1: It will raise an ValueError.
>=0: It would timeout after the time period.
Parameter 'retry_components':
'before_request': It should be run before it send HTTP request.
'request_done': It should be run after it send HTTP request and get the HTTP response.
'request_final': It must to run this implementation no matter whether it run successfully or not.
'request_error': It would be run if it gets anything exception when it sends HTTP request.
"""
@abstractmethod
def test_request_url(self, *args, **kwargs):
"""
Test Description:
Parameter 'url':
str type:
URL type:
other type: raise ValueError.
:param args:
:param kwargs:
:return:
"""
pass
@abstractmethod
def test_request_method(self, *args, **kwargs):
"""
Test Description:
Parameter 'method' of bounded function 'test_request_method' of module 'HTTP':
'GET': It should send HTTP request via 'GET' method.
'POST': It should send HTTP request via 'POST' method.
'PUT': It should send HTTP request via 'PUT' method.
'DELETE': It should send HTTP request via 'DELETE' method.
'HEAD': It should send HTTP request via 'HEAD' method.
'OPTION': It should send HTTP request via 'OPTION' method.
:param args:
:param kwargs:
:return:
"""
pass
@abstractmethod
def test_request_timeout(self):
"""
Test Description:
Parameter 'timeout':
-1: It would doesn't timeout and keep waiting for the response util request timeout.
<-1: It will raise an ValueError.
>=0: It would timeout after the time period.
:return:
"""
pass
@abstractmethod
def test_request_retry(self):
"""
Test Description:
Parameter 'retry_components':
'before_request': It should be run before it send HTTP request.
'request_done': It should be run after it send HTTP request and get the HTTP response.
'request_final': It must to run this implementation no matter whether it run successfully or not.
'request_error': It would be run if it gets anything exception when it sends HTTP request.
:return:
"""
pass
@abstractmethod
def test_get(self):
pass
@abstractmethod
def test_post(self):
pass
@abstractmethod
def test_put(self):
pass
@abstractmethod
def test_delete(self):
pass
@abstractmethod
def test_head(self):
pass
@abstractmethod
def test_option(self):
pass
@abstractmethod
def test_retry_before_request(self):
"""
Test Description:
Test for the property of 'before_request'.
:return:
"""
pass
@abstractmethod
def test_retry_request_done(self):
"""
Test Description:
Test for the property of 'request_done'.
:return:
"""
pass
@abstractmethod
def test_retry_request_final(self):
"""
Test Description:
Test for the property of 'request_final'.
:return:
"""
pass
@abstractmethod
def test_retry_request_error(self):
"""
Test Description:
Test for the property of 'request_error'.
:return:
"""
pass
@abstractmethod
def test_retry_mechanism_with_properties(self):
pass
class TestHttp(BaseHttpTestSpec):
@pytest.mark.skip(reason="No implement testing logic.")
def test_request_url(self, *args, **kwargs):
pass
def test_request_method(self, *args, **kwargs):
req_ver_http = _TestRequestsHTTP()
req_response = req_ver_http.request(url=TEST_URL)
assert req_response is not None, "It doesn't implement the code which has responsibility about sending HTTP request."
assert req_ver_http.status_code is not None, "HTTP status code must to be a value."
status_code = int(req_ver_http.status_code)
assert TestHttp.__status_code_is_valid(status_code) is True, "This is not a valid status code."
methods_http = _TestMethodsHTTP()
# Test HTTP method 'GET'
method_response = methods_http.request(url=TEST_URL, method="GET")
assert method_response is not None, "It doesn't implement the code which has responsibility about sending HTTP request."
__http_status = method_response.status
assert __http_status is not None, "HTTP status code must to be a value."
status_code = int(__http_status)
assert TestHttp.__status_code_is_valid(status_code) is True, "This is not a valid status code."
@pytest.mark.skip(reason="No implement testing logic.")
def test_request_timeout(self):
pass
@pytest.mark.skip(reason="No implement testing logic.")
def test_request_retry(self):
pass
def test_get(self):
def final_assert():
assert GET_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "GET"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert GET_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert GET_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert GET_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert GET_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
def test_post(self):
def final_assert():
assert POST_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "POST"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert POST_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert POST_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert POST_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert POST_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
def test_put(self):
def final_assert():
assert PUT_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "PUT"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert PUT_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert PUT_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert PUT_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert PUT_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
def test_delete(self):
def final_assert():
assert DELETE_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "DELETE"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert DELETE_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert DELETE_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert DELETE_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert DELETE_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
def test_head(self):
def final_assert():
assert HEAD_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "HEAD"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert HEAD_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert HEAD_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert HEAD_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert HEAD_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
def test_option(self):
def final_assert():
assert OPTION_FLAG is False, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
req_method = "OPTION"
req_method_upper = req_method.upper()
req_method_lower = req_method.lower()
_http_cls = _TestMethodsHTTP()
TestHttp.__test_request_with_upper_char(_http_cls, req_method)
assert OPTION_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_replace_random_char(_http_cls, req_method)
assert OPTION_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_insert_random_char(_http_cls, req_method)
assert OPTION_FLAG is True, \
f"'HTTP.request' should call function '{req_method_lower}' with option *method* value is '{req_method_upper}'."
TestHttp.__test_request_with_invalid_char(_http_cls, final_assert)
response = TestHttp.__request_with_no_override(req_method)
assert OPTION_FLAG is False, \
f"'HTTP.request' should not call function '{req_method.lower()}' because it doesn't override it."
assert response is None, "The HTTP response result should be None in default."
@staticmethod
def __test_request_with_upper_char(http_cls, req_method: str):
init_flag()
req_method_upper = req_method.upper()
response = http_cls.request(method=req_method_upper, url=TEST_URL)
Test_Http_Logger.info(f"Test with option value '{req_method_upper}'.")
@staticmethod
def __test_request_with_replace_random_char(http_cls, req_method: str):
init_flag()
req_method_replace_random = TestHttp.__replace_random_char(target=req_method)
response = http_cls.request(method=req_method_replace_random, url=TEST_URL)
Test_Http_Logger.info(f"Test with option value '{req_method_replace_random}'.")
@staticmethod
def __test_request_with_insert_random_char(http_cls, req_method: str):
init_flag()
john_cena_char = "$%#%$%#%YouCAnNotSeeME"
req_method_insert_random = TestHttp.__insert_random_char(target=john_cena_char, insert=req_method)
# No sure that whether package should filter this characters or not
response = http_cls.request(method=req_method_insert_random, url=TEST_URL)
Test_Http_Logger.info(f"Test with option value '{req_method_insert_random}'.")
@staticmethod
def __test_request_with_invalid_char(http_cls, assert_callable):
init_flag()
magic_char = "$%##%NowYouSeeME"
# Invalid option value
request_exception = None
try:
request_exception = http_cls.request(method=magic_char, url=TEST_URL)
except Exception as e:
request_exception = e
finally:
Test_Http_Logger.info(f"Test with option value '{magic_char}'.")
assert_callable()
assert type(request_exception) is TypeError, \
"'HTTP.request' should filter invalid option value."
@staticmethod
def __request_with_no_override(req_method: str):
_http_cls = _TestWrongMethodsHTTP()
response = _http_cls.request(method=req_method, url=TEST_URL)
Test_Http_Logger.info(f"Test with option value '{req_method}'.")
return response
@staticmethod
def __replace_random_char(target: str) -> str:
replaced_char_index = random.randrange(0, len(target))
if replaced_char_index % random.randrange(1, 2) == random.randrange(1, 2):
target_random = target[:replaced_char_index] + target[replaced_char_index].upper() + target[replaced_char_index + 1:]
else:
target_random = target[:replaced_char_index] + target[replaced_char_index].lower() + target[replaced_char_index + 1:]
return target_random
@staticmethod
def __insert_random_char(target: str, insert: str) -> str:
insert_char_index = random.randrange(0, len(target))
if insert_char_index % random.randrange(1, 2) == random.randrange(1, 2):
target_random = target[:insert_char_index] + insert + target[insert_char_index:]
else:
target_random = target[:insert_char_index] + insert + target[insert_char_index:]
return target_random
def test_retry_before_request(self):
reset_counter()
set_retry(RETRY_TIMES)
my_retry = _MyRetry()
for test_mode in [True, False]:
global Initial_Flag
Initial_Flag = 0
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode)
http_cls.before_request = my_retry.before_request
http_cls.request_error = my_retry.request_error
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
if test_mode is True:
assert Initial_Flag == RETRY_TIMES, "Initial process times should be equal to retry times."
else:
assert Initial_Flag <= RETRY_TIMES, "Initial process times should be equal to retry times."
def test_retry_request_done(self):
reset_counter()
set_retry(RETRY_TIMES)
my_retry = _MyRetry()
for test_mode in [True, False]:
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode)
http_cls.request_done = my_retry.request_done
http_cls.request_error = my_retry.request_error
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
global Done_Flag
if test_mode is True:
assert Done_Flag == 0, "The times of done process should be equal to retry times."
else:
assert Done_Flag <= RETRY_TIMES, "The times of done process should be equal to retry times."
Done_Flag = 0
def test_retry_request_final(self):
reset_counter()
set_retry(RETRY_TIMES)
my_retry = _MyRetry()
for test_mode in [True, False]:
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode)
http_cls.request_final = my_retry.request_final
http_cls.request_error = my_retry.request_error
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
global Final_Flag
assert Final_Flag <= RETRY_TIMES, "Final process times should be equal to retry times."
Test_Http_Logger.debug(f"Final_Flag: {Final_Flag}")
Test_Http_Logger.debug(f"Exception_Flag: {Exception_Flag}")
Final_Flag = 0
def test_retry_request_error(self):
reset_counter()
set_retry(RETRY_TIMES)
my_retry = _MyRetry()
for test_mode in [True, False]:
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode)
http_cls.request_error = my_retry.request_error
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
global Exception_Flag
if test_mode is True:
assert Exception_Flag == RETRY_TIMES, "The times of exception handling process should be equal to retry times."
else:
assert Exception_Flag <= RETRY_TIMES, "The times of exception handling process should be equal to retry times."
Exception_Flag = 0
def test_retry_mechanism_with_properties(self):
reset_counter()
set_retry(RETRY_TIMES)
for test_mode in [True, False]:
global Initial_Flag, Done_Flag, Final_Flag, Exception_Flag
Initial_Flag = 0
Done_Flag = 0
Final_Flag = 0
Exception_Flag = 0
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode)
# It will raise TimeoutError if it doesn't get response after 5 seconds later.
# And it will retry to send HTTP request if it got any exception util overrate the retry times.
my_retry = _MyRetry()
http_cls.before_request = my_retry.before_request
http_cls.request_done = my_retry.request_done
http_cls.request_final = my_retry.request_final
http_cls.request_error = my_retry.request_error
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
TestHttp.__request_checking(test_mode, http_cls, response)
def test_retry_mechanism_with_adapter(self):
reset_counter()
set_retry(RETRY_TIMES)
for test_mode in [True, False]:
global Initial_Flag, Done_Flag, Final_Flag, Exception_Flag
Initial_Flag = 0
Done_Flag = 0
Final_Flag = 0
Exception_Flag = 0
http_cls = _TestRetryRequestsHTTP(fail_mode=test_mode, retry_components=_MyRetry())
response = http_cls.request(url=TEST_URL, timeout=REQUEST_TIMEOUT)
TestHttp.__request_checking(test_mode, http_cls, response)
@staticmethod
def __request_checking(fail_mode, http_cls, response):
assert response is not None, "It doesn't implement the code which has responsibility about sending HTTP request."
if fail_mode is True:
assert Initial_Flag == RETRY_TIMES, "Initial process times should be equal to retry times."
assert Done_Flag == RETRY_TIMES or Exception_Flag == RETRY_TIMES, "The times of done process or exception handling process should be equal to retry times."
assert Final_Flag == RETRY_TIMES, "Final process times should be equal to retry times."
else:
__http_status = response.status
assert __http_status is not None, "HTTP status code must to be a value."
status_code = int(__http_status)
assert TestHttp.__status_code_is_valid(status_code) is True, "This is not a valid status code."
assert Initial_Flag <= RETRY_TIMES, "Initial process times should be equal to retry times."
assert Done_Flag <= RETRY_TIMES and \
Exception_Flag <= RETRY_TIMES and \
(Done_Flag + Exception_Flag) <= RETRY_TIMES, "The times of done process or exception handling process should be equal to retry times."
assert Final_Flag <= RETRY_TIMES, "Final process times should be equal to retry times."
@staticmethod
def __status_code_is_valid(status):
for _status in http.HTTPStatus:
if int(status) == _status.value:
return True
else:
return False
| 35.375283
| 167
| 0.659242
| 3,934
| 31,201
| 4.907728
| 0.064057
| 0.058735
| 0.030455
| 0.027969
| 0.816802
| 0.772259
| 0.74662
| 0.716683
| 0.70446
| 0.684622
| 0
| 0.00307
| 0.25887
| 31,201
| 881
| 168
| 35.415437
| 0.831863
| 0.113041
| 0
| 0.741348
| 0
| 0.056466
| 0.207505
| 0.029834
| 0
| 0
| 0
| 0
| 0.132969
| 1
| 0.129326
| false
| 0.032787
| 0.01275
| 0
| 0.213115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13a2fd390ff63a5db24c40da3b69739a0688560f
| 5,849
|
py
|
Python
|
examples/tx_analysis.py
|
frstrtr/bitcoin_tools
|
2a745428ab1613ec5564b80d5189c179aa69cb0f
|
[
"BSD-3-Clause"
] | 1
|
2020-10-16T07:48:58.000Z
|
2020-10-16T07:48:58.000Z
|
examples/tx_analysis.py
|
frstrtr/bitcoin_tools
|
2a745428ab1613ec5564b80d5189c179aa69cb0f
|
[
"BSD-3-Clause"
] | null | null | null |
examples/tx_analysis.py
|
frstrtr/bitcoin_tools
|
2a745428ab1613ec5564b80d5189c179aa69cb0f
|
[
"BSD-3-Clause"
] | 1
|
2020-10-16T07:47:31.000Z
|
2020-10-16T07:47:31.000Z
|
from bitcoin_tools.core.transaction import TX
#################################################
# Hex transaction analysis #
#################################################
# https://github.com/bitcoin/bitcoin/blob/v0.13.1rc2/src/primitives/transaction.h#L275
"""/**
* Basic transaction serialization format:
* - int32_t nVersion
* - std::vector<CTxIn> vin
* - std::vector<CTxOut> vout
* - uint32_t nLockTime
*
* Extended transaction serialization format:
* - int32_t nVersion
* - unsigned char dummy = 0x00
* - unsigned char flags (!= 0)
* - std::vector<CTxIn> vin
* - std::vector<CTxOut> vout
* - if (flags & 1):
* - CTxWitness wit;
* - uint32_t nLockTime
*/"""
# ---------------------------------------------------------------------------------------------------------------------
# The following piece of code parses a serialized transaction (hex encoded) and displays all the information related
# to it.
# - Leftmost displayed transaction shows data as should be interpreted (human-readable), while rightmost
# (surrounded by parenthesis) shows it as it is in the serialize transaction (can be used to identify it inside the
# transaction)
# - You should change the hex_tx for the one you'd like to deserialize. Serialized transaction can be obtain from block
# explorers such as blockcypher.com or blockchain.info, or by building a transaction using some of the library tools.
# ---------------------------------------------------------------------------------------------------------------------
# First a transaction object is created (through the deserialize constructor) by deserializing the hex transaction we
# have selected.
# hex_tx = "020000000001018b0795ef60c78761001f5544e7d3910d63f9db2e0d6ed5f83b308e7f8d8f0fae0000000000fdffffff02734ef40100000000160014ad57609ab92acbd3c1b5b0e2aae15ba6da7eabec10201600000000001600140e7b71cb408a98f9ccd7402a557763178950954e0247304402204efc5ed1e980f5f1a3078c5a6c19c3e85f5bd7dbddd5d6c4a13ea7ccc0fab42f022043d00773037129c6e15b87ecd7d70e429b9df7a2906e16c50241131941b3bfdc012102d58aca4317df9be3801285859bfcaf768d0a91260432c105d6b25d457d553520acec0900"
# hex_tx = "0200000000010178f480d25895817cd5537ddb431be44c3464d886800c5dd749eb852133762ab10000000000fdffffff04c10500000000000016001475bce781270c3624f8cbe1d52dc42b3c8bf09d2853991600000000001600142d4d9e741d301a94d055e402d316ce282cee2234feb21f0100000000160014aaba350dd348de07c4fa60eae1664d859a669ae421c0bd000000000017a91482b4a28bb4f207d60db3429ed474dd68ad055f3e870247304402201eb810896a2c9f601442342314cd31bec556d4e2f0ec81b1303046f8e1e0c4010220040f1c8de93ee9cd55ff9441129f9f8ca12752bb4e917d9ee804f1ce3adf7a2e012103ffb9a5fae765acb979308f88d1a498972f0198fb39b4a8e7286a5ac392b59cde6ef00900"
hex_tx = '02000000000104564cbd188db7178dac57cf7a5fd09812c947a5cfb75d66a1a46ca27465fc21b10000000017160014c3eb4c03b0df78ffc211f72ea8d08d0eca74e281fdffffff672e57b9693ca48089016849aa809a189d75f4982422c00baf5bfa05ceb11db90100000017160014fd074bd35ce52c61f8855b61ed72c690923e5a87fdffffffbed5f7848ae630b804a6f7732059d3fc1d86df8def8a9bf014ded8c274dcffb80c000000171600140de01b667a614191920b4686dea2db8973bb35bafdffffffe58cfb3212e988e05682e4dd4530239e7913455755ea41358e6ee87e3d48692400000000171600141d9764cb5906eaa5c07a987624f93189f287971bfdffffff010065cd1d0000000017a9144d856ff4875decf91c413f194de7ec38859a7a3c87024730440220338f13d154910ceef62039753a0d3394ad6715b45bbc9945a5d1b31899a8a6b602206bb4371b204ccd1a9da759188feff7ba268540c4c0c65a2632fa9eddc14ce60a0121036b7ed62f4026d8a484640953af4490f866565875f8c4c952114aa71c42e21aac0247304402205bead2ee73bfec6fc879a185e30d1f1cf901df46cc8a7e715baf1a2d5801267102200268cca107105b7d1e40879250c1e6b08418b37773a1ceba323340c7b946fd0e012103e162a4070a16a73d44d00e66abaf639eacf48a8c38108656622d842bb7873a030247304402203e3adbee9e095153ae16b47de13a69d534aa3eb0f194e7b870be75b34c9df657022027f67be4ce4f480c8e60e51c132ec1949f887edee1c80e87e2f299b632e1290b012103803fe60d6e9497ba760ae71c7924eabdf2673c299ef2f9df4bb8c87797e6b3bd0247304402207e6160a5a90c6ea4855d71acb4803a30661001d81f754b00a786a830f7e3bbd302201a7a1b7832f716dbbd12bb378bdfe5f6172988550832a64d1d03d1e1a4cf44ae012103dd39b5b603fc1eac34a371c6f3f3a419bd2098bb7c80b45738260c9402dfdb8a73f00900'
# hex_tx = "02000000 # version
# varint/1b 00 # number of vins ??? /marker char https://github.com/bitcoin/bips/blob/master/bip-0144.mediawiki Must be zero
# 1 byte 01 # vin data ???? /flag char bip-0144 Must be non-zero
# varint 01 # number of vouts ??? /txin count varint bip-0144
# 32 bytes 8b0795ef60c78761001f5544e7d3910d63f9db2e0d6ed5f83b308e7f8d8f0fae # vin vout txid/hash little endian
# 4 bytes 00000000 # vin vout index
# 00 # script bytes
# fdffffff # Sequence number ???
# 02 # vout count
# 8 bytes 734ef401 # 32 788 083 Satoshis vout n0 value little endian
# 00 # bytes in pk_script ???
# 000000160014
# ad57609ab92acbd3c1b5b0e2aae15ba6da7eabec # vout n0 scriptPubKey asm
# 8 bytes 10201600 # 1 450 000 Satoshis vout n1 value little endian
# 00 # bytes in pk_script ???
# 000000160014
# 0e7b71cb408a98f9ccd7402a557763178950954e # vout n1 scriptPubKey asm
# 02 # count of whitness?
# 47 # varint len in bytes (71 DEC)
# 304402204efc5ed1e980f5f1a3078c5a6c19c3e85f5bd7dbddd5d6c4a13ea7ccc0fab42f022043d00773037129c6e15b87ecd7d70e429b9df7a2906e16c50241131941b3bfdc01
# 21 # varint len in bytes (33 DEC)
# 02d58aca4317df9be3801285859bfcaf768d0a91260432c105d6b25d457d553520
# acec0900" # nLockTime
while hex_tx:
tx = TX.deserialize(hex_tx)
# Then, the transaction can be displayed using the display method to analyze how it's been constructed.
tx.display()
print('\n\n')
hex_tx = input('raw tx, please: ')
# todo!
# vout witness_v0_keyhash type?
# bc1 addresses
| 75.961039
| 1,467
| 0.788853
| 377
| 5,849
| 12.193634
| 0.519894
| 0.008701
| 0.010442
| 0.009136
| 0.054818
| 0.054818
| 0.035675
| 0.035675
| 0.020013
| 0
| 0
| 0.388525
| 0.111985
| 5,849
| 76
| 1,468
| 76.960526
| 0.496534
| 0.609506
| 0
| 0
| 0
| 0
| 0.872856
| 0.861029
| 0
| 1
| 0
| 0.013158
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.142857
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13baa3228e659e150a5cb61696089c28865fdb16
| 195
|
py
|
Python
|
app/server/config.py
|
SSU-NC-22/AirPost_Sink
|
a867441886a25f2edccb118e5a1e7b5e1ed05891
|
[
"Apache-2.0"
] | null | null | null |
app/server/config.py
|
SSU-NC-22/AirPost_Sink
|
a867441886a25f2edccb118e5a1e7b5e1ed05891
|
[
"Apache-2.0"
] | 2
|
2021-09-27T13:17:16.000Z
|
2021-09-27T13:18:24.000Z
|
app/server/config.py
|
SSU-NC-22/AirPost_Sink
|
a867441886a25f2edccb118e5a1e7b5e1ed05891
|
[
"Apache-2.0"
] | 1
|
2021-07-20T13:08:19.000Z
|
2021-07-20T13:08:19.000Z
|
class dev_info:
SINK_ID = 1 #THIS ID SHOULD BE HEX DECIMAL one character means
def set_id(sink_id):
dev_info.SINK_ID = sink_id
def get_id():
return dev_info.SINK_ID
| 24.375
| 67
| 0.666667
| 34
| 195
| 3.529412
| 0.529412
| 0.25
| 0.275
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.271795
| 195
| 7
| 68
| 27.857143
| 0.838028
| 0.251282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
13bc86ea6fc72062c23623088aff6a899e353d93
| 1,860
|
py
|
Python
|
src/spanishconjugator/tenses/subjunctive/future_perfect.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
src/spanishconjugator/tenses/subjunctive/future_perfect.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
src/spanishconjugator/tenses/subjunctive/future_perfect.py
|
shrutiichandra/spanish-conjugator
|
2ebf41b92c14c3e47a873c52fdf4ce1d17bff5e0
|
[
"MIT"
] | null | null | null |
# -*- coding: iso-8859-15 -*-
def subjunctive_future_perfect(root_verb, pronoun):
if pronoun == "yo":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubiere " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubiere " + conjugation
if pronoun == "tu":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubieres " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubieres " + conjugation
if pronoun == "usted":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubiere " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubiere " + conjugation
if pronoun == "nosotros":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubiéremos " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubiéremos " + conjugation
if pronoun == "vosotros":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubiereis " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubiereis " + conjugation
if pronoun == "ustedes":
if root_verb[-2:] == "ar":
conjugation = root_verb[:-2] + "ado"
return "hubieren " + conjugation
if root_verb[-2:] == "er" or "ir":
conjugation = root_verb[:-2] + "ido"
return "hubieren " + conjugation
| 37.959184
| 51
| 0.498925
| 197
| 1,860
| 4.573604
| 0.162437
| 0.221976
| 0.239734
| 0.146504
| 0.734739
| 0.734739
| 0.734739
| 0.734739
| 0.734739
| 0.734739
| 0
| 0.02451
| 0.341935
| 1,860
| 49
| 52
| 37.959184
| 0.711601
| 0.014516
| 0
| 0.837209
| 0
| 0
| 0.116812
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0
| 0
| 0.302326
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13cd2a1d8d800878def342f2f82997f32ef4ab48
| 438
|
py
|
Python
|
taotao-cloud-python/taotao-cloud-oldboy/day32-python-ftp/day32/test.py
|
shuigedeng/taotao-cloud-paren
|
3d281b919490f7cbee4520211e2eee5da7387564
|
[
"Apache-2.0"
] | 47
|
2021-04-13T10:32:13.000Z
|
2022-03-31T10:30:30.000Z
|
taotao-cloud-python/taotao-cloud-oldboy/day32-python-ftp/day32/test.py
|
shuigedeng/taotao-cloud-paren
|
3d281b919490f7cbee4520211e2eee5da7387564
|
[
"Apache-2.0"
] | 1
|
2021-11-01T07:41:04.000Z
|
2021-11-01T07:41:10.000Z
|
taotao-cloud-python/taotao-cloud-oldboy/day32-python-ftp/day32/test.py
|
shuigedeng/taotao-cloud-paren
|
3d281b919490f7cbee4520211e2eee5da7387564
|
[
"Apache-2.0"
] | 21
|
2021-04-13T10:32:17.000Z
|
2022-03-26T07:43:22.000Z
|
import socketserver
# class Mysocket(socketserver.BaseRequestHandler):
#
#
# def handle(self):
# pass
#
#
# s=socketserver.ThreadingTCPServer((),Mysocket)
# s.serve_forever()
# import hashlib
#
#
# s=hashlib.md5()
#
#
# s.update("hello")
# s.update("word")
# s.update("hellow")
# s.update("hellow")
# s.update("hellow")
# s.update("hellow")
# s.update("hellow")
# s.update("hellow")
# s.update("hellow")
# s.hexdigest()
| 13.272727
| 50
| 0.636986
| 51
| 438
| 5.45098
| 0.392157
| 0.226619
| 0.327338
| 0.352518
| 0.330935
| 0.330935
| 0.330935
| 0.330935
| 0.330935
| 0.330935
| 0
| 0.002681
| 0.148402
| 438
| 32
| 51
| 13.6875
| 0.742627
| 0.824201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
13eb75ad853e7e9541a19deb9cfa89358f80fc14
| 2,536
|
py
|
Python
|
tests/requests_client/RequestsFutureAdapter/build_timeout_test.py
|
educatedguessing/bravado
|
2eeb069ff4b4054b23e888577cc108327da46ff7
|
[
"BSD-3-Clause"
] | 600
|
2015-05-20T00:37:21.000Z
|
2022-03-09T03:48:38.000Z
|
tests/requests_client/RequestsFutureAdapter/build_timeout_test.py
|
educatedguessing/bravado
|
2eeb069ff4b4054b23e888577cc108327da46ff7
|
[
"BSD-3-Clause"
] | 323
|
2015-05-19T22:35:29.000Z
|
2021-12-09T12:55:09.000Z
|
tests/requests_client/RequestsFutureAdapter/build_timeout_test.py
|
educatedguessing/bravado
|
2eeb069ff4b4054b23e888577cc108327da46ff7
|
[
"BSD-3-Clause"
] | 137
|
2015-05-14T19:51:58.000Z
|
2022-01-31T19:36:32.000Z
|
# -*- coding: utf-8 -*-
from bravado.requests_client import RequestsFutureAdapter
def test_no_timeouts(session_mock, request_mock):
misc_options = {}
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=None) is None
def test_service_timeout_and_result_timeout_None(session_mock, request_mock):
misc_options = dict(timeout=1)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=None) == 1
def test_no_service_timeout_and_result_timeout_not_None(session_mock, request_mock):
misc_options = {}
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=1) == 1
def test_service_timeout_lt_result_timeout(session_mock, request_mock):
misc_options = dict(timeout=10)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=11) == 11
def test_service_timeout_gt_result_timeout(session_mock, request_mock):
misc_options = dict(timeout=11)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=10) == 11
def test_service_timeout_None_result_timeout_not_None(session_mock, request_mock):
misc_options = dict(timeout=None)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=10) == 10
def test_service_timeout_not_None_result_timeout_None(session_mock, request_mock):
misc_options = dict(timeout=10)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=None) == 10
def test_both_timeouts_the_same(session_mock, request_mock):
misc_options = dict(timeout=10)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=10) == 10
def test_connect_timeout_and_idle_timeout(session_mock, request_mock):
misc_options = dict(connect_timeout=1, timeout=11)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=None) == (1, 11)
def test_connect_timeout_only(session_mock, request_mock):
misc_options = dict(connect_timeout=1)
future = RequestsFutureAdapter(session_mock, request_mock, misc_options)
assert future.build_timeout(result_timeout=None) == (1, None)
| 40.253968
| 84
| 0.800473
| 332
| 2,536
| 5.707831
| 0.120482
| 0.116095
| 0.189974
| 0.23219
| 0.873879
| 0.831662
| 0.831662
| 0.831662
| 0.827968
| 0.822164
| 0
| 0.017457
| 0.119085
| 2,536
| 62
| 85
| 40.903226
| 0.830797
| 0.008281
| 0
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243902
| 1
| 0.243902
| false
| 0
| 0.02439
| 0
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b91460b261f27f79332e592a442811e6c5dc2b6b
| 10,555
|
py
|
Python
|
test/UnitTest/v2/v2data.py
|
jason-fox/fogflow
|
e396ef0dee0125936954e381ab2862fd472e1774
|
[
"BSD-3-Clause"
] | 102
|
2017-11-18T01:09:38.000Z
|
2022-02-21T16:32:15.000Z
|
test/UnitTest/v2/v2data.py
|
jason-fox/fogflow
|
e396ef0dee0125936954e381ab2862fd472e1774
|
[
"BSD-3-Clause"
] | 169
|
2018-02-23T07:42:20.000Z
|
2022-03-30T06:12:28.000Z
|
test/UnitTest/v2/v2data.py
|
jason-fox/fogflow
|
e396ef0dee0125936954e381ab2862fd472e1774
|
[
"BSD-3-Clause"
] | 68
|
2018-02-08T06:55:33.000Z
|
2022-01-18T06:21:06.000Z
|
subscription_data=\
{
"description": "A subscription to get info about Room1",
"subject": {
"entities": [
{
"id": "Room1",
"type": "Room",
}
],
"condition": {
"attrs": [
"p3"
]
}
},
"notification": {
"http": {
"url": "http://0.0.0.0:8888/accumulate"
},
"attrs": [
"p1",
"p2",
"p3"
]
},
"expires": "2040-01-01T14:00:00.00Z",
"throttling": 5
}
#data to test the following code for broker.thinBroker.go:946
'''
subReqv2 := SubscriptionRequest{}
err := r.DecodeJsonPayload(&subReqv2)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
'''
subscriptionWrongPaylaod=\
{
"description": "A subscription to get info about Room1",
"subject": {
"entities": [
{
"id": "Room1",
"type": "Room",
"ispattern":"false"
}
],
"condition": {
"attrs": [
"p3"
]
}
},
"notification": {
"http": {
"url": "http://0.0.0.0:8888/accumulate"
},
"attrs": [
"p1",
"p2",
"p3"
]
},
"expires": "2040-01-01T14:00:00.00Z",
"throttling": 5
}
v1SubData=\
{
"entities": [
{
"id": "Room1",
"type": "Room",
}
],
"reference": "http://0.0.0.0:8888/accumulate"
}
updateDataWithupdateaction=\
{
"contextElements": [
{
"entityId": {
"id": "Room1",
"type": "Room"
},
"attributes": [
{
"name": "p1",
"type": "float",
"value": 60
},
{
"name": "p3",
"type": "float",
"value": 69
},
{
"name": "p2",
"type": "float",
"value": 32
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": 49.406393,
"longitude": 8.684208
}
}
]
}
],
"updateAction": "UPDATE"
}
createDataWithupdateaction=\
{
"contextElements": [
{
"entityId": {
"id": "Room1",
"type": "Room"
},
"attributes": [
{
"name": "p1",
"type": "float",
"value": 90
},
{
"name": "p3",
"type": "float",
"value": 70
},
{
"name": "p2",
"type": "float",
"value": 12
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": 49.406393,
"longitude": 8.684208
}
}
]
}
],
"updateAction": "CRETAE"
}
deleteDataWithupdateaction=\
{
"contextElements": [
{
"entityId": {
"id": "Room1",
"type": "Room"
},
"attributes": [
{
"name": "p1",
"type": "float",
"value": 12
},
{
"name": "p3",
"type": "float",
"value": 13
},
{
"name": "p2",
"type": "float",
"value": 14
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": 49.406393,
"longitude": 8.684208
}
}
]
}
],
"updateAction": "DELETE"
}
subdata1=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial10",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 69
},
{
"name": "pressure",
"type": "float",
"value": 75
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
subdata2=\
{
"description": "A subscription to get info about RoomTrial10",
"subject": {
"entities": [
{
"id": "RoomTrial10",
"type": "Room"
}
],
"condition": {
"attrs": [
"pressure"
]
}
},
"notification": {
"http": {
"url": "http://0.0.0.0:8888/accumulate"
},
"attrs": [
"temperature"
]
},
"expires": "2040-01-01T14:00:00.00Z",
"throttling": 5
}
subdata3=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial10",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 50
},
{
"name": "pressure",
"type": "float",
"value": 80
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
subdata4=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial20",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 69
},
{
"name": "pressure",
"type": "float",
"value": 75
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
subdata5=\
{
"description": "A subscription to get info about RoomTrial20",
"subject": {
"entities": [
{
"id": "RoomTrial20",
"type": "Room"
}
],
"condition": {
"attrs": [
"pressure"
]
}
},
"notification": {
"http": {
"url": "http://0.0.0.0:8888/accumulate"
},
"attrs": [
"temperature"
]
},
"expires": "2040-01-01T14:00:00.00Z",
"throttling": 5
}
subdata6=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial20",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 40
},
{
"name": "pressure",
"type": "float",
"value": 85
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
subdata7=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial30",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 69
},
{
"name": "pressure",
"type": "float",
"value": 75
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
subdata8=\
{
"description": "A subscription to get info about RoomTrial30",
"subject": {
"entities": [
{
"id": "RoomTrial30",
"type": "Room"
}
],
"condition": {
"attrs": [
"pressure"
]
}
},
"notification": {
"http": {
"url": "http://0.0.0.0:8888/accumulate"
},
"attrs": [
"temperature"
]
},
"expires": "2040-01-01T14:00:00.00Z",
"throttling": 5
}
subdata9=\
{
"contextElements": [
{
"entityId": {
"id": "RoomTrial30",
"type": "Room"
},
"attributes": [
{
"name": "temperature",
"type": "float",
"value": 44
},
{
"name": "pressure",
"type": "float",
"value": 60
}
],
"domainMetadata": [
{
"name": "location",
"type": "point",
"value": {
"latitude": -33.1,
"longitude": -1.1
}}
]
}
],
"updateAction": "UPDATE"
}
| 20.90099
| 74
| 0.299289
| 568
| 10,555
| 5.559859
| 0.195423
| 0.059848
| 0.093097
| 0.062698
| 0.825206
| 0.754592
| 0.754592
| 0.711526
| 0.711526
| 0.711526
| 0
| 0.066091
| 0.549882
| 10,555
| 504
| 75
| 20.94246
| 0.598611
| 0.005685
| 0
| 0.576344
| 0
| 0
| 0.270141
| 0.011203
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9a7fdb8db8eda339c031d112b555535e1151623
| 18,530
|
py
|
Python
|
pronotepy/ent.py
|
shadowdevfr/pronotepy
|
cc904e017a8f5e32be017edb62e5212cc7063b70
|
[
"MIT"
] | null | null | null |
pronotepy/ent.py
|
shadowdevfr/pronotepy
|
cc904e017a8f5e32be017edb62e5212cc7063b70
|
[
"MIT"
] | null | null | null |
pronotepy/ent.py
|
shadowdevfr/pronotepy
|
cc904e017a8f5e32be017edb62e5212cc7063b70
|
[
"MIT"
] | null | null | null |
import logging
import requests
from bs4 import BeautifulSoup
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def ac_grenoble(username, password):
"""
ENT ac Grenoble
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
identity_provider = "https://cas.ent.auvergnerhonealpes.fr/login?selection=EDU&service=https://0380029A.index-education.net/pronote/&submit=Confirm"
login_service_provider = "https://educonnect.education.gouv.fr/idp/profile/SAML2/POST/SSO"
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# SAML authentication
session = requests.Session()
response = session.get(identity_provider, headers=headers)
soup = BeautifulSoup(response.text, 'html.parser')
data = {
"RelayState": soup.find("input", {"name": "RelayState"})["value"],
"SAMLRequest": soup.find("input", {"name": "SAMLRequest"})["value"]
}
log.debug('[ENT Eaux claires] Logging in with ' + username)
response = session.post(login_service_provider, data=data, headers=headers)
ent_login = response.url
# Login payload
payload = {
"j_username": username,
"j_password": password,
"_eventId_proceed": ""
}
# Send user:pass to the ENT
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
#2nd SAML Authentication
soup = BeautifulSoup(response.text, 'html.parser')
payload = {
"RelayState": soup.find("input", {"name": "RelayState"})["value"],
"SAMLResponse": soup.find("input", {"name": "SAMLResponse"})["value"]
}
def atrium_sud(username, password):
"""
ENT for Atrium Sud
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = 'https://www.atrium-sud.fr/connexion/login?service=https:%2F%2F0060013G.index-education.net%2Fpronote%2F'
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# ENT Connection
session = requests.Session()
response = session.get(ent_login, headers=headers)
log.debug('[ENT Atrium] Logging in with ' + username)
# Login payload
soup = BeautifulSoup(response.text, 'html.parser')
input_ = soup.find('input', {'type': 'hidden', 'name': 'execution'})
execution = input_.get('value')
payload = {
'execution': execution,
'_eventId': 'submit',
'submit': '',
'username': username,
'password': password}
# Send user:pass to the ENT
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
def ac_reims(username, password):
"""
ENT for AC Reims
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# Login payload
payload = {
'auth_mode': 'BASIC',
'orig_url': '/sso/SSO?SPEntityID=SP-MonBureauNumerique-Production',
'user': username,
'password': password}
# ENT / PRONOTE required URLs
ent_login = 'https://services-familles.ac-reims.fr/login/ct_logon_vk.jsp?CT_ORIG_URL=%2Fsso%2FSSO%3FSPEntityID%3DSP-MonBureauNumerique-Production&ct_orig_uri=%2Fsso%2FSSO%3FSPEntityID%3DSP-MonBureauNumerique-Production'
ent_verif = 'https://services-familles.ac-reims.fr/aten-web/connexion/controlesConnexion?CT_ORIG_URL=%2Fsso%2FSSO%3FSPEntityID%3DSP-MonBureauNumerique-Production&ct_orig_uri=%2Fsso%2FSSO%3FSPEntityID%3DSP-MonBureauNumerique-Production'
pronote_verif = 'https://cas.monbureaunumerique.fr/saml/SAMLAssertionConsumer'
# ENT Connection
session = requests.Session()
response = session.get(ent_login, headers=headers)
log.debug('[ENT AC Reims] Logging in with ' + username)
# Send user:pass to the ENT
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
# Get the CAS verification shit
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.get(ent_verif, headers=headers, cookies=cookies)
# Get the actual values
soup = BeautifulSoup(response.text, 'html.parser')
cas_infos = dict()
inputs = soup.findAll('input', {'type': 'hidden'})
for input_ in inputs:
cas_infos[input_.get('name')] = input_.get('value')
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
session.cookies.update({'SERVERID': 'gdest-prod-web14', 'preselection': 'REIMS-ATS_parent_eleve'})
response = session.post(pronote_verif, headers=headers, data=cas_infos, cookies=cookies)
# Get Pronote
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
return cookies
def occitanie_montpellier(username, password):
"""
ENT for Occitanie Montpellier
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# Login payload
payload = {
'auth_mode': 'BASIC',
'orig_url': '%2Ffim42%2Fsso%2FSSO%3FSPEntityID%3Dsp-ent-entmip-prod',
'user': username,
'password': password}
# ENT / PRONOTE required URLs
ent_login = 'https://famille.ac-montpellier.fr/login/ct_logon_vk.jsp?CT_ORIG_URL=/fim42/sso/SSO?SPEntityID=sp-ent-entmip-prod&ct_orig_uri=/fim42/sso/SSO?SPEntityID=sp-ent-entmip-prod'
ent_verif = 'https://famille.ac-montpellier.fr/aten-web/connexion/controlesConnexion?CT_ORIG_URL=%2Ffim42%2Fsso%2FSSO%3FSPEntityID%3Dsp-ent-entmip-prod&ct_orig_uri=%2Ffim42%2Fsso%2FSSO%3FSPEntityID%3Dsp-ent-entmip-prod'
pronote_verif = 'https://cas.mon-ent-occitanie.fr/saml/SAMLAssertionConsumer'
# ENT Connection
session = requests.Session()
response = session.get(ent_login, headers=headers)
log.debug('[ENT Occitanie] Logging in with ' + username)
# Send user:pass to the ENT
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
# Get the CAS verification shit
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.get(ent_verif, headers=headers, cookies=cookies)
# Get the actual values
soup = BeautifulSoup(response.text, 'html.parser')
cas_infos = dict()
inputs = soup.findAll('input', {'type': 'hidden'})
for input_ in inputs:
cas_infos[input_.get('name')] = input_.get('value')
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
session.cookies.update({'SERVERID': 'entmip-prod-web4', 'preselection': 'MONTP-ATS_parent_eleve'})
response = session.post(pronote_verif, headers=headers, data=cas_infos, cookies=cookies)
# Get Pronote
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
return cookies
def ac_reunion(username, password):
"""
ENT for AC Reunion
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = 'https://portail.college-jeandesme.re:8443/login?service=https:%2F%2Fportail.college-jeandesme.re%2Fpronote%2Feleve.html'
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# ENT Connection
session = requests.Session()
response = session.get(ent_login, headers=headers)
log.debug('[ENT Reunion] Logging in with ' + username)
# Login payload
soup = BeautifulSoup(response.text, 'html.parser')
input_ = soup.find('input', {'type': 'hidden', 'name': 'lt'})
lt = input_.get('value')
payload = {
'service': 'https://portail.college-jeandesme.re/pronote/eleve.html',
'lt': lt,
'previous_user': username + '@default',
'username': username,
'password': password}
# Send user:pass to the ENT
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
new_url = response.url
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.get(new_url, headers=headers, cookies=cookies)
pronote_url = response.url
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.get(pronote_url, headers=headers, cookies=cookies)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
def ile_de_france(username, password):
"""
ENT for Ile de France
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = "https://ent.iledefrance.fr/auth/login?callback=https%3A%2F%2Fent.iledefrance.fr%2F"
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
payload = {
'email' : username,
'password' : password,
}
# ENT Connection
session = requests.Session()
response = session.post(ent_login, headers=headers, data=payload)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
def paris_classe_numerique(username, password):
"""
ENT for PCN
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = "https://ent.parisclassenumerique.fr/auth/login"
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
payload = {
'email' : username,
'password' : password,
}
# ENT Connection
session = requests.Session()
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
def ac_lyon(username, password):
"""
ENT for Lyon
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = 'https://cas.ent.auvergnerhonealpes.fr/login?selection=LYON-ATS_parent_eleve&submit=Valider'
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
# ENT Connection
session = requests.Session()
response = session.get(ent_login, headers=headers)
soup = BeautifulSoup(response.text, 'html.parser')
input_ = soup.find('input', {'type': 'hidden', 'name': 'execution'})
executions = input_.get('value')
payload = {
'username' : username,
'password' : password,
'selection' : "LYON-ATS_parent_eleve",
'codeFournisseurIdentite' : "ATS-LYON",
'_eventId' : "submit",
'submit': "Confirm",
'geolocation': "",
'execution': executions
}
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(ent_login, headers=headers, data=payload, cookies=cookies)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
def ac_orleans_tours(username, password):
"""
ENT for AC Orleans-Tours
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'
}
# Login payload
payload = {
"j_username": username,
"j_password": password,
"_eventId_proceed": ""
}
# ENT / PRONOTE required URLs
ent_login_page = "https://ent.netocentre.fr/cas/login?service=https://0451462V.index-education.net/pronote/eleve.html&idpId=parentEleveEN-IdP"
ent_login = "https://educonnect.education.gouv.fr/idp/profile/SAML2/Redirect/SSO?execution=e1s1"
pronote_verif = "https://ent.netocentre.fr/cas/Shibboleth.sso/SAML2/POST?client_name=EduConnect"
# ENT Connection
session = requests.Session()
# Connection URL specifying the pronote service
session.get(ent_login_page, headers=headers)
# Send user:pass to the ENT
response = session.post(ent_login, headers=headers, data=payload)
# retrieving the "RelayState", "SAMLResponse" tokens in the response
soup = BeautifulSoup(response.text, 'html.parser')
cas_infos = dict()
inputs = soup.findAll('input', {'type': 'hidden'})
for input_ in inputs:
cas_infos[input_.get('name')] = input_.get('value')
# retrieving pronote ticket
response = session.post(pronote_verif, headers=headers, data=cas_infos)
cookies = requests.utils.cookiejar_from_dict(
requests.utils.dict_from_cookiejar(session.cookies))
return cookies
def monbureaunumerique(username, password):
"""
ENT for MonBureauNumerique (Grand Est)
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# Required Headers
headers = {
'Connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'
}
# Login payload
payload = {
"j_username": username,
"j_password": password,
"_eventId_proceed": ""
}
# ENT / PRONOTE required URLs
ent_login_page = "https://cas.monbureaunumerique.fr/login?selection=EDU&service=http%3A%2F%2Fpronote.lycee-fabert.com%2Fpronote%2F&submit=Valider"
ent_load = "https://educonnect.education.gouv.fr/idp/profile/SAML2/POST/SSO"
ent_login = "https://educonnect.education.gouv.fr/idp/profile/SAML2/POST/SSO?execution=e1s1"
pronote_verif = "https://cas.monbureaunumerique.fr/saml/SAMLAssertionConsumer"
# ENT Connection
session = requests.Session()
# Connection URL specifying the pronote service
response = session.get(ent_login_page, headers=headers)
# retrieving the "RelayState", "SAMLResponse" in the ent response for educonnect
soup = BeautifulSoup(response.text, 'html.parser')
cas_info = dict()
inputs = soup.findAll('input', {'type':'hidden'})
for input_ in inputs:
cas_info[input_.get('name')] = input_.get('value')
session.post(ent_load, headers=headers, data=cas_info)
# Send user:pass to the ENT
response = session.post(ent_login, headers=headers, data=payload)
# retrieving the "RelayState", "SAMLResponse" tokens in the response
soup = BeautifulSoup(response.text, 'html.parser')
cas_infos = dict()
inputs = soup.findAll('input', {'type': 'hidden'})
for input_ in inputs:
cas_infos[input_.get('name')] = input_.get('value')
# retrieving pronote ticket
response = session.post(pronote_verif, headers=headers, data=cas_infos)
cookies = requests.utils.cookiejar_from_dict(
requests.utils.dict_from_cookiejar(session.cookies))
return cookies
def ent_essonne(username, password):
"""
ENT Essonne
Parameters
----------
username : str
username
password : str
password
Returns
-------
cookies : cookies
returns the ent session cookies
"""
# ENT / PRONOTE required URLs
ent_login = "https://www.moncollege-ent.essonne.fr/auth/login"
# Required Headers
headers = {
'connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0'}
payload = {
'email' : username,
'password' : password,
}
# ENT Connection
session = requests.Session()
response = session.post(ent_login, headers=headers, data=payload)
return requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
| 32.912966
| 239
| 0.667836
| 2,149
| 18,530
| 5.63611
| 0.100512
| 0.049373
| 0.041777
| 0.049373
| 0.836113
| 0.807629
| 0.791034
| 0.767668
| 0.747441
| 0.74356
| 0
| 0.022977
| 0.203778
| 18,530
| 562
| 240
| 32.97153
| 0.797953
| 0.179547
| 0
| 0.649606
| 0
| 0.098425
| 0.324014
| 0.01336
| 0
| 0
| 0
| 0
| 0.011811
| 1
| 0.043307
| false
| 0.086614
| 0.011811
| 0
| 0.094488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b9bc462e4a779d341cfc3e25a3ba80e7315b7e77
| 44,462
|
py
|
Python
|
langdetect/utils/messages.py
|
pelucid/langdetect
|
17068f6d3d37606c60c9d3f245110851acd0eb00
|
[
"Apache-2.0"
] | null | null | null |
langdetect/utils/messages.py
|
pelucid/langdetect
|
17068f6d3d37606c60c9d3f245110851acd0eb00
|
[
"Apache-2.0"
] | null | null | null |
langdetect/utils/messages.py
|
pelucid/langdetect
|
17068f6d3d37606c60c9d3f245110851acd0eb00
|
[
"Apache-2.0"
] | null | null | null |
from os import path
mydict = {'NGram.CJK_KANJI_EXCLUDE':'\u0020\uFF08\uFF09',
'NGram.LATIN1_EXCLUDE':'\u00A0\u00AB\u00B0\u00BB',
'NGram.KANJI_1_0':'\u4F7C\u6934',
'NGram.KANJI_1_2':'\u88CF\u95B2',
'NGram.KANJI_1_4':'\u7027\u7DCB',
'NGram.KANJI_1_8':'\u4E80\u4E9C\u4EEE\u5263\u5264\u5270\u52C5\u52E7\u52F2\u53B3\u5449\u58CA\u58CC\u5968\u59C9\u59EB\u5D8B\u5DE3\u5E30\u6075\u622F\u623B\u6255\u629C\u629E\u62DD\u62E1\u633F\u635C\u63FA\u6442\u6589\u658E\u6669\u66A6\u66FD\u6804\u685C\u6B69\u6B6F\u6BBB\u6C37\u6C5A\u6D44\u6E09\u6E0B\u6E13\u6EDD\u713C\u72A0\u731F\u7363\u7A32\u7A42\u7A93\u7ADC\u7C8B\u7C9B\u7DD1\u7E01\u7E04\u7E26\u7E4A\u7E4B\u7E70\u8074\u8107\u8133\u81D3\u820E\u8217\u8358\u83D3\u85AC\u8987\u899A\u8B21\u8B72\u8B83\u8CDB\u9045\u90F7\u91C8\u9271\u9283\u92AD\u9665\u967A\u96A0\u96A3\u96B7\u970A\u983C\u9854\u9855\u99C6\u9A12\u9ED9\u9F62',
'NGram.KANJI_1_11':'\u67D8\u831C',
'NGram.KANJI_1_12':'\u5742\u57FC\u5800',
'NGram.KANJI_1_13':'\u4E3C\u4E98\u4FE3\u4FF5\u5072\u51A8\u53A9\u5451\u546A\u5504\u5516\u55A9\u55B0\u5618\u5642\u565B\u567A\u56A2\u57F4\u5840\u5841\u58F1\u59F6\u5A2F\u5B22\u5B8D\u5DCC\u5EFB\u5F10\u60A9\u60E3\u61D0\u62F6\u63B4\u63BB\u63C3\u6681\u685F\u6955\u6962\u696F\u698A\u698E\u69FB\u6A2B\u6A7F\u6B53\u6BD8\u6D99\u6E07\u7460\u7473\u7560\u7573\u758E\u7690\u7815\u783A\u7962\u7A4F\u7A63\u7AEA\u7BED\u7CA7\u7D18\u7D3A\u7E4D\u8061\u8218\u8276\u82C5\u8597\u85AB\u86CD\u874B\u88FE\u8ACF\u8B90\u8D0B\u8FBF\u9013\u9061\u914E\u9154\u918D\u9190\u91A4\u91B8\u9262\u929A\u92ED\u92F3\u932C\u96EB\u96F0\u976D\u97EE\u981A\u99C4\u9A28\u9AC4\u9B8E\u9C10\u9D0E\u9D5C\u9D8F\u9E78\u9EB9\u9EBA\u9EBF',
'NGram.KANJI_1_14':'\u5F66\u7984\u7985',
'NGram.KANJI_1_16':'\u5861\u7B25\u844E\u9419\u9D07',
'NGram.KANJI_1_18':'\u5039\u514E\u51E7\u51EA\u5301\u5302\u5859\u58F7\u59AC\u5C2D\u5CA8\u5EFC\u6357\u64B9\u67CA\u6802\u6834\u68BC\u6900\u6919\u691B\u69D9\u6AE8\u6D9C\u6E8C\u6F09\u6F45\u701E\u7026\u7114\u72DB\u7577\u75E9\u783F\u7895\u7A50\u7AC3\u7B48\u7B86\u7BAA\u7C7E\u7C82\u7C8D\u7CCE\u7D2C\u7F6B\u7FEB\u8557\u85AE\u86CE\u877F\u8997\u8ACC\u8CB0\u8CCE\u8FE9\u9197\u920E\u9266\u927E\u92F2\u9306\u9453\u9784\u982C\u9834\u99C8\u9BF5\u9C2F\u9D2C',
'NGram.KANJI_1_22':'\u6762\u6A17\u887F',
'NGram.KANJI_1_27':'\u4E21\u4E57\u4ECF\u4F1D\u4FA1\u4FF3\u5024\u50CD\u5150\u5186\u51E6\u52B4\u52B9\u5358\u53CE\u55B6\u56E3\u56F2\u56F3\u570F\u5727\u5869\u5897\u58F2\u5909\u5B9F\u5BDB\u5BFE\u5C02\u5DFB\u5E2F\u5E81\u5E83\u5EC3\u5F3E\u5F93\u5FB3\u5FB4\u5FDC\u60AA\u6226\u6238\u6271\u62E0\u6319\u63B2\u6483\u64AE\u67A0\u67FB\u691C\u697D\u69D8\u6A29\u6B73\u6B74\u6BCE\u6C17\u6CA2\u6D5C\u6E08\u6E80\u702C\u7523\u767A\u770C\u7D4C\u7D75\u7D76\u7D99\u7D9A\u7DCF\u8535\u8846\u89A7\u89B3\u8A33\u8AAC\u8AAD\u8C4A\u8EE2\u8EFD\u8FBA\u8FBC\u9244\u9332\u95A2\u95D8\u96D1\u99C5\u9A13\u9ED2',
'NGram.KANJI_1_29':'\u4F0E\u4FFA\u5036\u53E1\u54B2\u5506\u583A\u5C3B\u5CAC\u5CE0\u5CEF\u6803\u68B6\u6A0B\u6A8E\u73C2\u7551\u7826\u7881\u79B0\u7B39\u8429\u8599\u8FBB\u9162\u95C7\u9688\u96BC\u9AEA\u9DF2',
'NGram.KANJI_1_31':'\u5553\u938C',
'NGram.KANJI_1_35':'\u51B4\u564C\u57DC\u5B2C\u6822\u685D\u690B\u6973\u6C93\u7511\u7887\u7A17\u83D6\u847A\u8494\u8526\u854E\u85C1\u86F8\u88B4\u93A7\u9B92\u9C39\u9C48\u9C52',
'NGram.KANJI_2_0':'\u4E2B\u4EC3\u4F09\u4F57\u4F6F\u4F70\u4FD1\u4FDA\u500C\u5043\u516E\u5189\u5241\u530D\u5310\u5412\u54AB\u54AF\u5514\u5556\u55B1\u561F\u573B\u586D\u587D\u58C5\u58D1\u5914\u5A62\u5A6A\u5AE6\u5B40\u5B5B\u5B70\u5BB8\u5CD2\u5D01\u5D34\u5E11\u5EA0\u5F0B\u5F2D\u5F87\u607F\u621B\u6221\u6289\u63A3\u6452\u646D\u64D8\u652B\u6600\u6631\u6641\u66F7\u6773\u67B8\u67DD\u67DE\u6829\u68FB\u69AD\u6A47\u6C10\u6C68\u6C74\u6C85\u6CD3\u6D31\u6D93\u6D94\u6DB8\u6DBF\u6DC5\u6E6E\u6EA7\u6EB4\u6EC2\u6F2A\u6F2F\u6FB9\u6FC2\u6FDB\u6FEE\u70AF\u70FD\u7166\u726F\u729B\u739F\u73DE\u740A\u746D\u749C\u749F\u74E0\u759D\u75A3\u75CD\u75DE\u7600\u7620\u7688\u7738\u7762\u776B\u777D\u77E3\u781D\u7837\u78A3\u7946\u7B60\u7F44\u7F54\u7F5F\u7FAF\u8026\u807F\u80C4\u80DB\u80ED\u81E7\u824B\u82B7\u82E3\u8392\u846D\u84D3\u8548\u85B9\u86DE\u873F\u8753\u8782\u87AB\u87B3\u87D1\u87E0\u87FE\u8821\u88D8\u88E8\u8913\u891A\u892B\u8983\u8C3F\u8C49\u8C82\u8D6D\u8DE4\u8E1D\u8E1E\u8E7C\u8FE5\u8FE8\u9005\u9035\u9050\u9082\u9083\u9095\u90E2\u911E\u91AE\u91B4\u93D6\u9621\u968D\u96B9\u96D2\u9711\u9713\u973E\u9AB0\u9AB7\u9AE6\u9B03\u9B23\u9EDC\u9EEF',
'NGram.KANJI_2_1':'\u4E82\u4F48\u4F54\u50F9\u5167\u528D\u52DE\u532F\u537B\u53C3\u5433\u555F\u55AE\u56B4\u570D\u5716\u58D3\u58DE\u5920\u5967\u5A1B\u5BEB\u5BEC\u5C08\u5C0D\u5C46\u5C6C\u5CFD\u5E36\u5E6B\u5EC8\u5EF3\u5F48\u5F91\u5F9E\u5FB5\u6046\u60E1\u61F7\u6232\u6236\u64C7\u64CA\u64D4\u64DA\u64F4\u651D\u6578\u65B7\u6649\u6A13\u6A23\u6A6B\u6A94\u6AA2\u6B0A\u6B50\u6B61\u6B72\u6B77\u6B78\u6C92\u6EAB\u6EFF\u6FD5\u6FDF\u71DF\u722D\u72C0\u734E\u737B\u746A\u7522\u773E\u78BC\u7A69\u7C3D\u7CB5\u7D55\u7D72\u7DA0\u7DAB\u7DE3\u7E5E\u7E6A\u7E7C\u7E8C\u8072\u807D\u8085\u812B\u8166\u8173\u81D8\u8209\u820A\u8332\u838A\u840A\u85E5\u860B\u8655\u865B\u88DD\u89BA\u89BD\u89C0\u8AAA\u8B6F\u8B7D\u8B8A\u8B93\u8C50\u8CF4\u8E64\u8F15\u8F49\u8FA6\u8FAD\u9109\u9130\u91AB\u91CB\u92B7\u9304\u9322\u95CA\u96A8\u96AA\u96B1\u96B8\u96D6\u96D9\u96DC\u9748\u975C\u986F\u9918\u99DB\u9A57\u9B25\u9EA5\u9EC3\u9EDE\u9F52',
'NGram.KANJI_2_4':'\u514C\u51AA\u5614\u56AE\u56C2\u582F\u58FA\u5B0C\u5D11\u5DD2\u5DD6\u5E40\u5E5F\u5EEC\u6137\u6417\u6488\u64F2\u652A\u6582\u6689\u689F\u68D7\u69D3\u6A97\u6AB8\u6ABB\u6AC3\u6ADA\u6B7F\u6BB2\u6EA5\u6EC4\u6EF2\u7009\u701D\u7028\u703E\u7165\u71BE\u721B\u7463\u7464\u7469\u7515\u7526\u75FA\u7621\u779E\u79B1\u7A1F\u7AC4\u7AC7\u7B8F\u7BE9\u7D2E\u7D68\u7D8F\u7DB8\u7DBA\u7E46\u7E79\u7F4C\u7F88\u8070\u8073\u8076\u81BE\u82BB\u83A2\u858A\u8591\u861A\u8778\u87EC\u8805\u880D\u893B\u8A1B\u8A25\u8A36\u8A85\u8AA6\u8B17\u8B28\u8CB6\u8CE4\u8D16\u8D1B\u8ECB\u9112\u9214\u9249\u93AC\u9594\u9598\u95BB\u95D5\u965E\u96B4\u97DC\u9821\u9824\u9921\u9952\u9A55\u9A5B\u9B1A\u9C13\u9D09\u9DAF\u9E1A\u9E75\u9F67',
'NGram.KANJI_2_9':'\u4E9F\u4F6C\u4FDE\u4FFE\u5029\u5140\u51A2\u5345\u539D\u53FB\u54C7\u5599\u560E\u561B\u563B\u566C\u5676\u5729\u574D\u57E4\u595A\u598D\u5A1F\u5A25\u5A77\u5AB2\u5AD6\u5BF0\u5C2C\u5CEA\u5E37\u5F08\u6059\u606A\u6096\u609A\u62A8\u6555\u6556\u66E6\u675E\u68E3\u69BB\u6BCB\u6BD3\u6C1F\u6C26\u6C81\u6DC4\u6DDE\u6E32\u6E44\u6E4D\u6F33\u6F7C\u6FA7\u701A\u701B\u715C\u741B\u7428\u7480\u74A8\u7504\u752C\u768B\u76CE\u78CA\u78FA\u79BA\u7C27\u8046\u81FB\u8331\u8393\u83C1\u8403\u8438\u843C\u8446\u85B0\u87D2\u8862\u8DC6\u9074\u9131\u9672\u96EF\u9704\u9706\u977C\u9ABC\u9E92\u9ECF',
'NGram.KANJI_2_10':'\u51BD\u5704\u7350\u73A5',
'NGram.KANJI_2_11':'\u4E15\u4EA2\u4F5A\u50D6\u5349\u53DF\u5484\u5958\u5B34\u5B5A\u5C91\u5E1B\u5F77\u61CB\u61FF\u620C\u620D\u622E\u6248\u6538\u660A\u664F\u678B\u67E9\u69B7\u69C3\u6CB1\u6CD7\u6D5A\u6DAA\u6DC7\u7099\u71EE\u7325\u7425\u7455\u747E\u749E\u75B5\u7678\u7693\u76C2\u77B0\u77BF\u78CB\u7957\u795A\u797A\u7A79\u7B08\u7B75\u7BB4\u7F9A\u7FB2\u7FDF\u80E5\u81BA\u8340\u837C\u8398\u8559\u85A8\u86DF\u8734\u8882\u88F4\u8936\u900D\u907D\u9642\u96C9\u9AFB\u9E9D\u9EBE',
'NGram.KANJI_2_12':'\u5F57\u7940',
'NGram.KANJI_2_13':'\u5191\u7791\u792C\u7D46',
'NGram.KANJI_2_15':'\u5713\u58FD\u5D17\u5D19\u5DBC\u5F4C\u6191\u64A5\u687F\u69AE\u6AFB\u6EEC\u6F3F\u6FE4\u6FF1\u6FFE\u700B\u74CA\u76E1\u76E7\u7926\u792B\u79AE\u7AA9\u7C43\u7C4C\u7C64\u7DBD\u81A0\u856D\u8594\u8606\u8A62\u8AF7\u8CC8\u8CE3\u8D99\u8F1B\u8F3B\u9059\u9127\u9264\u947D\u95A9\u97CB\u980C\u9838\u9846\u99AE\u9A19\u9B06\u9B91\u9F4A\u9F4B',
'NGram.KANJI_2_16':'\u4E69\u4EC4\u4EDF\u4EF3\u4F0B\u4F5E\u5000\u5028\u50E5\u513B\u5157\u51DC\u52D7\u530F\u5379\u53F5\u5471\u5477\u5555\u555C\u557B\u5594\u55B2\u55C9\u560D\u5616\u562E\u5630\u5653\u5657\u566F\u56A8\u56B6\u5820\u5880\u58CE\u58D9\u5950\u5969\u596D\u599E\u59B3\u59CD\u59D2\u5A40\u5AA7\u5ABC\u5AD7\u5AD8\u5B0B\u5B24\u5B38\u5B53\u5C5C\u5D06\u5D47\u5D94\u5D9D\u5E57\u5EC4\u5F46\u5FAC\u60BD\u60D8\u6123\u615D\u615F\u6175\u618A\u61AB\u61E3\u623E\u6308\u636B\u645F\u6519\u6595\u6698\u66B8\u67D9\u6840\u695D\u696E\u6979\u69C1\u69E8\u6AEC\u6AFA\u6B5F\u6CAC\u6CE0\u6CEF\u6D0C\u6D36\u6DD2\u6DD9\u6DE6\u6DEC\u6E5F\u6FA0\u6FEC\u7156\u71C4\u71DC\u71EC\u71FC\u720D\u7230\u7292\u7296\u72A2\u72CE\u7357\u737A\u7380\u7386\u73A8\u73EE\u743F\u74A6\u74CF\u74D4\u74DA\u755A\u75A5\u75B3\u75C2\u75E0\u75F1\u75FF\u7601\u7609\u7646\u7658\u769A\u76B0\u774F\u775C\u778B\u77BD\u77C7\u7843\u787F\u78F4\u79C8\u7A88\u7A95\u7AFD\u7B1E\u7B67\u7B9D\u7BCC\u7C0D\u7C11\u7C37\u7C40\u7C6E\u7CB3\u7CBD\u7D09\u7D31\u7D40\u7D5B\u7D70\u7D91\u7D9E\u7DB0\u7DD9\u7DF9\u7E08\u7E11\u7E1D\u7E35\u7E52\u7FB6\u7FBF\u7FEE\u8012\u801C\u8028\u8052\u8123\u8188\u81C3\u81DA\u81FE\u8210\u82BE\u83A0\u83D4\u8407\u8435\u8477\u849E\u84C6\u84CA\u85F9\u867A\u86B5\u86B6\u86C4\u8706\u8707\u870A\u8768\u87BB\u8831\u8839\u8879\u8921\u8938\u8964\u89A6\u89AC\u8A10\u8A3E\u8AC2\u8ADB\u8AF3\u8B2B\u8B41\u8B4E\u8B5F\u8B6B\u8B92\u8C55\u8C62\u8C73\u8C8A\u8C8D\u8CB2\u8CB3\u8CD2\u8CE1\u8CFB\u8D0D\u8E34\u8E7A\u8E8A\u8ED4\u8EFE\u8F0A\u8F1C\u8F1E\u8F26\u8FAE\u9088\u90C3\u90FE\u9134\u9148\u91D9\u91E9\u9238\u9239\u923D\u924D\u925A\u9296\u92AC\u92BB\u9315\u9319\u931A\u9321\u9370\u9394\u93A2\u93D8\u93E4\u943A\u9477\u9582\u958E\u95A1\u95C8\u95CC\u95D4\u9658\u966C\u970F\u973D\u9744\u975B\u9766\u97A3\u97A6\u97C1\u97C6\u980A\u9837\u9853\u9870\u98AF\u98B3\u98BA\u98E9\u98ED\u9912\u991B\u991E\u993D\u993F\u99D1\u99DF\u9A01\u9A3E\u9A43\u9A4D\u9ACF\u9AE1\u9B22\u9B58\u9C25\u9C3E\u9C54\u9C56\u9D15\u9D23\u9D89\u9DC2\u9DD3\u9E82\u9E8B\u9EA9\u9EE0\u9EF7\u9F07\u9F2F\u9F34\u9F3E\u9F5F\u9F6C',
'NGram.KANJI_2_18':'\u5155\u520E\u55DF\u56C0\u56C1\u5793\u5FD6\u5FF8\u6029\u60FA\u613E\u6147\u615A\u62C8\u6384\u6883\u6894\u68F9\u6AA3\u6AAE\u6AC2\u6E63\u7032\u70A4\u7146\u71FB\u7228\u72F7\u7370\u7441\u74BF\u75B8\u75E3\u7622\u76CD\u7768\u79E3\u7A60\u7B6E\u7BC1\u7C5F\u7D06\u7E2F\u7E39\u8146\u81CF\u8703\u8729\u8737\u87EF\u88D2\u8A22\u8AC4\u8AF6\u8E59\u8F33\u8F42\u9169\u91B1\u9278\u93C3\u93DD\u9460\u946A\u9785\u9AD1\u9B4D\u9B4E\u9C31\u9D12\u9ECC',
'NGram.KANJI_2_21':'\u502A\u544E\u59AE\u59EC\u5D1B\u66A8\u6BD7\u6C76\u6E1D\u70EF\u742A\u7459\u7FE1\u82EF\u8343\u85C9\u8A79\u90DD',
'NGram.KANJI_2_22':'\u4EDE\u4F7B\u504C\u50EE\u52E3\u52F0\u536E\u54A9\u54BB\u54BF\u54C2\u54E6\u550F\u556A\u55E8\u564E\u5664\u5671\u568F\u56DD\u572F\u57A0\u5809\u5924\u59A3\u59A4\u59E3\u5A13\u5A23\u5B51\u5B73\u5C50\u5C8C\u6035\u60C6\u6106\u6215\u62CE\u62FD\u64ED\u6549\u6554\u655D\u659B\u65CE\u65D6\u6615\u6624\u665E\u6677\u669D\u66E9\u6772\u677C\u696B\u6A84\u6AA0\u6BFD\u6C16\u6C86\u6C94\u6CD6\u6D2E\u6D39\u6F78\u6FB6\u705E\u70CA\u7168\u723B\u7256\u7284\u73B3\u740D\u742F\u7498\u74A9\u752D\u75F3\u7634\u768E\u76B4\u76E5\u77A0\u77DC\u781F\u782D\u7AA0\u7BFE\u7FF1\u80AB\u8174\u81EC\u8202\u8222\u8228\u82DC\u8306\u83FD\u8469\u84FF\u859C\u8617\u86B1\u8722\u8C89\u8D67\u8DCE\u8E49\u8E76\u8E87\u8FE2\u8FE4\u8FF8\u9016\u905B\u9174\u982B\u98E7\u9955\u9B32',
'NGram.KANJI_2_23':'\u4F8F\u5055\u524C\u548E\u5583\u594E\u5CB7\u5ED6\u5F5D\u6021\u66B9\u66F0\u6C55\u6C7E\u6C82\u6E2D\u6EC7\u6ED5\u70B3\u71B9\u72C4\u73C0\u7426\u745C\u748B\u7696\u777F\u79A7\u79B9\u7F8C\u8153\u8339\u8386\u8725\u90B5\u9102\u962E\u9716\u97F6',
'NGram.KANJI_2_28':'\u5733\u57D4\u838E\u8FEA',
'NGram.KANJI_2_29':'\u50ED\u5F29\u62EE\u6A9C\u7BC6\u80F1\u8129\u8171\u822B\u8AEB',
'NGram.KANJI_2_30':'\u4EB3\u4F15\u4FB7\u5006\u509A\u50A2\u5102\u5109\u5115\u5137\u5138\u513C\u524B\u524E\u5277\u528A\u52E6\u52FB\u5331\u5436\u5443\u54FD\u5538\u555E\u55C6\u55C7\u5679\u5690\u5695\u56C9\u56D1\u56EA\u588A\u58E2\u5AFB\u5B2A\u5B43\u5B7F\u5BE2\u5C37\u5D27\u5D84\u5D87\u5DD4\u5EC1\u5EDD\u5F12\u5FA0\u60F1\u616B\u61F5\u61F6\u61FE\u62DA\u6371\u6399\u63C0\u6451\u647B\u6493\u64BB\u64BF\u64C4\u64F1\u64F7\u650F\u652C\u665D\u6684\u6688\u66EC\u672E\u68E7\u69A6\u69ED\u69F3\u6A01\u6AAF\u6AE5\u6BA4\u6BAE\u6BAF\u6BC6\u6C08\u6C2C\u6C59\u6D87\u6EBC\u6ECC\u6EF7\u6F6F\u6F80\u6F86\u6FD8\u6FF0\u6FFA\u7006\u7018\u7030\u7051\u7192\u71C9\u71D9\u71F4\u71FE\u7274\u7377\u74A3\u750C\u7613\u7627\u7661\u7662\u7665\u766E\u7671\u7672\u76BA\u775E\u776A\u778C\u78E7\u7955\u7A08\u7AC5\u7B4D\u7C2B\u7C6C\u7CF0\u7D02\u7D1C\u7D73\u7DA2\u7DB5\u7DDE\u7E09\u7E0A\u7E37\u7E43\u7E61\u7E7D\u7E93\u7F3D\u7FF9\u81A9\u8271\u83F8\u84C0\u8514\u85BA\u86A9\u86FB\u879E\u8814\u8836\u889E\u8932\u896A\u896F\u8993\u89B2\u8A15\u8A16\u8A1D\u8A5B\u8A6C\u8A6D\u8A7C\u8AA1\u8AA3\u8AA5\u8B0A\u8B4F\u8B59\u8B96\u8C48\u8C54\u8CBD\u8CFA\u8D13\u8E89\u8E8B\u8EAA\u8EC0\u8EDB\u8EFC\u8F12\u8F1F\u8F3E\u8F45\u8FFA\u9015\u9183\u919E\u91A3\u91D7\u91F5\u9209\u9215\u923E\u9240\u9251\u9257\u927B\u9293\u92A8\u92C5\u92C7\u92F0\u9333\u935A\u9382\u938A\u9398\u93B3\u93D7\u93DF\u93E2\u93FD\u942B\u942E\u9433\u9463\u9470\u9472\u947E\u95D0\u96CB\u97C3\u97CC\u981C\u9839\u986B\u98B6\u98EA\u9909\u991A\u9935\u993E\u9951\u99A5\u99B1\u99D9\u99DD\u99F1\u9A2B\u9A62\u9A65\u9AAF\u9AD2\u9AEF\u9B0D\u9B28\u9B77\u9BFD\u9C49\u9C5F\u9C78\u9D3F\u9D72\u9DD7\u9E1B\u9EB4\u9EF4\u9F66\u9F94',
'NGram.KANJI_2_31':'\u5DBD\u63C6\u6E3E\u7587\u8AF1\u8B5A\u9695',
'NGram.KANJI_2_32':'\u53A5\u589F\u5CD9\u7109\u7F79\u8006\u8654\u8944\u968B\u96CD',
'NGram.KANJI_2_35':'\u4F47\u4F91\u4FCE\u4FDF\u527D\u535E\u55DA\u56A5\u5879\u5A11\u5B7A\u5CAB\u5CF4\u5EBE\u5F7F\u5FA8\u601B\u606B\u60B8\u610D\u6134\u619A\u61FA\u6369\u6523\u65CC\u66C4\u6727\u6968\u6A05\u6A48\u6B59\u6BEC\u6D35\u6D38\u6E19\u701F\u7064\u711C\u716C\u71A8\u71E7\u7258\u743A\u746F\u75BD\u75D9\u75F2\u7669\u766C\u76DE\u7729\u77BC\u78EC\u792A\u7A37\u7A62\u7BE6\u7C2A\u7C50\u7D07\u7DD8\u7E5A\u7F8B\u7FD5\u7FF3\u8151\u81CD\u8317\u83F4\u85EA\u85FA\u8823\u895E\u89F4\u8A0C\u8A41\u8AA8\u8ACD\u8B10\u8CC1\u8D05\u8D73\u8E4A\u8E85\u8E91\u8EFB\u8F13\u9087\u914A\u91C9\u923F\u93B0\u9403\u95A8\u95AD\u9730\u9865\u9903\u9945\u9949\u99AD\u99E2\u9A6A\u9D26\u9E1E\u9EDD\u9F2C\u9F72',
'NGram.KANJI_2_36':'\u4E9E\u4F86\u5011\u50B3\u5152\u5169\u5340\u5718\u5B78\u5BE6\u5BF6\u5C07\u5EE3\u61C9\u6230\u6703\u689D\u6A02\u6C23\u7063\u7368\u756B\u7576\u767C\u7A31\u7D93\u7E23\u7E3D\u81FA\u8207\u842C\u85DD\u865F\u8B49\u8B80\u8CFD\u908A\u9435\u95DC\u965D\u9AD4\u9EE8',
'NGram.KANJI_2_37':'\u5480\u5580\u5C39\u67EF\u68B5\u6D85\u8521\u90B1',
'NGram.KANJI_2_38':'\u4E1F\u4F96\u4FE0\u50F1\u5118\u522A\u5291\u52C1\u52DB\u52F3\u52F5\u52F8\u53B2\u55CE\u562F\u580A\u5862\u58AE\u58D8\u58DF\u58E9\u58EF\u5925\u593E\u599D\u5ABD\u5C62\u5EC2\u5EDA\u5EE2\u5F4E\u5F65\u6085\u6158\u61FC\u6200\u62CB\u633E\u6416\u6436\u6490\u64CB\u64E0\u64FA\u6514\u651C\u6524\u6558\u6583\u66B1\u66C6\u66C9\u66E0\u6A11\u6A1E\u6A38\u6A62\u6AB3\u6B16\u6B98\u6BBC\u6C2B\u6DDA\u6DE8\u6DEA\u6DFA\u6EEF\u6EFE\u6F32\u6F51\u6F5B\u700F\u71D2\u7210\u7246\u7260\u72A7\u72F9\u7375\u7378\u758A\u760B\u76DC\u76EA\u77DA\u77FD\u78DA\u7919\u797F\u79AA\u7A05\u7A4C\u7ACA\u7C72\u7D81\u7DDD\u7E31\u7E69\u7E6B\u7E73\u7E96\u7E9C\u81BD\u81C9\u81DF\u8259\u8277\u8396\u83A7\u8523\u8525\u860A\u863F\u8667\u87A2\u87F2\u881F\u883B\u89F8\u8B20\u8B74\u8B9A\u8C4E\u8C6C\u8C93\u8CEC\u8D0A\u8D0F\u8D95\u8E10\u8F4E\u8FAF\u8FF4\u905E\u9072\u9081\u908F\u91AC\u91C0\u91C1\u91D0\u921E\u9223\u9245\u929C\u92B3\u92C1\u9336\u934A\u93C8\u9444\u9452\u947C\u947F\u9592\u95B1\u95C6\u95D6\u95E1\u95E2\u96DE\u9742\u978F\u984F\u9871\u98B1\u98C4\u99ED\u9A37\u9A45\u9A5F\u9AEE\u9B27\u9BCA\u9C77\u9D51\u9D5D\u9E79\u9E7C\u9E7D\u9EB5\u9EBC\u9F61\u9F63\u9F90\u9F9C',
'NGram.KANJI_3_1':'\u5283\u7562\u7DEC\u88E1\u8F2F',
'NGram.KANJI_3_2':'\u5009\u502B\u5049\u5075\u507D\u5091\u5098\u50B5\u50B7\u50BE\u5100\u5104\u511F\u518A\u525B\u5289\u5442\u5805\u589C\u58C7\u5922\u596A\u5A66\u5B6B\u5BE7\u5BE9\u5DBA\u5E63\u5E7E\u5FB9\u6163\u616E\u6176\u61B2\u61B6\u61F8\u639B\u63DA\u63EE\u640D\u64B2\u64C1\u64EC\u6557\u6575\u6607\u66AB\u68C4\u6A39\u6C96\u6CC1\u6E1B\u6E6F\u6E9D\u6EC5\u6F01\u6F64\u6FC3\u7058\u707D\u7344\u7642\u76E4\u7832\u790E\u7B46\u7D05\u7D0B\u7D14\u7D19\u7D1B\u7D39\u7D61\u7DB1\u7DCA\u7DD2\u7DE0\u7DE9\u7DEF\u7DF4\u7E2E\u7E3E\u8105\u8108\u81E8\u8266\u84CB\u84EE\u85A9\u885D\u88DC\u8972\u8A02\u8A0E\u8A13\u8A17\u8A2A\u8A34\u8A3A\u8A3C\u8A69\u8A73\u8A95\u8AA0\u8AA4\u8AB2\u8AC7\u8ACB\u8B00\u8B1B\u8B1D\u8B5C\u8C9D\u8C9E\u8CA2\u8CA8\u8CA9\u8CAB\u8CAC\u8CB7\u8CBF\u8CC0\u8CDE\u8CE2\u8CFC\u8D08\u8DE1\u8E8D\u8ECC\u8EDF\u8EF8\u8F14\u8F1D\u8F2A\u8F44\u9055\u9069\u9077\u907C\u90F5\u91DD\u9285\u92FC\u9326\u932F\u9375\u9396\u93AE\u93E1\u9451\u9589\u95A3\u9663\u9670\u9673\u96BB\u9801\u9802\u9803\u9806\u9808\u9810\u983B\u984D\u9858\u9867\u98EF\u98F2\u98FE\u990A\u99D0\u9A0E\u9A5A\u9B5A\u9CE5\u9DB4\u9E97\u9F8D',
'NGram.KANJI_3_3':'\u543E\u5BEE\u5F18\u6590\u725F\u83C5\u85E9\u9E93',
'NGram.KANJI_3_4':'\u5016\u53AD\u5606\u5629\u58BE\u5F14\u6065\u6144\u646F\u647A\u67F5\u6953\u6C3E\u6F2C\u6F97\u6FB1\u7169\u71E6\u71ED\u74BD\u79BF\u7A1C\u7A4E\u7AAF\u7CDE\u7D17\u7D43\u7E55\u7FA8\u807E\u8139\u8490\u8569\u856A\u87FB\u8A23\u8AB9\u8AE6\u8AFA\u8B2C\u8CD1\u91D8\u92F8\u9318\u96DB\u99B4\u9BC9\u9C2D\u9CF6\u9D61\u9DFA',
'NGram.KANJI_3_5':'\u4E26\u4F75\u4FC2\u500B\u5074\u5099\u512A\u5225\u5247\u5275\u5287\u52D5\u52D9\u52DD\u52E2\u5354\u54E1\u554F\u5712\u57F7\u5831\u5834\u5BAE\u5C0E\u5C64\u5CA1\u5CF6\u5E2B\u5E79\u5EAB\u5F35\u5F37\u5F8C\u5FA9\u611B\u614B\u63A1\u63DB\u6642\u66F8\u6771\u696D\u6975\u69CB\u6A19\u6A4B\u6A5F\u6BBA\u6C7A\u6E2C\u6E96\u6F22\u70BA\u7121\u71B1\u7372\u73FE\u74B0\u7570\u76E3\u78BA\u7A2E\u7A4D\u7AF6\u7BC0\u7BC4\u7BC9\u7C21\u7D00\u7D04\u7D0D\u7D1A\u7D30\u7D42\u7D44\u7D50\u7D66\u7D71\u7DAD\u7DDA\u7DE8\u7E54\u7F85\u7FA9\u7FD2\u8056\u805E\u8077\u8208\u83EF\u8449\u8853\u885B\u88FD\u8907\u898B\u898F\u8996\u89AA\u8A08\u8A18\u8A2D\u8A31\u8A55\u8A5E\u8A66\u8A71\u8A72\u8A8C\u8A8D\u8A9E\u8ABF\u8AD6\u8AF8\u8B58\u8B70\u8B77\u8CA0\u8CA1\u8CB4\u8CBB\u8CC7\u8CEA\u8ECA\u8ECD\u8F03\u8F09\u8F38\u8FB2\u9023\u9031\u9032\u904A\u904B\u904E\u9054\u9060\u9078\u907A\u9084\u9280\u9577\u9580\u958B\u9593\u9678\u967D\u968A\u968E\u969B\u96E2\u96E3\u96F2\u96FB\u97D3\u97FF\u9805\u9818\u982D\u984C\u985E\u98A8\u98DB\u9928\u99AC\u9BAE',
'NGram.KANJI_3_8':'\u5F6B\u6C4E\u7B87\u8A70',
'NGram.KANJI_3_9':'\u540B\u5B5C\u826E',
'NGram.KANJI_3_11':'\u4F83\u4FF8\u51CB\u52BE\u53F1\u548B\u558B\u5CB1\u5D69\u5F3C\u620E\u621F\u64E2\u67DA\u6854\u69CC\u6A35\u6C8C\u6E1A\u6F15\u6FE0\u717D\u7252\u7AFA\u82D3\u83DF\u8431\u9041\u9149\u9798',
'NGram.KANJI_3_12':'\u4ED5\u55E3\u572D\u57A3\u587E\u5983\u5A9B\u5C90\u5E61\u672D\u6960\u6F5F\u72D9\u72E9\u757F\u7949\u7950\u7E82\u7FCC\u82B8\u90B8\u91DC\u961C\u9B45',
'NGram.KANJI_3_13':'\u55AB\u6249\u643E\u6841\u68B1\u725D\u7B8B\u7C95\u7E1E\u7F36\u8A03\u8A6B\u8E74\u95A4',
'NGram.KANJI_3_15':'\u50AD\u50D1\u5132\u51F1\u55AC\u5617\u5687\u584A\u59EA\u5B30\u5BF5\u5C0B\u5C4D\u5EDF\u6182\u61A4\u64AB\u64FE\u66A2\u6897\u694A\u69CD\u6B3D\u6BC0\u6D29\u6F38\u7015\u7149\u71C8\u723A\u7336\u7345\u755D\u76C3\u78A9\u798D\u7AAE\u7DFB\u7E2B\u7F75\u7F77\u81E5\u834A\u852D\u85CD\u8755\u8A3B\u8A54\u8AE7\u8B02\u8B39\u8CAA\u8CE6\u8DA8\u8E5F\u8F5F\u905C\u912D\u919C\u92D2\u932B\u937E\u9418\u9583\u9812\u985B\u9905\u99B3\u99C1\u99D5\u9A30\u9CF3\u9D3B\u9D6C',
'NGram.KANJI_3_16':'\u6D6C\u72FD\u77A5\u8956\u9C0D',
'NGram.KANJI_3_18':'\u5919\u5F4A\u6063\u63AC\u649A\u6715\u6AD3\u71D0\u758B\u834F\u85F7\u88DF\u8F61\u93D1\u98F4\u9D60',
'NGram.KANJI_3_19':'\u4F50\u7DB2\u962A',
'NGram.KANJI_3_22':'\u5E96\u75D4\u91C6',
'NGram.KANJI_3_23':'\u5E9A\u6C40\u821C\u839E\u8FED\u9EDB',
'NGram.KANJI_3_27':'\u5F01\u66DC',
'NGram.KANJI_3_29':'\u5023\u5208\u531D\u536F\u53E9\u54C9\u598A\u59BE\u5A20\u5D6F\u5DF3\u66C7\u66D6\u66F3\u6775\u6A3D\u6ADB\u6B86\u6C72\u6E25\u73EA\u7435\u760D\u7656\u7825\u78D0\u7A14\u7A6B\u7B20\u7BE0\u7CF8\u7DAC\u7DBB\u7DBE\u80E4\u80F4\u837B\u8466\u8568\u867B\u8A63\u91E7\u9320\u935B\u9591\u965B\u98E2\u990C\u9913\u9BAB',
'NGram.KANJI_3_30':'\u60B6\u8AD2\u8CC2\u9237\u9328\u934D\u9397\u9830',
'NGram.KANJI_3_31':'\u4FB6\u50D5\u51CD\u559A\u55AA\u5674\u5857\u585A\u5875\u58B3\u596E\u59E6\u5A41\u5D50\u5E25\u5E33\u5F59\u61C7\u61F2\u6368\u6383\u65AC\u68DF\u68F2\u6A3A\u6B04\u6DBC\u6DF5\u6E26\u6E4A\u6E67\u6F54\u6F70\u6FC1\u6FEB\u7159\u727D\u7652\u77EF\u78EF\u798E\u7A40\u7AAA\u7BE4\u7C60\u7CE7\u7CFE\u7D21\u7D33\u7D5E\u7D79\u7DB4\u7DBF\u7E1B\u7E8F\u7F70\u814E\u816B\u8178\u819A\u84BC\u85A6\u865C\u8766\u8A1F\u8A50\u8A60\u8A6E\u8A87\u8A98\u8AB0\u8ADC\u8AED\u8AEE\u8B0E\u8B19\u8CA7\u8CAF\u8CB8\u8CBC\u8CC3\u8CC4\u8CCA\u8CDC\u8CE0\u8CED\u8ED2\u8F29\u8F3F\u91E3\u920D\u9234\u925B\u9298\u9310\u934B\u958F\u95A5\u9727\u97FB\u9811\u984E\u98FC\u98FD\u99D2\u99FF\u9B31\u9BE8\u9C57\u9CE9\u9CF4\u9D28\u9DF9',
'NGram.KANJI_3_32':'\u4E1E\u502D\u51A5\u5321\u58EC\u5A3C\u5BC5\u5CE8\u61A9\u620A\u65A1\u6714\u6853\u6893\u6C50\u6C5D\u7436\u745A\u745B\u773A\u7941\u7947\u8543\u865E\u8C5A\u914B\u99A8\u9AB8',
'NGram.KANJI_3_35':'\u4E99\u5BA5\u5DFD\u608C\u60C7\u60DA\u6190\u61A7\u6753\u6777\u6787\u6B4E\u6F23\u6FE1\u6FEF\u7337\u7827\u786F\u7893\u7ABA\u7B94\u7BB8\u7C3E\u7D62\u7E6D\u80B1\u81BF\u81C6\u821B\u82E7\u83F0\u84D1\u86ED\u8888\u8B01\u8B04\u8F4D\u9291\u92E4\u932E\u9354\u936C\u939A\u9957\u9AED\u9BAA\u9BAD\u9BD6\u9BDB\u9C3B\u9D1B',
'NGram.KANJI_3_36':'\u50C5\u53E2\u5EE0\u65BC\u70CF\u723E\u7D10\u7D9C\u806F\u8607\u862D\u8A0A\u8AFE\u8CD3\u9019\u9813\u9B6F',
'NGram.KANJI_3_37':'\u4EA8\u4F3D\u5384\u5EFF\u60DF\u66DD\u6E5B\u8087\u82D1\u8FE6\u9640\u9E9F',
'NGram.KANJI_3_38':'\u5147\u525D\u5678\u617E\u6372\u79A6\u8ABC\u92EA\u9438\u9817',
'NGram.KANJI_4_0':'\u6D3C\u718F\u74EE\u8712',
'NGram.KANJI_4_9':'\u4F84\u54C6\u5565\u68F1\u6D82\u83C7',
'NGram.KANJI_4_10':'\u4FE9\u4FED\u51FF\u523D\u5300\u5364\u538C\u5450\u5455\u545C\u54D1\u54D7\u5578\u56A3\u58F6\u592F\u5CE6\u5D2D\u5E90\u6073\u607C\u60EB\u61D2\u62E2\u62E3\u631A\u6320\u6323\u6361\u63B7\u63B8\u63BA\u6405\u65A9\u65F7\u6619\u6655\u67A3\u67E0\u6805\u6808\u6866\u6868\u6869\u6A71\u6BE1\u6C79\u6CA5\u6CDE\u6DA4\u6DA7\u6DA9\u6E85\u70DB\u70E6\u70EB\u7115\u724D\u7410\u759F\u75AE\u75EA\u75F9\u762B\u763E\u76B1\u77EB\u783E\u79C3\u7A8D\u7A9C\u7B5D\u7BF1\u7EC5\u7ED2\u7EDE\u7EE3\u7EF7\u7EF8\u7EFD\u7F00\u7F0E\u7F15\u7F1A\u7F20\u7F24\u7F28\u7FA1\u7FD8\u8038\u803B\u804B\u80AE\u817B\u82C7\u8327\u835E\u8367\u83BA\u8424\u864F\u8681\u8682\u8715\u8717\u8721\u8747\u874E\u8845\u886C\u889C\u88E4\u89C5\u8BB6\u8BB9\u8BC0\u8BC5\u8BE1\u8BEB\u8BEC\u8BF5\u8C0E\u8C1A\u8D2E\u8D31\u8D43\u8D4E\u8D58\u8F67\u8F7F\u9489\u9499\u949D\u94A0\u94A5\u94AE\u94BE\u94D0\u94DB\u94F2\u9508\u950C\u951A\u9525\u952D\u952F\u9530\u953B\u9540\u9550\u9570\u9576\u95F0\u960E\u9668\u96CF\u97E7\u9885\u988A\u98A4\u9965\u9975\u997A\u997F\u9985\u998D\u998F\u9A6E\u9A6F\u9A74\u9A79\u9A7C\u9A82\u9A87\u9CA4\u9CC4\u9CCD\u9CD6\u9E20\u9E25\u9E35\u9E3D\u9E45\u9E49\u9E4A\u9E66',
'NGram.KANJI_4_16':'\u576F\u579B\u6345\u78B4\u79EB\u79F8',
'NGram.KANJI_4_17':'\u4E13\u4E1A\u4E1C\u4E24\u4E25\u4E2A\u4E3E\u4E49\u4E50\u4E66\u4E9A\u4EA7\u4EBF\u4ECE\u4EEC\u4EF7\u4F17\u4F20\u5170\u5173\u519B\u51B3\u51E4\u51FB\u5219\u521B\u522B\u529E\u52A1\u52A8\u52BF\u534F\u5355\u536B\u5386\u53BF\u53D1\u53D8\u542F\u5458\u54CD\u56E2\u56ED\u56F4\u56FE\u573A\u5904\u590D\u5934\u5B81\u5B9E\u5BF9\u5BFC\u5C14\u5C9B\u5E26\u5E7F\u5E94\u5F00\u5F20\u5F3A\u603B\u6218\u65E0\u65F6\u663E\u672F\u6743\u6784\u6807\u6C14\u6C49\u707E\u70ED\u73AF\u73B0\u7535\u76D1\u786E\u79CD\u79EF\u7B80\u7C7B\u7EA2\u7EA6\u7EA7\u7EAA\u7EBF\u7EC4\u7EC7\u7ED3\u7EDF\u7EE7\u7EED\u7EF4\u7F16\u7F57\u804C\u8054\u817E\u8282\u82CF\u83B7\u8425\u89C1\u89C2\u89C4\u89C6\u8BA1\u8BA4\u8BAE\u8BAF\u8BB0\u8BB8\u8BBA\u8BBE\u8BC1\u8BC4\u8BD1\u8BDD\u8BE5\u8BED\u8BF4\u8C03\u8D22\u8D23\u8D28\u8D39\u8D44\u8D5B\u8F66\u8F6C\u8F83\u8FBE\u8FC7\u8FD0\u8FD8\u8FD9\u8FDB\u8FDE\u9009\u94C1\u957F\u95E8\u95EE\u95F4\u95FB\u961F\u9633\u9645\u9646\u96BE\u9879\u9884\u9886\u9898\u98CE\u9A6C\u9F99',
'NGram.KANJI_4_18':'\u51DB\u67B7',
'NGram.KANJI_4_22':'\u4FA5\u545B\u5499\u5520\u5570\u56F1\u5A76\u5C96\u60AF\u60ED\u618B\u61A8\u62A0\u62A1\u62E7\u6363\u6390\u63B0\u6400\u6402\u6512\u6748\u70C1\u732C\u765E\u7663\u76CF\u7741\u781A\u7980\u79C6\u79FD\u7AA5\u7B0B\u7B8D\u7BA9\u7BAB\u7BD3\u7CAA\u7EAB\u7ECA\u7EE2\u7F2D\u7F30\u8110\u8113\u81CA\u835A\u8360\u84D6\u852B\u87E5\u8869\u8A8A\u8BA5\u8BF2\u8C05\u8C12\u8D30\u8D4A\u8D61\u8DF7\u8E6D\u8E8F\u8F95\u8F99\u8FAB\u94B3\u94C6\u94E3\u9504\u954A\u9563\u95FA\u9893\u9981\u9992\u9AA1\u9CAB\u9E2F\u9E33\u9EB8',
'NGram.KANJI_4_24':'\u4E22\u4E8F\u4F1E\u4FA3\u5151\u517D\u51BB\u51D1\u5220\u529D\u52CB\u5367\u5389\u5395\u53E0\u53F9\u5413\u548F\u5524\u575E\u575F\u5784\u5792\u57A6\u57AB\u58F3\u5986\u5988\u5A04\u5A07\u5BA0\u5C18\u5C82\u5DE9\u5E10\u5E1C\u5F2F\u60E9\u6124\u629B\u6321\u6324\u635E\u63FD\u6401\u644A\u6491\u655B\u658B\u6635\u67AB\u67DC\u680B\u692D\u6984\u6A31\u6B7C\u6BD9\u6C22\u6CA6\u6CA7\u6CEA\u6CFB\u6CFC\u6D46\u6D47\u6D4A\u6D51\u6DA1\u6E0A\u6E83\u6EE4\u6EE5\u6F9C\u6FD2\u70C2\u7237\u727A\u730E\u7574\u75AF\u7792\u7816\u7845\u78B1\u7A77\u7A91\u7A9D\u7AD6\u7B3C\u7B5B\u7CAE\u7EA4\u7EB1\u7EBA\u7ECE\u7ED1\u7EF0\u7EF3\u7F14\u7F1D\u7F34\u7F62\u8042\u806A\u80A0\u80A4\u80BE\u80BF\u80C0\u810F\u8138\u8231\u8270\u829C\u82CD\u8350\u83B9\u841D\u8574\u8680\u8BB3\u8BBC\u8BBD\u8BC8\u8BF1\u8BFD\u8C0A\u8C0D\u8C1C\u8C24\u8C26\u8C2C\u8C2D\u8C34\u8D1E\u8D2C\u8D3C\u8D41\u8D42\u8D4C\u8D50\u8D5A\u8F69\u8F88\u8F90\u8FA9\u915D\u9171\u9493\u949E\u94A7\u94A9\u94BB\u94C3\u94C5\u94DD\u94F8\u9505\u9510\u9523\u9524\u95EF\u95F7\u95F9\u9600\u9610\u96F3\u97F5\u987D\u9882\u9888\u9896\u98D8\u9971\u9972\u9976\u997C\u9A84\u9A86\u9A8F\u9A97\u9A9A\u9AA4\u9CB8\u9CDE\u9E26\u9E43\u9E64\u9E70\u9F7F\u9F9F',
'NGram.KANJI_4_28':'\u534E\u62A5\u7ECF\u7F51',
'NGram.KANJI_4_34':'\u4E34\u4E3D\u4E4C\u4E54\u4E60\u4E61\u4E70\u4EB2\u4EC5\u4EEA\u4F18\u4F1F\u4F24\u4F26\u4FA7\u50A8\u513F\u5174\u517B\u518C\u519C\u51B5\u51CF\u5218\u521A\u5267\u52B3\u5356\u5382\u5385\u538B\u53A6\u5434\u5706\u5723\u5757\u575A\u575B\u575D\u5907\u591F\u593A\u5956\u5B59\u5BA1\u5BAB\u5BBD\u5BBE\u5BFB\u5C42\u5C81\u5E01\u5E08\u5E86\u5E93\u5F02\u5F39\u5F52\u5F55\u5F7B\u6000\u6001\u6076\u620F\u6237\u6267\u6269\u626C\u62A2\u62A4\u62DF\u62E5\u62E9\u6325\u635F\u6362\u6444\u6653\u6682\u6740\u6742\u6768\u6781\u6811\u6837\u6865\u68C0\u6B22\u6BC1\u6BD5\u6C47\u6C9F\u6CAA\u6CFD\u6D4B\u6DA8\u6E10\u6EE1\u6EE8\u706D\u7075\u70DF\u7231\u739B\u7597\u76D6\u76D8\u77FF\u7801\u7840\u79BB\u7A33\u7ADE\u7B14\u7B7E\u7CA4\u7D27\u7EB3\u7EBD\u7EC3\u7EC6\u7EC8\u7ECD\u7ED5\u7ED9\u7EDC\u7EDD\u7EE9\u7EFC\u7EFF\u7F13\u7F29\u8083\u80DC\u8111\u814A\u8230\u827A\u8363\u836F\u8428\u84DD\u867D\u8865\u88AD\u89C8\u8BA2\u8BA8\u8BA9\u8BAD\u8BB2\u8BBF\u8BC6\u8BCD\u8BD5\u8BEF\u8BF7\u8BF8\u8BFA\u8BFB\u8C08\u8D1D\u8D1F\u8D21\u8D25\u8D27\u8D2D\u8D2F\u8D35\u8D38\u8DC3\u8F6E\u8F6F\u8F7B\u8F7D\u8F86\u8F91\u8F93\u8F96\u8FB9\u8FBD\u8FC1\u8FDC\u8FDD\u9002\u9057\u90BB\u90D1\u91CA\u9488\u949F\u94A2\u94B1\u94F6\u9500\u9526\u9547\u9614\u9634\u9635\u9636\u9648\u9655\u9669\u9690\u97E9\u9875\u9876\u987A\u987B\u987E\u987F\u9891\u989D\u98DE\u9986\u9A7B\u9A8C\u9C81\u9C9C\u9F50',
'NGram.KANJI_4_39':'\u4E1B\u4E1D\u4E27\u4EA9\u4ED1\u4ED3\u4F2A\u4FA6\u4FA8\u503A\u503E\u507F\u5188\u51AF\u51C0\u51C9\u51ED\u51EF\u5242\u5251\u52B2\u5362\u53A2\u5415\u5417\u5428\u55B7\u5760\u5899\u5939\u594B\u5987\u5A31\u5A74\u5BAA\u5C1D\u5C7F\u5C97\u5CAD\u5E05\u5E2E\u5E99\u5E9E\u5E9F\u5F03\u5FC6\u5FE7\u60AC\u60CA\u60EF\u626B\u6270\u629A\u62E6\u62E8\u6446\u6447\u654C\u67AA\u680F\u6863\u68A6\u6C64\u6D01\u6D53\u6D9D\u6DA6\u6E14\u6E17\u6EDA\u6EE9\u707F\u70BC\u70E7\u7275\u72B9\u72EE\u72F1\u743C\u7545\u76D0\u7855\u7978\u7B79\u7BEE\u7EA0\u7EAC\u7EAF\u7EB2\u7EB5\u7EB7\u7EB8\u7EB9\u7ED8\u7EEA\u7EF5\u7F05\u7F06\u7F18\u7F5A\u80C1\u80F6\u8109\u8206\u8273\u82F9\u8346\u8361\u83B2\u8427\u8651\u867E\u8854\u89C9\u8BC9\u8BCA\u8BD7\u8BDA\u8BDE\u8BE2\u8BE6\u8BFE\u8C01\u8C0B\u8C10\u8C13\u8C22\u8C23\u8C28\u8C31\u8D24\u8D26\u8D29\u8D2A\u8D2B\u8D34\u8D37\u8D3A\u8D3E\u8D3F\u8D4B\u8D4F\u8D54\u8D56\u8D5E\u8D60\u8D62\u8D75\u8D76\u8D8B\u8F68\u8F70\u8F74\u8F85\u8F89\u8FC8\u8FDF\u900A\u9012\u903B\u9093\u90AE\u917F\u9274\u94A6\u94DC\u94ED\u94FA\u94FE\u9501\u950B\u9519\u9521\u952E\u955C\u95EA\u95ED\u95F2\u95F8\u95FD\u9601\u9605\u9647\u96B6\u96FE\u9877\u9881\u9887\u9897\u989C\u98A0\u996D\u996E\u9970\u9A70\u9A71\u9A73\u9A76\u9A7E\u9A91\u9C7C\u9E1F\u9E21\u9E23\u9E2D\u9E3F\u9E4F\u9F84',
'NGram.KANJI_5_10':'\u5239\u8EAF',
'NGram.KANJI_5_11':'\u51C4\u8471',
'NGram.KANJI_5_12':'\u6DC0\u7C98',
'NGram.KANJI_5_13':'\u5631\u5815\u8695',
'NGram.KANJI_5_14':'\u4E71\u4FA0\u5265\u52B1\u5374\u53A8\u53D9\u58EE\u5BDD\u5BFF\u5C3D\u5C4A\u5CE1\u5F25\u5F84\u604B\u60A6\u60E7\u60E8\u631F\u636E\u643A\u663C\u664B\u67A2\u6816\u697C\u6B8B\u6BB4\u6D45\u6E7F\u6EDE\u6F5C\u706F\u7089\u72ED\u732A\u732B\u76D7\u793C\u7977\u7A0E\u7A83\u80C6\u811A\u8131\u82A6\u830E\u848B\u865A\u866B\u86EE\u89E6\u8A89\u8DF5\u8E0A\u8E2A\u8F9E\u9065\u968F\u9759\u9EA6',
'NGram.KANJI_5_18':'\u601C\u75D2',
'NGram.KANJI_5_26':'\u4E07\u4E0E\u4E89\u4F1A\u4F53\u515A\u5185\u5199\u533A\u533B\u53C2\u53CC\u53F7\u58F0\u5965\u5B66\u5B9D\u5C06\u5C5E\u5F53\u62C5\u6570\u65AD\u65E7\u6761\u6765\u6A2A\u6B27\u6CA1\u6E29\u6E7E\u70B9\u72B6\u72EC\u732E\u753B\u79F0\u88C5\u9EC4',
'NGram.KANJI_5_29':'\u693F\u82EB',
'NGram.KANJI_5_34':'\u53F6\u6D9B\u83B1',
'NGram.KANJI_5_39':'\u5C61\u788D',
'NGram.KANJI_6_0':'\u4E10\u4E52\u4EC6\u4F88\u4FD0\u51F3\u533E\u53ED\u53EE\u5406\u541D\u5429\u5435\u5440\u5490\u5495\u54B1\u54C4\u54FC\u557C\u55D3\u5669\u56E4\u5777\u5992\u59E8\u5B7D\u5BDE\u5BE5\u5C79\u5C94\u5DCD\u5E18\u5E1A\u5E54\u5FF1\u604D\u6064\u60F6\u6127\u6177\u6233\u6252\u625B\u6273\u6296\u62C2\u62C7\u62F4\u638F\u6396\u63E3\u63EA\u6413\u6479\u64A9\u64C2\u659F\u667E\u6760\u6845\u6963\u6A90\u6B83\u6C13\u6C5E\u6D8E\u6D95\u6DCC\u6ED4\u6F13\u6F3E\u6FA1\u7076\u70D8\u710A\u71CE\u7239\u72E1\u73B7\u7599\u759A\u75A4\u75CA\u7629\u7682\u76C5\u76EF\u778E\u77AA\u787C\u7889\u788C\u78BE\u79E7\u7A96\u7A98\u7B77\u7C7D\u7CB1\u7D0A\u7D6E\u7F94\u7FCE\u8116\u814B\u814C\u819B\u828D\u82DF\u8301\u83E0\u85D5\u8611\u86A3\u8708\u8822\u8C4C\u8DB4\u8DEA\u8E42\u8E66\u8E72\u8EBA\u901B\u9157\u970E\u97ED',
'NGram.KANJI_6_3':'\u62FC\u88D4\u9B4F',
'NGram.KANJI_6_9':'\u4ED7\u4F63\u4FCF\u5018\u50BB\u50F5\u5154\u5201\u522E\u5254\u527F\u5306\u5462\u5492\u5496\u54A8\u54AA\u554A\u5561\u5564\u5566\u5885\u5938\u5AC2\u5AE9\u5CED\u5F64\u6084\u608D\u60A8\u60D5\u61C2\u61C8\u6254\u626F\u62AC\u6346\u634D\u640F\u6454\u6487\u6495\u64D2\u6746\u6789\u68B3\u68F5\u695E\u6986\u6995\u69A8\u6A44\u6AAC\u6B79\u6C28\u6C2E\u6CF5\u6DE4\u6E34\u6E3A\u6E89\u6F29\u70AB\u70AC\u7130\u715E\u7184\u71AC\u7238\u7281\u72E0\u74E3\u74F7\u7529\u7578\u761F\u7626\u76D4\u775B\u7779\u7784\u77BB\u780C\u780D\u7838\u7898\u78C5\u78F7\u7AED\u7B28\u7BE1\u7C07\u7CD5\u7CD9\u7CEF\u7F38\u800D\u8084\u809A\u8165\u816E\u832B\u8334\u840D\u8774\u886B\u888D\u88D9\u88F9\u8C41\u8D81\u8D9F\u8E22\u8E29\u8EB2\u8F9C\u9165\u918B\u9631\u964B\u964C\u9661\u9709\u9739\u9776\u9AD3\u9ED4',
'NGram.KANJI_6_10':'\u4E53\u5582\u5600\u6342\u7B06',
'NGram.KANJI_6_11':'\u5288\u543C\u5475\u5486\u54EE\u5598\u56BC\u5962\u5A36\u5A9A\u5B75\u5BA6\u5C38\u5C4E\u5F8A\u5F98\u627C\u62CC\u62D7\u63C9\u6930\u6954\u69D0\u6BEF\u6C90\u6CBD\u6CBE\u6F31\u6F88\u70D9\u7329\u75BC\u75F0\u7737\u77D7\u7B19\u7FB9\u803F\u80D6\u813E\u81C0\u8205\u8309\u83BD\u846B\u8517\u868C\u8759\u8815\u8859\u8B6C\u8E81\u8EAC\u90A2\u9698\u9B44',
'NGram.KANJI_6_12':'\u722C\u7FD4',
'NGram.KANJI_6_16':'\u5228\u5315\u542E\u54CE\u5509\u5527\u5543\u55B3\u55E1\u5636\u568E\u5FFF\u61E6\u6376\u642A\u6726\u74E4\u76F9\u7736\u7BD9\u8019\u80F0\u80F3\u812F\u818A\u8200\u8214\u8638\u869C\u86C0\u86C6\u86D4\u87C6\u88B1\u8902\u8C7A\u8E4B\u9119',
'NGram.KANJI_6_18':'\u67D2\u6ED3\u87C0\u87CB\u8DDB\u901E\u9163',
'NGram.KANJI_6_20':'\u4F5B\u52D2\u54C8\u62FF\u66FC\u6D59\u704C\u7586\u9ECE',
'NGram.KANJI_6_21':'\u4E48\u4EFF\u4F19\u4FF1\u5021\u5077\u5195\u5212\u5269\u5401\u541E\u5427\u54EA\u5587\u558A\u55BB\u566A\u573E\u574E\u5783\u57AE\u584C\u58E4\u5960\u5976\u59CA\u5A1C\u5DE2\u5F99\u600E\u6015\u6263\u626D\u6293\u62C6\u62D6\u62EF\u62F1\u6316\u632A\u6380\u6389\u63D2\u641E\u64C5\u64CE\u65F1\u6664\u6735\u6770\u67EC\u6846\u684C\u68AD\u6B47\u6B49\u6B67\u6C1B\u6C27\u6C2F\u6C5B\u6C89\u6DF9\u6EAF\u70AE\u70E4\u731C\u7334\u73BB\u7470\u76FC\u788E\u789F\u78B0\u78B3\u7A0D\u7A3B\u7A57\u7CB9\u7F69\u8335\u8354\u84BF\u8DCC\u8DD1\u904F\u90A8\u9189\u9677\u9738\u978B',
'NGram.KANJI_6_22':'\u5162\u53E8\u542D\u5501\u552C\u5639\u563F\u56B7\u6043\u60B4\u6194\u61CA\u634E\u63CD\u6414\u64AC\u6DAE\u6E43\u6F66\u7095\u7316\u733E\u7728\u7830\u78D5\u7ABF\u7FE9\u8018\u80EF\u8198\u8693\u86AA\u86AF\u874C\u8783\u879F\u8892\u8E6C',
'NGram.KANJI_6_23':'\u4FD8\u4FEF\u501A\u5085\u5180\u526A\u5323\u54ED\u5634\u56CA\u58A9\u58F9\u5955\u5978\u59DA\u5A49\u5B55\u5BC7\u5BE8\u5D4C\u5E62\u6467\u64BC\u6500\u655E\u6572\u658C\u6670\u68CD\u68D5\u68E0\u6912\u6A0A\u6BB7\u6C9B\u6D3D\u6DC6\u6E23\u6F8E\u7011\u7092\u714C\u73AB\u7405\u7624\u76D2\u7960\u79C9\u7A20\u7BF7\u7F50\u804A\u8086\u81C2\u8292\u82DE\u852C\u857E\u859B\u8760\u8C6B\u8DBE\u8E48\u8F9F\u96A7',
'NGram.KANJI_6_25':'\u4E8E\u5DF2\u5FB7\u7AD9',
'NGram.KANJI_6_28':'\u4E58\u4ECD\u4EFD\u4F30\u4F60\u4F69\u503C\u5047\u51B0\u51F0\u5361\u5377\u53E6\u54E5\u552E\u5708\u5740\u5761\u57C3\u5821\u589E\u5979\u59C6\u5B69\u5B83\u5E15\u5E76\u5F17\u5F88\u6208\u622A\u624E\u627E\u62D4\u62DC\u63ED\u641C\u6536\u6548\u65C1\u665A\u6668\u67E5\u6B65\u6BCF\u6C61\u6CDB\u6D4E\u6D89\u6DB5\u6E38\u6EAA\u6FB3\u70B8\u745F\u7538\u7A97\u7F3A\u7F55\u805A\u8258\u827E\u82AC\u8303\u83F2\u8482\u85CF\u8DDF\u903E\u9080\u970D\u9760\u9ED1\u9ED8',
'NGram.KANJI_6_29':'\u634F\u6518\u7B50\u809B',
'NGram.KANJI_6_30':'\u54A7\u57C2\u5AB3\u60CB\u6886\u8378\u85D0\u8671',
'NGram.KANJI_6_32':'\u5080\u5121\u51A4\u54AC\u55DC\u592D\u5DEB\u6292\u68D8\u69B4\u6A59\u6E24\u7FC5\u80DA\u8180\u86DB\u8700\u8DCB\u9761',
'NGram.KANJI_6_34':'\u4E30\u51E0\u542C\u613F',
'NGram.KANJI_6_35':'\u4E56\u547B\u55FD\u5C41\u606C\u6115\u6CAE\u7119\u795F\u7CDC\u86C9\u86F9\u8713\u873B\u8757\u8925\u892A\u96F9',
'NGram.KANJI_6_37':'\u51B2\u5308\u5398\u54B8\u59DC\u5C4F\u5D14\u5F6D\u60E0\u6241\u6350\u699C\u6BEB\u6C6A\u6CC4\u6DEE\u6F58\u6F6D\u7199\u77EE\u7ADF\u8058\u820D\u8212\u8389\u8587\u884D\u8881\u8FA8\u8FF9\u96D5',
'NGram.KANJI_6_39':'\u574F\u6251\u6302',
'NGram.KANJI_7_0':'\u52FA\u5544\u60F0\u6994\u86A4\u86E4',
'NGram.KANJI_7_3':'\u4E59\u4E7E\u4EAD\u4EF0\u4EF2\u4F0F\u4F10\u4FAF\u4FCA\u500D\u501F\u5076\u508D\u50E7\u5112\u5146\u5192\u51AC\u51DD\u51FD\u5200\u5237\u524A\u52A3\u52C3\u52C7\u52DF\u5351\u5352\u5353\u5378\u537F\u53E5\u5439\u54FA\u574A\u5782\u57CB\u5893\u58C1\u5915\u5937\u5949\u5951\u5974\u59B9\u5A18\u5A5A\u5ACC\u5B54\u5B5D\u5B64\u5B8F\u5BBF\u5BD2\u5C3A\u5C6F\u5CB3\u5D07\u5DE7\u5E84\u5E8A\u5F26\u5F69\u5F70\u5F90\u5FAA\u5FCD\u6012\u6016\u602A\u60A0\u60B2\u60BC\u6148\u6162\u6170\u6291\u6298\u62AB\u62BC\u62BD\u62D2\u62D3\u62D8\u62F3\u6311\u638C\u6398\u63E1\u642C\u6458\u64A4\u654F\u656C\u659C\u65E2\u65E8\u65EC\u6606\u6614\u6676\u6691\u6696\u66F9\u6749\u676F\u679A\u679D\u67CF\u67D4\u67F1\u67F3\u67F4\u6817\u6842\u6843\u6851\u68A8\u68CB\u68D2\u6B20\u6B32\u6BBF\u6C57\u6C88\u6CCA\u6D17\u6D1E\u6D69\u6D6E\u6D78\u6DE1\u6DFB\u6E58\u6EB6\u6F0F\u6F20\u7070\u708E\u70AD\u7126\u718A\u71C3\u7267\u72C2\u731B\u7384\u73A9\u73CD\u7434\u75AB\u75DB\u76C6\u76FE\u773C\u7891\u78C1\u795D\u7965\u79D2\u79DF\u79E6\u7A00\u7B11\u7B51\u7B54\u7C89\u7C92\u7CD6\u7D2B\u7F8A\u7FBD\u7FFC\u8010\u80A5\u80CE\u8150\u8179\u819C\u8247\u829D\u82B3\u82D7\u82E6\u8302\u8336\u8352\u83CA\u83CC\u83DC\u845B\u846C\u84B2\u84B8\u84C4\u8584\u864E\u86C7\u8861\u8863\u8870\u888B\u8896\u88D5\u8986\u8C46\u8DA3\u8E0F\u8F9B\u8FC5\u8FEB\u8FF7\u9003\u9006\u902E\u9042\u9063\u90ED\u963B\u9676\u96EA\u9756\u9B3C\u9B42\u9F3B',
'NGram.KANJI_7_6':'\u4E01\u4E03\u4E45\u4E5D\u4E88\u4E92\u4EA1\u4ECB\u4EE4\u4F01\u4F0A\u4F2F\u4F3C\u4F4E\u4F4F\u4F55\u4F8B\u4F9D\u4FBF\u4FEE\u505C\u50CF\u516B\u516D\u5175\u5177\u5178\u5207\u520A\u5224\u526F\u529F\u52A9\u5343\u5348\u535A\u5370\u53BB\u53CB\u53F3\u5409\u542B\u544A\u547C\u5584\u5747\u5802\u590F\u592B\u5931\u5947\u597D\u5A01\u5A92\u5B63\u5B8C\u5B97\u5BA2\u5BA3\u5BA4\u5BB3\u5BB9\u5BC6\u5BCC\u5BDF\u5C04\u5C1A\u5C45\u5C4B\u5CB8\u5DE6\u5E0C\u5E1D\u5E2D\u5E55\u5E8F\u5E95\u5E97\u5EA7\u5EB7\u5EF6\u5F8B\u5FAE\u5FC5\u5FD7\u5FF5\u601D\u6025\u606F\u60F3\u611F\u623F\u6253\u6279\u627F\u6295\u6297\u62EC\u6388\u6392\u63F4\u6545\u6551\u6574\u6599\u65C5\u65E9\u6613\u6620\u6625\u666E\u666F\u66B4\u66F4\u670D\u671B\u6728\u672B\u6751\u677E\u67B6\u6838\u6839\u6848\u68EE\u690D\u6982\u6A21\u6B4C\u6B62\u6B66\u6BB5\u6BCD\u6C0F\u6C38\u6C42\u6CBF\u6CE2\u6CE8\u6D0B\u6D3E\u6D88\u6DF1\u6E05\u6E56\u706B\u7167\u7206\u7236\u7247\u7387\u7530\u7537\u7559\u7565\u7591\u75C5\u767B\u767D\u767E\u7687\u76DB\u76DF\u771F\u7763\u77ED\u7834\u79FB\u7A81\u7AE0\u7AEF\u7B56\u7B97\u7C4D\u7CBE\u7D20\u7D22\u7F72\u7FA4\u8001\u8003\u81F4\u822A\u826F\u82B1\u8349\u843D\u878D\u8857\u89D2\u8B66\u8C37\u8D70\u8D85\u8D8A\u8DB3\u8FF0\u8FFD\u9001\u901F\u90A3\u90A6\u914D\u91CE\u9632\u963F\u9644\u964D\u9664\u96C4\u96E8\u9752\u9769\u98DF',
'NGram.KANJI_7_7':'\u4E09\u4E0A\u4E0B\u4E0D\u4E16\u4E3B\u4E8B\u4E8C\u4EE3\u4EE5\u4F4D\u4F5C\u4F7F\u5165\u5168\u516C\u5171\u51FA\u5206\u5229\u5236\u524D\u529B\u52A0\u5316\u5317\u5357\u539F\u53CA\u53F0\u5408\u540C\u540D\u548C\u5730\u57FA\u5916\u591A\u5929\u5B50\u5B9A\u5BB6\u5C0F\u5C71\u5DDE\u5DE5\u5E02\u5E73\u5EA6\u5EFA\u5F0F\u6027\u6210\u6240\u6307\u653F\u6587\u65B0\u65B9\u660E\u6700\u6709\u671F\u672C\u6B21\u6B63\u6C11\u6CBB\u6CD5\u6D77\u7269\u7279\u7406\u751F\u7528\u7531\u754C\u76EE\u76F8\u793E\u79D1\u7ACB\u7B2C\u7B49\u7CFB\u8005\u80FD\u81EA\u82F1\u884C\u8868\u897F\u8981\u901A\u9053\u90E8\u90FD\u91CD\u9AD8',
'NGram.KANJI_7_9':'\u4E4D\u4F36\u5319\u6A61\u6DCB\u7194',
'NGram.KANJI_7_11':'\u4E5E\u4F43\u5026\u50FB\u515C\u5243\u5420\u5446\u54B3\u54BD\u553E\u55A7\u5703\u5984\u5AC9\u5B09\u5C51\u5DFE\u5ED3\u5F1B\u6055\u618E\u62D9\u65A7\u6652\u6977\u6EBA\u707C\u75D8\u79E4\u7AFF\u7B4F\u7CA5\u808B\u8098\u80B4\u8235\u82DB\u849C\u8549\u868A\u86FE\u8718\u914C',
'NGram.KANJI_7_12':'\u4E08\u4E38\u4F8D\u50DA\u5203\u5256\u52C9\u52D8\u52FE\u5320\u533F\u5375\u53D4\u540F\u54E8\u56DA\u5806\u5996\u5999\u59A5\u59A8\u59FF\u5AE1\u5BB0\u5BF8\u5C09\u5C3F\u5C48\u5C65\u5D29\u5E06\u5E4C\u5EB5\u5EB6\u5EB8\u5F13\u5FCC\u5FD8\u6052\u606D\u609F\u60D1\u614E\u6247\u62B1\u6349\u64E6\u6577\u65ED\u6674\u6734\u67C4\u6850\u690E\u6A58\u6B3A\u6B89\u6C41\u6CBC\u6CCC\u6CF3\u6D74\u6DAF\u6DF3\u6ECB\u6F02\u6F84\u71E5\u7261\u7272\u72AC\u72FC\u733F\u7409\u755C\u76F2\u7720\u77AC\u77E2\u7802\u786B\u78E8\u7901\u7948\u79E9\u7A1A\u7A74\u7AE3\u7B4B\u7B52\u7BB1\u7C3F\u8015\u8096\u809D\u80A2\u80A9\u80AA\u80BA\u80F8\u8102\u810A\u8154\u8155\u8170\u817A\u81A8\u81ED\u820C\u8236\u82BD\u8305\u83E9\u83F1\u840C\u85FB\u8650\u8702\u8A93\u8E44\u8FB0\u9038\u9091\u90AA\u916C\u9175\u9177\u9685\u96C0\u96C7\u96CC\u97AD',
'NGram.KANJI_7_13':'\u63D6\u803D',
'NGram.KANJI_7_16':'\u602F\u7566',
'NGram.KANJI_7_18':'\u634C\u7C38',
'NGram.KANJI_7_19':'\u4E18\u4E73\u4E95\u4EAB\u4EC1\u4ED8\u4ED9\u4F11\u4F34\u4F38\u4F59\u4FB5\u4FC3\u4FD7\u5012\u5019\u5065\u50AC\u5144\u5145\u514D\u517C\u51A0\u51B7\u5211\u5238\u523A\u523B\u5272\u52E4\u5360\u5371\u539A\u541B\u5426\u5438\u5473\u54F2\u5510\u552F\u5531\u559C\u5609\u56F0\u56FA\u591C\u5948\u594F\u59BB\u59D3\u5B85\u5B87\u5B88\u5B99\u5B9C\u5BC4\u5BFA\u5C0A\u5C3E\u5CA9\u5D0E\u5DE1\u5DE8\u5DEE\u5DF1\u5E45\u5E78\u5E7B\u5E7C\u5EAD\u5EF7\u5F1F\u5F31\u5F79\u5F7C\u5F85\u5F92\u5FA1\u5FE0\u6050\u60A3\u6212\u62DB\u632F\u6355\u63A2\u63AA\u63CF\u642D\u6469\u64CD\u653B\u6563\u660C\u662D\u667A\u6697\u66FF\u6750\u675F\u677F\u6790\u67D3\u682A\u6885\u68B0\u6B8A\u6B96\u6BDB\u6C60\u6CB9\u6CC9\u6D25\u6D66\u6DB2\u6DF7\u6E21\u6ED1\u6F2B\u6F6E\u6FC0\u7235\u725B\u72AF\u7389\u7532\u7533\u756A\u75BE\u75C7\u76AE\u76CA\u7740\u786C\u7956\u7968\u796D\u7981\u79C0\u79C1\u79CB\u79D8\u7A3F\u7AE5\u7AF9\u7E41\u7F6A\u7FFB\u8089\u80CC\u80DE\u81E3\u821E\u8239\u82E5\u8328\u8377\u85E4\u8840\u88C1\u88C2\u8C6A\u8D64\u8DDD\u8FCE\u8FD4\u9000\u9014\u907F\u90CA\u90CE\u90E1\u9152\u9178\u9686\u9694\u969C\u9707\u9732\u9AA8\u9B54\u9E7F\u9EBB',
'NGram.KANJI_7_20':'\u4E39\u4E43\u4EAE\u4F73\u504F\u505A\u51C6\u51CC\u52AA\u5339\u5347\u53EB\u53EC\u5448\u5766\u57F9\u5854\u585E\u58A8\u5B8B\u5C01\u5CF0\u5E72\u5EC9\u5F80\u5F81\u5FBD\u5FEB\u6069\u6211\u624D\u628A\u62B5\u62CD\u6309\u63A7\u64AD\u6566\u6597\u65CB\u65D7\u6628\u6717\u6731\u674E\u675C\u683D\u6881\u6B3E\u6BD2\u6C7D\u6C99\u6CE5\u6CF0\u6D1B\u6D2A\u70C8\u719F\u724C\u7259\u73E0\u73ED\u745E\u74E6\u7518\u751A\u7686\u770B\u7B26\u8033\u80A1\u80E1\u821F\u83AB\u8499\u8D74\u8DE8\u900F\u9010\u9047\u904D\u906D\u9675\u96C5\u96F6\u96F7\u9700\u9F13',
'NGram.KANJI_7_21':'\u5764\u59D0\u5A03\u6062\u6108\u68C9\u7164\u79BE\u7BAD\u903C',
'NGram.KANJI_7_23':'\u4EA5\u50B2\u532A\u5366\u543B\u54E9\u5632\u59D1\u5BB5\u5DF7\u5F6A\u5F6C\u5FFD\u6070\u6168\u61BE\u63A0\u63A9\u6478\u65A4\u68A7\u6A1F\u6CAB\u70F9\u711A\u723D\u7262\u72F8\u751C\u754F\u75B9\u76C8\u7709\u7897\u7CCA\u7F9E\u8299\u82AD\u82B9\u82D4\u8304\u84C9\u84EC\u854A\u85AF\u86D9\u8FA3\u9187\u97A0',
'NGram.KANJI_7_25':'\u4E14\u4E5F\u4F46\u514B\u5176\u5230\u5373\u53EA\u540E\u5982\u5C3C\u5DF4\u6216\u62C9\u65AF\u66FE\u6B64\u6D32\u6D6A\u7BC7\u800C',
'NGram.KANJI_7_28':'\u4E4E\u4E9B\u4EA6\u4EC0\u4FC4\u5403\u5957\u5C24\u6089\u6258\u67D0\u758F\u7FF0\u8D6B',
'NGram.KANJI_7_29':'\u4FAE\u5944\u5A29\u6101\u62ED\u6328\u637B\u6666\u6687\u66AE\u673D\u6756\u67FF\u6813\u68A2\u699B\u7078\u708A\u7396\u7422\u7525\u75E2\u76BF\u7766\u77B3\u7A3C\u7A92\u819D\u81FC\u8237\u8338\u8511\u88F3\u8FC2',
'NGram.KANJI_7_32':'\u4E11\u4F3A\u4F51\u5197\u51B6\u51F9\u52FF\u541F\u5507\u5589\u5993\u5A7F\u5AC1\u5B9B\u5BC2\u5BE1\u5F04\u5F0A\u5F27\u6020\u6028\u6068\u6094\u6109\u611A\u614C\u621A\u62B9\u62D0\u62F7\u62FE\u632B\u633D\u6367\u660F\u6627\u6643\u66D9\u674F\u6795\u67AF\u67D1\u6876\u68DA\u68FA\u6905\u69FD\u6A80\u6B6A\u6CB8\u6CE3\u6DD1\u6DEB\u6E9C\u6EA2\u6EF4\u6F06\u714E\u716E\u722A\u7280\u74A7\u752B\u75B2\u75D5\u75F4\u77AD\u77E9\u785D\u79BD\u7A3D\u7A9F\u7B1B\u7B95\u7C9F\u7CDF\u80C3\u8106\u817F\u818F\u81B3\u828B\u82A5\u82AF\u840E\u851A\u853D\u8776\u87F9\u8877\u8910\u8912\u8C79\u8D66\u8FB1\u9017\u90C1\u916A\u9699\u96C1\u971C\u9774\u978D',
'NGram.KANJI_7_33':'\u4E4B\u4E86\u4E94\u4EA4\u4EAC\u4ECA\u4ED6\u4EF6\u4EFB\u4F9B\u4FDD\u4FE1\u5143\u5148\u5149\u518D\u5217\u521D\u5305\u5341\u534A\u53C8\u53CD\u53D6\u53D7\u53E3\u53E4\u53EF\u53F2\u53F8\u5404\u5411\u5468\u547D\u54C1\u5546\u5668\u56DB\u56DE\u56E0\u571F\u578B\u57CE\u57DF\u5883\u58EB\u592A\u592E\u5973\u59CB\u59D4\u5B57\u5B58\u5B89\u5B98\u5C11\u5C31\u5C40\u5C55\u5DDD\u5E03\u5E38\u5E9C\u5F15\u5F62\u5F71\u5F97\u5FC3\u60C5\u610F\u624B\u6280\u6301\u63A5\u63A8\u63D0\u652F\u6539\u653E\u6559\u65BD\u65CF\u661F\u66F2\u671D\u672A\u6797\u679C\u6821\u683C\u6B7B\u6BD4\u6C34\u6C5F\u6CB3\u6D3B\u6D41\u6E2F\u6E90\u6F14\u7136\u7248\u738B\u7403\u76F4\u7701\u77E5\u77F3\u7814\u793A\u795E\u798F\u7A0B\u7A76\u7A7A\u7BA1\u7C73\u7F6E\u7F8E\u80B2\u81F3\u822C\u8272\u8457\u88AB\u89E3\u8A00\u8C61\u8D77\u8DEF\u8EAB\u8FD1\u9020\u91CC\u91CF\u91D1\u9650\u9662\u96C6\u975E\u9762\u97F3\u9996\u9999',
'NGram.KANJI_7_35':'\u55C5\u57A2\u58D5\u59E5\u637A\u74E2\u7CE0\u895F',
'NGram.KANJI_7_37':'\u4E19\u4E32\u4E4F\u4E91\u4EC7\u4ED4\u4F0D\u5141\u51E1\u51F6\u51F8\u52AB\u535C\u53C9\u53DB\u540A\u5410\u54C0\u559D\u5750\u5751\u576A\u57E0\u5824\u582A\u5830\u5835\u5851\u5858\u586B\u5954\u59FB\u5A46\u5B5F\u5BB4\u5BD3\u5C16\u5C60\u5CFB\u5D16\u5E16\u5E3D\u5E7D\u5E87\u5ECA\u5FD9\u60DC\u60F9\u6155\u6167\u6234\u626E\u6276\u6284\u633A\u6377\u6492\u649E\u64B0\u6562\u6591\u65A5\u65E6\u65FA\u6602\u670B\u676D\u68AF\u695A\u6B23\u6BC5\u6C70\u6C83\u6CE1\u6D8C\u6DD8\u6E20\u71D5\u72D0\u72D7\u73B2\u73CA\u7433\u7483\u74DC\u74F6\u7554\u764C\u7761\u77DB\u78A7\u7A46\u7A7F\u7A84\u7C97\u7D2F\u7FC1\u7FE0\u8000\u8017\u808C\u80AF\u8404\u8461\u8463\u8475\u8513\u85AA\u8679\u86CB\u871C\u87BA\u88F8\u8C8C\u8DF3\u8FC4\u901D\u9022\u906E\u9075\u9192\u91C7\u966A\u971E\u9910\u9B41\u9F0E\u9F20',
'TO_NORMALIZE_VI_CHARS':'AEIOUYaeiouy\u00c2\u00ca\u00d4\u00e2\u00ea\u00f4\u0102\u0103\u01a0\u01a1\u01af\u01b0',
'DMARK_CLASS':'\u0300\u0301\u0303\u0309\u0323',
'NORMALIZED_VI_CHARS_0300':'\u00C0\u00C8\u00CC\u00D2\u00D9\u1EF2\u00E0\u00E8\u00EC\u00F2\u00F9\u1EF3\u1EA6\u1EC0\u1ED2\u1EA7\u1EC1\u1ED3\u1EB0\u1EB1\u1EDC\u1EDD\u1EEA\u1EEB',
'NORMALIZED_VI_CHARS_0301':'\u00C1\u00C9\u00CD\u00D3\u00DA\u00DD\u00E1\u00E9\u00ED\u00F3\u00FA\u00FD\u1EA4\u1EBE\u1ED0\u1EA5\u1EBF\u1ED1\u1EAE\u1EAF\u1EDA\u1EDB\u1EE8\u1EE9',
'NORMALIZED_VI_CHARS_0303':'\u00C3\u1EBC\u0128\u00D5\u0168\u1EF8\u00E3\u1EBD\u0129\u00F5\u0169\u1EF9\u1EAA\u1EC4\u1ED6\u1EAB\u1EC5\u1ED7\u1EB4\u1EB5\u1EE0\u1EE1\u1EEE\u1EEF',
'NORMALIZED_VI_CHARS_0309':'\u1EA2\u1EBA\u1EC8\u1ECE\u1EE6\u1EF6\u1EA3\u1EBB\u1EC9\u1ECF\u1EE7\u1EF7\u1EA8\u1EC2\u1ED4\u1EA9\u1EC3\u1ED5\u1EB2\u1EB3\u1EDE\u1EDF\u1EEC\u1EED',
'NORMALIZED_VI_CHARS_0323':'\u1EA0\u1EB8\u1ECA\u1ECC\u1EE4\u1EF4\u1EA1\u1EB9\u1ECB\u1ECD\u1EE5\u1EF5\u1EAC\u1EC6\u1ED8\u1EAD\u1EC7\u1ED9\u1EB6\u1EB7\u1EE2\u1EE3\u1EF0\u1EF1'}
class Messages(object):
def __init__(self):
self.messages = {}
for k in mydict.keys():
self.messages[k] = mydict[k].encode().decode('unicode_escape')
def get_string(self, key):
return self.messages.get(key, '!%s!' % key)
_messages = None
def get_string(key):
global _messages
if _messages is None:
_messages = Messages()
return _messages.get_string(key)
| 286.851613
| 2,020
| 0.818879
| 7,407
| 44,462
| 4.876738
| 0.931011
| 0.034882
| 0.007613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.446058
| 0.005578
| 44,462
| 154
| 2,021
| 288.714286
| 0.370923
| 0
| 0
| 0
| 0
| 0.496644
| 0.968962
| 0.913769
| 0
| 1
| 0
| 0
| 0
| 1
| 0.020134
| false
| 0
| 0.006711
| 0.006711
| 0.04698
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9bddd48b7b31b1f48090019239c6902fbdc4d7e
| 39,514
|
py
|
Python
|
openprocurement/planning/api/tests/plan_blanks.py
|
openprocurement/openprocurement.planning.api
|
2cdc0b2d16041a07ad2b807161e4210f765246aa
|
[
"Apache-2.0"
] | 1
|
2016-08-31T15:19:33.000Z
|
2016-08-31T15:19:33.000Z
|
openprocurement/planning/api/tests/plan_blanks.py
|
openprocurement/openprocurement.planning.api
|
2cdc0b2d16041a07ad2b807161e4210f765246aa
|
[
"Apache-2.0"
] | 35
|
2015-12-07T12:23:40.000Z
|
2017-10-26T12:04:34.000Z
|
openprocurement/planning/api/tests/plan_blanks.py
|
openprocurement/openprocurement.planning.api
|
2cdc0b2d16041a07ad2b807161e4210f765246aa
|
[
"Apache-2.0"
] | 12
|
2015-12-07T07:31:15.000Z
|
2018-06-15T14:38:29.000Z
|
# -*- coding: utf-8 -*-
from copy import deepcopy
from openprocurement.api.constants import ROUTE_PREFIX, CPV_ITEMS_CLASS_FROM
from openprocurement.api.utils import get_now
from openprocurement.planning.api.models import Plan
# PlanTest
def simple_add_plan(self):
u = Plan(self.initial_data)
u.planID = "UA-P-X"
assert u.id is None
assert u.rev is None
u.store(self.db)
assert u.id is not None
assert u.rev is not None
fromdb = self.db.get(u.id)
assert u.planID == fromdb['planID']
assert u.doc_type == "Plan"
u.delete_instance(self.db)
# AccreditationPlanTest
def create_plan_accreditation(self):
self.app.authorization = ('Basic', ('broker3', ''))
response = self.app.post_json('/plans', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
for broker in ['broker2', 'broker4']:
self.app.authorization = ('Basic', (broker, ''))
response = self.app.post_json('/plans', {"data": self.initial_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Broker Accreditation level does not permit plan creation")
self.app.authorization = ('Basic', ('broker1t', ''))
response = self.app.post_json('/plans', {"data": self.initial_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Broker Accreditation level does not permit plan creation")
response = self.app.post_json('/plans', {"data": self.initial_data_mode_test})
self.assertEqual(response.status, '201 Created')
# PlanResourceTest
def empty_listing(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], [])
self.assertNotIn('{\n "', response.body)
self.assertNotIn('callback({', response.body)
self.assertEqual(response.json['next_page']['offset'], '')
self.assertNotIn('prev_page', response.json)
response = self.app.get('/plans?opt_jsonp=callback')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/javascript')
self.assertNotIn('{\n "', response.body)
self.assertIn('callback({', response.body)
response = self.app.get('/plans?opt_pretty=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "', response.body)
self.assertNotIn('callback({', response.body)
response = self.app.get('/plans?opt_jsonp=callback&opt_pretty=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('{\n "', response.body)
self.assertIn('callback({', response.body)
response = self.app.get('/plans?offset=2015-01-01T00:00:00+02:00&descending=1&limit=10')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], [])
self.assertIn('descending=1', response.json['next_page']['uri'])
self.assertIn('limit=10', response.json['next_page']['uri'])
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertIn('limit=10', response.json['prev_page']['uri'])
response = self.app.get('/plans?feed=changes')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], [])
self.assertEqual(response.json['next_page']['offset'], '')
self.assertNotIn('prev_page', response.json)
response = self.app.get('/plans?feed=changes&offset=0', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Offset expired/invalid', u'location': u'params', u'name': u'offset'}
])
response = self.app.get('/plans?feed=changes&descending=1&limit=10')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], [])
self.assertIn('descending=1', response.json['next_page']['uri'])
self.assertIn('limit=10', response.json['next_page']['uri'])
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertIn('limit=10', response.json['prev_page']['uri'])
def listing(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
plans = []
for i in range(3):
offset = get_now().isoformat()
response = self.app.post_json('/plans', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plans.append(response.json['data'])
ids = ','.join([i['id'] for i in plans])
while True:
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertTrue(ids.startswith(','.join([i['id'] for i in response.json['data']])))
if len(response.json['data']) == 3:
break
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(','.join([i['id'] for i in response.json['data']]), ids)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in plans]))
self.assertEqual(set([i['dateModified'] for i in response.json['data']]),
set([i['dateModified'] for i in plans]))
self.assertEqual([i['dateModified'] for i in response.json['data']], sorted([i['dateModified'] for i in plans]))
response = self.app.get('/plans?offset={}'.format(offset))
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/plans?limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('prev_page', response.json)
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
response = self.app.get('/plans', params=[('opt_fields', 'budget')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'budget']))
self.assertIn('opt_fields=budget', response.json['next_page']['uri'])
response = self.app.get('/plans', params=[('opt_fields', 'planID')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'planID']))
self.assertIn('opt_fields=planID', response.json['next_page']['uri'])
response = self.app.get('/plans', params=[('opt_fields', 'budget,procuringEntity')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'budget', u'procuringEntity']))
self.assertIn('opt_fields=budget%2CprocuringEntity', response.json['next_page']['uri'])
response = self.app.get('/plans?descending=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in plans]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in plans], reverse=True))
response = self.app.get('/plans?descending=1&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
test_plan_data2 = self.initial_data.copy()
test_plan_data2['mode'] = 'test'
response = self.app.post_json('/plans', {'data': test_plan_data2})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
while True:
response = self.app.get('/plans?mode=test')
self.assertEqual(response.status, '200 OK')
if len(response.json['data']) == 1:
break
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/plans?mode=_all_')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 4)
def listing_changes(self):
response = self.app.get('/plans?feed=changes')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
plans = []
for i in range(3):
response = self.app.post_json('/plans', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plans.append(response.json['data'])
ids = ','.join([i['id'] for i in plans])
while True:
response = self.app.get('/plans?feed=changes')
self.assertEqual(response.status, '200 OK')
self.assertTrue(ids.startswith(','.join([i['id'] for i in response.json['data']])))
if len(response.json['data']) == 3:
break
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(','.join([i['id'] for i in response.json['data']]), ids)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in plans]))
self.assertEqual(set([i['dateModified'] for i in response.json['data']]),
set([i['dateModified'] for i in plans]))
self.assertEqual([i['dateModified'] for i in response.json['data']], sorted([i['dateModified'] for i in plans]))
response = self.app.get('/plans?feed=changes&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('prev_page', response.json)
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
response = self.app.get('/plans?feed=changes', params=[('opt_fields', 'budget')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'budget']))
self.assertIn('opt_fields=budget', response.json['next_page']['uri'])
response = self.app.get('/plans?feed=changes', params=[('opt_fields', 'planID')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'planID']))
self.assertIn('opt_fields=planID', response.json['next_page']['uri'])
response = self.app.get('/plans?feed=changes', params=[('opt_fields', 'budget,procuringEntity')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified', u'budget', u'procuringEntity']))
self.assertIn('opt_fields=budget%2CprocuringEntity', response.json['next_page']['uri'])
response = self.app.get('/plans?feed=changes&descending=1')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(set(response.json['data'][0]), set([u'id', u'dateModified']))
self.assertEqual(set([i['id'] for i in response.json['data']]), set([i['id'] for i in plans]))
self.assertEqual([i['dateModified'] for i in response.json['data']],
sorted([i['dateModified'] for i in plans], reverse=True))
response = self.app.get('/plans?feed=changes&descending=1&limit=2')
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 2)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 1)
response = self.app.get(response.json['next_page']['path'].replace(ROUTE_PREFIX, ''))
self.assertEqual(response.status, '200 OK')
self.assertNotIn('descending=1', response.json['prev_page']['uri'])
self.assertEqual(len(response.json['data']), 0)
test_plan_data2 = self.initial_data.copy()
test_plan_data2['mode'] = 'test'
response = self.app.post_json('/plans', {'data': test_plan_data2})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
while True:
response = self.app.get('/plans?mode=test')
self.assertEqual(response.status, '200 OK')
if len(response.json['data']) == 1:
break
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 1)
response = self.app.get('/plans?feed=changes&mode=_all_')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 4)
def create_plan_invalid(self):
request_path = '/plans'
response = self.app.post(request_path, 'data', status=415)
self.assertEqual(response.status, '415 Unsupported Media Type')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description':
u"Content-Type header should be one of ['application/json']", u'location': u'header',
u'name': u'Content-Type'}
])
response = self.app.post(
request_path, 'data', content_type='application/json', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'No JSON object could be decoded',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, 'data', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'not_data': {}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'data': []}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'data': {
'invalid_field': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Rogue field', u'location':
u'body', u'name': u'invalid_field'}
])
response = self.app.post_json(request_path, {'data': {'budget': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [
u'Please use a mapping for this field or Budget instance instead of unicode.'], u'location': u'body',
u'name': u'budget'}
])
response = self.app.post_json(request_path, {'data': {'tender': {'procurementMethod': 'invalid_value'}}},
status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertIn({u'description': [u'This field is required.'], u'location': u'body', u'name': u'procuringEntity'},
response.json['errors'])
self.assertIn({u'description': [u'This field is required.'], u'location': u'body', u'name': u'classification'},
response.json['errors'])
data = self.initial_data['tender']
self.initial_data['tender'] = {'procurementMethod': 'open', 'procurementMethodType': 'reporting', 'tenderPeriod' : data['tenderPeriod'] }
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data['tender'] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertIn({u'description': {u'procurementMethodType': [u"Value must be one of ('belowThreshold', 'aboveThresholdUA', 'aboveThresholdEU', 'aboveThresholdUA.defense', 'competitiveDialogueUA', 'competitiveDialogueEU', 'esco')."]}, u'location': u'body', u'name': u'tender'},
response.json['errors'])
data = self.initial_data['tender']
self.initial_data['tender'] = {'procurementMethod': 'limited', 'procurementMethodType': 'belowThreshold', 'tenderPeriod' : data['tenderPeriod'] }
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data['tender'] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertIn({u'description': {u'procurementMethodType': [u"Value must be one of ('negotiation', 'negotiation.quick', 'reporting')."]}, u'location': u'body', u'name': u'tender'},
response.json['errors'])
response = self.app.post_json(request_path,
{'data': {'tender': {'tenderPeriod': {'startDate': 'invalid_value'}}}},
status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'tenderPeriod': {u'startDate': [u'Could not parse invalid_value. Should be ISO8601.']}},
u'location': u'body', u'name': u'tender'}
])
response = self.app.post_json(request_path, {
'data': {'tender': {'tenderPeriod': {'startDate': '9999-12-31T23:59:59.999999'}}}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'tenderPeriod': {u'startDate': [u'date value out of range']}}, u'location': u'body',
u'name': u'tender'}
])
additionalClassifications = [i.pop("additionalClassifications") for i in self.initial_data["items"]]
if get_now() > CPV_ITEMS_CLASS_FROM:
cpv_code = self.initial_data['classification']['id']
cpv_codes = [i['classification']['id'] for i in self.initial_data["items"]]
self.initial_data['classification']['id'] = '99999999-9'
for index, cpv_code in enumerate(cpv_codes):
self.initial_data["items"][index]['classification']['id'] = '99999999-9'
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
for index, additionalClassification in enumerate(additionalClassifications):
self.initial_data["items"][index]['additionalClassifications'] = additionalClassification
if get_now() > CPV_ITEMS_CLASS_FROM:
self.initial_data['classification']['id'] = cpv_code
for index, cpv_code in enumerate(cpv_codes):
self.initial_data["items"][index]['classification']['id'] = cpv_code
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [u'This field is required.']}, {u'additionalClassifications': [u'This field is required.']}, {u'additionalClassifications': [u'This field is required.']}], u'location': u'body', u'name': u'items'}
])
additionalClassifications = [i["additionalClassifications"][0]["scheme"] for i in self.initial_data["items"]]
for index, _ in enumerate(additionalClassifications):
self.initial_data["items"][index]["additionalClassifications"][0]["scheme"] = u'Не ДКПП'
if get_now() > CPV_ITEMS_CLASS_FROM:
cpv_code = self.initial_data['classification']['id']
cpv_codes = [i['classification']['id'] for i in self.initial_data["items"]]
self.initial_data['classification']['id'] = '99999999-9'
for index, cpv_code in enumerate(cpv_codes):
self.initial_data["items"][index]['classification']['id'] = '99999999-9'
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
for index, data in enumerate(additionalClassifications):
self.initial_data["items"][index]["additionalClassifications"][0]["scheme"] = data
if get_now() > CPV_ITEMS_CLASS_FROM:
self.initial_data['classification']['id'] = cpv_code
for index, cpv_code in enumerate(cpv_codes):
self.initial_data["items"][index]['classification']['id'] = cpv_code
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
if get_now() > CPV_ITEMS_CLASS_FROM:
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [u"One of additional classifications should be one of [ДК003, ДК015, ДК018, specialNorms]."]} for _ in additionalClassifications], u'location': u'body', u'name': u'items'}
])
else:
self.assertEqual(response.json['errors'], [
{u'description': [{u'additionalClassifications': [u"One of additional classifications should be one of [ДКПП, NONE, ДК003, ДК015, ДК018]."]} for _ in additionalClassifications], u'location': u'body', u'name': u'items'}
])
data = self.initial_data["procuringEntity"]["name"]
del self.initial_data["procuringEntity"]["name"]
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["procuringEntity"]["name"] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'name': [u'This field is required.']}, u'location': u'body', u'name': u'procuringEntity'}
])
data = self.initial_data["budget"]
del self.initial_data["budget"]
self.initial_data['tender']['procurementMethodType'] = 'belowThreshold'
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["budget"] = data
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'budget'}
])
data = self.initial_data["items"][0].copy()
classification = data['classification'].copy()
classification["id"] = u'31519200-9'
data['classification'] = classification
self.initial_data["items"] = [self.initial_data["items"][0], data]
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["items"] = self.initial_data["items"][:1]
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
if get_now() > CPV_ITEMS_CLASS_FROM:
self.assertEqual(response.json['errors'], [
{u'description': [{u'classification': [u'CPV class of items should be identical to root cpv']}],
u'location': u'body', u'name': u'items'}
])
else:
self.assertEqual(response.json['errors'], [
{u'description': [{u'classification': [u'CPV group of items be identical to root cpv']}],
u'location': u'body', u'name': u'items'}
])
classification_id = self.initial_data["classification"]["id"]
self.initial_data["classification"]["id"] = u'33600000-6'
response = self.app.post_json(request_path, {'data': self.initial_data}, status=422)
self.initial_data["classification"]["id"] = classification_id
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [{u'classification': [u'CPV group of items be identical to root cpv']}],
u'location': u'body', u'name': u'items'}
])
classification_id = self.initial_data["classification"]["id"]
self.initial_data["classification"]["id"] = u'33600000-6'
item = self.initial_data["items"][0].copy()
data = self.initial_data["items"][0].copy()
classification = data['classification'].copy()
classification["id"] = u'33610000-9'
data['classification'] = classification
data2 = self.initial_data["items"][0].copy()
classification = data2['classification'].copy()
classification["id"] = u'33620000-2'
data2['classification'] = classification
self.initial_data["items"] = [data, data2]
response = self.app.post_json(request_path, {'data': self.initial_data})
self.initial_data["classification"]["id"] = classification_id
self.initial_data["items"] = [item]
self.assertEqual(response.status, '201 Created')
def create_plan_generated(self):
data = self.initial_data.copy()
data.update({'id': 'hash', 'doc_id': 'hash2', 'planID': 'hash3'})
response = self.app.post_json('/plans', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plan = response.json['data']
self.assertEqual(set(plan), set([
u'id', u'dateModified', u'datePublished', u'planID', u'budget', u'tender',
u'classification', u'additionalClassifications', u'items', u'procuringEntity', u'owner'
]))
self.assertNotEqual(data['id'], plan['id'])
self.assertNotEqual(data['doc_id'], plan['id'])
self.assertNotEqual(data['planID'], plan['planID'])
def create_plan(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.post_json('/plans', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plan = response.json['data']
self.assertEqual(set(plan) - set(self.initial_data), set([u'id', u'dateModified', u'datePublished', u'planID', u'owner']))
self.assertIn(plan['id'], response.headers['Location'])
response = self.app.get('/plans/{}'.format(plan['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(set(response.json['data']), set(plan))
self.assertEqual(response.json['data'], plan)
response = self.app.post_json('/plans?opt_jsonp=callback', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('callback({"', response.body)
response = self.app.post_json('/plans?opt_pretty=1', {"data": self.initial_data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "', response.body)
response = self.app.post_json('/plans', {"data": self.initial_data, "options": {"pretty": True}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "', response.body)
def get_plan(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.post_json('/plans', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
plan = response.json['data']
response = self.app.get('/plans/{}'.format(plan['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], plan)
response = self.app.get('/plans/{}?opt_jsonp=callback'.format(plan['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('callback({"data": {"', response.body)
response = self.app.get('/plans/{}?opt_pretty=1'.format(plan['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "data": {\n "', response.body)
def patch_plan(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.post_json('/plans', {'data': self.initial_data})
self.assertEqual(response.status, '201 Created')
plan = response.json['data']
dateModified = plan.pop('dateModified')
response = self.app.patch_json('/plans/{}'.format(plan['id']),
{'data': {'budget': {'id': u"12303111000-3"}}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
new_plan = response.json['data']
new_dateModified = new_plan.pop('dateModified')
plan['budget']['id'] = u"12303111000-3"
self.assertEqual(plan, new_plan)
self.assertNotEqual(dateModified, new_dateModified)
response = self.app.patch_json('/plans/{}'.format(
plan['id']), {'data': {'dateModified': new_dateModified}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
new_plan2 = response.json['data']
new_dateModified2 = new_plan2.pop('dateModified')
self.assertEqual(new_plan, new_plan2)
self.assertEqual(new_dateModified, new_dateModified2)
revisions = self.db.get(plan['id']).get('revisions')
self.assertEqual(revisions[-1][u'changes'][0]['op'], u'replace')
self.assertEqual(revisions[-1][u'changes'][0]['path'], u'/budget/id')
response = self.app.get('/plans/{}/revisions'.format(plan['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['revisions'], revisions)
response = self.app.patch_json('/plans/{}'.format(
plan['id']), {'data': {'items': [self.initial_data['items'][0]]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/plans/{}'.format(
plan['id']), {'data': {'items': [{}, self.initial_data['items'][0]]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
item0 = response.json['data']['items'][0]
item1 = response.json['data']['items'][1]
self.assertNotEqual(item0.pop('id'), item1.pop('id'))
self.assertEqual(item0, item1)
response = self.app.patch_json('/plans/{}'.format(
plan['id']), {'data': {'items': [{}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['items']), 1)
response = self.app.patch_json('/plans/{}'.format(plan['id']), {'data': {'items': [{"classification": {
"scheme": "ДК021",
"id": "03117140-7",
"description": "Послуги з харчування у школах"
}}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/plans/{}'.format(plan['id']),
{'data': {'items': [{"additionalClassifications": [
plan['items'][0]["additionalClassifications"][0] for i in range(3)
]}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/plans/{}'.format(plan['id']), {
'data': {'items': [{"additionalClassifications": plan['items'][0]["additionalClassifications"]}]}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.patch_json('/plans/{}'.format(
plan['id']), {'data': {'tender': {'tenderPeriod': {'startDate': new_dateModified2}}}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
new_plan = response.json['data']
self.assertIn('startDate', new_plan['tender']['tenderPeriod'])
# delete items
response = self.app.patch_json('/plans/{}'.format(plan['id']), {'data': {'items': []}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotIn('items', response.json['data'])
def plan_not_found(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
response = self.app.get('/plans/some_id', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'plan_id'}
])
response = self.app.patch_json(
'/plans/some_id', {'data': {}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'plan_id'}
])
def esco_plan(self):
response = self.app.get('/plans')
self.assertEqual(response.status, '200 OK')
self.assertEqual(len(response.json['data']), 0)
data = deepcopy(self.initial_data)
budget = data.pop('budget')
data['tender']['procurementMethodType'] = 'esco'
response = self.app.post_json('/plans', {"data": data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plan = response.json['data']
self.assertEqual(set(plan) - set(self.initial_data), set([u'id', u'dateModified', u'datePublished', u'planID', u'owner']))
self.assertNotIn('budget', plan)
self.assertIn(plan['id'], response.headers['Location'])
data['budget'] = budget
response = self.app.post_json('/plans', {"data": data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
plan = response.json['data']
self.assertEqual(set(plan) - set(self.initial_data), set([u'id', u'dateModified', u'datePublished', u'planID', u'owner']))
self.assertIn('budget', plan)
self.assertIn(plan['id'], response.headers['Location'])
def create_plan_without_procurement_method(self):
data = deepcopy(self.initial_data)
data['tender']['procurementMethod'] = ''
data['tender']['procurementMethodType'] = ''
response = self.app.post_json('/plans', {"data": data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
| 49.640704
| 278
| 0.670446
| 4,851
| 39,514
| 5.379922
| 0.054834
| 0.159208
| 0.18419
| 0.107786
| 0.885585
| 0.873707
| 0.860717
| 0.845965
| 0.830447
| 0.816001
| 0
| 0.020315
| 0.149137
| 39,514
| 795
| 279
| 49.703145
| 0.755926
| 0.002075
| 0
| 0.714499
| 0
| 0.00299
| 0.249835
| 0.03168
| 0
| 0
| 0
| 0
| 0.511211
| 1
| 0.019432
| false
| 0
| 0.005979
| 0
| 0.025411
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b9d94aad9b8b3243bcce65ca76ddb62b430252b3
| 22,732
|
py
|
Python
|
sds/distributions/gaussian.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 12
|
2019-09-21T13:52:09.000Z
|
2022-02-14T06:48:46.000Z
|
sds/distributions/gaussian.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 1
|
2020-01-22T12:34:52.000Z
|
2020-01-26T21:14:11.000Z
|
sds/distributions/gaussian.py
|
hanyas/sds
|
3c195fb9cbd88a9284287d62c0eacb6afc4598a7
|
[
"MIT"
] | 5
|
2019-09-18T15:11:26.000Z
|
2021-12-10T14:04:53.000Z
|
import numpy as np
import numpy.random as npr
import scipy as sc
from scipy import linalg
from operator import add
from functools import reduce, partial
from sds.utils.linalg import symmetrize
from sds.utils.general import Statistics as Stats
class _GaussianBase:
def __init__(self, dim, mu=None):
self.dim = dim
self.mu = mu
@property
def params(self):
raise NotImplementedError
@params.setter
def params(self, values):
raise NotImplementedError
@property
def sigma(self):
raise NotImplementedError
@sigma.setter
def sigma(self, value):
raise NotImplementedError
@property
def lmbda(self):
raise NotImplementedError
@lmbda.setter
def lmbda(self, value):
raise NotImplementedError
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
@staticmethod
def std_to_nat(params):
raise NotImplementedError
@staticmethod
def nat_to_std(natparam):
raise NotImplementedError
def mean(self):
return self.mu
def mode(self):
return self.mu
@property
def base(self):
return np.power(2. * np.pi, - self.dim / 2.)
def log_base(self):
return np.log(self.base)
def log_partition(self):
raise NotImplementedError
def log_likelihood(self, x):
if isinstance(x, np.ndarray):
bads = np.isnan(np.atleast_2d(x)).any(axis=1)
x = np.nan_to_num(x, copy=False).reshape((-1, self.dim))
log_lik = np.einsum('d,dl,nl->n', self.mu, self.lmbda, x, optimize=True)\
- 0.5 * np.einsum('nd,dl,nl->n', x, self.lmbda, x, optimize=True)
log_lik[bads] = 0.
log_lik += - self.log_partition() + self.log_base()
return log_lik
else:
return list(map(self.log_likelihood, x))
class GaussianWithPrecision(_GaussianBase):
def __init__(self, dim, mu=None, lmbda=None):
self._lmbda = lmbda
self._lmbda_chol = None
self._lmbda_chol_inv = None
super(GaussianWithPrecision, self).__init__(dim, mu)
@property
def params(self):
return self.mu, self.lmbda
@params.setter
def params(self, values):
self.mu, self.lmbda = values
@property
def nb_params(self):
return self.dim + self.dim * (self.dim + 1) / 2
@staticmethod
def std_to_nat(params):
a = params[1] @ params[0]
b = - 0.5 * params[1]
return Stats([a, b])
@staticmethod
def nat_to_std(natparam):
mu = - 0.5 * np.linalg.inv(natparam[1]) @ natparam[0]
lmbda = - 2. * natparam[1]
return mu, lmbda
@property
def lmbda(self):
return self._lmbda
@lmbda.setter
def lmbda(self, value):
self._lmbda = value
self._lmbda_chol = None
self._lmbda_chol_inv = None
@property
def lmbda_chol(self):
if self._lmbda_chol is None:
self._lmbda_chol = sc.linalg.cholesky(self.lmbda, lower=False)
return self._lmbda_chol
@property
def lmbda_chol_inv(self):
if self._lmbda_chol_inv is None:
self._lmbda_chol_inv = sc.linalg.inv(self.lmbda_chol)
return self._lmbda_chol_inv
@property
def sigma(self):
return self.lmbda_chol_inv @ self.lmbda_chol_inv.T
def rvs(self):
return self.mu + npr.normal(size=self.dim).dot(self.lmbda_chol_inv.T)
def statistics(self, data):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data = data[idx]
c0, c1 = 'nd->d', 'nd,nl->dl'
x = np.einsum(c0, data, optimize=True)
xxT = np.einsum(c1, data, data, optimize=True)
n = data.shape[0]
return Stats([x, n, xxT, n])
else:
stats = list(map(self.statistics, data))
return reduce(add, stats)
def weighted_statistics(self, data, weights):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data, weights = data[idx], weights[idx]
c0, c1 = 'n,nd->d', 'nd,n,nl->dl'
x = np.einsum(c0, weights, data, optimize=True)
xxT = np.einsum(c1, data, weights, data, optimize=True)
n = np.sum(weights, axis=0)
return Stats([x, n, xxT, n])
else:
stats = list(map(self.weighted_statistics, data, weights))
return reduce(add, stats)
def log_partition(self):
return 0.5 * np.einsum('d,dl,l->', self.mu, self.lmbda, self.mu)\
- np.sum(np.log(np.diag(self.lmbda_chol)))
def max_likelihood(self, data, weights=None):
x, n, xxT, n = self.statistics(data) if weights is None \
else self.weighted_statistics(data, weights)
self.mu = x / n
sigma = xxT / n - np.outer(self.mu, self.mu)
# numerical stabilization
sigma = symmetrize(sigma) + 1e-16 * np.eye(self.dim)
assert np.allclose(sigma, sigma.T)
assert np.all(np.linalg.eigvalsh(sigma) > 0.)
self.lmbda = np.linalg.inv(sigma)
class StackedGaussiansWithPrecision:
def __init__(self, size, dim, mus=None, lmbdas=None):
self.size = size
self.dim = dim
mus = [None] * self.size if mus is None else mus
lmbdas = [None] * self.size if lmbdas is None else lmbdas
self.dists = [GaussianWithPrecision(dim, mus[k], lmbdas[k])
for k in range(self.size)]
@property
def params(self):
return self.mus, self.lmbdas
@params.setter
def params(self, values):
self.mus, self.lmbdas = values
@property
def nb_params(self):
return self.size * (self.dim + self.dim * (self.dim + 1) / 2)
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def mus(self):
return np.array([dist.mu for dist in self.dists])
@mus.setter
def mus(self, value):
for k, dist in enumerate(self.dists):
dist.mu = value[k, ...]
@property
def lmbdas(self):
return np.array([dist.lmbda for dist in self.dists])
@lmbdas.setter
def lmbdas(self, value):
for k, dist in enumerate(self.dists):
dist.lmbda = value[k, ...]
@property
def lmbdas_chol(self):
return np.array([dist.lmbda_chol for dist in self.dists])
@property
def lmbdas_chol_inv(self):
return np.array([dist.lmbda_chol_inv for dist in self.dists])
@property
def sigmas(self):
return np.array([dist.sigma for dist in self.dists])
def mean(self):
return np.array([dist.mean() for dist in self.dists])
def mode(self):
return np.array([dist.mode() for dist in self.dists])
def rvs(self):
return np.array([dist.rvs() for dist in self.dists])
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def statistics(self, data):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data = data[idx]
c0, c1 = 'nd->d', 'nd,nl->dl'
x = np.einsum(c0, data, optimize=True)
xxT = np.einsum(c1, data, data, optimize=True)
n = data.shape[0]
xk = np.array([x for _ in range(self.size)])
xxTk = np.array([xxT for _ in range(self.size)])
nk = np.array([n for _ in range(self.size)])
return Stats([xk, nk, xxTk, nk])
else:
stats = list(map(self.statistics, data))
return reduce(add, stats)
def weighted_statistics(self, data, weights):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data, weights = data[idx], weights[idx]
c0, c1 = 'nk,nd->kd', 'nd,nk,nl->kdl'
xk = np.einsum(c0, weights, data, optimize=True)
xxTk = np.einsum(c1, data, weights, data, optimize=True)
nk = np.sum(weights, axis=0)
return Stats([xk, nk, xxTk, nk])
else:
stats = list(map(self.weighted_statistics, data, weights))
return reduce(add, stats)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
if isinstance(x, np.ndarray):
bads = np.isnan(np.atleast_2d(x)).any(axis=1)
x = np.nan_to_num(x, copy=False).reshape((-1, self.dim))
log_lik = np.einsum('kd,kdl,nl->nk', self.mus, self.lmbdas, x, optimize=True)\
- 0.5 * np.einsum('nd,kdl,nl->nk', x, self.lmbdas, x, optimize=True)
log_lik[bads] = 0.
log_lik += - self.log_partition() + self.log_base()
return log_lik
else:
return list(map(self.log_likelihood, x))
def max_likelihood(self, data, weights):
xk, nk, xxTk, nk = self.weighted_statistics(data, weights)
mus = np.zeros((self.size, self.dim))
lmbdas = np.zeros((self.size, self.dim, self.dim))
for k in range(self.size):
mus[k] = xk[k] / nk[k]
sigma = xxTk[k] / nk[k] - np.outer(mus[k], mus[k])
# numerical stabilization
sigma = symmetrize(sigma) + 1e-16 * np.eye(self.dim)
assert np.allclose(sigma, sigma.T)
assert np.all(np.linalg.eigvalsh(sigma) > 0.)
lmbdas[k] = np.linalg.inv(sigma)
self.mus = mus
self.lmbdas = lmbdas
class TiedGaussiansWithPrecision(StackedGaussiansWithPrecision):
def __init__(self, size, dim, mus=None, lmbdas=None):
super(TiedGaussiansWithPrecision, self).__init__(size, dim, mus, lmbdas)
def max_likelihood(self, data, weights):
xk, nk, xxTk, nk = self.weighted_statistics(data, weights)
xxT = np.sum(xxTk, axis=0)
n = np.sum(nk, axis=0)
mus = np.zeros((self.size, self.dim))
sigma = np.zeros((self.dim, self.dim))
sigma += xxT
for k in range(self.size):
mus[k] = xk[k] / nk[k]
sigma -= nk[k] * np.outer(mus[k], mus[k])
sigma /= n
# numerical stabilization
sigma = symmetrize(sigma) + 1e-16 * np.eye(self.dim)
assert np.allclose(sigma, sigma.T)
assert np.all(np.linalg.eigvalsh(sigma) > 0.)
self.mus = mus
lmbda = np.linalg.inv(sigma)
self.lmbdas = np.array(self.size * [lmbda])
class GaussianWithDiagonalPrecision(_GaussianBase):
def __init__(self, dim, mu=None, lmbda_diag=None):
self._lmbda_diag = lmbda_diag
self._lmbda_chol = None
self._lmbda_chol_inv = None
super(GaussianWithDiagonalPrecision, self).__init__(dim, mu)
@property
def params(self):
return self.mu, self.lmbda_diag
@params.setter
def params(self, values):
self.mu, self.lmbda_diag = values
@property
def nb_params(self):
return self.dim + self.dim * (self.dim + 1) / 2
@staticmethod
def std_to_nat(params):
a = params[1] * params[0]
b = - 0.5 * params[1]
return Stats([a, b])
@staticmethod
def nat_to_std(natparam):
mu = - 0.5 * (1. / natparam[1]) * natparam[0]
lmbda_diag = - 2. * natparam[1]
return mu, lmbda_diag
@property
def lmbda_diag(self):
return self._lmbda_diag
@lmbda_diag.setter
def lmbda_diag(self, value):
self._lmbda_diag = value
self._lmbda_chol = None
self._lmbda_chol_inv = None
@property
def lmbda(self):
assert self.lmbda_diag is not None
return np.diag(self.lmbda_diag)
@property
def lmbda_chol(self):
if self._lmbda_chol is None:
self._lmbda_chol = np.diag(np.sqrt(self.lmbda_diag))
return self._lmbda_chol
@property
def lmbda_chol_inv(self):
if self._lmbda_chol_inv is None:
self._lmbda_chol_inv = np.diag(1. / np.sqrt(self.lmbda_diag))
return self._lmbda_chol_inv
@property
def sigma_diag(self):
return 1. / self.lmbda_diag
@property
def sigma(self):
return np.diag(self.sigma_diag)
def rvs(self):
return self.mu + npr.normal(size=self.dim).dot(self.lmbda_chol_inv.T)
def statistics(self, data):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data = data[idx]
x = np.sum(data, axis=0)
n = data.shape[0]
xx = np.einsum('nd,nd->d', data, data)
nd = np.broadcast_to(data.shape[0], (self.dim, ))
return Stats([x, nd, nd, xx])
else:
stats = list(map(self.statistics, data))
return reduce(add, stats)
def weighted_statistics(self, data, weights):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data, weights = data[idx], weights[idx]
x = np.einsum('n,nd->d', weights, data)
n = np.sum(weights)
xx = np.einsum('nd,n,nd->d', data, weights, data)
nd = np.broadcast_to(np.sum(weights), (self.dim, ))
return Stats([x, nd, nd, xx])
else:
stats = list(map(self.weighted_statistics, data, weights))
return reduce(add, stats)
def log_partition(self):
return 0.5 * np.einsum('d,dl,l->', self.mu, self.lmbda, self.mu)\
- np.sum(np.log(np.diag(self.lmbda_chol)))
def log_likelihood(self, x):
if isinstance(x, np.ndarray):
bads = np.isnan(np.atleast_2d(x)).any(axis=1)
x = np.nan_to_num(x, copy=False).reshape((-1, self.dim))
log_lik = np.einsum('d,dl,nl->n', self.mu, self.lmbda, x, optimize=True)\
- 0.5 * np.einsum('nd,dl,nl->n', x, self.lmbda, x, optimize=True)
log_lik[bads] = 0.
log_lik += - self.log_partition() + self.log_base()
return log_lik
else:
return list(map(self.log_likelihood, x))
def max_likelihood(self, data, weights=None):
x, nd, nd, xx = self.statistics(data) if weights is None\
else self.weighted_statistics(data, weights)
self.mu = x / nd
self.lmbda_diag = 1. / (xx / nd - self.mu**2)
class StackedGaussiansWithDiagonalPrecision:
def __init__(self, size, dim, mus=None, lmbdas_diags=None):
self.size = size
self.dim = dim
mus = [None] * self.size if mus is None else mus
lmbdas_diags = [None] * self.size if lmbdas_diags is None else lmbdas_diags
self.dists = [GaussianWithDiagonalPrecision(dim, mus[k], lmbdas_diags[k])
for k in range(self.size)]
@property
def params(self):
return self.mus, self.lmbdas_diags
@params.setter
def params(self, values):
self.mus, self.lmbdas_diags = values
@property
def nb_params(self):
return self.size * (self.dim + self.dim * (self.dim + 1) / 2)
@property
def nat_param(self):
return self.std_to_nat(self.params)
@nat_param.setter
def nat_param(self, natparam):
self.params = self.nat_to_std(natparam)
def std_to_nat(self, params):
params_list = list(zip(*params))
natparams_list = [dist.std_to_nat(par) for dist, par in zip(self.dists, params_list)]
natparams_stack = Stats(map(partial(np.stack, axis=0), zip(*natparams_list)))
return natparams_stack
def nat_to_std(self, natparam):
natparams_list = list(zip(*natparam))
params_list = [dist.nat_to_std(par) for dist, par in zip(self.dists, natparams_list)]
params_stack = tuple(map(partial(np.stack, axis=0), zip(*params_list)))
return params_stack
@property
def mus(self):
return np.array([dist.mu for dist in self.dists])
@mus.setter
def mus(self, value):
for k, dist in enumerate(self.dists):
dist.mu = value[k, ...]
@property
def lmbdas_diags(self):
return np.array([dist.lmbda_diag for dist in self.dists])
@lmbdas_diags.setter
def lmbdas_diags(self, value):
for k, dist in enumerate(self.dists):
dist.lmbda_diag = value[k, ...]
@property
def lmbdas(self):
return np.array([dist.lmbda for dist in self.dists])
@property
def lmbdas_chol(self):
return np.array([dist.lmbda_chol for dist in self.dists])
@property
def lmbdas_chol_inv(self):
return np.array([dist.lmbda_chol_inv for dist in self.dists])
@property
def sigmas_diags(self):
return np.array([dist.sigma_diag for dist in self.dists])
@property
def sigmas(self):
return np.array([dist.sigma for dist in self.dists])
def mean(self):
return np.array([dist.mean() for dist in self.dists])
def mode(self):
return np.array([dist.mode() for dist in self.dists])
def rvs(self):
return np.array([dist.rvs() for dist in self.dists])
@property
def base(self):
return np.array([dist.base for dist in self.dists])
def log_base(self):
return np.log(self.base)
def statistics(self, data):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data = data[idx]
c0, c1 = 'nd->d', 'nd,nd->d'
x = np.einsum(c0, data, optimize=True)
xx = np.einsum(c1, data, data, optimize=True)
nd = np.broadcast_to(data.shape[0], (self.dim, ))
xk = np.array([x for _ in range(self.size)])
xxk = np.array([xx for _ in range(self.size)])
ndk = np.array([nd for _ in range(self.size)])
return Stats([xk, ndk, ndk, xxk])
else:
stats = list(map(self.statistics, data))
return reduce(add, stats)
def weighted_statistics(self, data, weights):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=1)
data, weights = data[idx], weights[idx]
xk = np.einsum('nk,nd->kd', weights, data)
xxk = np.einsum('nd,nk,nd->kd', data, weights, data)
ndk = np.broadcast_to(np.sum(weights, axis=0, keepdims=True), (self.size, self.dim))
return Stats([xk, ndk, ndk, xxk])
else:
stats = list(map(self.weighted_statistics, data, weights))
return reduce(add, stats)
def log_partition(self):
return np.array([dist.log_partition() for dist in self.dists])
def log_likelihood(self, x):
if isinstance(x, np.ndarray):
bads = np.isnan(np.atleast_2d(x)).any(axis=1)
x = np.nan_to_num(x, copy=False).reshape((-1, self.dim))
log_lik = np.einsum('kd,kdl,nl->nk', self.mus, self.lmbdas, x, optimize=True)\
- 0.5 * np.einsum('nd,kdl,nl->nk', x, self.lmbdas, x, optimize=True)
log_lik[bads] = 0.
log_lik += - self.log_partition() + self.log_base()
return log_lik
else:
return list(map(self.log_likelihood, x))
def max_likelihood(self, data, weights):
xk, ndk, ndk, xxk = self.weighted_statistics(data, weights)
mus = np.zeros((self.size, self.dim))
lmbdas_diags = np.zeros((self.size, self.dim))
for k in range(self.size):
mus[k] = xk[k] / ndk[k]
lmbdas_diags[k] = 1. / (xxk[k] / ndk[k] - mus[k]**2 + 1e-16)
self.mus = mus
self.lmbdas_diags = lmbdas_diags
class TiedGaussiansWithDiagonalPrecision(StackedGaussiansWithDiagonalPrecision):
def __init__(self, size, dim, mus=None, lmbdas_diags=None):
super(TiedGaussiansWithDiagonalPrecision, self).__init__(size, dim, mus, lmbdas_diags)
def max_likelihood(self, data, weights):
xk, ndk, ndk, xxk = self.weighted_statistics(data, weights)
xx = np.sum(xxk, axis=0)
nd = np.sum(ndk, axis=0)
mus = np.zeros((self.size, self.dim))
sigma_diag = np.zeros((self.dim, ))
sigma_diag += xx
for k in range(self.size):
mus[k] = xk[k] / ndk[k]
sigma_diag -= ndk[k] * mus[k]**2
sigma_diag /= nd
self.mus = mus
lmbda_diag = 1. / (sigma_diag + 1e-16)
self.lmbdas_diags = np.array(self.size * [lmbda_diag])
class GaussianWithKnownMeanAndDiagonalPrecision(GaussianWithDiagonalPrecision):
def __init__(self, dim, mu=None, lmbda_diag=None):
super(GaussianWithKnownMeanAndDiagonalPrecision, self).__init__(dim, mu,
lmbda_diag)
@property
def params(self):
return self.lmbda_diag
@params.setter
def params(self, values):
self.lmbda_diag = values
@property
def nb_params(self):
return self.dim
def statistics(self, data):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=0)
data = data[idx]
n = 0.5 * data.shape[0]
xx = - 0.5 * np.einsum('nd,nd->d', data, data)
return Stats([n, xx])
else:
stats = list(map(self.statistics, data))
return reduce(add, stats)
def weighted_statistics(self, data, weights):
if isinstance(data, np.ndarray):
idx = ~np.isnan(data).any(axis=0)
data, weights = data[idx], weights[idx]
n = 0.5 * np.sum(weights)
xx = - 0.5 * np.einsum('nd,n,nd->d', data, weights, data)
return Stats([n, xx])
else:
stats = list(map(self.weighted_statistics, data, weights))
return reduce(add, stats)
| 30.108609
| 96
| 0.583803
| 3,100
| 22,732
| 4.166129
| 0.050645
| 0.036934
| 0.025087
| 0.028959
| 0.847077
| 0.808672
| 0.761905
| 0.745645
| 0.721719
| 0.699264
| 0
| 0.008848
| 0.289064
| 22,732
| 754
| 97
| 30.148541
| 0.790298
| 0.003123
| 0
| 0.75404
| 0
| 0
| 0.011255
| 0
| 0
| 0
| 0
| 0
| 0.012567
| 1
| 0.204668
| false
| 0
| 0.014363
| 0.089767
| 0.396768
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9f708668c6dbd72e8e1bc46a36d13a247d28d64
| 8,891
|
py
|
Python
|
tests/test_tracers.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 6
|
2019-04-30T10:48:09.000Z
|
2021-08-19T15:57:53.000Z
|
tests/test_tracers.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 6
|
2019-04-08T12:39:08.000Z
|
2020-08-10T15:00:18.000Z
|
tests/test_tracers.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 2
|
2021-04-14T07:06:15.000Z
|
2021-08-19T15:58:46.000Z
|
# -*- coding: utf-8 -*-
import datetime
from py_gql import graphql_blocking
from py_gql.tracers import ApolloTracer
# Timestamps are not deterministic
class Any:
def __eq__(self, _):
return True
class AnyTimestamp:
def __eq__(self, rhs):
try:
datetime.datetime.strptime(rhs, "%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
return False
else:
return True
class AnyInt:
def __eq__(self, rhs):
try:
int(rhs)
except ValueError:
return False
else:
return True
def test_ApolloTracer(starwars_schema):
tracer = ApolloTracer()
graphql_blocking(
starwars_schema,
"""
query NestedQuery {
hero {
name
friends {
name
appearsIn
friends {
name
}
}
}
}
""",
instrumentation=tracer,
)
assert tracer.name == "tracing"
assert tracer.payload() == {
"version": 1,
"startTime": AnyTimestamp(),
"endTime": AnyTimestamp(),
"duration": AnyInt(),
"execution": {"resolvers": Any()},
"validation": {"duration": AnyInt(), "startOffset": AnyInt()},
"parsing": {"duration": AnyInt(), "startOffset": AnyInt()},
}
expected_resolvers = [
{
"path": ["hero"],
"parentType": "Query",
"fieldName": "hero",
"returnType": "Character",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends"],
"parentType": "Droid",
"fieldName": "friends",
"returnType": "[Character]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "appearsIn"],
"parentType": "Human",
"fieldName": "appearsIn",
"returnType": "[Episode]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "friends"],
"parentType": "Human",
"fieldName": "friends",
"returnType": "[Character]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "friends", 0, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "friends", 1, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "friends", 2, "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 0, "friends", 3, "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "appearsIn"],
"parentType": "Human",
"fieldName": "appearsIn",
"returnType": "[Episode]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "friends"],
"parentType": "Human",
"fieldName": "friends",
"returnType": "[Character]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "friends", 0, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "friends", 1, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 1, "friends", 2, "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "appearsIn"],
"parentType": "Human",
"fieldName": "appearsIn",
"returnType": "[Episode]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "friends"],
"parentType": "Human",
"fieldName": "friends",
"returnType": "[Character]",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "friends", 0, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "friends", 1, "name"],
"parentType": "Human",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "friends", 2, "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
{
"path": ["hero", "friends", 2, "friends", 3, "name"],
"parentType": "Droid",
"fieldName": "name",
"returnType": "String",
"startOffset": AnyInt(),
"duration": AnyInt(),
},
]
# Order is not deterministic.
for r in expected_resolvers:
assert r in tracer.payload()["execution"]["resolvers"]
def test_ApolloTracer_on_validation_error(starwars_schema):
tracer = ApolloTracer()
graphql_blocking(
starwars_schema,
"""
query NestedQuery {
hero {
nameasd # this is the validation error
friends {
name
appearsIn
friends {
name
}
}
}
}
""",
instrumentation=tracer,
)
assert tracer.name == "tracing"
assert tracer.payload() == {
"version": 1,
"startTime": AnyTimestamp(),
"endTime": AnyTimestamp(),
"duration": AnyInt(),
"execution": None,
"validation": {"duration": AnyInt(), "startOffset": AnyInt()},
"parsing": {"duration": AnyInt(), "startOffset": AnyInt()},
}
def test_ApolloTracer_on_syntax_error(starwars_schema):
tracer = ApolloTracer()
graphql_blocking(
starwars_schema,
"""
FOO
""",
instrumentation=tracer,
)
assert tracer.name == "tracing"
assert tracer.payload() == {
"version": 1,
"startTime": AnyTimestamp(),
"endTime": AnyTimestamp(),
"duration": AnyInt(),
"execution": None,
"validation": None,
"parsing": {"duration": AnyInt(), "startOffset": AnyInt()},
}
| 28.225397
| 70
| 0.430098
| 595
| 8,891
| 6.366387
| 0.14958
| 0.114572
| 0.151795
| 0.188226
| 0.868268
| 0.850317
| 0.850317
| 0.828669
| 0.813358
| 0.793295
| 0
| 0.006576
| 0.401417
| 8,891
| 314
| 71
| 28.315287
| 0.705186
| 0.009223
| 0
| 0.62069
| 0
| 0
| 0.288492
| 0.002568
| 0
| 0
| 0
| 0
| 0.02682
| 1
| 0.022989
| false
| 0
| 0.011494
| 0.003831
| 0.065134
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.