hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
d195782ce9fdb1bb2d9381bd26cb6cd6175f3f5e
88
py
Python
catnip/__init__.py
ramadan8/Catnip
554127b2e4726c6677327c229d2bfff3c366f8d6
[ "MIT" ]
1
2021-11-23T12:24:10.000Z
2021-11-23T12:24:10.000Z
catnip/__init__.py
ramadan8/Catnip
554127b2e4726c6677327c229d2bfff3c366f8d6
[ "MIT" ]
null
null
null
catnip/__init__.py
ramadan8/Catnip
554127b2e4726c6677327c229d2bfff3c366f8d6
[ "MIT" ]
null
null
null
from .camera import Camera, Frame from .event import Event from .manager import Manager
22
33
0.806818
13
88
5.461538
0.461538
0
0
0
0
0
0
0
0
0
0
0
0.147727
88
3
34
29.333333
0.946667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d19cc15777dc35d210b0dbff27704a9c386722c0
154
py
Python
tests/web_platform/CSS2/linebox/test_vertical_align_sub.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
null
null
null
tests/web_platform/CSS2/linebox/test_vertical_align_sub.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
null
null
null
tests/web_platform/CSS2/linebox/test_vertical_align_sub.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
1
2020-01-16T01:56:41.000Z
2020-01-16T01:56:41.000Z
from tests.utils import W3CTestCase class TestVerticalAlignSub(W3CTestCase): vars().update(W3CTestCase.find_tests(__file__, 'vertical-align-sub-'))
25.666667
74
0.792208
17
154
6.882353
0.823529
0
0
0
0
0
0
0
0
0
0
0.021429
0.090909
154
5
75
30.8
0.814286
0
0
0
0
0
0.124183
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
061621a1e4baee947373d40f9b9a099b93d732ce
434
py
Python
Calibration/HcalAlCaRecoProducers/python/ALCARECOHcalCalIsoTrkProducerFilter_cff.py
malbouis/cmssw
16173a30d3f0c9ecc5419c474bb4d272c58b65c8
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
Calibration/HcalAlCaRecoProducers/python/ALCARECOHcalCalIsoTrkProducerFilter_cff.py
gartung/cmssw
3072dde3ce94dcd1791d778988198a44cde02162
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
Calibration/HcalAlCaRecoProducers/python/ALCARECOHcalCalIsoTrkProducerFilter_cff.py
gartung/cmssw
3072dde3ce94dcd1791d778988198a44cde02162
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
import FWCore.ParameterSet.Config as cms #------------------------------------------------ #AlCaReco filtering for HCAL isotrk: #------------------------------------------------ from Calibration.HcalAlCaRecoProducers.alcaHcalIsotrkProducer_cfi import * from Calibration.HcalAlCaRecoProducers.alcaHcalIsotrkFilter_cfi import * seqALCARECOHcalCalIsoTrkProducerFilter = cms.Sequence(alcaHcalIsotrkProducer * alcaHcalIsotrkFilter)
28.933333
100
0.668203
28
434
10.285714
0.678571
0.104167
0.25
0
0
0
0
0
0
0
0
0
0.071429
434
14
101
31
0.71464
0.301843
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ae4318490ba3aa13ce5892b18991a31e29c8f969
46
py
Python
multi_dimensional_clustering/__init__.py
PieterMey/multi_dimensional_clustering
2e678f5ada62873fbac01fcdcb8a891f2239be01
[ "Apache-2.0" ]
1
2022-03-23T16:40:11.000Z
2022-03-23T16:40:11.000Z
multi_dimensional_clustering/__init__.py
PieterMey/multi_dimensional_clustering
2e678f5ada62873fbac01fcdcb8a891f2239be01
[ "Apache-2.0" ]
null
null
null
multi_dimensional_clustering/__init__.py
PieterMey/multi_dimensional_clustering
2e678f5ada62873fbac01fcdcb8a891f2239be01
[ "Apache-2.0" ]
1
2022-03-25T08:25:17.000Z
2022-03-25T08:25:17.000Z
from .multi_D_clustering import MD_clustering
23
45
0.891304
7
46
5.428571
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.086957
46
1
46
46
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ae4bf3c4691013d8afd3b0fdf16f6f00f7db1495
127
py
Python
dstl/translate/__init__.py
kabirkhan/dstl
9bfbba27e99da4736d460888ea908a71bd1c7ae9
[ "MIT" ]
2
2021-05-12T09:22:04.000Z
2021-10-12T10:57:08.000Z
dstl/translate/__init__.py
kabirkhan/dstl
9bfbba27e99da4736d460888ea908a71bd1c7ae9
[ "MIT" ]
null
null
null
dstl/translate/__init__.py
kabirkhan/dstl
9bfbba27e99da4736d460888ea908a71bd1c7ae9
[ "MIT" ]
null
null
null
from .azure import AzureTranslator from .google import GoogleTranslator from .transformers import TransformersMarianTranslator
31.75
54
0.88189
12
127
9.333333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.094488
127
3
55
42.333333
0.973913
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ae4c394ca32afb4041b48e53393bfd2f12c61d9a
288
py
Python
src/ising_animate/__init__.py
davifeliciano/ising_model
7f9beff5647a747a1b64b97012af2c6c040fddb7
[ "MIT" ]
2
2021-10-15T10:58:20.000Z
2021-10-31T15:59:04.000Z
src/ising_animate/__init__.py
davifeliciano/ising_animate
7f9beff5647a747a1b64b97012af2c6c040fddb7
[ "MIT" ]
null
null
null
src/ising_animate/__init__.py
davifeliciano/ising_animate
7f9beff5647a747a1b64b97012af2c6c040fddb7
[ "MIT" ]
null
null
null
""" A Python Package to easily generate animations of the Ising Model using the Metropolis Algorithm, the most commonly used Markov Chain Monte Carlo method to calculate estimations for this system. """ from .ising import Ising, AnimatedIsing, CoolingAnimatedIsing, DynamicAnimatedIsing
48
98
0.819444
37
288
6.378378
0.864865
0
0
0
0
0
0
0
0
0
0
0
0.138889
288
5
99
57.6
0.951613
0.677083
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ae5bb27580ff11d1812002bb7c2710f1710c80b6
418
py
Python
test/run_all_tests.py
wjchen84/rapprentice
9232a6a21e2c80f00854912f07dcdc725b0be95a
[ "BSD-2-Clause" ]
23
2015-08-25T19:40:18.000Z
2020-12-27T09:23:06.000Z
test/run_all_tests.py
wjchen84/rapprentice
9232a6a21e2c80f00854912f07dcdc725b0be95a
[ "BSD-2-Clause" ]
null
null
null
test/run_all_tests.py
wjchen84/rapprentice
9232a6a21e2c80f00854912f07dcdc725b0be95a
[ "BSD-2-Clause" ]
8
2016-05-18T20:13:06.000Z
2020-11-03T16:09:50.000Z
import rapprentice, os, os.path as osp from rapprentice.call_and_print import call_and_print assert osp.basename(os.getcwd()) == "test" call_and_print("python tps_unit_tests.py") call_and_print("python ../scripts/download_sampledata.py ~/Data --use_rsync") call_and_print("python ../scripts/generate_h5.py ~/Data/sampledata/overhand/overhand.yaml") call_and_print("python test_registration_synthetic.py --plotting=0")
52.25
91
0.80622
64
418
4.96875
0.515625
0.132075
0.226415
0.226415
0.157233
0
0
0
0
0
0
0.005102
0.062201
418
7
92
59.714286
0.806122
0
0
0
0
0
0.502392
0.30622
0
0
0
0
0.142857
1
0
true
0
0.285714
0
0.285714
0.714286
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
ae5e0d710108d5b4ce6c5bcd9dad8d7e645a849f
313
py
Python
mutations/mutation_role/mutation.py
akarapun/elearning
fe116d5815925269819061ea183cbfdb773844cf
[ "MIT" ]
1
2020-03-14T11:00:14.000Z
2020-03-14T11:00:14.000Z
mutations/mutation_role/mutation.py
akarapun/elearning
fe116d5815925269819061ea183cbfdb773844cf
[ "MIT" ]
null
null
null
mutations/mutation_role/mutation.py
akarapun/elearning
fe116d5815925269819061ea183cbfdb773844cf
[ "MIT" ]
null
null
null
import graphene from mutation_role.createRole import CreateRoleMutation from mutation_role.updateRole import UpdateRoleMutation from mutation_role.deleteRole import DeleteRoleMutation class RoleMutation( CreateRoleMutation, UpdateRoleMutation, DeleteRoleMutation, graphene.ObjectType): pass
24.076923
55
0.830671
28
313
9.178571
0.535714
0.140078
0.18677
0
0
0
0
0
0
0
0
0
0.13738
313
12
56
26.083333
0.951852
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.1
0.4
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
ae652a7bbf5b9b656563f1d897b0c7efb679fce4
143
py
Python
tests/conftest.py
DenMaslov/dz4
3b970c4d12339c0711905601695335a6e376d8d3
[ "MIT" ]
null
null
null
tests/conftest.py
DenMaslov/dz4
3b970c4d12339c0711905601695335a6e376d8d3
[ "MIT" ]
null
null
null
tests/conftest.py
DenMaslov/dz4
3b970c4d12339c0711905601695335a6e376d8d3
[ "MIT" ]
null
null
null
import pytest from dz4.calculator.calculator import Calculator @pytest.fixture() def calculator() -> Calculator: return Calculator()
11.916667
48
0.748252
15
143
7.133333
0.533333
0.373832
0
0
0
0
0
0
0
0
0
0.008333
0.160839
143
11
49
13
0.883333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
true
0
0.4
0.2
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
0
0
0
5
88375c88dad4196964e4ef4c3c059af714e55438
284
py
Python
container/src/models/note.py
PowercoderJr/oonote
619e7a0146c0239232b3d9a7effe597ab899b88e
[ "MIT" ]
null
null
null
container/src/models/note.py
PowercoderJr/oonote
619e7a0146c0239232b3d9a7effe597ab899b88e
[ "MIT" ]
null
null
null
container/src/models/note.py
PowercoderJr/oonote
619e7a0146c0239232b3d9a7effe597ab899b88e
[ "MIT" ]
null
null
null
from app import db class Note(db.Model): id_ = db.Column(db.String(16), primary_key=True) text = db.Column(db.Text) response = db.Column(db.String(100)) created_at = db.Column(db.DateTime) read_at = db.Column(db.DateTime) password = db.Column(db.String(64))
25.818182
52
0.672535
46
284
4.065217
0.5
0.256684
0.320856
0.256684
0.213904
0
0
0
0
0
0
0.030043
0.179577
284
10
53
28.4
0.772532
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.125
0.125
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
8863a97d566121460c264ae8c8c13b4b476fde3e
86
py
Python
moosetools/__init__.py
NidorFanClub/oatcogs
8b17770a895513b8c80cb64e9f4f9b8b98f57a3e
[ "MIT" ]
1
2022-01-10T10:46:22.000Z
2022-01-10T10:46:22.000Z
moosetools/__init__.py
UntitledVeganServer/oatcogs
8b17770a895513b8c80cb64e9f4f9b8b98f57a3e
[ "MIT" ]
11
2022-01-28T00:25:08.000Z
2022-03-27T21:35:53.000Z
moosetools/__init__.py
NidorFanClub/oatcogs
8b17770a895513b8c80cb64e9f4f9b8b98f57a3e
[ "MIT" ]
null
null
null
from .moosetools import MooseTools def setup(bot): bot.add_cog(MooseTools())
17.2
35
0.709302
11
86
5.454545
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.186047
86
4
36
21.5
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
88815ba30f14b60c626fa9aadcc1d0d61ff2ef9a
72
py
Python
example/is_main_example.py
jamesabel/ismain
d97c01ed81a7fa72cf7ca744ec139b81f23008a0
[ "MIT" ]
1
2019-10-12T20:13:41.000Z
2019-10-12T20:13:41.000Z
example/is_main_example.py
jamesabel/ismain
d97c01ed81a7fa72cf7ca744ec139b81f23008a0
[ "MIT" ]
1
2020-11-05T19:20:09.000Z
2020-11-05T19:20:09.000Z
example/is_main_example.py
jamesabel/ismain
d97c01ed81a7fa72cf7ca744ec139b81f23008a0
[ "MIT" ]
null
null
null
from ismain import is_main if is_main(): print("Hello from main.")
14.4
29
0.694444
12
72
4
0.666667
0.25
0
0
0
0
0
0
0
0
0
0
0.194444
72
4
30
18
0.827586
0
0
0
0
0
0.222222
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ee294d78edcc6be3fe6c777a0f5cc53b056c3922
302
py
Python
et_micc_build/__init__.py
etijskens/et-micc-build
adf60a142b11af145e1129c17de00a32a61c84ce
[ "MIT" ]
null
null
null
et_micc_build/__init__.py
etijskens/et-micc-build
adf60a142b11af145e1129c17de00a32a61c84ce
[ "MIT" ]
5
2019-11-26T11:42:48.000Z
2020-06-12T09:36:16.000Z
et_micc_build/__init__.py
etijskens/et-micc-build
adf60a142b11af145e1129c17de00a32a61c84ce
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Package et_micc_build ===================== Top-level package for et_micc_build. """ import et_micc_build.cli_micc_build import et_micc # et-micc and et--micc-build have identical version string, although # they are different packages. __version__ = et_micc.__version__
20.133333
68
0.708609
43
302
4.55814
0.511628
0.214286
0.22449
0.173469
0.214286
0
0
0
0
0
0
0.003802
0.129139
302
14
69
21.571429
0.741445
0.662252
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ee3ae10133edd3d7a05ec0da969be95308a6a693
39
py
Python
deepforest/_version.py
ethanwhite/DeepForest-pytorch
027228b431cf56979ea683c43bcf53ad86eb0cfd
[ "MIT" ]
null
null
null
deepforest/_version.py
ethanwhite/DeepForest-pytorch
027228b431cf56979ea683c43bcf53ad86eb0cfd
[ "MIT" ]
null
null
null
deepforest/_version.py
ethanwhite/DeepForest-pytorch
027228b431cf56979ea683c43bcf53ad86eb0cfd
[ "MIT" ]
null
null
null
__version__ = '__version__ = '0.1.43''
19.5
38
0.666667
5
39
3.6
0.8
0
0
0
0
0
0
0
0
0
0
0.117647
0.128205
39
1
39
39
0.411765
0
0
0
0
0
0.358974
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
ee58d33fb8c1a48ee8c398500f70287b7013d108
144
py
Python
CarRegistration/__init__.py
infiniteloopltd/PyCarRegistrationAPI
2d5b2237caef695325d132a548341bab3ea73938
[ "MIT" ]
null
null
null
CarRegistration/__init__.py
infiniteloopltd/PyCarRegistrationAPI
2d5b2237caef695325d132a548341bab3ea73938
[ "MIT" ]
null
null
null
CarRegistration/__init__.py
infiniteloopltd/PyCarRegistrationAPI
2d5b2237caef695325d132a548341bab3ea73938
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Copyright 2012 Locu <maksims@locu.com> <kkamalov@locu.com> from api import *
36
62
0.513889
16
144
4.625
0.8125
0.189189
0
0
0
0
0
0
0
0
0
0.044944
0.381944
144
3
63
48
0.786517
0.833333
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ee843b348034f772d0e9893aec548bce29b682de
110
py
Python
sprites/__init__.py
ghandic/FinalSpace
2c048d6e64d4784832cdf0d256494dfbfd357be6
[ "MIT" ]
null
null
null
sprites/__init__.py
ghandic/FinalSpace
2c048d6e64d4784832cdf0d256494dfbfd357be6
[ "MIT" ]
null
null
null
sprites/__init__.py
ghandic/FinalSpace
2c048d6e64d4784832cdf0d256494dfbfd357be6
[ "MIT" ]
null
null
null
from .earth import Earth from .enemy import Enemy from .player import Player from .explosion import Explosion
22
32
0.818182
16
110
5.625
0.375
0
0
0
0
0
0
0
0
0
0
0
0.145455
110
4
33
27.5
0.957447
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c9d6ffde5a427a71fde0b0009ea514f5c2a17e28
106
py
Python
ifthen/statements/thens/0001.py
tinyx/yitao.io
8a3a75016e417b4c158bca0ceae98a589b2adff2
[ "MIT" ]
null
null
null
ifthen/statements/thens/0001.py
tinyx/yitao.io
8a3a75016e417b4c158bca0ceae98a589b2adff2
[ "MIT" ]
12
2020-06-05T19:26:11.000Z
2022-03-11T23:33:24.000Z
ifthen/statements/thens/0001.py
tinyx/yitao.io
8a3a75016e417b4c158bca0ceae98a589b2adff2
[ "MIT" ]
null
null
null
def execute(operating_player, opponent_player): operating_player.attack = operating_player.attack + 5
35.333333
57
0.811321
13
106
6.307692
0.538462
0.54878
0.512195
0
0
0
0
0
0
0
0
0.010638
0.113208
106
2
58
53
0.861702
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
4e56eeb6602a3b17f651d53f9d8f8c4cbe4c5a40
142
py
Python
students/admin.py
wjarczak/WSB-CRUD
fe866bf848cb06e9b747ab649ef6f821e06b613e
[ "MIT" ]
null
null
null
students/admin.py
wjarczak/WSB-CRUD
fe866bf848cb06e9b747ab649ef6f821e06b613e
[ "MIT" ]
null
null
null
students/admin.py
wjarczak/WSB-CRUD
fe866bf848cb06e9b747ab649ef6f821e06b613e
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Student, Representative admin.site.register(Student) admin.site.register(Representative)
28.4
43
0.84507
18
142
6.666667
0.555556
0.15
0.283333
0
0
0
0
0
0
0
0
0
0.077465
142
5
44
28.4
0.916031
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
4eb6577486dfa10dc3c459fcc28dd50d4af9a280
12,797
py
Python
algoritmo.py
jorgemauricio/procesamiento_he5
931a7d9278b7b6ff84f5cea85452f695a3cafb93
[ "MIT" ]
null
null
null
algoritmo.py
jorgemauricio/procesamiento_he5
931a7d9278b7b6ff84f5cea85452f695a3cafb93
[ "MIT" ]
null
null
null
algoritmo.py
jorgemauricio/procesamiento_he5
931a7d9278b7b6ff84f5cea85452f695a3cafb93
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ ####################################### # Script que permite la generación # automática de mapas de particulas en el # aire # Author: Jorge Mauricio # Email: jorge.ernesto.mauricio@gmail.com # Date: Created on Thu Sep 28 08:38:15 2017 # Version: 1.0 ####################################### """ # librerías import os import pandas as pd import numpy as np import h5py import requests from bs4 import BeautifulSoup import urllib.request import sys import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap import time from time import gmtime, strftime import schedule # límites Lat y Long LONG_MIN = -115 LONG_MAX = -111 LAT_MIN = 29 LAT_MAX = 32 PATH = "/home/jorge/Documents/Research/procesamiento_he5" array_URLs = ["https://acdisc.gesdisc.eosdis.nasa.gov/data/Aura_OMI_Level3/OMNO2d.003/2018/", "https://acdisc.gsfc.nasa.gov/data/Aura_OMI_Level3/OMDOAO3e.003/2018/", "https://acdisc.gsfc.nasa.gov/data/Aura_OMI_Level3/OMSO2e.003/2018/", "https://acdisc.gsfc.nasa.gov/data/Aura_OMI_Level3/OMTO3e.003/2018/", "https://acdisc.gesdisc.eosdis.nasa.gov/data/Aura_OMI_Level3/OMAEROe.003/2018/"] #array_URLs = ["https://acdisc.gesdisc.eosdis.nasa.gov/data/Aura_OMI_Level3/OMNO2d.003/2018/"] array_Archivo = [] # JOB def job(): # fecha de la descarga # descarga de información descarga_de_archivos() procesamientoNO2() procesamientoO3() procesamientoSO2() procesamientoTO3() procesamientoAERO() # función para procesar NO2 def procesamientoNO2(): # clear plt plt.clf() # Open file. FILE_NAME = array_Archivo[0] DATAFIELD_NAME = 'HDFEOS/GRIDS/ColumnAmountNO2/Data Fields/ColumnAmountNO2' with h5py.File(FILE_NAME, mode='r') as f: # Read dataset. dset = f[DATAFIELD_NAME] data = dset[:] # Handle fill value. data[data == dset.fillvalue] = np.nan data = np.ma.masked_where(np.isnan(data), data) # Get attributes needed for the plot. # String attributes actually come in as the bytes type and should # be decoded to UTF-8 (python3). title = dset.attrs['Title'].decode() units = dset.attrs['Units'].decode() # There is no geolocation data, so construct it ourselves. longitude = np.arange(0., 1440.0) * 0.25 - 180 + 0.125 latitude = np.arange(0., 720.0) * 0.25 - 90 + 0.125 # leer coordenadas dataEstaciones = pd.read_csv("{}/data/coordenadas_estaciones.csv".format(PATH)) xC = np.array(dataEstaciones['Long']) yC = np.array(dataEstaciones['Lat']) # Draw an equidistant cylindrical projection using the low resolution # coastline database. m = Basemap(projection='cyl', resolution='l', llcrnrlat=LAT_MIN, urcrnrlat = LAT_MAX, llcrnrlon=LONG_MIN, urcrnrlon = LONG_MAX) m.scatter(xC, yC, latlon=True, s=1, marker='o', color='r', zorder=25) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180., 45.), labels=[0, 0, 0, 1]) m.pcolormesh(longitude, latitude, data, latlon=True, cmap='jet') cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, title)) fig = plt.gcf() # plt.show() pngfile = "{}.png".format(basename) fig.savefig(pngfile, dpi=600) # función para procesar 03 def procesamientoO3(): # clear plt plt.clf() # Open file. FILE_NAME = array_Archivo[1] DATAFIELD_NAME = 'HDFEOS/GRIDS/ColumnAmountO3/Data Fields/ColumnAmountO3' with h5py.File(FILE_NAME, mode='r') as f: # Read dataset. dset = f[DATAFIELD_NAME] data = dset[:] # Handle fill value. data[data == dset.fillvalue] = np.nan data = np.ma.masked_where(np.isnan(data), data) # Get attributes needed for the plot. # String attributes actually come in as the bytes type and should # be decoded to UTF-8 (python3). title = dset.attrs['Title'].decode() units = dset.attrs['Units'].decode() # There is no geolocation data, so construct it ourselves. longitude = np.arange(0., 1440.0) * 0.25 - 180 + 0.125 latitude = np.arange(0., 720.0) * 0.25 - 90 + 0.125 # leer coordenadas dataEstaciones = pd.read_csv("{}/data/coordenadas_estaciones.csv".format(PATH)) xC = np.array(dataEstaciones['Long']) yC = np.array(dataEstaciones['Lat']) # Draw an equidistant cylindrical projection using the low resolution # coastline database. m = Basemap(projection='cyl', resolution='l', llcrnrlat=LAT_MIN, urcrnrlat = LAT_MAX, llcrnrlon=LONG_MIN, urcrnrlon = LONG_MAX) m.scatter(xC, yC, latlon=True, s=1, marker='o', color='r', zorder=25) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180., 45.), labels=[0, 0, 0, 1]) m.pcolormesh(longitude, latitude, data, latlon=True, cmap='jet') cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, title)) fig = plt.gcf() # plt.show() pngfile = "{}.png".format(basename) fig.savefig(pngfile, dpi=600) # función para procesar SO2 def procesamientoSO2(): # clear plt plt.clf() # Open file. FILE_NAME = array_Archivo[2] DATAFIELD_NAME = 'HDFEOS/GRIDS/OMI Total Column Amount SO2/Data Fields/ColumnAmountSO2_PBL' with h5py.File(FILE_NAME, mode='r') as f: # Read dataset. dset = f[DATAFIELD_NAME] data = dset[:] # Handle fill value. data[data == dset.fillvalue] = np.nan data = np.ma.masked_where(np.isnan(data), data) # Get attributes needed for the plot. # String attributes actually come in as the bytes type and should # be decoded to UTF-8 (python3). title = dset.attrs['Title'].decode() units = dset.attrs['Units'].decode() # There is no geolocation data, so construct it ourselves. longitude = np.arange(0., 1440.0) * 0.25 - 180 + 0.125 latitude = np.arange(0., 720.0) * 0.25 - 90 + 0.125 # leer coordenadas dataEstaciones = pd.read_csv("{}/data/coordenadas_estaciones.csv".format(PATH)) xC = np.array(dataEstaciones['Long']) yC = np.array(dataEstaciones['Lat']) # Draw an equidistant cylindrical projection using the low resolution # coastline database. m = Basemap(projection='cyl', resolution='l', llcrnrlat=LAT_MIN, urcrnrlat = LAT_MAX, llcrnrlon=LONG_MIN, urcrnrlon = LONG_MAX) m.scatter(xC, yC, latlon=True, s=1, marker='o', color='r', zorder=25) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180., 45.), labels=[0, 0, 0, 1]) m.pcolormesh(longitude, latitude, data, latlon=True, cmap='jet') cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, title)) fig = plt.gcf() # plt.show() pngfile = "{}.png".format(basename) fig.savefig(pngfile, dpi=600) # función para procesar TO3 def procesamientoTO3(): # clear plt plt.clf() # Open file. FILE_NAME = array_Archivo[3] DATAFIELD_NAME = 'HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/ColumnAmountO3' with h5py.File(FILE_NAME, mode='r') as f: # Read dataset. dset = f[DATAFIELD_NAME] data = dset[:] # Handle fill value. data[data == dset.fillvalue] = np.nan data = np.ma.masked_where(np.isnan(data), data) # Get attributes needed for the plot. # String attributes actually come in as the bytes type and should # be decoded to UTF-8 (python3). title = dset.attrs['Title'].decode() units = dset.attrs['Units'].decode() # There is no geolocation data, so construct it ourselves. longitude = np.arange(0., 1440.0) * 0.25 - 180 + 0.125 latitude = np.arange(0., 720.0) * 0.25 - 90 + 0.125 # leer coordenadas dataEstaciones = pd.read_csv("{}/data/coordenadas_estaciones.csv".format(PATH)) xC = np.array(dataEstaciones['Long']) yC = np.array(dataEstaciones['Lat']) # Draw an equidistant cylindrical projection using the low resolution # coastline database. m = Basemap(projection='cyl', resolution='l', llcrnrlat=LAT_MIN, urcrnrlat = LAT_MAX, llcrnrlon=LONG_MIN, urcrnrlon = LONG_MAX) m.scatter(xC, yC, latlon=True, s=1, marker='o', color='r', zorder=25) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180., 45.), labels=[0, 0, 0, 1]) m.pcolormesh(longitude, latitude, data, latlon=True, cmap='jet') cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, title)) fig = plt.gcf() # plt.show() pngfile = "{}.png".format(basename) fig.savefig(pngfile, dpi=600) # función para procesar TO3 def procesamientoAERO(): # clear plt plt.clf() # Open file. FILE_NAME = array_Archivo[4] DATAFIELD_NAME = 'HDFEOS/GRIDS/ColumnAmountAerosol/Data Fields/UVAerosolIndex' with h5py.File(FILE_NAME, mode='r') as f: # Read dataset. dset = f[DATAFIELD_NAME] data = dset[:] # Handle fill value. data[data == dset.fillvalue] = np.nan data = np.ma.masked_where(np.isnan(data), data) # Get attributes needed for the plot. # String attributes actually come in as the bytes type and should # be decoded to UTF-8 (python3). title = dset.attrs['Title'].decode() units = dset.attrs['Units'].decode() # There is no geolocation data, so construct it ourselves. longitude = np.arange(0., 1440.0) * 0.25 - 180 + 0.125 latitude = np.arange(0., 720.0) * 0.25 - 90 + 0.125 # leer coordenadas dataEstaciones = pd.read_csv("{}/data/coordenadas_estaciones.csv".format(PATH)) xC = np.array(dataEstaciones['Long']) yC = np.array(dataEstaciones['Lat']) # Draw an equidistant cylindrical projection using the low resolution # coastline database. m = Basemap(projection='cyl', resolution='l', llcrnrlat=LAT_MIN, urcrnrlat = LAT_MAX, llcrnrlon=LONG_MIN, urcrnrlon = LONG_MAX) m.scatter(xC, yC, latlon=True, s=1, marker='o', color='r', zorder=25) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180., 45.), labels=[0, 0, 0, 1]) m.pcolormesh(longitude, latitude, data, latlon=True, cmap='jet') cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, title)) fig = plt.gcf() # plt.show() pngfile = "{}.png".format(basename) fig.savefig(pngfile, dpi=600) # función descarga de archivos def descarga_de_archivos(): # fecha de la descarga fechaPronostico = strftime("%Y-%m-%d") # cambiar a carpeta data os.chdir("{}/data".format(PATH)) # crear directorio de fecha de descarga os.mkdir("{}/data/{}".format(PATH,fechaPronostico)) # cambiar de directorio a data os.chdir("{}/data/{}".format(PATH,fechaPronostico)) # ciclo para la descarga de información for URL in array_URLs: # generar la consulta de información r = requests.get(URL) # parsear el html para determinar los links a descargar soup = BeautifulSoup(r.text, "html.parser") # crear un array para guardar los links array_links = [] # ciclo para filtrar los links con información for link in soup.find_all("a"): array_links.append(link.get("href")) # nombre del archivo a descargar nombre_archivo = array_links[-5] # imprimir el nombre del archivo print(nombre_archivo) # guardar el nombre del archivo para el post procesamiento array_Archivo.append(nombre_archivo) # generar el url para la descarga de información URL_DESCARGA = "{}{}".format(URL, nombre_archivo) # print url de descarga print(URL_DESCARGA) os.system("wget --load-cookies ~/.urs_cookies --save-cookies ~/.urs_cookies --keep-session-cookies {}".format(URL_DESCARGA)) schedule.every().day.at("07:00").do(job) while 1: schedule.run_pending() time.sleep(1)
32.812821
132
0.635696
1,725
12,797
4.651014
0.17913
0.007479
0.014957
0.011218
0.757821
0.74436
0.738128
0.738128
0.738128
0.738128
0
0.044815
0.222318
12,797
389
133
32.897172
0.761355
0.231382
0
0.676329
0
0.009662
0.125962
0.046504
0
0
0
0
0
1
0.033816
false
0
0.062802
0
0.096618
0.009662
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
14d09aebc19521be5aaa6063a2a93a5ace207e9d
54
py
Python
django_filters/__init__.py
buriy/django-filter
7abbb68f103e00031bbc0e882dfa2ccfd3b717d6
[ "BSD-3-Clause" ]
null
null
null
django_filters/__init__.py
buriy/django-filter
7abbb68f103e00031bbc0e882dfa2ccfd3b717d6
[ "BSD-3-Clause" ]
null
null
null
django_filters/__init__.py
buriy/django-filter
7abbb68f103e00031bbc0e882dfa2ccfd3b717d6
[ "BSD-3-Clause" ]
1
2019-02-24T08:35:08.000Z
2019-02-24T08:35:08.000Z
from filterset import FilterSet from filters import *
18
31
0.833333
7
54
6.428571
0.571429
0
0
0
0
0
0
0
0
0
0
0
0.148148
54
2
32
27
0.978261
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
090681d321b51766a36c06a225aae094402f0802
15,814
py
Python
tests/test_utilities.py
andymeneely/attack-surface-metrics
9cef791a79771ee29f18a0da2159f36c3df32755
[ "MIT" ]
16
2015-12-25T10:53:10.000Z
2022-02-26T08:27:55.000Z
tests/test_utilities.py
andymeneely/attack-surface-metrics
9cef791a79771ee29f18a0da2159f36c3df32755
[ "MIT" ]
30
2015-01-29T19:34:31.000Z
2021-06-10T17:22:57.000Z
tests/test_utilities.py
andymeneely/attack-surface-metrics
9cef791a79771ee29f18a0da2159f36c3df32755
[ "MIT" ]
4
2016-11-03T15:59:42.000Z
2020-10-29T17:56:59.000Z
import copy import os import unittest import networkx as nx from attacksurfacemeter import utilities from attacksurfacemeter.call import Call from attacksurfacemeter.call_graph import CallGraph from attacksurfacemeter.environments import Environments from attacksurfacemeter.loaders.cflow_loader import CflowLoader from attacksurfacemeter.loaders.gprof_loader import GprofLoader class UtilitiesTestCase(unittest.TestCase): def test_fix(self): # Arrange target = CallGraph.from_loader( CflowLoader( os.path.join( os.path.dirname(os.path.realpath(__file__)), 'helloworld/cflow.callgraph.r.mod.txt' ), True ) ) _target = copy.deepcopy(target) reference = CallGraph.from_loader( GprofLoader( os.path.join( os.path.dirname(os.path.realpath(__file__)), 'helloworld/gprof.callgraph.txt' ) ) ) expected = { 'before': Call('GreeterSayHi', '', Environments.C), 'after': Call('GreeterSayHi', './src/helloworld.c', Environments.C) } # Act utilities.fix(target, using=reference) actual = { 'before': next( i for (i, _) in _target.nodes if i.function_name == 'GreeterSayHi' ), 'after': next( i for (i, _) in target.nodes if i.function_name == 'GreeterSayHi' ) } # Assert self.assertEqual(expected['before'], actual['before']) self.assertEqual(expected['after'], actual['after']) # Asserting if node attributes got carried over self.assertCountEqual( [ attrs for (i, attrs) in _target.nodes if i == expected['before'] ], [ attrs for (i, attrs) in target.nodes if i == expected['after'] ] ) # Asserting if edge attributes got carried over self.assertCountEqual( [ attrs for (i, j, attrs) in _target.edges if i == expected['before'] or j == expected['before'] ], [ attrs for (i, j, attrs) in target.edges if i == expected['after'] or j == expected['after'] ], ) # Asserting if OTHER nodes and their attributes got carried over self.assertCountEqual( [ (i, attrs) for (i, attrs) in _target.nodes if i != expected['before'] ], [ (i, attrs) for (i, attrs) in target.nodes if i != expected['after'] ] ) # Asserting if OTHER edges and their attributes got carried over self.assertCountEqual( [ (i, j, attrs) for (i, j, attrs) in _target.edges if i != expected['before'] and j != expected['before'] ], [ (i, j, attrs) for (i, j, attrs) in target.edges if i != expected['after'] and j != expected['after'] ], ) def test_get_fragments(self): # Arrange # a -- b e -- f -- g # | | # | | # d -- c h -- i j graph = nx.DiGraph() graph.add_nodes_from( ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] ) graph.add_edges_from([ ('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'), ('d', 'c'), ('d', 'a'), ('a', 'd'), ('e', 'f'), ('f', 'e'), ('f', 'g'), ('g', 'f'), ('h', 'i'), ('i', 'h') ]) expected = [None] * 4 expected[0] = nx.DiGraph() expected[0].add_nodes_from(['a', 'b', 'c', 'd']) expected[0].add_edges_from([ ('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'), ('d', 'c'), ('d', 'a'), ('a', 'd') ]) expected[1] = nx.DiGraph() expected[1].add_nodes_from(['e', 'f', 'g']) expected[1].add_edges_from( [('e', 'f'), ('f', 'e'), ('f', 'g'), ('g', 'f')] ) expected[2] = nx.DiGraph() expected[2].add_nodes_from(['h', 'i']) expected[2].add_edges_from([('i', 'h'), ('h', 'i')]) expected[3] = nx.DiGraph() expected[3].add_nodes_from(['j']) # Act actual = utilities.get_fragments(graph) actual.sort(key=lambda i: len(i.nodes()), reverse=True) # Assert self.assertEqual(len(expected), len(actual)) for i in range(4): self.assertCountEqual(expected[i].nodes(), actual[i].nodes()) self.assertCountEqual(expected[i].edges(), actual[i].edges()) def test_get_fragments_for_undirected(self): # Arrange # a -- b e -- f -- g # | | # | | # d -- c h -- i j graph = nx.Graph() graph.add_nodes_from( ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] ) graph.add_edges_from([ ('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'a'), ('e', 'f'), ('f', 'g'), ('h', 'i') ]) # Assert self.assertRaises(Exception, utilities.get_fragments, graph) def test_get_largest_fragment(self): # Arrange # a -- b e -- f -- g # | | # | | # d -- c h -- i j graph = nx.DiGraph() graph.add_nodes_from( ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] ) graph.add_edges_from([ ('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'), ('d', 'c'), ('d', 'a'), ('a', 'd'), ('e', 'f'), ('f', 'e'), ('f', 'g'), ('g', 'f'), ('h', 'i'), ('i', 'h') ]) expected = nx.DiGraph() expected.add_nodes_from(['a', 'b', 'c', 'd']) expected.add_edges_from([ ('a', 'b'), ('b', 'a'), ('b', 'c'), ('c', 'b'), ('c', 'd'), ('d', 'c'), ('d', 'a'), ('a', 'd') ]) # Act actual = utilities.get_largest_fragment(utilities.get_fragments(graph)) # Assert self.assertCountEqual(expected.nodes(), actual.nodes()) self.assertCountEqual(expected.edges(), actual.edges()) def test_get_node_attrs(self): # Scenario: main -- printf (cflow) # Arrange source = 'cflow' caller = Call('main', 'main.c', Environments.C) callee = Call('printf', '', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, list(), list() ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNone(callee_attrs) # Scenario: main -- printf (gprof) # Arrange source = 'gprof' caller = Call('main', 'main.c', Environments.C) callee = Call('printf', '', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, list(), list() ) # Assert # Caller Attributes self.assertTrue('tested' in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNone(callee_attrs) # Scenario: main -- None (gprof) # Arrange source = 'gprof' caller = Call('main', 'main.c', Environments.C) callee = None # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, list(), list() ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNone(callee_attrs) # Scenario: main -- validate* (cflow) # * Designed defense # Arrange source = 'cflow' defenses = [Call('validate', 'utils.c', Environments.C)] caller = Call('main', 'main.c', Environments.C) callee = Call('validate', 'utils.c', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, defenses, list() ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNotNone(callee_attrs) self.assertTrue('tested' not in callee_attrs) self.assertTrue('defense' in callee_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertEqual(callee_attrs['frequency'], 1) # Scenario: main -- validate* (cflow) # * Vulnerable # Arrange source = 'cflow' vulnerabilities = [Call('validate', 'utils.c', Environments.C)] caller = Call('main', 'main.c', Environments.C) callee = Call('validate', 'utils.c', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, list(), vulnerabilities ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in callee_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNotNone(callee_attrs) self.assertTrue('tested' not in callee_attrs) self.assertTrue('defense' not in callee_attrs) self.assertTrue('vulnerable' in callee_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertEqual(callee_attrs['frequency'], 1) # Scenario: main* -- validate+ (cflow) # * Vulnerable # + Designed defense and vulnerable # Arrange source = 'cflow' defenses = [Call('validate', 'utils.c', Environments.C)] vulnerabilities = [ Call('main', 'main.c', Environments.C), Call('validate', 'utils.c', Environments.C) ] caller = Call('main', 'main.c', Environments.C) callee = Call('validate', 'utils.c', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, defenses, vulnerabilities ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNotNone(callee_attrs) self.assertTrue('tested' not in callee_attrs) self.assertTrue('defense' in callee_attrs) self.assertTrue('vulnerable' in callee_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertEqual(callee_attrs['frequency'], 1) # Scenario: main* -- validate+ (cflow) # * Designed defense # + Designed defense and vulnerable # Arrange source = 'cflow' defenses = [ Call('main', 'main.c', Environments.C), Call('validate', 'utils.c', Environments.C) ] vulnerabilities = [ Call('main', 'main.c', Environments.C), Call('validate', 'utils.c', Environments.C) ] caller = Call('main', 'main.c', Environments.C) callee = Call('validate', 'utils.c', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, defenses, vulnerabilities ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' in caller_attrs) self.assertTrue('vulnerable' in caller_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNotNone(callee_attrs) self.assertTrue('tested' not in callee_attrs) self.assertTrue('defense' in callee_attrs) self.assertTrue('vulnerable' in callee_attrs) self.assertTrue('dangerous' not in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertEqual(callee_attrs['frequency'], 1) # Scenario: main -- chown (cflow) # Arrange source = 'cflow' caller = Call('main', 'main.c', Environments.C) callee = Call('chown', '', Environments.C) # Act (caller_attrs, callee_attrs) = utilities.get_node_attrs( source, caller, callee, list(), list() ) # Assert # Caller Attributes self.assertTrue('tested' not in caller_attrs) self.assertTrue('defense' not in caller_attrs) self.assertTrue('vulnerable' not in caller_attrs) self.assertTrue('dangerous' in caller_attrs) self.assertTrue('entry' not in caller_attrs) self.assertTrue('exit' not in caller_attrs) self.assertTrue('frequency' not in caller_attrs) # Callee Attributes self.assertIsNone(callee_attrs) if __name__ == '__main__': unittest.main()
33.646809
79
0.541103
1,693
15,814
4.934436
0.075015
0.134068
0.163754
0.11683
0.796744
0.790998
0.785851
0.785851
0.779148
0.746349
0
0.001585
0.32174
15,814
469
80
33.71855
0.77727
0.084356
0
0.615142
0
0
0.092027
0.004584
0
0
0
0
0.328076
1
0.015773
false
0
0.031546
0
0.050473
0.006309
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
09399de3c6bdc0215bd7f2ca80432b7f381157d3
150
py
Python
gluoncv/data/video_custom/__init__.py
Kentwhf/gluon-cv
bab5c029793d4da20b5c14846a0db58cfbe21d6d
[ "Apache-2.0" ]
48
2021-04-08T13:34:55.000Z
2022-03-29T09:36:53.000Z
gluoncv/data/video_custom/__init__.py
xuefeiwang12/gluon-cv
ce90b0e65444c593a860a46f8d070b4c4095dbba
[ "Apache-2.0" ]
null
null
null
gluoncv/data/video_custom/__init__.py
xuefeiwang12/gluon-cv
ce90b0e65444c593a860a46f8d070b4c4095dbba
[ "Apache-2.0" ]
1
2021-04-10T04:44:07.000Z
2021-04-10T04:44:07.000Z
# pylint: disable=wildcard-import """Video related tasks. Custom data loader """ from __future__ import absolute_import from .classification import *
25
42
0.793333
18
150
6.333333
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.12
150
5
43
30
0.863636
0.48
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
09585c82dbf6886677b381dc5d6ed58e52b2030a
266
py
Python
tottle/types/objects/contact.py
muffleo/tottle
69a5bdda879ab56d43505d517d3369a687c135a2
[ "MIT" ]
12
2020-09-06T15:31:34.000Z
2021-02-27T20:30:34.000Z
tottle/types/objects/contact.py
cyanlabs-org/tottle
6cf02022ed7b445c9b5af475c6e854b91780d792
[ "MIT" ]
2
2021-04-13T06:43:42.000Z
2021-07-07T20:52:39.000Z
tottle/types/objects/contact.py
cyanlabs-org/tottle
6cf02022ed7b445c9b5af475c6e854b91780d792
[ "MIT" ]
4
2020-09-12T03:09:25.000Z
2021-03-22T08:52:04.000Z
from typing import Optional from pydantic import BaseModel class Contact(BaseModel): phone_number: Optional[str] = None first_name: Optional[str] = None last_name: Optional[str] = None user_id: Optional[int] = None vcard: Optional[str] = None
22.166667
38
0.714286
35
266
5.314286
0.542857
0.236559
0.322581
0.204301
0
0
0
0
0
0
0
0
0.199248
266
11
39
24.181818
0.873239
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.25
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
5
117036e3ecad7d781f1f1e9e61c483e6031266d3
620
py
Python
src/pysparkbundle/filesystem/FilesystemInterface.py
daipe-ai/pyspark-bundle
ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0
[ "MIT" ]
null
null
null
src/pysparkbundle/filesystem/FilesystemInterface.py
daipe-ai/pyspark-bundle
ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0
[ "MIT" ]
null
null
null
src/pysparkbundle/filesystem/FilesystemInterface.py
daipe-ai/pyspark-bundle
ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod class FilesystemInterface(ABC): @abstractmethod def exists(self, path: str): pass @abstractmethod def put(self, path: str, content: str, overwrite: bool = False): pass @abstractmethod def makedirs(self, path: str): pass @abstractmethod def copy(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def move(self, source: str, destination: str, recursive: bool = False): pass @abstractmethod def delete(self, path: str, recursive: bool = False): pass
22.142857
75
0.635484
68
620
5.794118
0.352941
0.258883
0.266497
0.205584
0.637056
0.497462
0.335025
0.335025
0.335025
0.335025
0
0
0.270968
620
27
76
22.962963
0.871681
0
0
0.6
0
0
0
0
0
0
0
0
0
1
0.3
false
0.3
0.05
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
1185ea2b17424560cd9377235ba7c2e89380c41b
58
py
Python
grb/dataset/__init__.py
Stanislas0/grb
96fc521f57fdb06ab6a3c442fcf4a8bc97894829
[ "MIT" ]
null
null
null
grb/dataset/__init__.py
Stanislas0/grb
96fc521f57fdb06ab6a3c442fcf4a8bc97894829
[ "MIT" ]
null
null
null
grb/dataset/__init__.py
Stanislas0/grb
96fc521f57fdb06ab6a3c442fcf4a8bc97894829
[ "MIT" ]
null
null
null
from .dataset import Dataset, CustomDataset, CogDLDataset
29
57
0.844828
6
58
8.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.103448
58
1
58
58
0.942308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
eea55d6ca00241662c10e7989d60768fb244bfe0
57
py
Python
nygame/_quietload.py
nfearnley/nygame
221abf9ec1cff8fc577704969cf1f843e1fb4352
[ "MIT" ]
1
2021-06-07T22:12:05.000Z
2021-06-07T22:12:05.000Z
nygame/_quietload.py
nfearnley/nygame
221abf9ec1cff8fc577704969cf1f843e1fb4352
[ "MIT" ]
null
null
null
nygame/_quietload.py
nfearnley/nygame
221abf9ec1cff8fc577704969cf1f843e1fb4352
[ "MIT" ]
null
null
null
import os os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "1"
19
46
0.77193
9
57
4.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0.019231
0.087719
57
2
47
28.5
0.769231
0
0
0
0
0
0.473684
0.45614
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
eebee4a4f91ccf126142c3850d5f1f3df59b0f9c
189,704
py
Python
esociallib/v2_04/evtCdBenPrRP.py
akretion/esociallib
2472d68b45610638cf10d87aeed48b917ebae6d8
[ "MIT" ]
6
2018-02-16T09:59:35.000Z
2021-09-01T20:40:02.000Z
esociallib/v2_04/evtCdBenPrRP.py
akretion/esociallib
2472d68b45610638cf10d87aeed48b917ebae6d8
[ "MIT" ]
2
2018-02-02T19:32:21.000Z
2019-01-25T14:43:05.000Z
esociallib/v2_04/evtCdBenPrRP.py
akretion/esociallib
2472d68b45610638cf10d87aeed48b917ebae6d8
[ "MIT" ]
2
2018-05-03T17:16:38.000Z
2021-04-02T19:17:31.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated Tue Oct 10 00:42:21 2017 by generateDS.py version 2.28b. # Python 2.7.12 (default, Nov 19 2016, 06:48:10) [GCC 5.4.0 20160609] # # Command line options: # ('--no-process-includes', '') # ('-o', 'esociallib/v2_04/evtCdBenPrRP.py') # # Command line arguments: # schemas/v2_04/evtCdBenPrRP.xsd # # Command line: # /usr/local/bin/generateDS --no-process-includes -o "esociallib/v2_04/evtCdBenPrRP.py" schemas/v2_04/evtCdBenPrRP.xsd # # Current working directory (os.getcwd()): # esociallib # import sys import re as re_ import base64 import datetime as datetime_ import warnings as warnings_ try: from lxml import etree as etree_ except ImportError: from xml.etree import ElementTree as etree_ Validate_simpletypes_ = True if sys.version_info.major == 2: BaseStrType_ = basestring else: BaseStrType_ = str def parsexml_(infile, parser=None, **kwargs): if parser is None: # Use the lxml ElementTree compatible parser so that, e.g., # we ignore comments. try: parser = etree_.ETCompatXMLParser() except AttributeError: # fallback to xml.etree parser = etree_.XMLParser() doc = etree_.parse(infile, parser=parser, **kwargs) return doc # # Namespace prefix definition table (and other attributes, too) # # The module generatedsnamespaces, if it is importable, must contain # a dictionary named GeneratedsNamespaceDefs. This Python dictionary # should map element type names (strings) to XML schema namespace prefix # definitions. The export method for any class for which there is # a namespace prefix definition, will export that definition in the # XML representation of that element. See the export method of # any generated element type class for a example of the use of this # table. # A sample table is: # # # File: generatedsnamespaces.py # # GenerateDSNamespaceDefs = { # "ElementtypeA": "http://www.xxx.com/namespaceA", # "ElementtypeB": "http://www.xxx.com/namespaceB", # } # try: from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ except ImportError: GenerateDSNamespaceDefs_ = {} # # The root super-class for element type classes # # Calls to the methods in these classes are generated by generateDS.py. # You can replace these methods by re-implementing the following class # in a module named generatedssuper.py. try: from generatedssuper import GeneratedsSuper except ImportError as exp: class GeneratedsSuper(object): tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') class _FixedOffsetTZ(datetime_.tzinfo): def __init__(self, offset, name): self.__offset = datetime_.timedelta(minutes=offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return None def gds_format_string(self, input_data, input_name=''): return input_data def gds_validate_string(self, input_data, node=None, input_name=''): if not input_data: return '' else: return input_data def gds_format_base64(self, input_data, input_name=''): return base64.b64encode(input_data) def gds_validate_base64(self, input_data, node=None, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): return '%d' % input_data def gds_validate_integer(self, input_data, node=None, input_name=''): return input_data def gds_format_integer_list(self, input_data, input_name=''): return '%s' % ' '.join(input_data) def gds_validate_integer_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: int(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of integers') return values def gds_format_float(self, input_data, input_name=''): return ('%.15f' % input_data).rstrip('0') def gds_validate_float(self, input_data, node=None, input_name=''): return input_data def gds_format_float_list(self, input_data, input_name=''): return '%s' % ' '.join(input_data) def gds_validate_float_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of floats') return values def gds_format_double(self, input_data, input_name=''): return '%e' % input_data def gds_validate_double(self, input_data, node=None, input_name=''): return input_data def gds_format_double_list(self, input_data, input_name=''): return '%s' % ' '.join(input_data) def gds_validate_double_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of doubles') return values def gds_format_boolean(self, input_data, input_name=''): return ('%s' % input_data).lower() def gds_validate_boolean(self, input_data, node=None, input_name=''): return input_data def gds_format_boolean_list(self, input_data, input_name=''): return '%s' % ' '.join(input_data) def gds_validate_boolean_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: if value not in ('true', '1', 'false', '0', ): raise_parse_error( node, 'Requires sequence of booleans ' '("true", "1", "false", "0")') return values def gds_validate_datetime(self, input_data, node=None, input_name=''): return input_data def gds_format_datetime(self, input_data, input_name=''): if input_data.microsecond == 0: _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, ) else: _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, ('%f' % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) return _svalue @classmethod def gds_parse_datetime(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] time_parts = input_data.split('.') if len(time_parts) > 1: micro_seconds = int(float('0.' + time_parts[1]) * 1000000) input_data = '%s.%s' % (time_parts[0], micro_seconds, ) dt = datetime_.datetime.strptime( input_data, '%Y-%m-%dT%H:%M:%S.%f') else: dt = datetime_.datetime.strptime( input_data, '%Y-%m-%dT%H:%M:%S') dt = dt.replace(tzinfo=tz) return dt def gds_validate_date(self, input_data, node=None, input_name=''): return input_data def gds_format_date(self, input_data, input_name=''): _svalue = '%04d-%02d-%02d' % ( input_data.year, input_data.month, input_data.day, ) try: if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format( hours, minutes) except AttributeError: pass return _svalue @classmethod def gds_parse_date(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') dt = dt.replace(tzinfo=tz) return dt.date() def gds_validate_time(self, input_data, node=None, input_name=''): return input_data def gds_format_time(self, input_data, input_name=''): if input_data.microsecond == 0: _svalue = '%02d:%02d:%02d' % ( input_data.hour, input_data.minute, input_data.second, ) else: _svalue = '%02d:%02d:%02d.%s' % ( input_data.hour, input_data.minute, input_data.second, ('%f' % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) return _svalue def gds_validate_simple_patterns(self, patterns, target): # pat is a list of lists of strings/patterns. We should: # - AND the outer elements # - OR the inner elements found1 = True for patterns1 in patterns: found2 = False for patterns2 in patterns1: if re_.search(patterns2, target) is not None: found2 = True break if not found2: found1 = False break return found1 @classmethod def gds_parse_time(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] if len(input_data.split('.')) > 1: dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') else: dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') dt = dt.replace(tzinfo=tz) return dt.time() def gds_str_lower(self, instring): return instring.lower() def get_path_(self, node): path_list = [] self.get_path_list_(node, path_list) path_list.reverse() path = '/'.join(path_list) return path Tag_strip_pattern_ = re_.compile(r'\{.*\}') def get_path_list_(self, node, path_list): if node is None: return tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) if tag: path_list.append(tag) self.get_path_list_(node.getparent(), path_list) def get_class_obj_(self, node, default_class=None): class_obj1 = default_class if 'xsi' in node.nsmap: classname = node.get('{%s}type' % node.nsmap['xsi']) if classname is not None: names = classname.split(':') if len(names) == 2: classname = names[1] class_obj2 = globals().get(classname) if class_obj2 is not None: class_obj1 = class_obj2 return class_obj1 def gds_build_any(self, node, type_name=None): return None @classmethod def gds_reverse_node_mapping(cls, mapping): return dict(((v, k) for k, v in mapping.iteritems())) @staticmethod def gds_encode(instring): if sys.version_info.major == 2: return instring.encode(ExternalEncoding) else: return instring @staticmethod def convert_unicode(instring): if isinstance(instring, str): result = quote_xml(instring) elif sys.version_info.major == 2 and isinstance(instring, unicode): result = quote_xml(instring).encode('utf8') else: result = GeneratedsSuper.gds_encode(str(instring)) return result def __eq__(self, other): if type(self) != type(other): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self.__eq__(other) def getSubclassFromModule_(module, class_): '''Get the subclass of a class from a specific module.''' name = class_.__name__ + 'Sub' if hasattr(module, name): return getattr(module, name) else: return None # # If you have installed IPython you can uncomment and use the following. # IPython is available from http://ipython.scipy.org/. # ## from IPython.Shell import IPShellEmbed ## args = '' ## ipshell = IPShellEmbed(args, ## banner = 'Dropping into IPython', ## exit_msg = 'Leaving Interpreter, back to program.') # Then use the following line where and when you want to drop into the # IPython shell: # ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit') # # Globals # ExternalEncoding = 'ascii' Tag_pattern_ = re_.compile(r'({.*})?(.*)') String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL) # Change this to redirect the generated superclass module to use a # specific subclass module. CurrentSubclassModule_ = None # # Support/utility functions. # def showIndent(outfile, level, pretty_print=True): if pretty_print: for idx in range(level): outfile.write(' ') def quote_xml(inStr): "Escape markup chars, but do not modify CDATA sections." if not inStr: return '' s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) s2 = '' pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: s3 = s1[pos:mo.start()] s2 += quote_xml_aux(s3) s2 += s1[mo.start():mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) return s2 def quote_xml_aux(inStr): s1 = inStr.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') return s1 def quote_attrib(inStr): s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) s1 = s1.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') if '"' in s1: if "'" in s1: s1 = '"%s"' % s1.replace('"', "&quot;") else: s1 = "'%s'" % s1 else: s1 = '"%s"' % s1 return s1 def quote_python(inStr): s1 = inStr if s1.find("'") == -1: if s1.find('\n') == -1: return "'%s'" % s1 else: return "'''%s'''" % s1 else: if s1.find('"') != -1: s1 = s1.replace('"', '\\"') if s1.find('\n') == -1: return '"%s"' % s1 else: return '"""%s"""' % s1 def get_all_text_(node): if node.text is not None: text = node.text else: text = '' for child in node: if child.tail is not None: text += child.tail return text def find_attr_value_(attr_name, node): attrs = node.attrib attr_parts = attr_name.split(':') value = None if len(attr_parts) == 1: value = attrs.get(attr_name) elif len(attr_parts) == 2: prefix, name = attr_parts namespace = node.nsmap.get(prefix) if namespace is not None: value = attrs.get('{%s}%s' % (namespace, name, )) return value class GDSParseError(Exception): pass def raise_parse_error(node, msg): msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) raise GDSParseError(msg) class MixedContainer: # Constants for category: CategoryNone = 0 CategoryText = 1 CategorySimple = 2 CategoryComplex = 3 # Constants for content_type: TypeNone = 0 TypeText = 1 TypeString = 2 TypeInteger = 3 TypeFloat = 4 TypeDecimal = 5 TypeDouble = 6 TypeBoolean = 7 TypeBase64 = 8 def __init__(self, category, content_type, name, value): self.category = category self.content_type = content_type self.name = name self.value = value def getCategory(self): return self.category def getContenttype(self, content_type): return self.content_type def getValue(self): return self.value def getName(self): return self.name def export(self, outfile, level, name, namespace, pretty_print=True): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): outfile.write(self.value) elif self.category == MixedContainer.CategorySimple: self.exportSimple(outfile, level, name) else: # category == MixedContainer.CategoryComplex self.value.export( outfile, level, namespace, name, pretty_print=pretty_print) def exportSimple(self, outfile, level, name): if self.content_type == MixedContainer.TypeString: outfile.write('<%s>%s</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeInteger or \ self.content_type == MixedContainer.TypeBoolean: outfile.write('<%s>%d</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeFloat or \ self.content_type == MixedContainer.TypeDecimal: outfile.write('<%s>%f</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeDouble: outfile.write('<%s>%g</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeBase64: outfile.write('<%s>%s</%s>' % ( self.name, base64.b64encode(self.value), self.name)) def to_etree(self, element): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): if len(element) > 0: if element[-1].tail is None: element[-1].tail = self.value else: element[-1].tail += self.value else: if element.text is None: element.text = self.value else: element.text += self.value elif self.category == MixedContainer.CategorySimple: subelement = etree_.SubElement( element, '%s' % self.name) subelement.text = self.to_etree_simple() else: # category == MixedContainer.CategoryComplex self.value.to_etree(element) def to_etree_simple(self): if self.content_type == MixedContainer.TypeString: text = self.value elif (self.content_type == MixedContainer.TypeInteger or self.content_type == MixedContainer.TypeBoolean): text = '%d' % self.value elif (self.content_type == MixedContainer.TypeFloat or self.content_type == MixedContainer.TypeDecimal): text = '%f' % self.value elif self.content_type == MixedContainer.TypeDouble: text = '%g' % self.value elif self.content_type == MixedContainer.TypeBase64: text = '%s' % base64.b64encode(self.value) return text def exportLiteral(self, outfile, level, name): if self.category == MixedContainer.CategoryText: showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( self.category, self.content_type, self.name, self.value)) elif self.category == MixedContainer.CategorySimple: showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( self.category, self.content_type, self.name, self.value)) else: # category == MixedContainer.CategoryComplex showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s",\n' % ( self.category, self.content_type, self.name,)) self.value.exportLiteral(outfile, level + 1) showIndent(outfile, level) outfile.write(')\n') class MemberSpec_(object): def __init__(self, name='', data_type='', container=0, optional=0, child_attrs=None, choice=None): self.name = name self.data_type = data_type self.container = container self.child_attrs = child_attrs self.choice = choice self.optional = optional def set_name(self, name): self.name = name def get_name(self): return self.name def set_data_type(self, data_type): self.data_type = data_type def get_data_type_chain(self): return self.data_type def get_data_type(self): if isinstance(self.data_type, list): if len(self.data_type) > 0: return self.data_type[-1] else: return 'xs:string' else: return self.data_type def set_container(self, container): self.container = container def get_container(self): return self.container def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs def get_child_attrs(self): return self.child_attrs def set_choice(self, choice): self.choice = choice def get_choice(self): return self.choice def set_optional(self, optional): self.optional = optional def get_optional(self): return self.optional def _cast(typ, value): if typ is None or value is None: return value return typ(value) # # Data representation classes. # class eSocial(GeneratedsSuper): subclass = None superclass = None def __init__(self, evtCdBenPrRP=None, Signature=None): self.original_tagname_ = None self.evtCdBenPrRP = evtCdBenPrRP self.Signature = Signature def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, eSocial) if subclass is not None: return subclass(*args_, **kwargs_) if eSocial.subclass: return eSocial.subclass(*args_, **kwargs_) else: return eSocial(*args_, **kwargs_) factory = staticmethod(factory) def get_evtCdBenPrRP(self): return self.evtCdBenPrRP def set_evtCdBenPrRP(self, evtCdBenPrRP): self.evtCdBenPrRP = evtCdBenPrRP def get_Signature(self): return self.Signature def set_Signature(self, Signature): self.Signature = Signature def hasContent_(self): if ( self.evtCdBenPrRP is not None or self.Signature is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='eSocial', namespacedef_=' xmlns:ds="http://www.w3.org/2000/09/xmldsig#" ', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('eSocial') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='eSocial') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='eSocial', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='eSocial'): pass def exportChildren(self, outfile, level, namespace_='', name_='eSocial', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.evtCdBenPrRP is not None: self.evtCdBenPrRP.export(outfile, level, namespace_, name_='evtCdBenPrRP', pretty_print=pretty_print) if self.Signature is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sSignature>%s</%sSignature>%s' % ('ds:', self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), 'ds:', eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'evtCdBenPrRP': obj_ = evtCdBenPrRP.factory() obj_.build(child_) self.evtCdBenPrRP = obj_ obj_.original_tagname_ = 'evtCdBenPrRP' elif nodeName_ == 'Signature': Signature_ = child_.text Signature_ = self.gds_validate_string(Signature_, node, 'Signature') self.Signature = Signature_ # end class eSocial class evtCdBenPrRP(GeneratedsSuper): """Evento de cadastro de benefícios previdenciários de Regimes Próprios""" subclass = None superclass = None def __init__(self, Id=None, ideEvento=None, ideEmpregador=None, ideBenef=None, infoBeneficio=None): self.original_tagname_ = None self.Id = _cast(None, Id) self.ideEvento = ideEvento self.ideEmpregador = ideEmpregador self.ideBenef = ideBenef self.infoBeneficio = infoBeneficio def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, evtCdBenPrRP) if subclass is not None: return subclass(*args_, **kwargs_) if evtCdBenPrRP.subclass: return evtCdBenPrRP.subclass(*args_, **kwargs_) else: return evtCdBenPrRP(*args_, **kwargs_) factory = staticmethod(factory) def get_ideEvento(self): return self.ideEvento def set_ideEvento(self, ideEvento): self.ideEvento = ideEvento def get_ideEmpregador(self): return self.ideEmpregador def set_ideEmpregador(self, ideEmpregador): self.ideEmpregador = ideEmpregador def get_ideBenef(self): return self.ideBenef def set_ideBenef(self, ideBenef): self.ideBenef = ideBenef def get_infoBeneficio(self): return self.infoBeneficio def set_infoBeneficio(self, infoBeneficio): self.infoBeneficio = infoBeneficio def get_Id(self): return self.Id def set_Id(self, Id): self.Id = Id def hasContent_(self): if ( self.ideEvento is not None or self.ideEmpregador is not None or self.ideBenef is not None or self.infoBeneficio is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='evtCdBenPrRP', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('evtCdBenPrRP') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='evtCdBenPrRP') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='evtCdBenPrRP', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='evtCdBenPrRP'): if self.Id is not None and 'Id' not in already_processed: already_processed.add('Id') outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) def exportChildren(self, outfile, level, namespace_='', name_='evtCdBenPrRP', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.ideEvento is not None: self.ideEvento.export(outfile, level, namespace_, name_='ideEvento', pretty_print=pretty_print) if self.ideEmpregador is not None: self.ideEmpregador.export(outfile, level, namespace_, name_='ideEmpregador', pretty_print=pretty_print) if self.ideBenef is not None: self.ideBenef.export(outfile, level, namespace_, name_='ideBenef', pretty_print=pretty_print) if self.infoBeneficio is not None: self.infoBeneficio.export(outfile, level, namespace_, name_='infoBeneficio', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('Id', node) if value is not None and 'Id' not in already_processed: already_processed.add('Id') self.Id = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'ideEvento': obj_ = TIdeEveTrab.factory() obj_.build(child_) self.ideEvento = obj_ obj_.original_tagname_ = 'ideEvento' elif nodeName_ == 'ideEmpregador': obj_ = TEmprPJ.factory() obj_.build(child_) self.ideEmpregador = obj_ obj_.original_tagname_ = 'ideEmpregador' elif nodeName_ == 'ideBenef': obj_ = ideBenef.factory() obj_.build(child_) self.ideBenef = obj_ obj_.original_tagname_ = 'ideBenef' elif nodeName_ == 'infoBeneficio': obj_ = infoBeneficio.factory() obj_.build(child_) self.infoBeneficio = obj_ obj_.original_tagname_ = 'infoBeneficio' # end class evtCdBenPrRP class ideBenef(GeneratedsSuper): """Identificação do beneficiário""" subclass = None superclass = None def __init__(self, cpfBenef=None, nmBenefic=None, dadosBenef=None): self.original_tagname_ = None self.cpfBenef = cpfBenef self.nmBenefic = nmBenefic self.dadosBenef = dadosBenef def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, ideBenef) if subclass is not None: return subclass(*args_, **kwargs_) if ideBenef.subclass: return ideBenef.subclass(*args_, **kwargs_) else: return ideBenef(*args_, **kwargs_) factory = staticmethod(factory) def get_cpfBenef(self): return self.cpfBenef def set_cpfBenef(self, cpfBenef): self.cpfBenef = cpfBenef def get_nmBenefic(self): return self.nmBenefic def set_nmBenefic(self, nmBenefic): self.nmBenefic = nmBenefic def get_dadosBenef(self): return self.dadosBenef def set_dadosBenef(self, dadosBenef): self.dadosBenef = dadosBenef def hasContent_(self): if ( self.cpfBenef is not None or self.nmBenefic is not None or self.dadosBenef is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='ideBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ideBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='ideBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='ideBenef', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ideBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='ideBenef', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.cpfBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scpfBenef>%s</%scpfBenef>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfBenef), input_name='cpfBenef')), namespace_, eol_)) if self.nmBenefic is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snmBenefic>%s</%snmBenefic>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmBenefic), input_name='nmBenefic')), namespace_, eol_)) if self.dadosBenef is not None: self.dadosBenef.export(outfile, level, namespace_, name_='dadosBenef', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'cpfBenef': cpfBenef_ = child_.text cpfBenef_ = self.gds_validate_string(cpfBenef_, node, 'cpfBenef') self.cpfBenef = cpfBenef_ elif nodeName_ == 'nmBenefic': nmBenefic_ = child_.text nmBenefic_ = self.gds_validate_string(nmBenefic_, node, 'nmBenefic') self.nmBenefic = nmBenefic_ elif nodeName_ == 'dadosBenef': obj_ = TDadosBenef.factory() obj_.build(child_) self.dadosBenef = obj_ obj_.original_tagname_ = 'dadosBenef' # end class ideBenef class cpfBenef(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, cpfBenef) if subclass is not None: return subclass(*args_, **kwargs_) if cpfBenef.subclass: return cpfBenef.subclass(*args_, **kwargs_) else: return cpfBenef(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='cpfBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='cpfBenef', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='cpfBenef', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class cpfBenef class nmBenefic(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nmBenefic) if subclass is not None: return subclass(*args_, **kwargs_) if nmBenefic.subclass: return nmBenefic.subclass(*args_, **kwargs_) else: return nmBenefic(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nmBenefic', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmBenefic') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmBenefic') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nmBenefic', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmBenefic'): pass def exportChildren(self, outfile, level, namespace_='', name_='nmBenefic', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nmBenefic class infoBeneficio(GeneratedsSuper): """Informações relacionadas ao benefício previdenciário concedido ao servidor""" subclass = None superclass = None def __init__(self, tpPlanRP=None, iniBeneficio=None, altBeneficio=None, fimBeneficio=None): self.original_tagname_ = None self.tpPlanRP = tpPlanRP self.iniBeneficio = iniBeneficio self.altBeneficio = altBeneficio self.fimBeneficio = fimBeneficio def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, infoBeneficio) if subclass is not None: return subclass(*args_, **kwargs_) if infoBeneficio.subclass: return infoBeneficio.subclass(*args_, **kwargs_) else: return infoBeneficio(*args_, **kwargs_) factory = staticmethod(factory) def get_tpPlanRP(self): return self.tpPlanRP def set_tpPlanRP(self, tpPlanRP): self.tpPlanRP = tpPlanRP def get_iniBeneficio(self): return self.iniBeneficio def set_iniBeneficio(self, iniBeneficio): self.iniBeneficio = iniBeneficio def get_altBeneficio(self): return self.altBeneficio def set_altBeneficio(self, altBeneficio): self.altBeneficio = altBeneficio def get_fimBeneficio(self): return self.fimBeneficio def set_fimBeneficio(self, fimBeneficio): self.fimBeneficio = fimBeneficio def hasContent_(self): if ( self.tpPlanRP is not None or self.iniBeneficio is not None or self.altBeneficio is not None or self.fimBeneficio is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='infoBeneficio', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoBeneficio') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoBeneficio') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='infoBeneficio', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoBeneficio'): pass def exportChildren(self, outfile, level, namespace_='', name_='infoBeneficio', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.tpPlanRP is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpPlanRP>%s</%stpPlanRP>%s' % (namespace_, self.gds_format_integer(self.tpPlanRP, input_name='tpPlanRP'), namespace_, eol_)) if self.iniBeneficio is not None: self.iniBeneficio.export(outfile, level, namespace_, name_='iniBeneficio', pretty_print=pretty_print) if self.altBeneficio is not None: self.altBeneficio.export(outfile, level, namespace_, name_='altBeneficio', pretty_print=pretty_print) if self.fimBeneficio is not None: self.fimBeneficio.export(outfile, level, namespace_, name_='fimBeneficio', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'tpPlanRP': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'tpPlanRP') self.tpPlanRP = ival_ elif nodeName_ == 'iniBeneficio': obj_ = TDadosBeneficio.factory() obj_.build(child_) self.iniBeneficio = obj_ obj_.original_tagname_ = 'iniBeneficio' elif nodeName_ == 'altBeneficio': obj_ = TDadosBeneficio.factory() obj_.build(child_) self.altBeneficio = obj_ obj_.original_tagname_ = 'altBeneficio' elif nodeName_ == 'fimBeneficio': obj_ = fimBeneficio.factory() obj_.build(child_) self.fimBeneficio = obj_ obj_.original_tagname_ = 'fimBeneficio' # end class infoBeneficio class tpPlanRP(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, tpPlanRP) if subclass is not None: return subclass(*args_, **kwargs_) if tpPlanRP.subclass: return tpPlanRP.subclass(*args_, **kwargs_) else: return tpPlanRP(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='tpPlanRP', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpPlanRP') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpPlanRP') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='tpPlanRP', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpPlanRP'): pass def exportChildren(self, outfile, level, namespace_='', name_='tpPlanRP', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class tpPlanRP class fimBeneficio(GeneratedsSuper): """Informações relativas a benefícios previdenciários - Término. Validação: Só pode ser informado se já houver informação anterior de benefícios para o beneficiário identificado em {ideBenef} e para o qual não tenha havido ainda informação de término de benefícios.""" subclass = None superclass = None def __init__(self, tpBenef=None, nrBenefic=None, dtFimBenef=None, mtvFim=None): self.original_tagname_ = None self.tpBenef = tpBenef self.nrBenefic = nrBenefic if isinstance(dtFimBenef, BaseStrType_): initvalue_ = datetime_.datetime.strptime(dtFimBenef, '%Y-%m-%d').date() else: initvalue_ = dtFimBenef self.dtFimBenef = initvalue_ self.mtvFim = mtvFim def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, fimBeneficio) if subclass is not None: return subclass(*args_, **kwargs_) if fimBeneficio.subclass: return fimBeneficio.subclass(*args_, **kwargs_) else: return fimBeneficio(*args_, **kwargs_) factory = staticmethod(factory) def get_tpBenef(self): return self.tpBenef def set_tpBenef(self, tpBenef): self.tpBenef = tpBenef def get_nrBenefic(self): return self.nrBenefic def set_nrBenefic(self, nrBenefic): self.nrBenefic = nrBenefic def get_dtFimBenef(self): return self.dtFimBenef def set_dtFimBenef(self, dtFimBenef): self.dtFimBenef = dtFimBenef def get_mtvFim(self): return self.mtvFim def set_mtvFim(self, mtvFim): self.mtvFim = mtvFim def hasContent_(self): if ( self.tpBenef is not None or self.nrBenefic is not None or self.dtFimBenef is not None or self.mtvFim is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='fimBeneficio', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('fimBeneficio') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='fimBeneficio') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='fimBeneficio', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='fimBeneficio'): pass def exportChildren(self, outfile, level, namespace_='', name_='fimBeneficio', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.tpBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpBenef>%s</%stpBenef>%s' % (namespace_, self.gds_format_integer(self.tpBenef, input_name='tpBenef'), namespace_, eol_)) if self.nrBenefic is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrBenefic>%s</%snrBenefic>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrBenefic), input_name='nrBenefic')), namespace_, eol_)) if self.dtFimBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sdtFimBenef>%s</%sdtFimBenef>%s' % (namespace_, self.gds_format_date(self.dtFimBenef, input_name='dtFimBenef'), namespace_, eol_)) if self.mtvFim is not None: showIndent(outfile, level, pretty_print) outfile.write('<%smtvFim>%s</%smtvFim>%s' % (namespace_, self.gds_format_integer(self.mtvFim, input_name='mtvFim'), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'tpBenef': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'tpBenef') self.tpBenef = ival_ elif nodeName_ == 'nrBenefic': nrBenefic_ = child_.text nrBenefic_ = self.gds_validate_string(nrBenefic_, node, 'nrBenefic') self.nrBenefic = nrBenefic_ elif nodeName_ == 'dtFimBenef': sval_ = child_.text dval_ = self.gds_parse_date(sval_) self.dtFimBenef = dval_ elif nodeName_ == 'mtvFim': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'mtvFim') self.mtvFim = ival_ # end class fimBeneficio class tpBenef(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, tpBenef) if subclass is not None: return subclass(*args_, **kwargs_) if tpBenef.subclass: return tpBenef.subclass(*args_, **kwargs_) else: return tpBenef(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='tpBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='tpBenef', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='tpBenef', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class tpBenef class nrBenefic(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nrBenefic) if subclass is not None: return subclass(*args_, **kwargs_) if nrBenefic.subclass: return nrBenefic.subclass(*args_, **kwargs_) else: return nrBenefic(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nrBenefic', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrBenefic') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrBenefic') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nrBenefic', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrBenefic'): pass def exportChildren(self, outfile, level, namespace_='', name_='nrBenefic', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nrBenefic class dtFimBenef(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, dtFimBenef) if subclass is not None: return subclass(*args_, **kwargs_) if dtFimBenef.subclass: return dtFimBenef.subclass(*args_, **kwargs_) else: return dtFimBenef(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='dtFimBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtFimBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtFimBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='dtFimBenef', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtFimBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='dtFimBenef', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class dtFimBenef class mtvFim(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, mtvFim) if subclass is not None: return subclass(*args_, **kwargs_) if mtvFim.subclass: return mtvFim.subclass(*args_, **kwargs_) else: return mtvFim(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='mtvFim', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('mtvFim') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='mtvFim') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='mtvFim', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='mtvFim'): pass def exportChildren(self, outfile, level, namespace_='', name_='mtvFim', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class mtvFim class TIdeEveTrab(GeneratedsSuper): """Identificação do evento""" subclass = None superclass = None def __init__(self, indRetif=None, nrRecibo=None, tpAmb=None, procEmi=None, verProc=None): self.original_tagname_ = None self.indRetif = indRetif self.nrRecibo = nrRecibo self.tpAmb = tpAmb self.procEmi = procEmi self.verProc = verProc def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TIdeEveTrab) if subclass is not None: return subclass(*args_, **kwargs_) if TIdeEveTrab.subclass: return TIdeEveTrab.subclass(*args_, **kwargs_) else: return TIdeEveTrab(*args_, **kwargs_) factory = staticmethod(factory) def get_indRetif(self): return self.indRetif def set_indRetif(self, indRetif): self.indRetif = indRetif def get_nrRecibo(self): return self.nrRecibo def set_nrRecibo(self, nrRecibo): self.nrRecibo = nrRecibo def get_tpAmb(self): return self.tpAmb def set_tpAmb(self, tpAmb): self.tpAmb = tpAmb def get_procEmi(self): return self.procEmi def set_procEmi(self, procEmi): self.procEmi = procEmi def get_verProc(self): return self.verProc def set_verProc(self, verProc): self.verProc = verProc def hasContent_(self): if ( self.indRetif is not None or self.nrRecibo is not None or self.tpAmb is not None or self.procEmi is not None or self.verProc is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TIdeEveTrab', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TIdeEveTrab') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TIdeEveTrab') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TIdeEveTrab', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TIdeEveTrab'): pass def exportChildren(self, outfile, level, namespace_='', name_='TIdeEveTrab', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.indRetif is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sindRetif>%s</%sindRetif>%s' % (namespace_, self.gds_format_integer(self.indRetif, input_name='indRetif'), namespace_, eol_)) if self.nrRecibo is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrRecibo>%s</%snrRecibo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRecibo), input_name='nrRecibo')), namespace_, eol_)) if self.tpAmb is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpAmb>%s</%stpAmb>%s' % (namespace_, self.gds_format_integer(self.tpAmb, input_name='tpAmb'), namespace_, eol_)) if self.procEmi is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sprocEmi>%s</%sprocEmi>%s' % (namespace_, self.gds_format_integer(self.procEmi, input_name='procEmi'), namespace_, eol_)) if self.verProc is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sverProc>%s</%sverProc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.verProc), input_name='verProc')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'indRetif': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'indRetif') self.indRetif = ival_ elif nodeName_ == 'nrRecibo': nrRecibo_ = child_.text nrRecibo_ = self.gds_validate_string(nrRecibo_, node, 'nrRecibo') self.nrRecibo = nrRecibo_ elif nodeName_ == 'tpAmb': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'tpAmb') self.tpAmb = ival_ elif nodeName_ == 'procEmi': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'procEmi') self.procEmi = ival_ elif nodeName_ == 'verProc': verProc_ = child_.text verProc_ = self.gds_validate_string(verProc_, node, 'verProc') self.verProc = verProc_ # end class TIdeEveTrab class indRetif(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, indRetif) if subclass is not None: return subclass(*args_, **kwargs_) if indRetif.subclass: return indRetif.subclass(*args_, **kwargs_) else: return indRetif(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='indRetif', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('indRetif') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='indRetif') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='indRetif', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indRetif'): pass def exportChildren(self, outfile, level, namespace_='', name_='indRetif', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class indRetif class nrRecibo(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nrRecibo) if subclass is not None: return subclass(*args_, **kwargs_) if nrRecibo.subclass: return nrRecibo.subclass(*args_, **kwargs_) else: return nrRecibo(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nrRecibo', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRecibo') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRecibo') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nrRecibo', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRecibo'): pass def exportChildren(self, outfile, level, namespace_='', name_='nrRecibo', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nrRecibo class tpAmb(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, tpAmb) if subclass is not None: return subclass(*args_, **kwargs_) if tpAmb.subclass: return tpAmb.subclass(*args_, **kwargs_) else: return tpAmb(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='tpAmb', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpAmb') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpAmb') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='tpAmb', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpAmb'): pass def exportChildren(self, outfile, level, namespace_='', name_='tpAmb', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class tpAmb class procEmi(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, procEmi) if subclass is not None: return subclass(*args_, **kwargs_) if procEmi.subclass: return procEmi.subclass(*args_, **kwargs_) else: return procEmi(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='procEmi', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('procEmi') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='procEmi') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='procEmi', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='procEmi'): pass def exportChildren(self, outfile, level, namespace_='', name_='procEmi', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class procEmi class verProc(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, verProc) if subclass is not None: return subclass(*args_, **kwargs_) if verProc.subclass: return verProc.subclass(*args_, **kwargs_) else: return verProc(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='verProc', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('verProc') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='verProc') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='verProc', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='verProc'): pass def exportChildren(self, outfile, level, namespace_='', name_='verProc', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class verProc class TEmprPJ(GeneratedsSuper): """Informações do Empregador PJ""" subclass = None superclass = None def __init__(self, tpInsc=None, nrInsc=None): self.original_tagname_ = None self.tpInsc = tpInsc self.nrInsc = nrInsc def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TEmprPJ) if subclass is not None: return subclass(*args_, **kwargs_) if TEmprPJ.subclass: return TEmprPJ.subclass(*args_, **kwargs_) else: return TEmprPJ(*args_, **kwargs_) factory = staticmethod(factory) def get_tpInsc(self): return self.tpInsc def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc def get_nrInsc(self): return self.nrInsc def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc def hasContent_(self): if ( self.tpInsc is not None or self.nrInsc is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TEmprPJ', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEmprPJ') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEmprPJ') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TEmprPJ', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEmprPJ'): pass def exportChildren(self, outfile, level, namespace_='', name_='TEmprPJ', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.tpInsc is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_)) if self.nrInsc is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'tpInsc': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'tpInsc') self.tpInsc = ival_ elif nodeName_ == 'nrInsc': nrInsc_ = child_.text nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc') self.nrInsc = nrInsc_ # end class TEmprPJ class tpInsc(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, tpInsc) if subclass is not None: return subclass(*args_, **kwargs_) if tpInsc.subclass: return tpInsc.subclass(*args_, **kwargs_) else: return tpInsc(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='tpInsc', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInsc') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInsc') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='tpInsc', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpInsc'): pass def exportChildren(self, outfile, level, namespace_='', name_='tpInsc', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class tpInsc class nrInsc(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nrInsc) if subclass is not None: return subclass(*args_, **kwargs_) if nrInsc.subclass: return nrInsc.subclass(*args_, **kwargs_) else: return nrInsc(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nrInsc', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrInsc') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrInsc') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nrInsc', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrInsc'): pass def exportChildren(self, outfile, level, namespace_='', name_='nrInsc', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nrInsc class TDadosBenef(GeneratedsSuper): """Dados de beneficiário""" subclass = None superclass = None def __init__(self, dadosNasc=None, endereco=None): self.original_tagname_ = None self.dadosNasc = dadosNasc self.endereco = endereco def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TDadosBenef) if subclass is not None: return subclass(*args_, **kwargs_) if TDadosBenef.subclass: return TDadosBenef.subclass(*args_, **kwargs_) else: return TDadosBenef(*args_, **kwargs_) factory = staticmethod(factory) def get_dadosNasc(self): return self.dadosNasc def set_dadosNasc(self, dadosNasc): self.dadosNasc = dadosNasc def get_endereco(self): return self.endereco def set_endereco(self, endereco): self.endereco = endereco def hasContent_(self): if ( self.dadosNasc is not None or self.endereco is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TDadosBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TDadosBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TDadosBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TDadosBenef', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TDadosBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='TDadosBenef', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.dadosNasc is not None: self.dadosNasc.export(outfile, level, namespace_, name_='dadosNasc', pretty_print=pretty_print) if self.endereco is not None: self.endereco.export(outfile, level, namespace_, name_='endereco', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'dadosNasc': obj_ = dadosNasc.factory() obj_.build(child_) self.dadosNasc = obj_ obj_.original_tagname_ = 'dadosNasc' elif nodeName_ == 'endereco': obj_ = endereco.factory() obj_.build(child_) self.endereco = obj_ obj_.original_tagname_ = 'endereco' # end class TDadosBenef class dadosNasc(GeneratedsSuper): """Informações de nascimento do beneficiário""" subclass = None superclass = None def __init__(self, dtNascto=None, codMunic=None, uf=None, paisNascto=None, paisNac=None, nmMae=None, nmPai=None): self.original_tagname_ = None if isinstance(dtNascto, BaseStrType_): initvalue_ = datetime_.datetime.strptime(dtNascto, '%Y-%m-%d').date() else: initvalue_ = dtNascto self.dtNascto = initvalue_ self.codMunic = codMunic self.uf = uf self.paisNascto = paisNascto self.paisNac = paisNac self.nmMae = nmMae self.nmPai = nmPai def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, dadosNasc) if subclass is not None: return subclass(*args_, **kwargs_) if dadosNasc.subclass: return dadosNasc.subclass(*args_, **kwargs_) else: return dadosNasc(*args_, **kwargs_) factory = staticmethod(factory) def get_dtNascto(self): return self.dtNascto def set_dtNascto(self, dtNascto): self.dtNascto = dtNascto def get_codMunic(self): return self.codMunic def set_codMunic(self, codMunic): self.codMunic = codMunic def get_uf(self): return self.uf def set_uf(self, uf): self.uf = uf def get_paisNascto(self): return self.paisNascto def set_paisNascto(self, paisNascto): self.paisNascto = paisNascto def get_paisNac(self): return self.paisNac def set_paisNac(self, paisNac): self.paisNac = paisNac def get_nmMae(self): return self.nmMae def set_nmMae(self, nmMae): self.nmMae = nmMae def get_nmPai(self): return self.nmPai def set_nmPai(self, nmPai): self.nmPai = nmPai def hasContent_(self): if ( self.dtNascto is not None or self.codMunic is not None or self.uf is not None or self.paisNascto is not None or self.paisNac is not None or self.nmMae is not None or self.nmPai is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='dadosNasc', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('dadosNasc') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='dadosNasc') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='dadosNasc', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dadosNasc'): pass def exportChildren(self, outfile, level, namespace_='', name_='dadosNasc', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.dtNascto is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sdtNascto>%s</%sdtNascto>%s' % (namespace_, self.gds_format_date(self.dtNascto, input_name='dtNascto'), namespace_, eol_)) if self.codMunic is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_)) if self.uf is not None: showIndent(outfile, level, pretty_print) outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_)) if self.paisNascto is not None: showIndent(outfile, level, pretty_print) outfile.write('<%spaisNascto>%s</%spaisNascto>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisNascto), input_name='paisNascto')), namespace_, eol_)) if self.paisNac is not None: showIndent(outfile, level, pretty_print) outfile.write('<%spaisNac>%s</%spaisNac>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisNac), input_name='paisNac')), namespace_, eol_)) if self.nmMae is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snmMae>%s</%snmMae>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmMae), input_name='nmMae')), namespace_, eol_)) if self.nmPai is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snmPai>%s</%snmPai>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmPai), input_name='nmPai')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'dtNascto': sval_ = child_.text dval_ = self.gds_parse_date(sval_) self.dtNascto = dval_ elif nodeName_ == 'codMunic': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'codMunic') self.codMunic = ival_ elif nodeName_ == 'uf': uf_ = child_.text uf_ = self.gds_validate_string(uf_, node, 'uf') self.uf = uf_ elif nodeName_ == 'paisNascto': paisNascto_ = child_.text paisNascto_ = self.gds_validate_string(paisNascto_, node, 'paisNascto') self.paisNascto = paisNascto_ elif nodeName_ == 'paisNac': paisNac_ = child_.text paisNac_ = self.gds_validate_string(paisNac_, node, 'paisNac') self.paisNac = paisNac_ elif nodeName_ == 'nmMae': nmMae_ = child_.text nmMae_ = self.gds_validate_string(nmMae_, node, 'nmMae') self.nmMae = nmMae_ elif nodeName_ == 'nmPai': nmPai_ = child_.text nmPai_ = self.gds_validate_string(nmPai_, node, 'nmPai') self.nmPai = nmPai_ # end class dadosNasc class dtNascto(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, dtNascto) if subclass is not None: return subclass(*args_, **kwargs_) if dtNascto.subclass: return dtNascto.subclass(*args_, **kwargs_) else: return dtNascto(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='dtNascto', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtNascto') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtNascto') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='dtNascto', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtNascto'): pass def exportChildren(self, outfile, level, namespace_='', name_='dtNascto', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class dtNascto class codMunic(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, codMunic) if subclass is not None: return subclass(*args_, **kwargs_) if codMunic.subclass: return codMunic.subclass(*args_, **kwargs_) else: return codMunic(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='codMunic', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('codMunic') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='codMunic') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='codMunic', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codMunic'): pass def exportChildren(self, outfile, level, namespace_='', name_='codMunic', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class codMunic class uf(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, uf) if subclass is not None: return subclass(*args_, **kwargs_) if uf.subclass: return uf.subclass(*args_, **kwargs_) else: return uf(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='uf', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('uf') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='uf') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='uf', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='uf'): pass def exportChildren(self, outfile, level, namespace_='', name_='uf', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class uf class paisNascto(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, paisNascto) if subclass is not None: return subclass(*args_, **kwargs_) if paisNascto.subclass: return paisNascto.subclass(*args_, **kwargs_) else: return paisNascto(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='paisNascto', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisNascto') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisNascto') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='paisNascto', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisNascto'): pass def exportChildren(self, outfile, level, namespace_='', name_='paisNascto', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class paisNascto class paisNac(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, paisNac) if subclass is not None: return subclass(*args_, **kwargs_) if paisNac.subclass: return paisNac.subclass(*args_, **kwargs_) else: return paisNac(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='paisNac', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisNac') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisNac') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='paisNac', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisNac'): pass def exportChildren(self, outfile, level, namespace_='', name_='paisNac', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class paisNac class nmMae(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nmMae) if subclass is not None: return subclass(*args_, **kwargs_) if nmMae.subclass: return nmMae.subclass(*args_, **kwargs_) else: return nmMae(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nmMae', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmMae') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmMae') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nmMae', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmMae'): pass def exportChildren(self, outfile, level, namespace_='', name_='nmMae', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nmMae class nmPai(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nmPai) if subclass is not None: return subclass(*args_, **kwargs_) if nmPai.subclass: return nmPai.subclass(*args_, **kwargs_) else: return nmPai(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nmPai', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmPai') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmPai') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nmPai', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmPai'): pass def exportChildren(self, outfile, level, namespace_='', name_='nmPai', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nmPai class endereco(GeneratedsSuper): """Grupo de informações do endereço do Trabalhador""" subclass = None superclass = None def __init__(self, brasil=None, exterior=None): self.original_tagname_ = None self.brasil = brasil self.exterior = exterior def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, endereco) if subclass is not None: return subclass(*args_, **kwargs_) if endereco.subclass: return endereco.subclass(*args_, **kwargs_) else: return endereco(*args_, **kwargs_) factory = staticmethod(factory) def get_brasil(self): return self.brasil def set_brasil(self, brasil): self.brasil = brasil def get_exterior(self): return self.exterior def set_exterior(self, exterior): self.exterior = exterior def hasContent_(self): if ( self.brasil is not None or self.exterior is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='endereco', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('endereco') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='endereco') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='endereco', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='endereco'): pass def exportChildren(self, outfile, level, namespace_='', name_='endereco', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.brasil is not None: self.brasil.export(outfile, level, namespace_, name_='brasil', pretty_print=pretty_print) if self.exterior is not None: self.exterior.export(outfile, level, namespace_, name_='exterior', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'brasil': obj_ = TEnderecoBrasil.factory() obj_.build(child_) self.brasil = obj_ obj_.original_tagname_ = 'brasil' elif nodeName_ == 'exterior': obj_ = TEnderecoExterior.factory() obj_.build(child_) self.exterior = obj_ obj_.original_tagname_ = 'exterior' # end class endereco class TEnderecoBrasil(GeneratedsSuper): """Informações do Endereço no Brasil""" subclass = None superclass = None def __init__(self, tpLograd=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, cep=None, codMunic=None, uf=None): self.original_tagname_ = None self.tpLograd = tpLograd self.dscLograd = dscLograd self.nrLograd = nrLograd self.complemento = complemento self.bairro = bairro self.cep = cep self.codMunic = codMunic self.uf = uf def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TEnderecoBrasil) if subclass is not None: return subclass(*args_, **kwargs_) if TEnderecoBrasil.subclass: return TEnderecoBrasil.subclass(*args_, **kwargs_) else: return TEnderecoBrasil(*args_, **kwargs_) factory = staticmethod(factory) def get_tpLograd(self): return self.tpLograd def set_tpLograd(self, tpLograd): self.tpLograd = tpLograd def get_dscLograd(self): return self.dscLograd def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd def get_nrLograd(self): return self.nrLograd def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd def get_complemento(self): return self.complemento def set_complemento(self, complemento): self.complemento = complemento def get_bairro(self): return self.bairro def set_bairro(self, bairro): self.bairro = bairro def get_cep(self): return self.cep def set_cep(self, cep): self.cep = cep def get_codMunic(self): return self.codMunic def set_codMunic(self, codMunic): self.codMunic = codMunic def get_uf(self): return self.uf def set_uf(self, uf): self.uf = uf def hasContent_(self): if ( self.tpLograd is not None or self.dscLograd is not None or self.nrLograd is not None or self.complemento is not None or self.bairro is not None or self.cep is not None or self.codMunic is not None or self.uf is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TEnderecoBrasil', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoBrasil') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoBrasil') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoBrasil', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoBrasil'): pass def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoBrasil', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.tpLograd is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpLograd>%s</%stpLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpLograd), input_name='tpLograd')), namespace_, eol_)) if self.dscLograd is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_)) if self.nrLograd is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_)) if self.complemento is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_)) if self.bairro is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_)) if self.cep is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scep>%s</%scep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cep), input_name='cep')), namespace_, eol_)) if self.codMunic is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_)) if self.uf is not None: showIndent(outfile, level, pretty_print) outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'tpLograd': tpLograd_ = child_.text tpLograd_ = self.gds_validate_string(tpLograd_, node, 'tpLograd') self.tpLograd = tpLograd_ elif nodeName_ == 'dscLograd': dscLograd_ = child_.text dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd') self.dscLograd = dscLograd_ elif nodeName_ == 'nrLograd': nrLograd_ = child_.text nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd') self.nrLograd = nrLograd_ elif nodeName_ == 'complemento': complemento_ = child_.text complemento_ = self.gds_validate_string(complemento_, node, 'complemento') self.complemento = complemento_ elif nodeName_ == 'bairro': bairro_ = child_.text bairro_ = self.gds_validate_string(bairro_, node, 'bairro') self.bairro = bairro_ elif nodeName_ == 'cep': cep_ = child_.text cep_ = self.gds_validate_string(cep_, node, 'cep') self.cep = cep_ elif nodeName_ == 'codMunic': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'codMunic') self.codMunic = ival_ elif nodeName_ == 'uf': uf_ = child_.text uf_ = self.gds_validate_string(uf_, node, 'uf') self.uf = uf_ # end class TEnderecoBrasil class tpLograd(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, tpLograd) if subclass is not None: return subclass(*args_, **kwargs_) if tpLograd.subclass: return tpLograd.subclass(*args_, **kwargs_) else: return tpLograd(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='tpLograd', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLograd') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLograd') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='tpLograd', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpLograd'): pass def exportChildren(self, outfile, level, namespace_='', name_='tpLograd', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class tpLograd class dscLograd(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, dscLograd) if subclass is not None: return subclass(*args_, **kwargs_) if dscLograd.subclass: return dscLograd.subclass(*args_, **kwargs_) else: return dscLograd(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='dscLograd', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscLograd') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscLograd') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='dscLograd', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscLograd'): pass def exportChildren(self, outfile, level, namespace_='', name_='dscLograd', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class dscLograd class nrLograd(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nrLograd) if subclass is not None: return subclass(*args_, **kwargs_) if nrLograd.subclass: return nrLograd.subclass(*args_, **kwargs_) else: return nrLograd(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nrLograd', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrLograd') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrLograd') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nrLograd', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrLograd'): pass def exportChildren(self, outfile, level, namespace_='', name_='nrLograd', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nrLograd class complemento(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, complemento) if subclass is not None: return subclass(*args_, **kwargs_) if complemento.subclass: return complemento.subclass(*args_, **kwargs_) else: return complemento(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='complemento', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('complemento') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='complemento') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='complemento', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='complemento'): pass def exportChildren(self, outfile, level, namespace_='', name_='complemento', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class complemento class bairro(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, bairro) if subclass is not None: return subclass(*args_, **kwargs_) if bairro.subclass: return bairro.subclass(*args_, **kwargs_) else: return bairro(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='bairro', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('bairro') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='bairro') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='bairro', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='bairro'): pass def exportChildren(self, outfile, level, namespace_='', name_='bairro', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class bairro class cep(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, cep) if subclass is not None: return subclass(*args_, **kwargs_) if cep.subclass: return cep.subclass(*args_, **kwargs_) else: return cep(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='cep', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('cep') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='cep') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='cep', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cep'): pass def exportChildren(self, outfile, level, namespace_='', name_='cep', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class cep class TEnderecoExterior(GeneratedsSuper): """Informações do Endereço no Exterior""" subclass = None superclass = None def __init__(self, paisResid=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, nmCid=None, codPostal=None): self.original_tagname_ = None self.paisResid = paisResid self.dscLograd = dscLograd self.nrLograd = nrLograd self.complemento = complemento self.bairro = bairro self.nmCid = nmCid self.codPostal = codPostal def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TEnderecoExterior) if subclass is not None: return subclass(*args_, **kwargs_) if TEnderecoExterior.subclass: return TEnderecoExterior.subclass(*args_, **kwargs_) else: return TEnderecoExterior(*args_, **kwargs_) factory = staticmethod(factory) def get_paisResid(self): return self.paisResid def set_paisResid(self, paisResid): self.paisResid = paisResid def get_dscLograd(self): return self.dscLograd def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd def get_nrLograd(self): return self.nrLograd def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd def get_complemento(self): return self.complemento def set_complemento(self, complemento): self.complemento = complemento def get_bairro(self): return self.bairro def set_bairro(self, bairro): self.bairro = bairro def get_nmCid(self): return self.nmCid def set_nmCid(self, nmCid): self.nmCid = nmCid def get_codPostal(self): return self.codPostal def set_codPostal(self, codPostal): self.codPostal = codPostal def hasContent_(self): if ( self.paisResid is not None or self.dscLograd is not None or self.nrLograd is not None or self.complemento is not None or self.bairro is not None or self.nmCid is not None or self.codPostal is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TEnderecoExterior', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoExterior') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoExterior') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoExterior', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoExterior'): pass def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoExterior', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.paisResid is not None: showIndent(outfile, level, pretty_print) outfile.write('<%spaisResid>%s</%spaisResid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.paisResid), input_name='paisResid')), namespace_, eol_)) if self.dscLograd is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_)) if self.nrLograd is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_)) if self.complemento is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_)) if self.bairro is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_)) if self.nmCid is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snmCid>%s</%snmCid>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nmCid), input_name='nmCid')), namespace_, eol_)) if self.codPostal is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scodPostal>%s</%scodPostal>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codPostal), input_name='codPostal')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'paisResid': paisResid_ = child_.text paisResid_ = self.gds_validate_string(paisResid_, node, 'paisResid') self.paisResid = paisResid_ elif nodeName_ == 'dscLograd': dscLograd_ = child_.text dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd') self.dscLograd = dscLograd_ elif nodeName_ == 'nrLograd': nrLograd_ = child_.text nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd') self.nrLograd = nrLograd_ elif nodeName_ == 'complemento': complemento_ = child_.text complemento_ = self.gds_validate_string(complemento_, node, 'complemento') self.complemento = complemento_ elif nodeName_ == 'bairro': bairro_ = child_.text bairro_ = self.gds_validate_string(bairro_, node, 'bairro') self.bairro = bairro_ elif nodeName_ == 'nmCid': nmCid_ = child_.text nmCid_ = self.gds_validate_string(nmCid_, node, 'nmCid') self.nmCid = nmCid_ elif nodeName_ == 'codPostal': codPostal_ = child_.text codPostal_ = self.gds_validate_string(codPostal_, node, 'codPostal') self.codPostal = codPostal_ # end class TEnderecoExterior class paisResid(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, paisResid) if subclass is not None: return subclass(*args_, **kwargs_) if paisResid.subclass: return paisResid.subclass(*args_, **kwargs_) else: return paisResid(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='paisResid', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('paisResid') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='paisResid') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='paisResid', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='paisResid'): pass def exportChildren(self, outfile, level, namespace_='', name_='paisResid', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class paisResid class nmCid(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, nmCid) if subclass is not None: return subclass(*args_, **kwargs_) if nmCid.subclass: return nmCid.subclass(*args_, **kwargs_) else: return nmCid(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='nmCid', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('nmCid') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='nmCid') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='nmCid', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nmCid'): pass def exportChildren(self, outfile, level, namespace_='', name_='nmCid', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class nmCid class codPostal(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, codPostal) if subclass is not None: return subclass(*args_, **kwargs_) if codPostal.subclass: return codPostal.subclass(*args_, **kwargs_) else: return codPostal(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='codPostal', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('codPostal') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='codPostal') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='codPostal', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codPostal'): pass def exportChildren(self, outfile, level, namespace_='', name_='codPostal', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class codPostal class TDadosBeneficio(GeneratedsSuper): """Dados do benefício previdenciário""" subclass = None superclass = None def __init__(self, tpBenef=None, nrBenefic=None, dtIniBenef=None, vrBenef=None, infoPenMorte=None): self.original_tagname_ = None self.tpBenef = tpBenef self.nrBenefic = nrBenefic if isinstance(dtIniBenef, BaseStrType_): initvalue_ = datetime_.datetime.strptime(dtIniBenef, '%Y-%m-%d').date() else: initvalue_ = dtIniBenef self.dtIniBenef = initvalue_ self.vrBenef = vrBenef self.infoPenMorte = infoPenMorte def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TDadosBeneficio) if subclass is not None: return subclass(*args_, **kwargs_) if TDadosBeneficio.subclass: return TDadosBeneficio.subclass(*args_, **kwargs_) else: return TDadosBeneficio(*args_, **kwargs_) factory = staticmethod(factory) def get_tpBenef(self): return self.tpBenef def set_tpBenef(self, tpBenef): self.tpBenef = tpBenef def get_nrBenefic(self): return self.nrBenefic def set_nrBenefic(self, nrBenefic): self.nrBenefic = nrBenefic def get_dtIniBenef(self): return self.dtIniBenef def set_dtIniBenef(self, dtIniBenef): self.dtIniBenef = dtIniBenef def get_vrBenef(self): return self.vrBenef def set_vrBenef(self, vrBenef): self.vrBenef = vrBenef def get_infoPenMorte(self): return self.infoPenMorte def set_infoPenMorte(self, infoPenMorte): self.infoPenMorte = infoPenMorte def hasContent_(self): if ( self.tpBenef is not None or self.nrBenefic is not None or self.dtIniBenef is not None or self.vrBenef is not None or self.infoPenMorte is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='TDadosBeneficio', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TDadosBeneficio') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='TDadosBeneficio') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='TDadosBeneficio', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TDadosBeneficio'): pass def exportChildren(self, outfile, level, namespace_='', name_='TDadosBeneficio', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.tpBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%stpBenef>%s</%stpBenef>%s' % (namespace_, self.gds_format_integer(self.tpBenef, input_name='tpBenef'), namespace_, eol_)) if self.nrBenefic is not None: showIndent(outfile, level, pretty_print) outfile.write('<%snrBenefic>%s</%snrBenefic>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrBenefic), input_name='nrBenefic')), namespace_, eol_)) if self.dtIniBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sdtIniBenef>%s</%sdtIniBenef>%s' % (namespace_, self.gds_format_date(self.dtIniBenef, input_name='dtIniBenef'), namespace_, eol_)) if self.vrBenef is not None: showIndent(outfile, level, pretty_print) outfile.write('<%svrBenef>%s</%svrBenef>%s' % (namespace_, self.gds_format_float(self.vrBenef, input_name='vrBenef'), namespace_, eol_)) if self.infoPenMorte is not None: self.infoPenMorte.export(outfile, level, namespace_, name_='infoPenMorte', pretty_print=pretty_print) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'tpBenef': sval_ = child_.text try: ival_ = int(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires integer: %s' % exp) ival_ = self.gds_validate_integer(ival_, node, 'tpBenef') self.tpBenef = ival_ elif nodeName_ == 'nrBenefic': nrBenefic_ = child_.text nrBenefic_ = self.gds_validate_string(nrBenefic_, node, 'nrBenefic') self.nrBenefic = nrBenefic_ elif nodeName_ == 'dtIniBenef': sval_ = child_.text dval_ = self.gds_parse_date(sval_) self.dtIniBenef = dval_ elif nodeName_ == 'vrBenef': sval_ = child_.text try: fval_ = float(sval_) except (TypeError, ValueError) as exp: raise_parse_error(child_, 'requires float or double: %s' % exp) fval_ = self.gds_validate_float(fval_, node, 'vrBenef') self.vrBenef = fval_ elif nodeName_ == 'infoPenMorte': obj_ = infoPenMorte.factory() obj_.build(child_) self.infoPenMorte = obj_ obj_.original_tagname_ = 'infoPenMorte' # end class TDadosBeneficio class dtIniBenef(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, dtIniBenef) if subclass is not None: return subclass(*args_, **kwargs_) if dtIniBenef.subclass: return dtIniBenef.subclass(*args_, **kwargs_) else: return dtIniBenef(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='dtIniBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtIniBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtIniBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='dtIniBenef', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtIniBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='dtIniBenef', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class dtIniBenef class vrBenef(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, vrBenef) if subclass is not None: return subclass(*args_, **kwargs_) if vrBenef.subclass: return vrBenef.subclass(*args_, **kwargs_) else: return vrBenef(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='vrBenef', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('vrBenef') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='vrBenef') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='vrBenef', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='vrBenef'): pass def exportChildren(self, outfile, level, namespace_='', name_='vrBenef', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class vrBenef class infoPenMorte(GeneratedsSuper): """Informações relativas a pensão por morte""" subclass = None superclass = None def __init__(self, idQuota=None, cpfInst=None): self.original_tagname_ = None self.idQuota = idQuota self.cpfInst = cpfInst def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, infoPenMorte) if subclass is not None: return subclass(*args_, **kwargs_) if infoPenMorte.subclass: return infoPenMorte.subclass(*args_, **kwargs_) else: return infoPenMorte(*args_, **kwargs_) factory = staticmethod(factory) def get_idQuota(self): return self.idQuota def set_idQuota(self, idQuota): self.idQuota = idQuota def get_cpfInst(self): return self.cpfInst def set_cpfInst(self, cpfInst): self.cpfInst = cpfInst def hasContent_(self): if ( self.idQuota is not None or self.cpfInst is not None ): return True else: return False def export(self, outfile, level, namespace_='', name_='infoPenMorte', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoPenMorte') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoPenMorte') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='infoPenMorte', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoPenMorte'): pass def exportChildren(self, outfile, level, namespace_='', name_='infoPenMorte', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.idQuota is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sidQuota>%s</%sidQuota>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.idQuota), input_name='idQuota')), namespace_, eol_)) if self.cpfInst is not None: showIndent(outfile, level, pretty_print) outfile.write('<%scpfInst>%s</%scpfInst>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfInst), input_name='cpfInst')), namespace_, eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'idQuota': idQuota_ = child_.text idQuota_ = self.gds_validate_string(idQuota_, node, 'idQuota') self.idQuota = idQuota_ elif nodeName_ == 'cpfInst': cpfInst_ = child_.text cpfInst_ = self.gds_validate_string(cpfInst_, node, 'cpfInst') self.cpfInst = cpfInst_ # end class infoPenMorte class idQuota(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, idQuota) if subclass is not None: return subclass(*args_, **kwargs_) if idQuota.subclass: return idQuota.subclass(*args_, **kwargs_) else: return idQuota(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='idQuota', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('idQuota') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='idQuota') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='idQuota', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='idQuota'): pass def exportChildren(self, outfile, level, namespace_='', name_='idQuota', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class idQuota class cpfInst(GeneratedsSuper): subclass = None superclass = None def __init__(self): self.original_tagname_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, cpfInst) if subclass is not None: return subclass(*args_, **kwargs_) if cpfInst.subclass: return cpfInst.subclass(*args_, **kwargs_) else: return cpfInst(*args_, **kwargs_) factory = staticmethod(factory) def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespace_='', name_='cpfInst', namespacedef_='', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfInst') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None: name_ = self.original_tagname_ showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfInst') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespace_='', name_='cpfInst', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespace_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfInst'): pass def exportChildren(self, outfile, level, namespace_='', name_='cpfInst', fromsubclass_=False, pretty_print=True): pass def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class cpfInst GDSClassesMapping = { 'altBeneficio': TDadosBeneficio, 'brasil': TEnderecoBrasil, 'dadosBenef': TDadosBenef, 'exterior': TEnderecoExterior, 'ideEmpregador': TEmprPJ, 'ideEvento': TIdeEveTrab, 'iniBeneficio': TDadosBeneficio, } USAGE_TEXT = """ Usage: python <Parser>.py [ -s ] <in_xml_file> """ def usage(): print(USAGE_TEXT) sys.exit(1) def get_root_tag(node): tag = Tag_pattern_.match(node.tag).groups()[-1] rootClass = GDSClassesMapping.get(tag) if rootClass is None: rootClass = globals().get(tag) return tag, rootClass def parse(inFileName, silence=False): parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'eSocial' rootClass = eSocial rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None if not silence: sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export( sys.stdout, 0, name_=rootTag, namespacedef_='', pretty_print=True) return rootObj def parseEtree(inFileName, silence=False): parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'eSocial' rootClass = eSocial rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None mapping = {} rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) if not silence: content = etree_.tostring( rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8") sys.stdout.write(content) sys.stdout.write('\n') return rootObj, rootElement, mapping, reverse_mapping def parseString(inString, silence=False): if sys.version_info.major == 2: from StringIO import StringIO as IOBuffer else: from io import BytesIO as IOBuffer parser = None doc = parsexml_(IOBuffer(inString), parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'eSocial' rootClass = eSocial rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None if not silence: sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export( sys.stdout, 0, name_=rootTag, namespacedef_='') return rootObj def parseLiteral(inFileName, silence=False): parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'eSocial' rootClass = eSocial rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None if not silence: sys.stdout.write('#from evtCdBenPrRP import *\n\n') sys.stdout.write('import evtCdBenPrRP as model_\n\n') sys.stdout.write('rootObj = model_.rootClass(\n') rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) sys.stdout.write(')\n') return rootObj def main(): args = sys.argv[1:] if len(args) == 1: parse(args[0]) else: usage() if __name__ == '__main__': #import pdb; pdb.set_trace() main() __all__ = [ "TDadosBenef", "TDadosBeneficio", "TEmprPJ", "TEnderecoBrasil", "TEnderecoExterior", "TIdeEveTrab", "eSocial" ]
42.03501
193
0.611547
19,766
189,704
5.596023
0.032025
0.039182
0.026281
0.028903
0.796358
0.758993
0.73537
0.710128
0.686812
0.654655
0
0.003601
0.281296
189,704
4,512
194
42.044326
0.807681
0.024232
0
0.678752
1
0.00024
0.042691
0.007089
0
0
0
0
0
1
0.152221
false
0.039376
0.038415
0.022329
0.328451
0.079712
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
eec02affaa444ea5d0b1ae56b595337ead42ec35
26
py
Python
acq4/devices/ThorlabsFilterWheel/__init__.py
aleonlein/acq4
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
[ "MIT" ]
1
2020-06-04T17:04:53.000Z
2020-06-04T17:04:53.000Z
acq4/devices/ThorlabsFilterWheel/__init__.py
aleonlein/acq4
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
[ "MIT" ]
24
2016-09-27T17:25:24.000Z
2017-03-02T21:00:11.000Z
acq4/devices/ThorlabsFilterWheel/__init__.py
sensapex/acq4
9561ba73caff42c609bd02270527858433862ad8
[ "MIT" ]
4
2016-10-19T06:39:36.000Z
2019-09-30T21:06:45.000Z
from FilterWheel import *
13
25
0.807692
3
26
7
1
0
0
0
0
0
0
0
0
0
0
0
0.153846
26
1
26
26
0.954545
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
eef0a1df04469024ca7a834758ac6007044aa997
28
py
Python
reina/iv/__init__.py
SoumilShekdar/Reina
638240a979a90a9b6ca9efb40edef6ecc71d836f
[ "MIT" ]
4
2021-07-19T04:21:37.000Z
2022-02-22T09:24:42.000Z
reina/iv/__init__.py
SoumilShekdar/Reina
638240a979a90a9b6ca9efb40edef6ecc71d836f
[ "MIT" ]
10
2021-07-19T12:45:21.000Z
2021-08-11T07:45:50.000Z
reina/iv/__init__.py
SoumilShekdar/Reina
638240a979a90a9b6ca9efb40edef6ecc71d836f
[ "MIT" ]
1
2021-09-05T00:41:48.000Z
2021-09-05T00:41:48.000Z
from .2sls import SieveTSLS
14
27
0.821429
4
28
5.75
1
0
0
0
0
0
0
0
0
0
0
0.041667
0.142857
28
1
28
28
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
1
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
5
e1134fe2d4efd305d38c69d062f02bd8fcd99659
766
py
Python
tests/integration/load_balancer_types/test_load_balancer_types.py
cenkalti/hcloud-python
1be68b3cc29b81d93d85bef2d4b8dc57cbf603fb
[ "MIT" ]
null
null
null
tests/integration/load_balancer_types/test_load_balancer_types.py
cenkalti/hcloud-python
1be68b3cc29b81d93d85bef2d4b8dc57cbf603fb
[ "MIT" ]
null
null
null
tests/integration/load_balancer_types/test_load_balancer_types.py
cenkalti/hcloud-python
1be68b3cc29b81d93d85bef2d4b8dc57cbf603fb
[ "MIT" ]
null
null
null
class TestLoadBalancerTypesClient(object): def test_get_by_id(self, hetzner_client): load_balancer_type = hetzner_client.load_balancer_types.get_by_id(1) assert load_balancer_type.id == 1 assert load_balancer_type.name == "lb11" def test_get_by_name(self, hetzner_client): load_balancer_type = hetzner_client.load_balancer_types.get_by_name("lb11") assert load_balancer_type.id == 1 assert load_balancer_type.name == "lb11" def test_get_list(self, hetzner_client): result = hetzner_client.load_balancer_types.get_list() load_balancer_types = result.load_balancer_types assert load_balancer_types[0].id == 1 assert load_balancer_types[0].name == "lb11"
40.315789
84
0.708877
103
766
4.834951
0.213592
0.313253
0.238956
0.251004
0.708835
0.606426
0.534137
0.534137
0.534137
0.534137
0
0.023141
0.210183
766
18
85
42.555556
0.8
0
0
0.285714
0
0
0.021419
0
0
0
0
0
0.428571
1
0.214286
false
0
0
0
0.285714
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
5
013e956198aa1b77981faa4ab9b419572706ea01
19
py
Python
Exercicios/tempCodeRunnerFile.py
eduardodarocha/Introducao_Ciencia_da_Computacao_com_Python_Parte_2_Coursera
b5b9198e16b4b67894b85766eb521ae96010accf
[ "MIT" ]
1
2020-08-28T20:29:23.000Z
2020-08-28T20:29:23.000Z
Exercicios/tempCodeRunnerFile.py
eduardodarocha/Introducao_Ciencia_da_Computacao_com_Python_Parte_2_Coursera
b5b9198e16b4b67894b85766eb521ae96010accf
[ "MIT" ]
null
null
null
Exercicios/tempCodeRunnerFile.py
eduardodarocha/Introducao_Ciencia_da_Computacao_com_Python_Parte_2_Coursera
b5b9198e16b4b67894b85766eb521ae96010accf
[ "MIT" ]
null
null
null
print(elefantes(4))
19
19
0.789474
3
19
5
1
0
0
0
0
0
0
0
0
0
0
0.052632
0
19
1
19
19
0.736842
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
6da557c68c852fb5e7d15b6ff66d6fad1e0de619
124
py
Python
apps/iotdb_cloud_core/admin.py
JulianFeinauer/iotdb-cloud
0ccb2dd14c7d0ae3a8e72b3b32ce83de2df48738
[ "Apache-2.0" ]
6
2021-08-22T02:25:55.000Z
2021-08-28T04:53:36.000Z
apps/iotdb_cloud_core/admin.py
JulianFeinauer/iotdb-cloud
0ccb2dd14c7d0ae3a8e72b3b32ce83de2df48738
[ "Apache-2.0" ]
null
null
null
apps/iotdb_cloud_core/admin.py
JulianFeinauer/iotdb-cloud
0ccb2dd14c7d0ae3a8e72b3b32ce83de2df48738
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from apps.iotdb_cloud_core.models import IoTDBRelease admin.site.register(IoTDBRelease)
17.714286
53
0.846774
17
124
6.058824
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.096774
124
6
54
20.666667
0.919643
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6dae200bc2542cbf1a7d16cff24cb31ec402d6c5
246
py
Python
testing/misc/test1.py
lagvier/echo-sense
fe8ab921e7f61c48b224f0cc2832103a395a6cf7
[ "MIT" ]
null
null
null
testing/misc/test1.py
lagvier/echo-sense
fe8ab921e7f61c48b224f0cc2832103a395a6cf7
[ "MIT" ]
null
null
null
testing/misc/test1.py
lagvier/echo-sense
fe8ab921e7f61c48b224f0cc2832103a395a6cf7
[ "MIT" ]
1
2019-02-20T13:22:22.000Z
2019-02-20T13:22:22.000Z
import sys from os import path import numpy as np ts = [1467038416442, 1467038416452, 1467038416462, 1467038416472, 1467038416482, 1467038416492, 1467038416502, 1467038416512, 1467038416522, 1467038416532] y = [0, 0, 1, 1, 1, 1, 0, 0, 0, None]
30.75
155
0.756098
32
246
5.8125
0.71875
0.032258
0.032258
0
0
0
0
0
0
0
0
0.661905
0.146341
246
7
156
35.142857
0.22381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
6de944f602d5ea4b43546e6defa95609633e50c2
135
py
Python
app/admin/__init__.py
RandyDeng/InterviewScheduler
044d39873c4efb0d523772c42af62e8699336f63
[ "MIT" ]
null
null
null
app/admin/__init__.py
RandyDeng/InterviewScheduler
044d39873c4efb0d523772c42af62e8699336f63
[ "MIT" ]
null
null
null
app/admin/__init__.py
RandyDeng/InterviewScheduler
044d39873c4efb0d523772c42af62e8699336f63
[ "MIT" ]
null
null
null
from flask import Blueprint admin = Blueprint('admin', __name__, url_prefix='/admin', template_folder='templates')
22.5
57
0.666667
14
135
6
0.785714
0.333333
0
0
0
0
0
0
0
0
0
0
0.222222
135
5
58
27
0.8
0
0
0
0
0
0.148148
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
a30616a29f411ad752e16628beebe4c2d1a4b42a
128
py
Python
katas/kyu_7/formatting_decimal_places_1.py
the-zebulan/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
40
2016-03-09T12:26:20.000Z
2022-03-23T08:44:51.000Z
katas/kyu_7/formatting_decimal_places_1.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
null
null
null
katas/kyu_7/formatting_decimal_places_1.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
36
2016-11-07T19:59:58.000Z
2022-03-31T11:18:27.000Z
from math import trunc def two_decimal_places(number): factor = float(10 ** 2) return trunc(number * factor) / factor
18.285714
42
0.695313
18
128
4.833333
0.777778
0.275862
0
0
0
0
0
0
0
0
0
0.029703
0.210938
128
6
43
21.333333
0.831683
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
09ae7cadaf61799aa1c9b362b46ebd1005a33f96
132
py
Python
evidence/data_sources/__init__.py
cancervariants/evidence-normalization
eb055541bb5c53b40f30fc35e89a2b49e3176bb9
[ "MIT" ]
null
null
null
evidence/data_sources/__init__.py
cancervariants/evidence-normalization
eb055541bb5c53b40f30fc35e89a2b49e3176bb9
[ "MIT" ]
12
2022-01-31T18:47:02.000Z
2022-03-31T13:34:58.000Z
evidence/data_sources/__init__.py
cancervariants/evidence-normalization
eb055541bb5c53b40f30fc35e89a2b49e3176bb9
[ "MIT" ]
null
null
null
"""Import data sources""" from .gnomad import GnomAD from .cbioportal import CBioPortal from .cancer_hotspots import CancerHotspots
26.4
43
0.818182
16
132
6.6875
0.5625
0
0
0
0
0
0
0
0
0
0
0
0.113636
132
4
44
33
0.91453
0.143939
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
09c706e008d6346528f62a89540b79fcfb12e772
204
py
Python
api/service/listing_service.py
build-week-optimal-pricing/Data-science
23433f64445393654c8702e7d4c436e3758cd329
[ "MIT" ]
null
null
null
api/service/listing_service.py
build-week-optimal-pricing/Data-science
23433f64445393654c8702e7d4c436e3758cd329
[ "MIT" ]
null
null
null
api/service/listing_service.py
build-week-optimal-pricing/Data-science
23433f64445393654c8702e7d4c436e3758cd329
[ "MIT" ]
2
2020-02-03T18:54:57.000Z
2020-02-03T20:14:11.000Z
#!/usr/bin/env python3 from api import DB from api.models.listing import Listing def get_all_queries(): """ Returns all stored listing queries. """ return list(Listing.query.all())
17
43
0.671569
28
204
4.821429
0.678571
0.103704
0
0
0
0
0
0
0
0
0
0.00625
0.215686
204
11
44
18.545455
0.8375
0.279412
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
5
09fbd4228a570bcd8035cd9011201ce529d14413
40
py
Python
Lib/xmlrpc/__init__.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
1
2018-06-21T18:21:24.000Z
2018-06-21T18:21:24.000Z
Lib/xmlrpc/__init__.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
null
null
null
Lib/xmlrpc/__init__.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
null
null
null
# This directory jest a Python package.
20
39
0.775
6
40
5.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.175
40
1
40
40
0.939394
0.925
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
61dae364e89628a6916af4c9a43c0cd0c9b5d3c5
65
py
Python
goodbye_cruel_world.py
sgriffith3/2021_05_10_pyna
d732e1dd0fa03f1cef8f72fc9dcc09ec947f31a5
[ "MIT" ]
null
null
null
goodbye_cruel_world.py
sgriffith3/2021_05_10_pyna
d732e1dd0fa03f1cef8f72fc9dcc09ec947f31a5
[ "MIT" ]
null
null
null
goodbye_cruel_world.py
sgriffith3/2021_05_10_pyna
d732e1dd0fa03f1cef8f72fc9dcc09ec947f31a5
[ "MIT" ]
null
null
null
print("Goodbye Cruel World!!!!") print("see ya later, aligator")
21.666667
32
0.692308
9
65
5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.107692
65
2
33
32.5
0.775862
0
0
0
0
0
0.692308
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
61ddd275c9328cdf120cd05d7b3417a27c6e0f25
72
py
Python
src/lingcomp/data_utils/__init__.py
CharlottePouw/interpreting-complexity
b9a73c0aff18e4c6b4209a6511d00639494c70da
[ "Apache-2.0" ]
2
2020-12-18T12:26:22.000Z
2020-12-19T18:47:07.000Z
src/lingcomp/data_utils/__init__.py
CharlottePouw/interpreting-complexity
b9a73c0aff18e4c6b4209a6511d00639494c70da
[ "Apache-2.0" ]
null
null
null
src/lingcomp/data_utils/__init__.py
CharlottePouw/interpreting-complexity
b9a73c0aff18e4c6b4209a6511d00639494c70da
[ "Apache-2.0" ]
1
2021-05-19T13:39:45.000Z
2021-05-19T13:39:45.000Z
from .et_processor import DundeeProcessor, GECOProcessor, ZuCoProcessor
36
71
0.875
7
72
8.857143
1
0
0
0
0
0
0
0
0
0
0
0
0.083333
72
1
72
72
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
114d4aa01e523660546ae0c5a1fb081013ec1a86
60
py
Python
irida_uploader_cl/parsers/miniseq/__init__.py
duanjunhyq/irida_uploader_cl
d0e5d404c5b5b10c3411ded71a20f5ab062aabba
[ "MIT" ]
null
null
null
irida_uploader_cl/parsers/miniseq/__init__.py
duanjunhyq/irida_uploader_cl
d0e5d404c5b5b10c3411ded71a20f5ab062aabba
[ "MIT" ]
null
null
null
irida_uploader_cl/parsers/miniseq/__init__.py
duanjunhyq/irida_uploader_cl
d0e5d404c5b5b10c3411ded71a20f5ab062aabba
[ "MIT" ]
null
null
null
from irida_uploader_cl.parsers.miniseq.parser import Parser
30
59
0.883333
9
60
5.666667
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.066667
60
1
60
60
0.910714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fec7d1bf151211967057226b6ac546f31ea05daa
146
py
Python
history/admin.py
fahimfarhan/cancer-web-app
6c5d8c5c90b0909cbd161d2ae87b4f12549bdfef
[ "MIT" ]
null
null
null
history/admin.py
fahimfarhan/cancer-web-app
6c5d8c5c90b0909cbd161d2ae87b4f12549bdfef
[ "MIT" ]
5
2021-03-18T20:13:38.000Z
2022-01-13T00:35:37.000Z
history/admin.py
fahimfarhan/cancer-web-app
6c5d8c5c90b0909cbd161d2ae87b4f12549bdfef
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from history.models import HistoryModelFile admin.site.register(HistoryModelFile)
20.857143
43
0.835616
18
146
6.777778
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.109589
146
6
44
24.333333
0.938462
0.178082
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
feca366a33eddf6f2c9718e3c0793e7f47ad681b
125
py
Python
CATS/cleaning_empty_rows.py
janetvanbilsen/Twitter-Mining-Raspberry_Pi
bd9beb5fd57be7b1fccb3ed041c36045f0be7f41
[ "MIT" ]
null
null
null
CATS/cleaning_empty_rows.py
janetvanbilsen/Twitter-Mining-Raspberry_Pi
bd9beb5fd57be7b1fccb3ed041c36045f0be7f41
[ "MIT" ]
null
null
null
CATS/cleaning_empty_rows.py
janetvanbilsen/Twitter-Mining-Raspberry_Pi
bd9beb5fd57be7b1fccb3ed041c36045f0be7f41
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import pandas as pd dataset = pd.read_csv('Dataset.csv') dataset.to_csv('Dataset.csv', index=False)
17.857143
42
0.736
21
125
4.285714
0.666667
0.333333
0.288889
0
0
0
0
0
0
0
0
0.008929
0.104
125
6
43
20.833333
0.794643
0.168
0
0
0
0
0.213592
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
fed3254b8b43f91bf171dddb50dc3bb5868244db
310
py
Python
HomeLab/homepage/views.py
amalik18/HomeLab
b0d71f379e8628948ceb15bd776386a81a468558
[ "MIT" ]
null
null
null
HomeLab/homepage/views.py
amalik18/HomeLab
b0d71f379e8628948ceb15bd776386a81a468558
[ "MIT" ]
null
null
null
HomeLab/homepage/views.py
amalik18/HomeLab
b0d71f379e8628948ceb15bd776386a81a468558
[ "MIT" ]
null
null
null
from django.shortcuts import render, get_object_or_404 from django.http import HttpResponse from django.template import loader from django.shortcuts import render from django.http import Http404 # Create your views here. def index(request): return render(request=request, template_name='homepage.html')
25.833333
65
0.816129
44
310
5.659091
0.568182
0.200803
0.15261
0.200803
0.248996
0
0
0
0
0
0
0.022059
0.122581
310
11
66
28.181818
0.893382
0.074194
0
0
0
0
0.045614
0
0
0
0
0
0
1
0.142857
false
0
0.714286
0.142857
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
fef94cf7472eb2fb96182773789ca2fc30a153f5
144
py
Python
pyobs/utils/threads/__init__.py
pyobs/pyobs-core
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
[ "MIT" ]
4
2020-02-14T10:50:03.000Z
2022-03-25T04:15:06.000Z
pyobs/utils/threads/__init__.py
pyobs/pyobs-core
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
[ "MIT" ]
60
2020-09-14T09:10:20.000Z
2022-03-25T17:51:42.000Z
pyobs/utils/threads/__init__.py
pyobs/pyobs-core
e3401e63eb31587c2bc535f7346b7e4ef69d64ab
[ "MIT" ]
2
2020-10-14T09:34:57.000Z
2021-04-27T09:35:57.000Z
from .future import Future from .lockwithabort import LockWithAbort, AcquireLockFailed from .threadwithreturnvalue import ThreadWithReturnValue
36
59
0.881944
13
144
9.769231
0.461538
0
0
0
0
0
0
0
0
0
0
0
0.090278
144
3
60
48
0.969466
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
3a0b37416080748feed953c20d0d1fe11351ed58
748
py
Python
proto/core/molecule.py
protoserver/proto-cli
a5675b90646a6ab6656b7fecca1c1da8804f45fb
[ "MIT" ]
null
null
null
proto/core/molecule.py
protoserver/proto-cli
a5675b90646a6ab6656b7fecca1c1da8804f45fb
[ "MIT" ]
null
null
null
proto/core/molecule.py
protoserver/proto-cli
a5675b90646a6ab6656b7fecca1c1da8804f45fb
[ "MIT" ]
null
null
null
from abc import abstractmethod from cement import Interface, Handler class MoleculeInterface(Interface): class Meta: interface = 'stack' @abstractmethod def _build_config(self): """Do something to build the config.""" pass def build_config(self): """Do something to build the config.""" self._build_config() def start(self): """Do something to start the stack.""" pass def stop(self): """Do something to stop the stack.""" pass def restart(self): """Do something to restart the stack.""" self.stop() self.start() class Molecule(MoleculeInterface, Handler): """FIXME: Put all common operations here.""" pass
21.371429
48
0.606952
85
748
5.282353
0.352941
0.066815
0.167038
0.18931
0.200445
0.200445
0.200445
0.200445
0.200445
0.200445
0
0
0.286096
748
34
49
22
0.840824
0.275401
0
0.210526
0
0
0.009804
0
0
0
0
0.029412
0
1
0.263158
false
0.210526
0.105263
0
0.526316
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
5
3a13f34eb44a5677aa4403790e7570f05dfb4af0
101
py
Python
topCoder/srms/500s/srm571/div2/fox_and_game.py
ferhatelmas/algo
a7149c7a605708bc01a5cd30bf5455644cefd04d
[ "WTFPL" ]
25
2015-01-21T16:39:18.000Z
2021-05-24T07:01:24.000Z
topCoder/srms/500s/srm571/div2/fox_and_game.py
ferhatelmas/algo
a7149c7a605708bc01a5cd30bf5455644cefd04d
[ "WTFPL" ]
2
2020-09-30T19:39:36.000Z
2020-10-01T17:15:16.000Z
topCoder/srms/500s/srm571/div2/fox_and_game.py
ferhatelmas/algo
a7149c7a605708bc01a5cd30bf5455644cefd04d
[ "WTFPL" ]
15
2015-01-21T16:39:27.000Z
2020-10-01T17:00:22.000Z
class FoxAndGame: def countStars(self, result): return sum(r.count("o") for r in result)
25.25
48
0.653465
15
101
4.4
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.227723
101
3
49
33.666667
0.846154
0
0
0
0
0
0.009901
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
3a3a360fc0c13877547e772805fff2aed918da4c
1,251
py
Python
test/moves/test_acrobatics.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
8
2019-02-02T01:15:57.000Z
2021-12-23T04:43:46.000Z
test/moves/test_acrobatics.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
null
null
null
test/moves/test_acrobatics.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
6
2020-09-11T13:15:05.000Z
2022-03-18T15:46:35.000Z
import unittest from sim.battle import Battle from data import dex class TestAcrobatics(unittest.TestCase): def test_acrobatics(self): b = Battle(debug=False, rng=False) b.join(0, [{'species': 'charmander', 'moves': ['tackle']}]) b.join(1, [{'species': 'pidgey', 'moves': ['acrobatics']}]) b.choose(0, dex.Decision('move', 0)) b.choose(1, dex.Decision('move', 0)) b.do_turn() charmander = b.sides[0].pokemon[0] pidgey = b.sides[1].pokemon[0] #damage calcs were done by hand self.assertEqual(charmander.hp, charmander.maxhp-76) def test_acrobatics_noitem(self): b = Battle(debug=False, rng=False) b.join(0, [{'species': 'charmander', 'moves': ['tackle']}]) b.join(1, [{'species': 'pidgey', 'item': 'pokeball','moves': ['acrobatics']}]) b.choose(0, dex.Decision('move', 0)) b.choose(1, dex.Decision('move', 0)) b.do_turn() charmander = b.sides[0].pokemon[0] pidgey = b.sides[1].pokemon[0] #damage calcs were done by hand self.assertEqual(charmander.hp, charmander.maxhp-39) def runTest(self): self.test_acrobatics() self.test_acrobatics_noitem()
31.275
86
0.592326
160
1,251
4.58125
0.3
0.076398
0.081855
0.087312
0.723056
0.723056
0.723056
0.723056
0.723056
0.723056
0
0.025184
0.238209
1,251
39
87
32.076923
0.743966
0.047962
0
0.518519
0
0
0.117746
0
0
0
0
0
0.074074
1
0.111111
false
0
0.111111
0
0.259259
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
3a5400edeacc9bb20f022bc49d8ed1ea7ee11a72
93
py
Python
postreview/admin.py
harshiljhaveri/unicode-website
6caee5689fcdf172c01b10d51745b7b3e4596be1
[ "MIT" ]
16
2021-09-22T19:08:28.000Z
2022-03-18T18:57:02.000Z
postreview/admin.py
harshiljhaveri/unicode-website
6caee5689fcdf172c01b10d51745b7b3e4596be1
[ "MIT" ]
6
2021-09-30T12:36:02.000Z
2022-03-18T22:18:00.000Z
postreview/admin.py
harshiljhaveri/unicode-website
6caee5689fcdf172c01b10d51745b7b3e4596be1
[ "MIT" ]
6
2021-12-06T02:04:51.000Z
2022-03-13T14:38:14.000Z
from django.contrib import admin from .models import Review admin.site.register(Review)
18.6
33
0.784946
13
93
5.615385
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.150538
93
4
34
23.25
0.924051
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
28c817717093216ec1ffa47d7c38f9ff49e09f13
113
py
Python
website/views/__init__.py
luxutao/django-blog
392c47f5b584d212485e97826283f9e1c98fb6b9
[ "Apache-2.0" ]
null
null
null
website/views/__init__.py
luxutao/django-blog
392c47f5b584d212485e97826283f9e1c98fb6b9
[ "Apache-2.0" ]
null
null
null
website/views/__init__.py
luxutao/django-blog
392c47f5b584d212485e97826283f9e1c98fb6b9
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding:utf-8 -*- """ @__Create Time__ = 2017/12/11 10:34 @__Description__ = " " """
16.142857
35
0.59292
15
113
3.933333
1
0
0
0
0
0
0
0
0
0
0
0.145833
0.150442
113
6
36
18.833333
0.46875
0.893805
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e93e464839168c250308884392001c2de7fdea96
3,618
py
Python
backend/app/alembic/versions/0b840782b66f_initial_model_again.py
totalhack/zar
e50a5f96f9df1316ca4205309920401c19db6e31
[ "MIT" ]
1
2020-11-02T14:31:30.000Z
2020-11-02T14:31:30.000Z
backend/app/alembic/versions/0b840782b66f_initial_model_again.py
totalhack/zar
e50a5f96f9df1316ca4205309920401c19db6e31
[ "MIT" ]
null
null
null
backend/app/alembic/versions/0b840782b66f_initial_model_again.py
totalhack/zar
e50a5f96f9df1316ca4205309920401c19db6e31
[ "MIT" ]
null
null
null
"""Initial model again Revision ID: 0b840782b66f Revises: Create Date: 2020-10-27 17:24:10.636183 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '0b840782b66f' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('page', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('vid', sa.String(length=24), nullable=True), sa.Column('sid', sa.String(length=36), nullable=True), sa.Column('cid', sa.String(length=36), nullable=True), sa.Column('uid', sa.String(length=64), nullable=True), sa.Column('ip', sa.String(length=128), nullable=True), sa.Column('user_agent', sa.String(length=512), nullable=True), sa.Column('referer', sa.String(length=2048), nullable=True), sa.Column('url', sa.String(length=2048), nullable=True), sa.Column('properties', sa.Text(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.PrimaryKeyConstraint('id'), mysql_charset='utf8', mysql_engine='InnoDB' ) op.create_index(op.f('ix_page_cid'), 'page', ['cid'], unique=False) op.create_index(op.f('ix_page_created_at'), 'page', ['created_at'], unique=False) op.create_index(op.f('ix_page_sid'), 'page', ['sid'], unique=False) op.create_index(op.f('ix_page_uid'), 'page', ['uid'], unique=False) op.create_index(op.f('ix_page_vid'), 'page', ['vid'], unique=False) op.create_table('track', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('event', sa.String(length=64), nullable=True), sa.Column('vid', sa.String(length=24), nullable=True), sa.Column('sid', sa.String(length=36), nullable=True), sa.Column('cid', sa.String(length=36), nullable=True), sa.Column('uid', sa.String(length=64), nullable=True), sa.Column('ip', sa.String(length=128), nullable=True), sa.Column('user_agent', sa.String(length=512), nullable=True), sa.Column('referer', sa.String(length=2048), nullable=True), sa.Column('url', sa.String(length=2048), nullable=True), sa.Column('properties', sa.Text(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.PrimaryKeyConstraint('id'), mysql_charset='utf8', mysql_engine='InnoDB' ) op.create_index(op.f('ix_track_cid'), 'track', ['cid'], unique=False) op.create_index(op.f('ix_track_created_at'), 'track', ['created_at'], unique=False) op.create_index(op.f('ix_track_sid'), 'track', ['sid'], unique=False) op.create_index(op.f('ix_track_uid'), 'track', ['uid'], unique=False) op.create_index(op.f('ix_track_vid'), 'track', ['vid'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_track_vid'), table_name='track') op.drop_index(op.f('ix_track_uid'), table_name='track') op.drop_index(op.f('ix_track_sid'), table_name='track') op.drop_index(op.f('ix_track_created_at'), table_name='track') op.drop_index(op.f('ix_track_cid'), table_name='track') op.drop_table('track') op.drop_index(op.f('ix_page_vid'), table_name='page') op.drop_index(op.f('ix_page_uid'), table_name='page') op.drop_index(op.f('ix_page_sid'), table_name='page') op.drop_index(op.f('ix_page_created_at'), table_name='page') op.drop_index(op.f('ix_page_cid'), table_name='page') op.drop_table('page') # ### end Alembic commands ###
44.121951
92
0.672747
543
3,618
4.311234
0.156538
0.078599
0.068347
0.085434
0.831269
0.814609
0.794105
0.756087
0.706536
0.575822
0
0.027267
0.128248
3,618
81
93
44.666667
0.714965
0.079878
0
0.444444
0
0
0.174058
0
0
0
0
0
0
1
0.031746
false
0
0.031746
0
0.063492
0
0
0
0
null
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
3a9b433d38b7b7016736639df4a8c3b7664c27da
157
py
Python
core/dbt/config/__init__.py
darrenhaken/dbt
8270ef71b2450fc46117ebcd2c9a3d4403f79f3a
[ "Apache-2.0" ]
null
null
null
core/dbt/config/__init__.py
darrenhaken/dbt
8270ef71b2450fc46117ebcd2c9a3d4403f79f3a
[ "Apache-2.0" ]
null
null
null
core/dbt/config/__init__.py
darrenhaken/dbt
8270ef71b2450fc46117ebcd2c9a3d4403f79f3a
[ "Apache-2.0" ]
null
null
null
from .renderer import ConfigRenderer from .profile import Profile, UserConfig, PROFILES_DIR from .project import Project from .runtime import RuntimeConfig
26.166667
54
0.840764
19
157
6.894737
0.578947
0
0
0
0
0
0
0
0
0
0
0
0.121019
157
5
55
31.4
0.949275
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
3ae89a5ac845fc75c2d7a53a92c02e503916a1e2
166
py
Python
BugsApp/admin.py
safia88/BugTracker
77b1ccf6dd20ed9e8e4a03da236294897c75d020
[ "MIT" ]
null
null
null
BugsApp/admin.py
safia88/BugTracker
77b1ccf6dd20ed9e8e4a03da236294897c75d020
[ "MIT" ]
null
null
null
BugsApp/admin.py
safia88/BugTracker
77b1ccf6dd20ed9e8e4a03da236294897c75d020
[ "MIT" ]
null
null
null
from django.contrib import admin from.models import Ticket,Customeuser # Register your models here. admin.site.register(Ticket) admin.site.register(Customeuser)
27.666667
38
0.807229
22
166
6.090909
0.545455
0.134328
0.253731
0
0
0
0
0
0
0
0
0
0.114458
166
6
39
27.666667
0.911565
0.156627
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c907963de4c248eaeefe566d8d771e03cf03148b
196
py
Python
pertpy/api/__init__.py
theislab/pertpy
54a9244fd032cdab2fb7fc0e4a2208ba088ff54e
[ "MIT" ]
1
2021-06-23T14:16:14.000Z
2021-06-23T14:16:14.000Z
pertpy/api/__init__.py
theislab/pertpy
54a9244fd032cdab2fb7fc0e4a2208ba088ff54e
[ "MIT" ]
36
2021-07-12T10:42:03.000Z
2022-03-29T13:07:01.000Z
pertpy/api/__init__.py
theislab/pertpy
54a9244fd032cdab2fb7fc0e4a2208ba088ff54e
[ "MIT" ]
1
2022-01-28T13:27:58.000Z
2022-01-28T13:27:58.000Z
import scanpy pertpy_settings = scanpy.settings from pertpy.api import data as dt from pertpy.api import plot as pl from pertpy.api import preprocessing as pp from pertpy.api import tools as tl
21.777778
42
0.811224
34
196
4.647059
0.441176
0.253165
0.329114
0.481013
0
0
0
0
0
0
0
0
0.158163
196
8
43
24.5
0.957576
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
c9087116dc9864c80739bc6dfa64e10c6b66752d
222
py
Python
modules/may9/plug-ins/May9_Next.py
DavideAlidosi/May9
30e3d896dde0e6c4b1aa5d4bf6a6198cbba0a49b
[ "MIT" ]
39
2016-08-05T08:22:21.000Z
2021-06-01T16:05:20.000Z
modules/may9/plug-ins/May9_Next.py
DavideAlidosi/May9
30e3d896dde0e6c4b1aa5d4bf6a6198cbba0a49b
[ "MIT" ]
9
2016-10-14T09:55:58.000Z
2020-01-19T00:30:22.000Z
modules/may9/plug-ins/May9_Next.py
DavideAlidosi/May9
30e3d896dde0e6c4b1aa5d4bf6a6198cbba0a49b
[ "MIT" ]
8
2016-11-24T09:35:59.000Z
2020-02-12T15:42:39.000Z
import __main__ import May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try: __main__.__delattr__("May9_Next") except AttributeError: pass
17.076923
41
0.711712
24
222
5.75
0.541667
0.231884
0.15942
0
0
0
0
0
0
0
0
0.022857
0.211712
222
13
42
17.076923
0.765714
0
0
0
0
0
0.040359
0
0
0
0
0
0
1
0.222222
true
0.111111
0.222222
0
0.444444
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
5
c90f721bbb44f22f7fbe7e9911dde5c76d97859c
56
py
Python
utils/__init__.py
carolinscholl/SORN
99f908c88265ecc26dad195b56bebfa12838591f
[ "MIT" ]
1
2019-10-25T11:48:31.000Z
2019-10-25T11:48:31.000Z
utils/__init__.py
carolinscholl/SORN
99f908c88265ecc26dad195b56bebfa12838591f
[ "MIT" ]
null
null
null
utils/__init__.py
carolinscholl/SORN
99f908c88265ecc26dad195b56bebfa12838591f
[ "MIT" ]
3
2019-09-27T12:54:19.000Z
2020-11-17T19:37:12.000Z
from .bunch import * from .backup import backup_pickle
14
33
0.785714
8
56
5.375
0.625
0
0
0
0
0
0
0
0
0
0
0
0.160714
56
3
34
18.666667
0.914894
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c961e1935bfaae9007e5bf961dcddc9407f37ff8
255
py
Python
app/views/admin/index.py
mrakzero/FlaskCMS
03ad35610befca1f45d5705f07cfb4794f9b2d3b
[ "MulanPSL-1.0" ]
1
2022-03-31T03:57:45.000Z
2022-03-31T03:57:45.000Z
app/views/admin/index.py
mrakzero/FlaskCMS
03ad35610befca1f45d5705f07cfb4794f9b2d3b
[ "MulanPSL-1.0" ]
null
null
null
app/views/admin/index.py
mrakzero/FlaskCMS
03ad35610befca1f45d5705f07cfb4794f9b2d3b
[ "MulanPSL-1.0" ]
null
null
null
from flask import render_template from app.views.admin import bp_admin @bp_admin.route('/') def index(): return render_template('admin/index.html') @bp_admin.route('/dashboard') def dashboard(): return render_template('admin/dashboard.html')
18.214286
50
0.745098
35
255
5.257143
0.428571
0.228261
0.130435
0.271739
0
0
0
0
0
0
0
0
0.121569
255
13
51
19.615385
0.821429
0
0
0
0
0
0.184314
0
0
0
0
0
0
1
0.25
true
0
0.25
0.25
0.75
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
0
0
0
5
c962990b2983cb54eac95079f3b250bf64a15127
109
py
Python
db/base.py
zy7y/HelloFastAPI
6f4e5d65e411d733dc17ca3851b82f3d0eed7245
[ "MIT" ]
1
2021-02-18T08:07:13.000Z
2021-02-18T08:07:13.000Z
db/base.py
zy7y/HelloFastAPI
6f4e5d65e411d733dc17ca3851b82f3d0eed7245
[ "MIT" ]
2
2021-04-06T18:26:08.000Z
2021-06-02T03:47:19.000Z
db/base.py
zy7y/HelloFastAPI
6f4e5d65e411d733dc17ca3851b82f3d0eed7245
[ "MIT" ]
null
null
null
# 导入所有模型, 用于迁移文件 from db.base_class import Base from models.user import User from models.movie import Movie
18.166667
30
0.807339
18
109
4.833333
0.555556
0.229885
0
0
0
0
0
0
0
0
0
0
0.146789
109
5
31
21.8
0.935484
0.12844
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c97cfce015d46f366d4c6c0e8a56bf0785b60afc
204
py
Python
bessel_zeros/bessel_zeros.py
GrzegorzMika/Towards-adaptivity-via-a-new-discrepancy-principle-for-Poisson-inverse-problems
13f62a5fa2a446c48796e12536e61125302d638d
[ "MIT" ]
null
null
null
bessel_zeros/bessel_zeros.py
GrzegorzMika/Towards-adaptivity-via-a-new-discrepancy-principle-for-Poisson-inverse-problems
13f62a5fa2a446c48796e12536e61125302d638d
[ "MIT" ]
null
null
null
bessel_zeros/bessel_zeros.py
GrzegorzMika/Towards-adaptivity-via-a-new-discrepancy-principle-for-Poisson-inverse-problems
13f62a5fa2a446c48796e12536e61125302d638d
[ "MIT" ]
1
2022-01-23T19:15:01.000Z
2022-01-23T19:15:01.000Z
import numpy as np import os zeros = np.loadtxt('./bessel_zeros_short.txt') np.save('bessel_zeros_short', zeros) if os.path.exists('./bessel_zeros_short.txt'): os.remove('./bessel_zeros_short.txt')
22.666667
46
0.740196
33
204
4.333333
0.454545
0.307692
0.447552
0.398601
0
0
0
0
0
0
0
0
0.098039
204
8
47
25.5
0.777174
0
0
0
0
0
0.441176
0.352941
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
a3639ef28aac2c02170e8a0b9e216bd8d1280efb
104
py
Python
vedastr_cstr/vedastr/models/bodies/sequences/transformer/__init__.py
bsm8734/formula-image-latex-recognition
86d5070e8f907571a47967d64facaee246d92a35
[ "MIT" ]
13
2021-06-20T18:11:23.000Z
2021-12-07T18:06:42.000Z
vedastr_cstr/vedastr/models/bodies/sequences/transformer/__init__.py
bsm8734/formula-image-latex-recognition
86d5070e8f907571a47967d64facaee246d92a35
[ "MIT" ]
9
2021-06-16T14:55:07.000Z
2021-06-23T14:45:36.000Z
vedastr_cstr/vedastr/models/bodies/sequences/transformer/__init__.py
bsm8734/formula-image-latex-recognition
86d5070e8f907571a47967d64facaee246d92a35
[ "MIT" ]
6
2021-06-17T15:16:50.000Z
2021-07-05T20:41:26.000Z
from .decoder import TransformerDecoder # noqa 401 from .encoder import TransformerEncoder # noqa 401
34.666667
51
0.807692
12
104
7
0.666667
0.166667
0
0
0
0
0
0
0
0
0
0.068182
0.153846
104
2
52
52
0.886364
0.163462
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a39340a6c3314820690f2f2e630d31359d80bf33
61
py
Python
python/cuXfilter/charts/cudatashader/__init__.py
AjayThorve/cuxfilter
537ff67de80439a43e0bad7373558f5e25dcb112
[ "Apache-2.0" ]
2
2019-03-06T02:10:05.000Z
2020-05-06T06:33:02.000Z
python/cuXfilter/charts/cudatashader/__init__.py
AjayThorve/cuxfilter
537ff67de80439a43e0bad7373558f5e25dcb112
[ "Apache-2.0" ]
null
null
null
python/cuXfilter/charts/cudatashader/__init__.py
AjayThorve/cuxfilter
537ff67de80439a43e0bad7373558f5e25dcb112
[ "Apache-2.0" ]
null
null
null
from .cudatashader import scatter_geo, scatter, line, heatmap
61
61
0.836066
8
61
6.25
0.875
0
0
0
0
0
0
0
0
0
0
0
0.098361
61
1
61
61
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6e80a9d159babd98381532f9d38035d7836e7baf
179
py
Python
kokoropy/scaffolding/scaffold_cms/controllers/page.py
goFrendiAsgard/kokoropy
49c8ca4b7dd2a084f2ced33fc5987b8a8b62c995
[ "MIT" ]
5
2015-01-06T17:01:59.000Z
2016-08-13T05:29:24.000Z
kokoropy/scaffolding/scaffold_cms/controllers/page.py
goFrendiAsgard/kokoropy
49c8ca4b7dd2a084f2ced33fc5987b8a8b62c995
[ "MIT" ]
5
2015-01-05T14:32:59.000Z
2015-09-29T10:27:27.000Z
kokoropy/scaffolding/scaffold_cms/controllers/page.py
goFrendiAsgard/kokoropy
49c8ca4b7dd2a084f2ced33fc5987b8a8b62c995
[ "MIT" ]
6
2015-01-06T17:02:01.000Z
2016-11-11T02:50:27.000Z
from kokoropy.controller import Crud_Controller from ..models._all import Page class Page_Controller(Crud_Controller): __model__ = Page Page_Controller.publish_route()
25.571429
47
0.798883
22
179
6.045455
0.545455
0.210526
0
0
0
0
0
0
0
0
0
0
0.139665
179
7
48
25.571429
0.863636
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
6e821b2a6878f04a0b4d72a67ec636b8137f0150
2,102
py
Python
tests/terraform/checks/resource/aws/test_LBDeletionProtection.py
kylelaker/checkov
6eada26030a87f397a6bf1831827b3dc6c5dad2d
[ "Apache-2.0" ]
3
2021-04-19T17:17:21.000Z
2021-09-06T06:31:09.000Z
tests/terraform/checks/resource/aws/test_LBDeletionProtection.py
kylelaker/checkov
6eada26030a87f397a6bf1831827b3dc6c5dad2d
[ "Apache-2.0" ]
16
2021-03-09T07:38:38.000Z
2021-06-09T03:53:55.000Z
tests/terraform/checks/resource/aws/test_LBDeletionProtection.py
kylelaker/checkov
6eada26030a87f397a6bf1831827b3dc6c5dad2d
[ "Apache-2.0" ]
1
2022-01-06T08:04:56.000Z
2022-01-06T08:04:56.000Z
import unittest import hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(""" resource "aws_lb" "test_failed" { name = "test-lb-tf" internal = false load_balancer_type = "network" subnets = aws_subnet.public.*.id enable_deletion_protection = false } """) resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(""" resource "aws_lb" "test_failed" { name = "test-lb-tf" internal = false load_balancer_type = "network" subnets = aws_subnet.public.*.id } """) resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(""" resource "aws_lb" "test_success" { name = "test-lb-tf" internal = false load_balancer_type = "network" subnets = aws_subnet.public.*.id enable_deletion_protection = true } """) resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if __name__ == '__main__': unittest.main()
38.218182
76
0.531874
198
2,102
5.333333
0.272727
0.102273
0.051136
0.056818
0.72822
0.72822
0.72822
0.72822
0.72822
0.660038
0
0.005327
0.374881
2,102
54
77
38.925926
0.798326
0
0
0.6
0
0
0.531399
0.056137
0
0
0
0
0.066667
1
0.066667
false
0.022222
0.088889
0
0.177778
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
6e8b87175f17706e19f2c4d7495fe375c7bcd357
91
py
Python
src/userlogs/admin.py
cbsBiram/xarala__ssr
863e1362c786daa752b942b796f7a015211d2f1b
[ "FSFAP" ]
null
null
null
src/userlogs/admin.py
cbsBiram/xarala__ssr
863e1362c786daa752b942b796f7a015211d2f1b
[ "FSFAP" ]
null
null
null
src/userlogs/admin.py
cbsBiram/xarala__ssr
863e1362c786daa752b942b796f7a015211d2f1b
[ "FSFAP" ]
null
null
null
from django.contrib import admin from .models import UserLog admin.site.register(UserLog)
18.2
32
0.824176
13
91
5.769231
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.10989
91
4
33
22.75
0.925926
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6ed905ce6c0fd0e90fe7c53c90e3ea2223aea8cc
168
py
Python
service_layer/customer_service_interface.py
yeonghwanchoi/Project_bank
931bb22aa641101e3cb8eb3501f97ff99b120920
[ "MIT" ]
null
null
null
service_layer/customer_service_interface.py
yeonghwanchoi/Project_bank
931bb22aa641101e3cb8eb3501f97ff99b120920
[ "MIT" ]
null
null
null
service_layer/customer_service_interface.py
yeonghwanchoi/Project_bank
931bb22aa641101e3cb8eb3501f97ff99b120920
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod class CustomerServiceInterface(ABC): @abstractmethod def get_all_accounts_for_user(self, id: int) -> list: pass
16.8
57
0.720238
20
168
5.85
0.85
0.290598
0
0
0
0
0
0
0
0
0
0
0.208333
168
9
58
18.666667
0.879699
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0.2
0.2
0
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
42b6d6d0608b9e9067ad3b832005fe658c96ba6d
58
py
Python
ConvertText.py
danheeks/PyCAD
711543aaa88c88a82d909f329b6ee36a9b96ae79
[ "BSD-3-Clause" ]
17
2018-07-30T17:38:02.000Z
2022-02-03T10:35:38.000Z
ConvertText.py
danheeks/PyCAD
711543aaa88c88a82d909f329b6ee36a9b96ae79
[ "BSD-3-Clause" ]
2
2020-06-11T10:29:06.000Z
2020-06-11T15:42:00.000Z
ConvertText.py
danheeks/PyCAD
711543aaa88c88a82d909f329b6ee36a9b96ae79
[ "BSD-3-Clause" ]
null
null
null
from HeeksFont import ConvertHeeksFont ConvertHeeksFont()
19.333333
38
0.87931
5
58
10.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.086207
58
3
39
19.333333
0.962264
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
42c2a938308ccc46ba4f62ae26262f96eff8cfcc
82
py
Python
django_group_by/__init__.py
alissonmuller/django-group-by
645c36ad2c3ab1f4691de6fcc04fed8b5d7ef78d
[ "MIT" ]
25
2016-09-29T15:25:16.000Z
2021-09-19T14:20:58.000Z
django_group_by/__init__.py
alissonmuller/django-group-by
645c36ad2c3ab1f4691de6fcc04fed8b5d7ef78d
[ "MIT" ]
22
2016-05-29T00:14:47.000Z
2019-06-08T13:24:21.000Z
django_group_by/__init__.py
alissonmuller/django-group-by
645c36ad2c3ab1f4691de6fcc04fed8b5d7ef78d
[ "MIT" ]
2
2018-09-24T07:28:39.000Z
2019-02-12T14:09:18.000Z
""" This module contains the package exports. """ from .mixin import GroupByMixin
16.4
41
0.756098
10
82
6.2
1
0
0
0
0
0
0
0
0
0
0
0
0.146341
82
4
42
20.5
0.885714
0.5
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
42f11f9d1898b3c5ffb47bdd9af94aa188c539c6
936
py
Python
mocks/mock_drive_manager.py
shepherdjay/-r-winnipegjets-scripts
2edeb1c4a48077cf9c3a21f92fa6f69412ba7de2
[ "MIT" ]
null
null
null
mocks/mock_drive_manager.py
shepherdjay/-r-winnipegjets-scripts
2edeb1c4a48077cf9c3a21f92fa6f69412ba7de2
[ "MIT" ]
null
null
null
mocks/mock_drive_manager.py
shepherdjay/-r-winnipegjets-scripts
2edeb1c4a48077cf9c3a21f92fa6f69412ba7de2
[ "MIT" ]
null
null
null
"""Module containing the definition of a mocked out GDocs dependency. This is for test use only""" class MockDriveManager(): """Mocked out version of DriveManager for test purposes""" def __init__(self): none = None def get_file_entries(): none = None def get_drive_filetype(): none = None def get_all_books_sheets(): none = None def get_games_result(): none = None def convert_rank(): none = None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None def overwrite_leaderboard(): none = None def update_answerkey_results(): none = None def update_game_start_time(): none = None def create_new_sheet(): none = None def new_response_data_available(): none = None
19.914894
98
0.612179
112
936
4.821429
0.5
0.207407
0.264815
0.181481
0
0
0
0
0
0
0
0
0.310897
936
46
99
20.347826
0.837209
0.154915
0
0.482759
0
0
0
0
0
0
0
0
0
1
0.482759
false
0
0
0
0.517241
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
6e353d78b983b52908ae505279a61410a0d5812d
50,536
py
Python
st_rationale.py
microsoft/RationaleST
760c10a44aa89bd0022db34afd0ffa4fb41ac1e3
[ "MIT" ]
null
null
null
st_rationale.py
microsoft/RationaleST
760c10a44aa89bd0022db34afd0ffa4fb41ac1e3
[ "MIT" ]
null
null
null
st_rationale.py
microsoft/RationaleST
760c10a44aa89bd0022db34afd0ffa4fb41ac1e3
[ "MIT" ]
null
null
null
""" Author: Meghana Bhat (bhat.89@osu.edu) Code for Self-training for Rationale using few-shot learning. This code base is adapted from UST (https://github.com/microsoft/UST) """ from collections import defaultdict from sklearn.utils import shuffle from transformers import * import logging import math import models import numpy as np import os, sys import json import nltk import tensorflow as tf import tensorflow.keras as K import tensorflow.keras.backend as kb import tensorflow_addons as tfa from focal_loss import BinaryFocalLoss, SparseCategoricalFocalLoss import random from sklearn.metrics import f1_score from sklearn.metrics import precision_recall_fscore_support logger = logging.getLogger('STRationale') def create_learning_rate_scheduler(max_learn_rate=5e-5, end_learn_rate=1e-7, warmup_epoch_count=10, total_epoch_count=90): def lr_scheduler(epoch): if epoch < warmup_epoch_count: res = (max_learn_rate/warmup_epoch_count) * (epoch + 1) else: res = max_learn_rate*math.exp(math.log(end_learn_rate/max_learn_rate)*(epoch-warmup_epoch_count+1)/(total_epoch_count-warmup_epoch_count+1)) return float(res) learning_rate_scheduler = tf.keras.callbacks.LearningRateScheduler(lr_scheduler, verbose=1) return learning_rate_scheduler def train_model(max_seq_length, X, y, X_test, y_test, X_unlabeled, model_dir, tokenizer, sup_batch_size=4, unsup_batch_size=32, unsup_size=4096, sample_size=16384, TFModel=TFBertModel, Config=BertConfig, pt_teacher_checkpoint='bert-base-uncased', sample_scheme='easy_bald_class_conf', T=30, alpha=0.1, valid_split=0.5, sup_epochs=70, unsup_epochs=25, N_base=10, dense_dropout=0.5, attention_probs_dropout_prob=0.3, hidden_dropout_prob=0.3, test_data=None, unlabeled_data=None, class_weight=None, type_="token", X_dev=None, y_dev=None, task=None): #labels = [0, 1] #fix hardcoding labels = set(y[:,0]) logger.info ("Class labels {}".format(labels)) #split X and y to train and dev with valid_split if valid_split > 0: train_size = int((1. - valid_split)*len(X["input_ids"])) if '_neg' in type_: X_train, y_train = {"input_ids": X["input_ids"][:train_size], "token_type_ids": X["token_type_ids"][:train_size], "attention_mask": X["attention_mask"][:train_size], "input_ids_r":X["input_ids_r"][:train_size], "token_type_ids_r":X["token_type_ids_r"][:train_size], "attention_mask_r":X["attention_mask_r"][:train_size], "input_ids_neg":X["input_ids_neg"][:train_size], "token_type_ids_neg":X["token_type_ids_neg"][:train_size], "attention_mask_neg":X["attention_mask_neg"][:train_size]}, y[:train_size] X_dev, y_dev = {"input_ids": X["input_ids"][train_size:], "token_type_ids": X["token_type_ids"][train_size:], "attention_mask": X["attention_mask"][train_size:], "input_ids_r":X["input_ids_r"][train_size:], "token_type_ids_r":X["token_type_ids_r"][train_size:], "attention_mask_r":X["attention_mask_r"][train_size:], "input_ids_neg":X["input_ids_neg"][train_size:], "token_type_ids_neg":X["token_type_ids_neg"][train_size:], "attention_mask_neg":X["attention_mask_neg"][train_size:]}, y[train_size:] elif 'joint' in type_: X_train, y_train = {"input_ids": X["input_ids"][:train_size], "token_type_ids": X["token_type_ids"][:train_size], "attention_mask": X["attention_mask"][:train_size], "input_ids_r":X["input_ids_r"][:train_size], "token_type_ids_r":X["token_type_ids_r"][:train_size], "attention_mask_r":X["attention_mask_r"][:train_size]}, y[:train_size] X_dev, y_dev = {"input_ids": X["input_ids"][train_size:], "token_type_ids": X["token_type_ids"][train_size:], "attention_mask": X["attention_mask"][train_size:], "input_ids_r":X["input_ids_r"][train_size:], "token_type_ids_r":X["token_type_ids_r"][train_size:], "attention_mask_r":X["attention_mask_r"][train_size:]}, y[train_size:] else: X_train, y_train = {"input_ids": X["input_ids"][:train_size], "token_type_ids": X["token_type_ids"][:train_size], "attention_mask": X["attention_mask"][:train_size]}, y[:train_size] X_dev, y_dev = {"input_ids": X["input_ids"][train_size:], "token_type_ids": X["token_type_ids"][train_size:], "attention_mask": X["attention_mask"][train_size:]}, y[train_size:] else: X_train, y_train = X, y X_dev, y_dev = X_dev, y_dev logger.info("X Train Shape: {} {}".format(X_train["input_ids"].shape, y_train.shape)) logger.info("X Dev Shape: {} {}".format(X_dev["input_ids"].shape, y_dev.shape)) logger.info("X Test Shape: {} {}".format(X_test["input_ids"].shape, y_test.shape)) logger.info ("X Unlabeled Shape: {}".format(X_unlabeled["input_ids"].shape)) strategy = tf.distribute.MirroredStrategy() gpus = strategy.num_replicas_in_sync logger.info('Number of devices: {}'.format(gpus)) #run the base model n times with different initialization to select best base model based on validation loss best_base_model = None best_validation_loss = np.inf for counter in range(N_base): #original N_base=10 with strategy.scope(): if 'mtl' in type_: rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) model = models.construct_teacher_mtl(TFModel, Config, pt_teacher_checkpoint, max_seq_length, len(labels), dense_dropout=dense_dropout, attention_probs_dropout_prob=attention_probs_dropout_prob, hidden_dropout_prob=hidden_dropout_prob) model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), rat_loss], metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="dense_3_classification_acc")])#, tf.keras.metrics.SparseCategoricalAccuracy(name="token_acc")]) #, sample_weight_mode="temporal") elif type_ == 'joint': rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) model = models.construct_teacher_joint(TFModel, Config, pt_teacher_checkpoint, max_seq_length, len(labels), dense_dropout=dense_dropout, attention_probs_dropout_prob=attention_probs_dropout_prob, hidden_dropout_prob=hidden_dropout_prob) model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier':tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_task_classifier': None, 'l2_distance': None}, metrics={'task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'l2_distance': None}) elif 'joint_neg' in type_: rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) def custom_loss(y_true, y_pred): cce = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction=tf.keras.losses.Reduction.NONE) if 'focal' in type_: cce = SparseCategoricalFocalLoss(gamma=2, reduction=tf.keras.losses.Reduction.NONE) cce_loss = ((cce(y_true, y_pred))* 1/(unsup_batch_size*gpus)) l1_loss = tf.reduce_mean(tf.reduce_sum(tf.math.abs(y_pred),axis=0)) coh_loss = tf.reduce_mean(tf.reduce_sum(tf.math.abs(y_pred[1:]-y_pred[:-1]), axis=0)) #l2_loss = 0.0 #logger.info(l1_loss) return cce_loss + 0.01*l1_loss + 0.01*coh_loss def custom_loss_neg(y_true, y_pred): cce = tf.keras.losses.CategoricalCrossentropy(from_logits=False, reduction=tf.keras.losses.Reduction.NONE) return tf.reduce_sum(cce(y_true, y_pred))*(1/(unsup_batch_size*gpus)) model = models.construct_teacher_joint_neg(TFModel, Config, pt_teacher_checkpoint, max_seq_length, len(labels), dense_dropout=dense_dropout, attention_probs_dropout_prob=attention_probs_dropout_prob, hidden_dropout_prob=hidden_dropout_prob) loss_weights = [1.0, 1.0, 1.0, 1.0] if '_noexp' in type_: loss_weights = [1.0, 0.0, 0.0, 0.0] elif '_no_suffcomp' in type_: loss_weights = [1.0, 1.0, 0, 0] model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier':rat_loss, 'rationale_task_classifier': None, 'not_rationale_task_classifier': None}, metrics={'task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'not_rationale_task_classifier': None}, loss_weights=loss_weights) if counter == 0: logger.info(model.summary()) model_file = os.path.join(model_dir, "model_label.h5") model_file_task = os.path.join(model_dir, "model_task.h5") model_file_best = os.path.join(model_dir, "model_best.h5") if os.path.exists(model_file): model.load_weights(model_file) #model_task.load_weights(model_file_task) best_base_model = model logger.info ("Model file loaded from {}".format(model_file)) break elif 'mtl' in type_ : logger.info(y_train.shape) model.fit(x=X_train, y=[y_train[:,0], y_train[:,1:]], shuffle=True, epochs=sup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:]]), batch_size=sup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='loss', patience=5, restore_best_weights=True)]) # class_weight=class_weight) val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:]]) elif '_neg' in type_ : y_neg = np.full((len(y_train),len(labels)), 1/len(labels)) model.fit(x=X_train, y=[y_train[:,0], y_train[:,1:], y_train[:,0], y_neg], shuffle=True, epochs=sup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.full((len(y_dev), len(labels)), 1/len(labels))]), batch_size=sup_batch_size*1, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)]) #, class_weight=class_weight) val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.full((len(y_dev), len(labels)), 1/len(labels))]) elif 'joint' in type_: _placeholder_labels = np.empty((y_train.shape[0], y_train.shape[0])) model.fit(x=X_train, y=[y_train[:,0], y_train, y_train[:,0], np.ones(len(y_train))], shuffle=True, epochs=sup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev, y_dev[:,0], np.ones(len(y_dev))]), batch_size=sup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='loss', patience=5, restore_best_weights=True)]) # class_weight=class_weight) val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev, y_dev[:,0], np.ones(len(y_dev))]) logger.info ("Validation loss for run {} : {}".format(counter, val_loss)) if val_loss[0] < best_validation_loss: best_base_model = model best_validation_loss = val_loss[0] model = best_base_model ''' if 'mtl' in type_: logger.info ("Best validation acc for base model {}: {}".format(best_validation_loss, model.evaluate(X_dev, [y_dev[:,0],y_dev[:,1:]]))) ''' if not os.path.exists(model_file): model.save_weights(model_file) logger.info ("Model file saved to {}".format(model_file)) best_val_acc = 0. best_test_acc = 0. max_test_acc = 0. max_task_acc = 0. max_best_acc = 0. val_loss = 0. if 'mtl' in type_: logger.info("y_test: {}".format(y_test)) test_acc = model.evaluate(X_test, [y_test[:,0], y_test[:,1:]], verbose=0)[4] task_acc = model.evaluate(X_test, [y_test[:,0], y_test[:,1:]], verbose=0)[3] val_loss = model.evaluate(X_test, [y_test[:,0], y_test[:,1:]], verbose=0)[0] elif '_neg' in type_: out = model.evaluate(X_test, [y_test[:,0], y_test[:,1:], y_test[:,0], np.full((len(y_test), len(labels)), 1/len(labels))]) task_acc, test_acc, r_acc = out[3], out[4], out[5] elif 'joint' in type_: out = model.evaluate(X_test, [y_test[:,0], y_test, y_test[:,0], np.ones(len(y_test))]) task_acc, test_acc, r_acc = out[3], out[4], out[5] logger.info ("Test token acc for run {} : {}".format(counter, test_acc)) logger.info ("Best Test task acc for run {} with total loss : {}".format(counter, task_acc)) if 'mtl' in type_: class_acc = model.predict(X_test)[0] test_pred = model.predict(X_test)[1] class_acc = np.argmax(class_acc, axis=-1) elif 'joint' in type_: out = model.predict(X_test) class_acc, test_pred, r_acc = out[0], out[1], out[2] class_acc = np.argmax(class_acc, axis=-1) logger.info("Class predictions shape {}".format(class_acc.shape)) logger.info("Teacher model best score (macro/task): {}".format(precision_recall_fscore_support(class_acc, y_test[:,0], average='macro'))) logger.info("Teacher model best score (micro/task): {}".format(precision_recall_fscore_support(class_acc, y_test[:,0], average='micro'))) logger.info("Token Predictions shape {}".format(test_pred.shape)) pred, truth = [], [] logger.info(test_pred) test_pred = np.argmax(tf.nn.softmax(test_pred, axis=-1), axis=-1) logger.info("Printing prediction data on teacher model for run {}: {}".format(counter, test_pred)) tp, fn, fp = 0, 0, 0 pred_1, pred_0, truth_1, truth_0 = 0, 0, 0, 0 for i in range(len(test_pred)): temp_p, temp_t, ct = [],[], 0 temp = tokenizer.convert_ids_to_tokens(X_test["input_ids"][i])[1:] for j in range(0,len(test_pred[0])-1): if test_pred[i][j] == 1: temp_p.append(temp[j]) if y_test[i][j+1] == 1: #to skip evaluation of the task label temp_t.append(temp[j]) pred_1 += test_pred[i].sum() pred_0+= max_seq_length-pred_1 truth_1 += y_test[i].sum() truth_0+= max_seq_length-truth_1 pred.append(' '.join(temp_p)) truth.append(' '.join(temp_t)) for word in temp_p: if word in temp_t: ct+=1 temp_t.remove(word) else: fp+=1 tp +=ct fn += (y_test[i].sum()-ct) p = tp/(tp+fp+0.0000001) r = tp/(tp+fn+0.0000001) logger.info("Token-level: {}".format((tp)/(tp+(0.5*(fp+fn))))) logger.info("Rationale coverage (recall): {}".format(r)) logger.info("Token Precision: {}".format(p)) logger.info("Token overlap: {}".format(tp/(tp+fp+fn))) score1, score2, score3, score4 = 0.0, 0.0, 0.0, 0.0 for i in range(len(pred)): score1 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(1, 0, 0, 0)) score2 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 1, 0, 0)) score3 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 0, 1, 0)) score4 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 0, 0, 1)) logger.info("BLEU-1 score of rationales on test set (teacher model): {} ".format(score1/len(pred))) logger.info("BLEU-2 score of rationales on test set (teacher model): {} ".format(score2/len(pred))) logger.info("BLEU-3 score of rationales on test set (teacher model): {} ".format(score3/len(pred))) logger.info("BLEU-4 score of rationales on test set (teacher model): {} ".format(score4/len(pred))) best_loss = np.inf data = [] for i in range(len(X_test["input_ids"])): text = tokenizer.convert_ids_to_tokens(X_test["input_ids"][i]) temp = dict() temp['text'] = ' '.join(text) temp['truth'] = truth[i] temp['pred'] = pred[i] temp['score'] = nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split()) data.append(temp) with open(os.path.join(model_dir, 'rationale_output_test_teacher_'+type_+'.json'), 'w') as f: json.dump(data, f) model_student = None # model_task for epoch in range(unsup_epochs): logger.info ("Starting loop {}".format(epoch)) if type_ == 'mtl': test_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:]], verbose=0)[-1] task_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:]], verbose=0)[-2] val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:]], verbose=0)[0] if task_acc > max_task_acc: logger.info ("Val acc (task) {}".format(task_acc)) max_task_acc = task_acc model.save_weights(model_file_best) val_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:]], verbose=0)[-2] test_task_acc = model.evaluate(X_test, [y_test[:,0], y_test[:,1:]], verbose=0)[-2] elif 'joint_neg' in type_: y_neg_dev = np.full((len(y_dev), len(labels)), 1/len(labels)) y_neg_test = np.full((len(y_test), len(labels)), 1/len(labels)) y_dev_plg = [y_dev[:,1:], y_dev[:,0], np.full((len(y_dev),len(labels)), 1/len(labels))] y_test_plg = [y_test[:,1:], y_test[:,0], np.full((len(y_test),len(labels)), 1/len(labels))] test_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], y_neg_dev], verbose=0)[-2] task_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], y_neg_dev], verbose=0)[-3] out1 = model.predict(X_test) acc1, y_pred1, r_acc1 = out1[0], out1[1], out1[2] y_pred1 = np.argmax(y_pred1, axis=-1) acc1 = np.argmax(acc1, axis=-1) r_acc1 = np.argmax(r_acc1, axis=-1) logger.info("Model performance for token (macro/task): {}".format(precision_recall_fscore_support(y_pred1, y_test[:,1:], average='micro'))) logger.info("Model performance for token (macro/task): {}".format(precision_recall_fscore_support(y_pred1, y_test[:,1:], average='macro'))) logger.info("Model performance for task (macro/task): {}".format(precision_recall_fscore_support(acc1, y_test[:,0], average='macro'))) val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], y_neg_dev], verbose=0)[0] if task_acc > max_task_acc: logger.info ("Val acc (task) {}".format(task_acc)) max_task_acc = task_acc best_val_acc = task_acc model.save_weights(model_file_best) #_student = deepcopy(model) val_acc = task_acc #model.evaluate(X_dev, [y_dev[:,0], y_dev, y_dev[:,0], y_neg_dev], verbose=0)[-3] if test_acc > max_test_acc: max_test_acc = test_acc test_task_acc = model.evaluate(X_test, [y_test[:,0], y_test[:,1:], y_test[:,0], y_neg_test], verbose=0)[-3] elif type_ == 'joint': # or 'joint_neg' in type_: test_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.ones(len(y_dev))], verbose=0)[-2] task_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.ones(len(y_dev))], verbose=0)[-3] val_loss = model.evaluate(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.ones(len(y_dev))], verbose=0)[0] if task_acc > max_task_acc: logger.info ("Val acc (task) {}".format(task_acc)) max_task_acc = task_acc best_val_acc = task_acc model.save_weights(model_file_best) #_student = deepcopy(model) val_acc = model.evaluate(X_dev, [y_dev[:,0], y_dev, y_dev[:,0], np.ones(len(y_dev))], verbose=0)[-3] ''' if val_loss < best_loss: best_loss = val_loss model.save_weights(model_file_best) #_student = deepcopy(model) ''' if test_acc > max_test_acc: max_test_acc = test_acc test_task_acc = model.evaluate(X_test, [y_test[:,0], y_test[:,1:], y_test[:,0], np.ones(len(y_test))], verbose=0)[-3] if '_neg' in type_: y_neg_dev = np.full((len(y_dev), len(labels)), 1/len(labels)) y_neg_test = np.full((len(y_test), len(labels)), 1/len(labels)) temp = model.evaluate(X_test, [y_test[:,0], y_test[:,1:], y_test[:,0], y_neg_test], verbose=0) elif 'joint' in type_: temp = model.evaluate(X_test, [y_test[:,0], y_test[:,1:], y_test[:,0], np.ones(len(y_test))], verbose=0) elif 'mtl' in type_: temp = model.evaluate(X_test, [y_test[:,0], y_test[:,1:]], verbose=0) logger.info("Print acc (task) for joint {}".format(temp)) logger.info ("Val acc (token) {}".format(test_acc)) logger.info ("Val acc (task) {}".format(task_acc)) logger.info ("Test acc (task) {}".format(test_task_acc)) if test_task_acc >= max_best_acc: max_best_acc = test_task_acc model_file = os.path.join(model_dir, "model_token_{}_{}.h5".format(epoch, sample_scheme)) model_file_task = os.path.join(model_dir, "model_task_{}_{}.h5".format(epoch, sample_scheme)) if os.path.exists(model_file): model.load_weights(model_file) logger.info ("Model file loaded from {}".format(model_file)) continue if 'mtl' in type_ : acc, y_pred = model.predict(X_unlabeled, batch_size=256) #y_val = np.amax(acc, axis=-1) #y_rat = np.amax(y_pred, axis=-1) y_pred = np.argmax(y_pred, axis=-1) #.flatten() acc = np.argmax(acc, axis=-1) elif 'joint' in type_: out = model.predict(X_unlabeled, batch_size=64) acc, y_pred, r_acc = out[0], out[1], out[2] #y_val = np.amax(acc, axis=-1) #y_rat = np.amax(y_pred, axis=-1) y_pred = np.argmax(y_pred, axis=-1) #.flatten() acc = np.argmax(acc, axis=-1) r_acc = np.argmax(r_acc, axis=-1) #compute confidence on the unlabeled set if sample_size < len(X_unlabeled["input_ids"]): logger.info ("Evaluating confidence on {} number of instances sampled from {} unlabeled instances".format(sample_size, len(X_unlabeled["input_ids"]))) indices = np.random.choice(len(X_unlabeled["input_ids"]), sample_size, replace=False) if '_neg' in type_: X_unlabeled_sample, y_pred = {'input_ids': X_unlabeled["input_ids"][indices], 'token_type_ids': X_unlabeled["token_type_ids"][indices], 'attention_mask': X_unlabeled["attention_mask"][indices], 'input_ids_r':X_unlabeled['input_ids_r'][indices], 'token_type_ids_r':X_unlabeled['token_type_ids_r'][indices], 'attention_mask_r':X_unlabeled['attention_mask_r'][indices], 'input_ids_neg':X_unlabeled['input_ids_neg'][indices], 'token_type_ids_neg':X_unlabeled['token_type_ids_neg'][indices], 'attention_mask_neg':X_unlabeled['attention_mask_neg'][indices]}, y_pred[indices] elif 'joint' in type_: X_unlabeled_sample, y_pred = {'input_ids': X_unlabeled["input_ids"][indices], 'token_type_ids': X_unlabeled["token_type_ids"][indices], 'attention_mask': X_unlabeled["attention_mask"][indices], 'input_ids_r':X_unlabeled['input_ids_r'][indices], 'token_type_ids_r':X_unlabeled['token_type_ids_r'][indices], 'attention_mask_r':X_unlabeled['attention_mask_r'][indices]}, y_pred[indices] else: X_unlabeled_sample, y_pred = {'input_ids': X_unlabeled["input_ids"][indices], 'token_type_ids': X_unlabeled["token_type_ids"][indices], 'attention_mask': X_unlabeled["attention_mask"][indices]}, y_pred[indices] else: logger.info ("Evaluating confidence on {} number of instances".format(len(X_unlabeled["input_ids"]))) X_unlabeled_sample = X_unlabeled #X_unlabeled_sample = {'input_ids': X_unlabeled["input_ids"][indices], 'token_type_ids': X_unlabeled["token_type_ids"][indices], 'attention_mask': X_unlabeled["attention_mask"][indices]} #logger.info (X_unlabeled_sample["input_ids"][:5]) if 'joint' in type_: ids = [] attention_mask_r = np.ones((len(y_pred), max_seq_length)) attention_mask_r[:,1:] = np.array(y_pred) #logger.info(y_pred.shape) #logger.info("Percentage of rationales selected: {}".format(np.mean(np.sum(attention_mask_r, axis=-1)))) attention_mask_r[:,0] = 1 negation_mask = np.where(attention_mask_r==0, 1, 0) negation_mask[:,0] = 1 X_sample = {"input_ids": np.array(X_unlabeled_sample["input_ids"]), "token_type_ids": np.array(X_unlabeled_sample['token_type_ids']), "attention_mask": attention_mask_r} #mask tokens that are not rationales u-r if '_neg' in type_: X_negation_sample = {"input_ids": np.array(X_unlabeled_sample["input_ids"]), "token_type_ids": np.array(X_unlabeled_sample['token_type_ids']), "attention_mask": negation_mask} for i in range(len(y_pred)): X_sample["input_ids"][i, 1:] = np.where(y_pred[i]==0, 103, X_sample["input_ids"][i, 1:]) if '_neg' in type_: X_negation_sample["input_ids"][i, 1:] = np.where(y_pred[i]==0, X_negation_sample["input_ids"][i, 1:], 103) X_negation_sample["input_ids"][:,0] = 101 X_sample["input_ids"][:,0] = 101 logger.info("Extracted rationale from teacher model as input for task: {}".format(X_sample["input_ids"][:5])) logger.info("Extracted rationale from teacher model as input for task: {}".format(X_negation_sample["input_ids"][:5])) y_mean, y_var, y_T = None, None, None if 'mtl' in type_: acc, y_pred = model.predict(X_unlabeled_sample, batch_size=256) y_val = np.amax(tf.math.softmax(acc, axis=-1).numpy(), axis=-1) y_rat = np.amax(tf.math.softmax(y_pred, axis=-1).numpy(), axis=-1) y_pred = np.argmax(y_pred, axis=-1) #.flatten() acc = np.argmax(acc, axis=-1) elif 'joint' in type_: if 'pruthi_' in type_: out = y_train acc, y_pred, r_acc = y_train[:,0], y_train[:,1:], y_train[:,0] y_val = acc y_rat = np.array(y_pred).astype('float') #y_rat = y_rat[:,1:] #y_pred = y_pred[:,1:] else: out = model.predict(X_unlabeled_sample, batch_size=64) acc, y_pred, r_acc = out[0], out[1], out[2] y_val = np.amax(tf.math.softmax(acc, axis=-1).numpy(), axis=-1) y_rat = np.amax(tf.math.softmax(y_pred, axis=-1).numpy(), axis=-1) y_pred = np.argmax(y_pred, axis=-1) #.flatten() acc = np.argmax(acc, axis=-1) r_acc = np.argmax(r_acc, axis=-1) #y_rat = y_rat[:, 1:] #y_pred = y_pred[:,1:] # sample from unlabeled set if 'uni' in sample_scheme: logger.info ("Sampling uniformly") if unsup_size < len(X_unlabeled_sample['input_ids']): '''X_unlabeled_sample, y_pred = {"input_ids": X_unlabeled_sample['input_ids'][indices], "token_type_ids": X_unlabeled_sample['token_type_ids'][indices], "attention_mask": X_unlabeled_sample['attention_mask'][indices]}, y_pred[indices] if type_ == 'decoupled' or ('joint' in type_): X_sample = {"input_ids": X_sample['input_ids'][indices], "token_type_ids": X_sample['token_type_ids'][indices], "attention_mask": X_sample['attention_mask'][indices]} ''' #acc = acc[:,None] #y_batch = np.concatenate((acc[indices], y_pred), axis=1) acc = acc[:,None] y_batch = np.concatenate((acc, y_pred), axis=1) logging.info("y_batch shape {}".format(y_batch.shape)) indices = [] for i in labels: indx = np.where(y_batch[:,0]==i)[0] GLOBAL_SEED = int(os.getenv("PYTHONHASHSEED")) random.Random(GLOBAL_SEED).shuffle(indx) if len(indx) > unsup_size: indx = indx[:unsup_size] logger.info("Shape of predicted labels for class {} : {}".format(i, len(indx))) indices.extend(indx) indices = np.asarray(indices) #indices = np.random.choice(len(X_unlabeled_sample['input_ids']), unsup_size, replace=False) X_batch, y_batch = {"input_ids": X_unlabeled_sample['input_ids'][indices], "token_type_ids": X_unlabeled_sample['token_type_ids'][indices], "attention_mask": X_unlabeled_sample['attention_mask'][indices]}, y_batch[indices] if 'joint' in type_: X_rationale_batch = {"input_ids_r": X_sample['input_ids'][indices], "token_type_ids_r": X_sample['token_type_ids'][indices], "attention_mask_r": X_sample['attention_mask'][indices]} if '_neg' in type_: X_neg_rationale_batch = {"input_ids_neg": X_negation_sample['input_ids'][indices], "token_type_ids_neg": X_negation_sample['token_type_ids'][indices], "attention_mask_neg": X_negation_sample['attention_mask'][indices]} else: indices = np.array([i for i in range(len(y_pred))]) acc = acc[:,None] y_batch = np.concatenate((acc[indices], y_pred[indices]), axis=1) X_batch = {"input_ids": X_unlabeled_sample['input_ids'][indices], "token_type_ids": X_unlabeled_sample['token_type_ids'][indices], "attention_mask": X_unlabeled_sample['attention_mask'][indices]} if 'joint' in type_: X_rationale_batch = {"input_ids_r": X_sample['input_ids'][indices], "token_type_ids_r": X_sample['token_type_ids'][indices], "attention_mask_r": X_sample['attention_mask'][indices]} if '_neg' in type_: X_neg_rationale_batch = {"input_ids_neg": X_negation_sample['input_ids'][indices], "token_type_ids_neg": X_negation_sample['token_type_ids'][indices], "attention_mask_neg": X_negation_sample['attention_mask'][indices]} ''' probs = y_val[indices] X_conf = np.ones((len(y_batch), max_seq_length)) X_conf[:,0] = np.log(probs+1e-10)*alpha ''' else: logger.info("No sampling at the moment; choose all the unlabeled examples") X_batch = {"input_ids": X_unlabeled_sample['input_ids'][indices], "token_type_ids": X_unlabeled_sample['token_type_ids'][indices], "attention_mask": X_unlabeled_sample['attention_mask'][indices]} if 'joint' in type_: X_rationale_batch = {"input_ids_r": X_sample['input_ids'][indices], "token_type_ids_r": X_sample['token_type_ids'][indices], "attention_mask_r": X_sample['attention_mask'][indices]} if '_neg' in type_: X_neg_rationale_batch = {"input_ids_neg": X_negation_sample['input_ids'][indices], "token_type_ids_neg": X_negation_sample['token_type_ids'][indices], "attention_mask_neg": X_negation_sample['attention_mask'][indices]} elif 'joint' in type_: acc = acc[:,None] y_batch = np.concatenate((acc[indices], y_pred[indices][:, 1:]), axis=1) logger.info("y_batch shape: {}".format(y_batch.shape)) #X_batch, y_batch, X_conf = f_(tokenizer, X_unlabeled_sample, y_mean, y_var, acc, unsup_size, len(labels), y_T=y_T, type_=type_) probs = y_val[indices] probs_rat = y_rat[indices] cls = list(acc[indices]) logger.info(cls) X_conf = np.ones((len(y_batch), max_seq_length)) log_probs = (probs+1e-10) #+(1-y_batch[:,0])*np.log(1-probs+1e-10)) log_rationale = (probs_rat+1e-10) if 'rwt' in type_: #re-weight labels X_conf[:,0] = np.where(log_probs>0, log_probs, 0.00000001) if 'norm' in type_: X_conf[:,0] = tf.nn.softmax(X_conf[:,0], axis=0) if '_r_' in type_: #re-weight rationales X_conf[:,1:] = np.where(log_rationale>0, log_rationale, 0.000000001) if 'norm' in type_: X_conf[:,1:] = tf.nn.softmax(X_conf[:,1:], axis=0) #X_conf = np.ones((len(X_batch['input_ids']), max_seq_length)) for i in range(len(cls)): X_conf[i,0] = class_weight[cls[i][0]]*X_conf[i,0] #logger.info ("Weights {}".format(X_conf[:10])) logger.info("X_connf shape: {}".format(X_conf.shape)) if 'mtl' in type_: #model = model_student logger.info(y_batch.shape) model.fit(x=X_batch, y=[y_batch[:,0], y_batch[:,1:]], shuffle=True, epochs=unsup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:]]), batch_size=unsup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='dense_3_classification_acc', patience=5, restore_best_weights=True)], sample_weight=[X_conf[:,0], X_conf[:,1:]]) if 'fine_tune_teacher' in type_: rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) loss_weights = None if '_noexp' in type_: loss_weights = [1.0, 0.0] else: loss_weights = [0.5, 0.5] with strategy.scope(): model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), rat_loss], metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="dense_3_classification_acc")])#, tf.keras.metrics.SparseCategoricalAccuracy(name="token_acc")]) #, sample_weight_mode="temporal") model.fit(x=X_train, y=[y_train[:,0], y_train[:,1:]], shuffle=True, epochs=unsup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:]]), batch_size=unsup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_task_classifier_acc', patience=5, restore_best_weights=True)]) #, sample_weight=[X_conf[:,0], X_conf[:,1:]]) elif type_ == 'joint': logger.info(type_) def custom_loss(y_true, y_pred): logger.info(y_pred) return kb.mean(y_true*y_pred, axis=-1) with strategy.scope(): model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'l2_distance': custom_loss}, metrics={'task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'l2_distance':None}) #model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_task_classifier': None, 'l2_distance': custom_loss}, metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="acc"), tf.keras.metrics.SparseCategoricalAccuracy(name="acc"), tf.keras.metrics.SparseCategoricalAccuracy(name="acc"), tf.keras.metrics.Mean(name='mean')]) #X_batch.update(X_rationale_batch) X_batch['input_ids_r'], X_batch['token_type_ids_r'], X_batch['attention_mask_r'] = X_rationale_batch['input_ids_r'], X_rationale_batch['token_type_ids_r'], X_rationale_batch['attention_mask_r'] model.fit(x=X_batch, y=[y_batch[:,0], y_batch, y_batch[:, 0], np.ones(len(y_batch))], shuffle=True, epochs=unsup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev, y_dev[:,0], np.ones(len(y_dev))]), batch_size=unsup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_task_classifier_acc', patience=5, restore_best_weights=True)]) # class_weight=class_weight) elif 'joint_neg' in type_: logger.info("Training for without rationales") with strategy.scope(): def custom_loss(y_true, y_pred): cce = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction=tf.keras.losses.Reduction.NONE) tf.print(tf.size(y_true), tf.size(y_pred)) cce_loss = ((cce(y_true, y_pred))* 1/(unsup_batch_size*gpus)) l1_loss = tf.reduce_mean(tf.reduce_sum(tf.math.abs(y_pred),axis=0)) coh_loss = tf.reduce_mean(tf.reduce_sum(tf.math.abs(y_pred[1:]-y_pred[:-1]), axis=0)) #l2_loss = 0.0 #logger.info(l1_loss) return cce_loss + 0.1*l1_loss + 0.01*coh_loss def custom_loss_neg(y_true, y_pred): cce = tf.keras.losses.CategoricalCrossentropy(from_logits=False, reduction=tf.keras.losses.Reduction.NONE) return tf.reduce_sum(cce(y_true, y_pred))*(1/(unsup_batch_size*gpus)) rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) loss_weights = [1.0, 1.0, 1.0, 1.0] ''' if '_noexp' in type_: loss_weights = [1.0, 0, 0, 0] if '_no_suffcomp' in type_: loss_weights = [1.0, 1.0, 0, 0] ''' model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier': custom_loss, 'rationale_task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'not_rationale_task_classifier': custom_loss_neg}, metrics={'task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'not_rationale_task_classifier':None}, loss_weights=loss_weights) X_batch['input_ids_r'], X_batch['token_type_ids_r'], X_batch['attention_mask_r'] = X_rationale_batch['input_ids_r'], X_rationale_batch['token_type_ids_r'], X_rationale_batch['attention_mask_r'] X_batch['input_ids_neg'], X_batch['token_type_ids_neg'], X_batch['attention_mask_neg'] = X_neg_rationale_batch['input_ids_neg'], X_neg_rationale_batch['token_type_ids_neg'], X_neg_rationale_batch['attention_mask_neg'] model.fit(x=X_batch, y=[y_batch[:,0], y_batch[:,1:], y_batch[:, 0], np.full((len(y_batch),len(labels)), 1/len(labels))], shuffle=True, epochs=unsup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.full((len(y_dev), len(labels)), 1/len(labels))]), batch_size=unsup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)], sample_weight=[X_conf[:,0], X_conf[:,1:], X_conf[:,0], np.ones((len(y_batch)))]) # class_weight=class_weight) if 'fine_tune_teacher' in type_: rat_loss = None if 'focal' in type_: rat_loss = SparseCategoricalFocalLoss(gamma=2) else: rat_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) with strategy.scope(): loss_weights = [1.0, 1.0, 1.0, 1.0] ''' if '_noexp' in type_: loss_weights = [1.0, 0, 0, 0] elif '_no_suffcomp' in type_: loss_weights = [1.0, 1.0, 0, 0] ''' model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08), loss={'task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'rationale_classifier': rat_loss, 'rationale_task_classifier': tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), 'not_rationale_task_classifier': None}, metrics={'task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'rationale_task_classifier':[tf.keras.metrics.SparseCategoricalAccuracy(name="acc")], 'not_rationale_task_classifier':None}, loss_weights=loss_weights) y_neg = np.full((len(y_train),len(labels)), 1/len(labels)) model.fit(x=X_train, y=[y_train[:,0], y_train[:,1:], y_train[:,0], y_neg], shuffle=True, epochs=sup_epochs, validation_data=(X_dev, [y_dev[:,0], y_dev[:,1:], y_dev[:,0], np.full((len(y_dev), len(labels)), 1/len(labels))]), batch_size=unsup_batch_size*gpus, callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_task_classifier_acc', patience=5, restore_best_weights=True)]) # class_weight=class_weight) tf.keras.backend.clear_session() if not os.path.exists(model_file): model.save_weights(model_file) logger.info ("Model file saved to {}".format(model_file)) model_student = model model_student.load_weights(model_file_best) if 'mtl' in type_: acc, y_pred = model_student.predict(X_test) y_pred = np.argmax(y_pred, axis=-1) acc = np.argmax(acc, axis=-1) #logger.info("Micro score (task): {}".format(precision_recall_fscore_support(acc, y_test[:,0], average='micro'))) elif 'joint' in type_: out = model_student.predict(X_test) acc, y_pred, r_acc = out[0], out[1], out[2] logger.info("Raw logits: {}".format(acc)) y_pred = np.argmax(y_pred, axis=-1) acc = np.argmax(acc, axis=-1) r_acc = np.argmax(r_acc, axis=-1) logger.info("Best task acc score: {}".format(precision_recall_fscore_support(acc, y_test[:,0], average='micro'))) logger.info("Best token acc score: {}".format(precision_recall_fscore_support(y_pred, y_test[:,1:], average='macro'))) pred, truth = [], [] #sys.exit(1) test_pred = y_pred #np.argmax(y_pred, axis=-1) logger.info("Printing prediction data on student model for run {}: {}".format(counter, test_pred)) tp, fn, fp = 0, 0, 0 pred_1, pred_0, truth_1, truth_0 = 0, 0, 0, 0 for i in range(len(test_pred)): temp_p, temp_t, ct = [],[], 0 temp = tokenizer.convert_ids_to_tokens(X_test["input_ids"][i])[1:] #logger.info("Test sample {}".format(temp)) for j in range(0,len(test_pred[0])-1): if test_pred[i][j] == 1: temp_p.append(temp[j]) if y_test[i][j+1] == 1: temp_t.append(temp[j]) pred_1 += test_pred[i].sum() pred_0+= max_seq_length-pred_1 truth_1 += y_test[i].sum() truth_0+= max_seq_length-truth_1 pred.append(' '.join(temp_p)) truth.append(' '.join(temp_t)) for word in temp_p: if word in temp_t: ct+=1 temp_t.remove(word) else: fp+=1 tp +=ct fn += (y_test[i].sum()-ct) p = tp/(tp+fp+0.0000001) r = tp/(tp+fn+0.0000001) logger.info("Token-level: {}".format((tp)/(tp+(0.5*(fp+fn))))) logger.info("Rationale coverage (recall): {}".format(r)) logger.info("Token Precision: {}".format(p)) logger.info("Token overlap: {}".format(tp/(tp+fp+fn))) score1, score2, score3, score4 = 0.0, 0.0, 0.0, 0.0 for i in range(len(pred)): score1 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(1, 0, 0, 0)) score2 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 1, 0, 0)) score3 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 0, 1, 0)) score4 += nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split(), weights=(0, 0, 0, 1)) logger.info("BLEU-1 score of rationales on test set (student model): {} ".format(score1/len(pred))) logger.info("BLEU-2 score of rationales on test set (student model): {} ".format(score2/len(pred))) logger.info("BLEU-3 score of rationales on test set (student model): {} ".format(score3/len(pred))) logger.info("BLEU-4 score of rationales on test set (student model): {} ".format(score4/len(pred))) data = [] for i in range(len(X_test["input_ids"])): text = tokenizer.decode(X_test["input_ids"][i]) temp = dict() temp['text'] = text temp['truth'] = truth[i] temp['pred'] = pred[i] temp['score'] = nltk.translate.bleu_score.sentence_bleu([truth[i].split()],pred[i].split()) data.append(temp) with open(os.path.join(model_dir, 'rationale_output_test_'+type_+'.json'), 'w') as f: json.dump(data, f) logger.info ("Best accuracy (task) across all self-training iterations {}".format(max_best_acc))
71.580737
695
0.588392
6,714
50,536
4.137772
0.057343
0.027933
0.030236
0.008351
0.798135
0.763363
0.745977
0.730751
0.709694
0.692632
0
0.020631
0.268185
50,536
705
696
71.68227
0.730552
0.060511
0
0.51756
0
0
0.143721
0.013798
0
0
0
0
0
1
0.014787
false
0
0.033272
0
0.060998
0.001848
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
6e5369edf54c07e650578673b8c2f9394123f3e2
127
py
Python
Generate_Key.py
ChenhaoJimmyZou/Encryption-For-Any-File
b7bcc8b99162f15f1c42cfec39539c13d7dfcbfc
[ "MIT" ]
1
2019-02-05T00:55:13.000Z
2019-02-05T00:55:13.000Z
Generate_Key.py
ChenhaoJimmyZou/Encryption-For-Any-File
b7bcc8b99162f15f1c42cfec39539c13d7dfcbfc
[ "MIT" ]
null
null
null
Generate_Key.py
ChenhaoJimmyZou/Encryption-For-Any-File
b7bcc8b99162f15f1c42cfec39539c13d7dfcbfc
[ "MIT" ]
null
null
null
from Crypto.Hash import SHA256 def getKey(passWord): hash = SHA256.new(passWord.encode('utf-8')) return hash.digest()
21.166667
47
0.708661
18
127
5
0.777778
0
0
0
0
0
0
0
0
0
0
0.065421
0.15748
127
5
48
25.4
0.775701
0
0
0
0
0
0.03937
0
0
0
0
0
0
1
0.25
false
0.5
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
284a831532fb16c77c72bdce0f6d75ecc321df9a
129
py
Python
miniboss/__init__.py
afroisalreadyinu/miniboss
7284a898d1d9c618af0fefc5bab98ad756c8cc3f
[ "MIT" ]
633
2020-07-23T21:27:05.000Z
2022-03-30T21:08:34.000Z
miniboss/__init__.py
afroisalreadyinu/miniboss
7284a898d1d9c618af0fefc5bab98ad756c8cc3f
[ "MIT" ]
7
2020-11-17T14:36:15.000Z
2022-03-30T10:07:25.000Z
miniboss/__init__.py
afroisalreadyinu/miniboss
7284a898d1d9c618af0fefc5bab98ad756c8cc3f
[ "MIT" ]
9
2022-03-03T08:37:33.000Z
2022-03-27T10:23:22.000Z
from .main import cli from .services import Service from .context import Context from .types import set_group_name as group_name
25.8
47
0.829457
21
129
4.952381
0.571429
0.173077
0
0
0
0
0
0
0
0
0
0
0.139535
129
4
48
32.25
0.936937
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
286944eb83f8291d1636b17171206c4cee1f3b47
161
py
Python
app/admin/__init__.py
sunshineinwater/flask-Purchase_and_sale
6fb845da59e4b25737b67d344cbcb4185e93958c
[ "MIT" ]
122
2019-04-09T03:21:31.000Z
2022-03-27T13:56:08.000Z
app/admin/__init__.py
zhuhaiv5/flask-Purchase_and_sale
6fb845da59e4b25737b67d344cbcb4185e93958c
[ "MIT" ]
15
2019-04-25T02:52:48.000Z
2021-12-19T09:35:45.000Z
app/admin/__init__.py
zhuhaiv5/flask-Purchase_and_sale
6fb845da59e4b25737b67d344cbcb4185e93958c
[ "MIT" ]
63
2019-04-08T08:25:48.000Z
2022-03-27T13:56:11.000Z
#-*- coding:utf-8 -*- # author:Agam # datetime:2018-11-05 from flask import Blueprint admin=Blueprint('admin',__name__) import app.admin.views
12.384615
34
0.658385
21
161
4.857143
0.809524
0.27451
0
0
0
0
0
0
0
0
0
0.069767
0.198758
161
12
35
13.416667
0.72093
0.322981
0
0
0
0
0.054945
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
286df201429ff8d199170cf101fe58d43a694181
89
py
Python
plots/model_explorer/app_hooks.py
ZviBaratz/pylabber
35337284f3d0615249f642743b993b7dad407390
[ "Apache-2.0" ]
3
2020-08-28T21:33:07.000Z
2021-07-19T17:52:17.000Z
plots/model_explorer/app_hooks.py
TheLabbingProject/pylabber
27d6073e7bde871c16912a8ea5e0e389711bbd9f
[ "Apache-2.0" ]
74
2019-09-04T11:40:16.000Z
2022-01-03T19:43:04.000Z
plots/series/series_viewer/app_hooks.py
ZviBaratz/pylabber
35337284f3d0615249f642743b993b7dad407390
[ "Apache-2.0" ]
3
2019-05-07T07:09:05.000Z
2019-08-30T15:40:47.000Z
from .setup import load_django def on_server_loaded(server_context): load_django()
14.833333
37
0.786517
13
89
5
0.769231
0.307692
0
0
0
0
0
0
0
0
0
0
0.146067
89
5
38
17.8
0.855263
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
5
955f3ff25ffc82f27b6802acf9455a2a07624ed9
45
py
Python
Lib/test/test_compiler/testcorpus/04_assign.py
diogommartins/cinder
79103e9119cbecef3b085ccf2878f00c26e1d175
[ "CNRI-Python-GPL-Compatible" ]
1,886
2021-05-03T23:58:43.000Z
2022-03-31T19:15:58.000Z
Lib/test/test_compiler/testcorpus/04_assign.py
diogommartins/cinder
79103e9119cbecef3b085ccf2878f00c26e1d175
[ "CNRI-Python-GPL-Compatible" ]
70
2021-05-04T23:25:35.000Z
2022-03-31T18:42:08.000Z
Lib/test/test_compiler/testcorpus/04_assign.py
diogommartins/cinder
79103e9119cbecef3b085ccf2878f00c26e1d175
[ "CNRI-Python-GPL-Compatible" ]
52
2021-05-04T21:26:03.000Z
2022-03-08T18:02:56.000Z
a = 1 b = "foo" c = (d, e) di = {f: 1, g: 2}
9
17
0.333333
12
45
1.25
0.916667
0
0
0
0
0
0
0
0
0
0
0.103448
0.355556
45
4
18
11.25
0.413793
0
0
0
0
0
0.066667
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9592aef7c55ffd0676beb11ace497389e4aefc4e
114
py
Python
universalwrapper/__init__.py
Basdbruijne/UniversalWrapper
c910377210eb3adefca46bffa0a2b4f7bf9f831e
[ "MIT" ]
4
2021-11-21T18:16:08.000Z
2022-01-21T19:38:50.000Z
universalwrapper/__init__.py
Basdbruijne/UniversalWrapper
c910377210eb3adefca46bffa0a2b4f7bf9f831e
[ "MIT" ]
null
null
null
universalwrapper/__init__.py
Basdbruijne/UniversalWrapper
c910377210eb3adefca46bffa0a2b4f7bf9f831e
[ "MIT" ]
null
null
null
import sys import universalwrapper.universal_wrapper as universalwrapper sys.modules[__name__] = universalwrapper
28.5
61
0.877193
12
114
7.916667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.078947
114
3
62
38
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2517e785e0013d0d462dd517997c1ccce659bf13
747
py
Python
crabageprediction/venv/Lib/site-packages/fontTools/ttLib/tables/_c_i_d_g.py
13rianlucero/CrabAgePrediction
92bc7fbe1040f49e820473e33cc3902a5a7177c7
[ "MIT" ]
2,705
2016-09-27T10:02:12.000Z
2022-03-31T09:37:46.000Z
crabageprediction/venv/Lib/site-packages/fontTools/ttLib/tables/_c_i_d_g.py
13rianlucero/CrabAgePrediction
92bc7fbe1040f49e820473e33cc3902a5a7177c7
[ "MIT" ]
1,599
2016-09-27T09:07:36.000Z
2022-03-31T23:04:51.000Z
crabageprediction/venv/Lib/site-packages/fontTools/ttLib/tables/_c_i_d_g.py
13rianlucero/CrabAgePrediction
92bc7fbe1040f49e820473e33cc3902a5a7177c7
[ "MIT" ]
352
2016-10-07T04:18:15.000Z
2022-03-30T07:35:01.000Z
# coding: utf-8 from .otBase import BaseTTXConverter class table__c_i_d_g(BaseTTXConverter): """The AAT ``cidg`` table has almost the same structure as ``gidc``, just mapping CIDs to GlyphIDs instead of the reverse direction. It is useful for fonts that may be used by a PDF renderer in lieu of a font reference with a known glyph collection but no subsetted glyphs. For instance, a PDF can say “please use a font conforming to Adobe-Japan-1”; the ``cidg`` mapping is necessary if the font is, say, a TrueType font. ``gidc`` is lossy for this purpose and is obsoleted by ``cidg``. For example, the first font in ``/System/Library/Fonts/PingFang.ttc`` (which Apple ships pre-installed on MacOS 10.12.6) has a ``cidg`` table. """ pass
37.35
72
0.738956
128
747
4.273438
0.703125
0.032907
0
0
0
0
0
0
0
0
0
0.011327
0.172691
747
19
73
39.315789
0.873786
0.864793
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
251a99fcffb12dfb16f9c045fd9e8a3355b541c6
32
py
Python
python_meteorologist/forecast/__init__.py
AlertingAvian/python-meteorologist
904089f4062e875148cbe5abc8fe4f7ff1d6a524
[ "MIT" ]
null
null
null
python_meteorologist/forecast/__init__.py
AlertingAvian/python-meteorologist
904089f4062e875148cbe5abc8fe4f7ff1d6a524
[ "MIT" ]
null
null
null
python_meteorologist/forecast/__init__.py
AlertingAvian/python-meteorologist
904089f4062e875148cbe5abc8fe4f7ff1d6a524
[ "MIT" ]
null
null
null
from .forecast import Forecaster
32
32
0.875
4
32
7
1
0
0
0
0
0
0
0
0
0
0
0
0.09375
32
1
32
32
0.965517
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
256b888419b6e4a2d2951b298905cbb524a6dbcc
14
py
Python
yesno.py
wdymm/pymysql
02cefd05070a7e72010cf4892116b24015ac6214
[ "MIT" ]
null
null
null
yesno.py
wdymm/pymysql
02cefd05070a7e72010cf4892116b24015ac6214
[ "MIT" ]
null
null
null
yesno.py
wdymm/pymysql
02cefd05070a7e72010cf4892116b24015ac6214
[ "MIT" ]
null
null
null
print('yesno')
14
14
0.714286
2
14
5
1
0
0
0
0
0
0
0
0
0
0
0
0
14
1
14
14
0.714286
0
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
c24f3673894f6aa9153feff785cc3831e03b4f1e
168
py
Python
users/urls.py
LeonardoCruzx/Projeto_rede_social
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
[ "CC0-1.0" ]
null
null
null
users/urls.py
LeonardoCruzx/Projeto_rede_social
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
[ "CC0-1.0" ]
7
2020-06-05T20:31:01.000Z
2021-09-22T18:22:45.000Z
users/urls.py
LeonardoCruzx/Projeto_rede_social
5f6fc9fe4d1fbc1cfea696a4ff1d19fd98697bc0
[ "CC0-1.0" ]
null
null
null
from django.urls import path from .views import * app_name = 'users' urlpatterns = [ path('pagina-inicial',pagina_inicial_usuario,name='pagina-inicial-usuario') ]
21
79
0.75
22
168
5.590909
0.590909
0.317073
0.325203
0
0
0
0
0
0
0
0
0
0.125
168
8
80
21
0.836735
0
0
0
0
0
0.242604
0.130178
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
c25095ea92087e77b0d495d56c81b4155c8a433e
66
py
Python
CodingBat/Warmup-1/makes10.py
arthxvr/coding--python
1e91707be6cb8fef816dad0c1a65f2cc3327357e
[ "MIT" ]
null
null
null
CodingBat/Warmup-1/makes10.py
arthxvr/coding--python
1e91707be6cb8fef816dad0c1a65f2cc3327357e
[ "MIT" ]
null
null
null
CodingBat/Warmup-1/makes10.py
arthxvr/coding--python
1e91707be6cb8fef816dad0c1a65f2cc3327357e
[ "MIT" ]
null
null
null
def makes10(a, b): return (a == 10 or b == 10 or a + b == 10)
22
46
0.484848
14
66
2.285714
0.5
0.125
0
0
0
0
0
0
0
0
0
0.177778
0.318182
66
2
47
33
0.533333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c25ed373eb62ca3f1dfd13461c8bf699262611bf
165
py
Python
generate_secretkey.py
Saknowman/django_setting_sample_project
3f11103a57628190ed4ab480cba39d4474847040
[ "MIT" ]
null
null
null
generate_secretkey.py
Saknowman/django_setting_sample_project
3f11103a57628190ed4ab480cba39d4474847040
[ "MIT" ]
7
2020-06-06T00:28:14.000Z
2022-02-10T11:03:44.000Z
generate_secretkey.py
Saknowman/django_setting_sample_project
3f11103a57628190ed4ab480cba39d4474847040
[ "MIT" ]
null
null
null
from django.core.management.utils import get_random_secret_key secret_key = get_random_secret_key() text = 'SECRET_KEY = \'{0}\''.format(secret_key) print(text)
33
63
0.769697
25
165
4.72
0.56
0.381356
0.254237
0.305085
0
0
0
0
0
0
0
0.006757
0.10303
165
5
64
33
0.790541
0
0
0
0
0
0.08642
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c2701ec268373038ac918593263fde9296fc7381
30
py
Python
__init__.py
rowanc1/Seismogram
4c68e1d3f20cb5f45e6ed64e42af150efea4a181
[ "MIT" ]
3
2019-10-14T12:36:19.000Z
2022-03-20T04:53:09.000Z
__init__.py
rowanc1/Seismogram
4c68e1d3f20cb5f45e6ed64e42af150efea4a181
[ "MIT" ]
null
null
null
__init__.py
rowanc1/Seismogram
4c68e1d3f20cb5f45e6ed64e42af150efea4a181
[ "MIT" ]
4
2015-07-26T17:19:14.000Z
2022-01-04T22:08:45.000Z
import Layers import Wavelets
10
15
0.866667
4
30
6.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
2
16
15
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c27de3a5621d8d42972e9487683ee8274232c541
49
py
Python
dephell/__main__.py
OliverHofkens/dephell
6303f416018910668f1635b70cd828a2fd2b2d9e
[ "MIT" ]
1,880
2019-03-21T10:08:25.000Z
2022-03-31T12:41:55.000Z
dephell/__main__.py
rachmadaniHaryono/dephell
0ef500c8f2d5f05244bac191b1b1383f68464cd2
[ "MIT" ]
356
2019-03-21T19:08:56.000Z
2021-01-08T17:45:43.000Z
dephell/__main__.py
rachmadaniHaryono/dephell
0ef500c8f2d5f05244bac191b1b1383f68464cd2
[ "MIT" ]
157
2019-04-23T01:13:37.000Z
2022-03-24T22:41:18.000Z
# app from .cli import entrypoint entrypoint()
8.166667
27
0.734694
6
49
6
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.183673
49
5
28
9.8
0.9
0.061224
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
6c1e5496d4122ac7bea8b3c30b72333302a8dd0a
67
py
Python
src/xbrief/margin/matrix_margin/__init__.py
pydget/xbrief
9e91927a98754b0fca1fa55eae9a785b15e963f9
[ "MIT" ]
null
null
null
src/xbrief/margin/matrix_margin/__init__.py
pydget/xbrief
9e91927a98754b0fca1fa55eae9a785b15e963f9
[ "MIT" ]
null
null
null
src/xbrief/margin/matrix_margin/__init__.py
pydget/xbrief
9e91927a98754b0fca1fa55eae9a785b15e963f9
[ "MIT" ]
null
null
null
from .matrix_margin import MatrixMargin from .sizing import sizing
22.333333
39
0.850746
9
67
6.222222
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.119403
67
2
40
33.5
0.949153
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6c253e039973a0b6c5003eeb5416d3267eaf47a6
70
py
Python
functions/Melody.py
Skentir/CSC617M
8fe21fa449d51571f3368fd8323b04e7c782aebc
[ "MIT" ]
null
null
null
functions/Melody.py
Skentir/CSC617M
8fe21fa449d51571f3368fd8323b04e7c782aebc
[ "MIT" ]
null
null
null
functions/Melody.py
Skentir/CSC617M
8fe21fa449d51571f3368fd8323b04e7c782aebc
[ "MIT" ]
null
null
null
class Melody(): def __init__(self, notes): self.notes = []
23.333333
30
0.571429
8
70
4.5
0.75
0.5
0
0
0
0
0
0
0
0
0
0
0.271429
70
3
31
23.333333
0.705882
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
6c783caed3bed76188c4a9f67b8c3ed37bb4c856
117
py
Python
green/__init__.py
dtan3847/green
b021810ba2484b2c280e972af19a6ba8c4764dc4
[ "MIT" ]
null
null
null
green/__init__.py
dtan3847/green
b021810ba2484b2c280e972af19a6ba8c4764dc4
[ "MIT" ]
null
null
null
green/__init__.py
dtan3847/green
b021810ba2484b2c280e972af19a6ba8c4764dc4
[ "MIT" ]
null
null
null
from __future__ import unicode_literals from .cmdline import main from .version import __version__ main __version__
16.714286
39
0.854701
15
117
5.8
0.533333
0
0
0
0
0
0
0
0
0
0
0
0.128205
117
6
40
19.5
0.852941
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
66a69b439c8c7c9dc1480c0b52f680de3d092b4a
167
py
Python
nextcode/services/phenotype/__init__.py
Haffi/nextcode-python-sdk
b70baa848cb6326fb0e7ee0e4167c41dcc45e085
[ "MIT" ]
7
2019-10-23T17:22:50.000Z
2021-04-17T21:44:28.000Z
nextcode/services/phenotype/__init__.py
Haffi/nextcode-python-sdk
b70baa848cb6326fb0e7ee0e4167c41dcc45e085
[ "MIT" ]
8
2019-11-07T16:41:01.000Z
2021-09-13T14:33:28.000Z
nextcode/services/phenotype/__init__.py
Haffi/nextcode-python-sdk
b70baa848cb6326fb0e7ee0e4167c41dcc45e085
[ "MIT" ]
4
2019-11-08T13:59:55.000Z
2021-11-07T13:49:21.000Z
""" Service class ------------------ Service object for interfacing with the Phenotype Archive API """ from .phenotype import Phenotype from .service import Service
16.7
61
0.700599
19
167
6.157895
0.631579
0
0
0
0
0
0
0
0
0
0
0
0.143713
167
9
62
18.555556
0.818182
0.562874
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
66e6890d586dbb985b05c7f3916d0cd48bc524b2
159
py
Python
build/lib.macosx-10.9-x86_64-3.9/gators/encoders/tests/test_base_encoder.py
Aditya-Kapadiya/gators
d7c9967e3a8e304a601b6a92ad834d03d3e36338
[ "Apache-2.0" ]
4
2021-10-29T18:20:52.000Z
2022-03-31T22:53:03.000Z
build/lib.macosx-10.9-x86_64-3.9/gators/encoders/tests/test_base_encoder.py
Aditya-Kapadiya/gators
d7c9967e3a8e304a601b6a92ad834d03d3e36338
[ "Apache-2.0" ]
1
2022-01-19T12:16:19.000Z
2022-01-19T12:16:19.000Z
build/lib.macosx-10.9-x86_64-3.9/gators/encoders/tests/test_base_encoder.py
Aditya-Kapadiya/gators
d7c9967e3a8e304a601b6a92ad834d03d3e36338
[ "Apache-2.0" ]
5
2021-11-17T20:16:54.000Z
2022-02-21T18:21:02.000Z
# License: Apache-2.0 from gators.encoders import WOEEncoder import pytest def test_init(): with pytest.raises(TypeError): WOEEncoder(dtype=str)
17.666667
38
0.72956
21
159
5.47619
0.857143
0
0
0
0
0
0
0
0
0
0
0.015267
0.176101
159
8
39
19.875
0.862595
0.119497
0
0
0
0
0
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
dd17d66bd12b06082736bf5ca13108c38907863e
459
py
Python
message_type.py
VladislavKorecky/pylogs
796336a3ce6056f06dcfd578779daaed0e88711a
[ "MIT" ]
null
null
null
message_type.py
VladislavKorecky/pylogs
796336a3ce6056f06dcfd578779daaed0e88711a
[ "MIT" ]
null
null
null
message_type.py
VladislavKorecky/pylogs
796336a3ce6056f06dcfd578779daaed0e88711a
[ "MIT" ]
null
null
null
from abc import ABC class MessageType(ABC): """ Interface for message types. """ def get_name(self): """ Return the name of the message type. Returns: str: The name of the message type. """ pass def get_color_code(self): """ Return the color code of the message type. Returns: str: The color code of the message type. """ pass
16.392857
52
0.51634
54
459
4.333333
0.407407
0.08547
0.205128
0.273504
0.555556
0.521368
0.405983
0
0
0
0
0
0.405229
459
27
53
17
0.857143
0.461874
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0.166667
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
dd598c679fced89abfde6e7b7f988312d99cd6e5
34
py
Python
mlmo/interfaces/base_i_prod.py
prashantlv/mltoolkit
acc192bafc66b7661d541ef4f604b5e5ab7df5ca
[ "MIT" ]
1
2020-10-03T05:23:31.000Z
2020-10-03T05:23:31.000Z
mlmo/interfaces/base_i_prod.py
prashantlv/mltoolkit
acc192bafc66b7661d541ef4f604b5e5ab7df5ca
[ "MIT" ]
null
null
null
mlmo/interfaces/base_i_prod.py
prashantlv/mltoolkit
acc192bafc66b7661d541ef4f604b5e5ab7df5ca
[ "MIT" ]
null
null
null
class BaseIProd(object): pass
11.333333
24
0.705882
4
34
6
1
0
0
0
0
0
0
0
0
0
0
0
0.205882
34
2
25
17
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
dd7cba20ae2ab0c3ec5cd45eb59d263718632848
61
py
Python
pcep/prac_2.py
gliverm/devnet-study-group
28aecef8207cfeb8f10dc375c22e5ec953d6762b
[ "MIT" ]
1
2020-07-30T15:23:55.000Z
2020-07-30T15:23:55.000Z
pcep/prac_2.py
gliverm/devnet-study-group
28aecef8207cfeb8f10dc375c22e5ec953d6762b
[ "MIT" ]
null
null
null
pcep/prac_2.py
gliverm/devnet-study-group
28aecef8207cfeb8f10dc375c22e5ec953d6762b
[ "MIT" ]
null
null
null
def fun(inp=2, out=3): return inp * out print(fun(out=2))
20.333333
22
0.622951
13
61
2.923077
0.615385
0
0
0
0
0
0
0
0
0
0
0.06
0.180328
61
3
23
20.333333
0.7
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
b096d2539ed93e5bae93fa3ce74409e915eccc76
57
py
Python
dcbase/tests/unit/__init__.py
tctimmeh/dc-django-base
08e444387c7ce4896343ac7e61444bbd7f98b4a6
[ "MIT" ]
null
null
null
dcbase/tests/unit/__init__.py
tctimmeh/dc-django-base
08e444387c7ce4896343ac7e61444bbd7f98b4a6
[ "MIT" ]
13
2015-02-16T17:13:34.000Z
2015-03-07T04:59:28.000Z
dcbase/tests/unit/__init__.py
tctimmeh/dc-django-base
08e444387c7ce4896343ac7e61444bbd7f98b4a6
[ "MIT" ]
null
null
null
from dcbase.tests.unit.unitTestCase import UnitTestCase
19
55
0.859649
7
57
7
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.087719
57
2
56
28.5
0.942308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b0b3f400ae86e8ffe2156c92a4d7974acfd1b347
219
py
Python
Day 24/Core Team/python.py
ChetasShree/MarchCode
80ee6206c0e4481b4421a83c7b7b7fc977450009
[ "MIT" ]
9
2021-03-02T12:16:24.000Z
2021-03-26T11:06:08.000Z
Day 24/Core Team/python.py
ChetasShree/MarchCode
80ee6206c0e4481b4421a83c7b7b7fc977450009
[ "MIT" ]
65
2021-03-02T04:57:47.000Z
2021-04-02T19:31:30.000Z
Day 24/Core Team/python.py
ChetasShree/MarchCode
80ee6206c0e4481b4421a83c7b7b7fc977450009
[ "MIT" ]
94
2021-03-02T04:42:28.000Z
2021-06-28T10:38:20.000Z
arr = [ 5,3,5,2,41,4,3,1,4,4 ] for i in range(10): if (arr[i]!= -1): for j in range (i+1, 10): if (arr[i] == arr[j]): arr[j] = -1 print(arr[i], end = ' ')
27.375
34
0.347032
39
219
1.948718
0.410256
0.157895
0.184211
0.210526
0
0
0
0
0
0
0
0.144
0.429224
219
8
35
27.375
0.464
0
0
0
0
0
0.004545
0
0
0
0
0
0
1
0
false
0
0
0
0
0.142857
0
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b0b6e14627d6bea77880c44621f700bee6ffc1c7
11,258
py
Python
authors/apps/articles/tests/test_highlight_text.py
andela/ah-django-unchained
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
[ "BSD-3-Clause" ]
null
null
null
authors/apps/articles/tests/test_highlight_text.py
andela/ah-django-unchained
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
[ "BSD-3-Clause" ]
26
2019-01-07T14:22:05.000Z
2019-02-28T17:11:48.000Z
authors/apps/articles/tests/test_highlight_text.py
andela/ah-django-unchained
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
[ "BSD-3-Clause" ]
3
2019-09-19T22:16:09.000Z
2019-10-16T21:16:16.000Z
import json from django.urls import reverse from rest_framework.views import status from rest_framework.test import APITestCase, APIClient class CommentsTestCase(APITestCase): def setUp(self): self.client = APIClient() self.signup_url = reverse('authentication:auth-register') self.create_article_url = reverse('articles:articles-listcreate') self.user_two_details = { "user": { "username": "andela", "email": "andela@andela.com", "password": "Password@123" }} self.create_article_data = { "title": "Programming Languages", "body": "There are variety of programming languagr", "description": "Programming", "tagList": ["Programming", "language", "python"] } self.highlighted_text = { "comment": { "body": "Good work here!!", "start_highlight_position": 2, "end_highlight_position": 15 }} self.selection_start_index_larger_than_end_index = { "comment": { "body": "Good work here!!", "start_highlight_position": 28, "end_highlight_position": 15 }} self.invalid_index_datatype = { "comment": { "body": "Good work here!!", "start_highlight_position": "one", "end_highlight_position": 15 }} self.missing_field = { "comment": { "body": "Good work here!!", "end_highlight_position": 15 }} self.update_comment = { "comment": { "body": "Nice Idea" }} def register_user(self, user_details): """Sign up a new user to get a token""" register = self.client.post(self.signup_url, user_details, format='json') token = register.data["token"] return token def create_article(self, token): """Create an article.""" response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] return slug def test_comment_highlighted_text(self): """Test comment highlighted text.""" token = self.register_user(self.user_two_details) slug = self.create_article(token) response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.highlighted_text, HTTP_AUTHORIZATION='token {}'.format(token), format='json') self.assertIn('selected_text', response.data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_rejects_start_index_larger_than_end_index(self): """Test rejects start index larger than end index.""" token = self.register_user(self.user_two_details) slug = self.create_article(token) response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.selection_start_index_larger_than_end_index, HTTP_AUTHORIZATION='token {}'.format(token), format='json') self.assertEqual(response.data['error'], 'The start_index_position should not ' 'be greater or equal end_index_position') self.assertEqual(response.status_code, status.HTTP_200_OK) def test_rejects_invalid_types_for_highlight_index(self): """Test rejects index data type that are not integers.""" token = self.register_user(self.user_two_details) slug = self.create_article(token) response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.invalid_index_datatype, HTTP_AUTHORIZATION='token {}'.format(token), format='json') self.assertEqual(response.data['error'], 'Start of highlight and end of highlight' ' indices should be both integers') self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) def test_rejects_missing_required_field(self): """Test for missing field.""" token = self.register_user(self.user_two_details) slug = self.create_article(token) response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.missing_field, HTTP_AUTHORIZATION='token {}'.format(token), format='json') self.assertEqual(response.data['error'], 'start_highlight_position is required') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_get_all_comments(self): """Test get all comments.""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # highlight a text and comment it self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.highlighted_text, HTTP_AUTHORIZATION='token {}'.format(token), format='json') # get all comments response = self.client.get( reverse('articles:high_light', kwargs={'slug': slug}), format='json') response_data = json.loads(json.dumps(response.data)) self.assertIn('selected_text', response_data[0]) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_get_single_comments(self): """Test get single comments.""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # highlight a text and comment it response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.highlighted_text, HTTP_AUTHORIZATION='token {}'.format(token), format='json') # get single comment article_id = response.data['id'] response = self.client.get( '/api/articles/{}/highlight/{}'.format(slug, article_id), format='json') response_data = json.loads(json.dumps(response.data)) self.assertIn('selected_text', response_data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_delete_single_comments(self): """Test delete single comments.""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # highlight a text and comment it response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.highlighted_text, HTTP_AUTHORIZATION='token {}'.format(token), format='json') # delete single comment article_id = response.data['id'] response = self.client.delete( '/api/articles/{}/highlight/{}'.format(slug, article_id), HTTP_AUTHORIZATION='token {}'.format(token), format='json') response_data = json.loads(json.dumps(response.data)) self.assertEqual(response.data['message'], 'Comment on highlighted text deleted successfully') self.assertEqual(response.status_code, status.HTTP_200_OK) def test_update_single_comments(self): """Test update single comment.""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # highlight a text and comment on it response = self.client.post( reverse('articles:high_light', kwargs={'slug': slug}), self.highlighted_text, HTTP_AUTHORIZATION='token {}'.format(token), format='json') article_id = response.data['id'] # update the comment response = self.client.put( '/api/articles/{}/highlight/{}'.format(slug, article_id), self.update_comment, HTTP_AUTHORIZATION='token {}'.format(token), format='json') response_data = json.loads(json.dumps(response.data)) self.assertIn('selected_text', response_data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_update_unexisting_comments(self): """Test update unexisting comment.""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # update the comment response = self.client.put( '/api/articles/{}/highlight/{}'.format(slug, 2), self.update_comment, HTTP_AUTHORIZATION='token {}'.format(token), format='json') response_data = json.loads(json.dumps(response.data)) self.assertEqual(response.data['error'], 'The comment does not exist') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_get_delete_unexisting_comments(self): """Delete unexisting comment""" token = self.register_user(self.user_two_details) # create an article response = self.client.post( self.create_article_url, self.create_article_data, format='json', HTTP_AUTHORIZATION='token {}'.format(token)) slug = response.data['slug'] # update the comment response = self.client.delete( '/api/articles/{}/highlight/{}'.format(slug, 2), self.update_comment, HTTP_AUTHORIZATION='token {}'.format(token), format='json') response_data = json.loads(json.dumps(response.data)) self.assertEqual(response.data["error"], "The comment does not exist") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
38.955017
78
0.587227
1,168
11,258
5.451199
0.125856
0.060311
0.0534
0.083556
0.766452
0.743521
0.74305
0.714622
0.668918
0.662321
0
0.005963
0.299876
11,258
288
79
39.090278
0.801827
0.064487
0
0.70852
0
0
0.14615
0.036824
0
0
0
0
0.089686
1
0.058296
false
0.004484
0.017937
0
0.089686
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b0beecd27f3b1cfd40337e7b10668aadc2e4ef93
74
py
Python
app/controllers/__init__.py
jattoabdul/vanhack-cms
ab2cb054e35765531833afd98051027d891baf10
[ "MIT" ]
15
2018-11-06T03:01:55.000Z
2020-10-17T04:24:54.000Z
app/controllers/__init__.py
jattoabdul/vanhack-cms
ab2cb054e35765531833afd98051027d891baf10
[ "MIT" ]
2
2019-05-21T08:44:29.000Z
2021-04-30T20:46:08.000Z
app/controllers/__init__.py
Maxcutex/web_scraping
3e14398285b6eeb72163683ef4a52caaa1cdf327
[ "MIT" ]
1
2018-11-06T10:48:49.000Z
2018-11-06T10:48:49.000Z
'''The controllers package''' from .base_controller import BaseController
24.666667
43
0.810811
8
74
7.375
1
0
0
0
0
0
0
0
0
0
0
0
0.094595
74
2
44
37
0.880597
0.310811
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b0ce4a9723d6d9ecac90e2da90b8bbd7a6856793
138
py
Python
usl_score/models/__init__.py
vitouphy/usl_dialogue_metric
98b23f9727ad828bd1eda3141effeec47ea56cec
[ "MIT" ]
5
2020-11-11T02:33:20.000Z
2021-12-22T02:56:28.000Z
usl_score/models/__init__.py
vitouphy/usl_dialogue_metric
98b23f9727ad828bd1eda3141effeec47ea56cec
[ "MIT" ]
1
2021-08-24T13:18:16.000Z
2021-11-14T11:59:12.000Z
usl_score/models/__init__.py
vitouphy/usl_dialogue_metric
98b23f9727ad828bd1eda3141effeec47ea56cec
[ "MIT" ]
null
null
null
name="models" from .VUPScorer import * from .NUPScorer import * from .MLMScorer import * from .distinct import * from .composite import *
19.714286
24
0.753623
17
138
6.117647
0.529412
0.384615
0
0
0
0
0
0
0
0
0
0
0.152174
138
6
25
23
0.888889
0
0
0
0
0
0.043478
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
b0e64d619d39c2425211a034a22937f028a50ba8
231
py
Python
Source/Chapter5/Linear.py
irmoralesb/MLForDevsBook
4e990d720ef5888525d09d2e27e37a4db21a75db
[ "Unlicense" ]
null
null
null
Source/Chapter5/Linear.py
irmoralesb/MLForDevsBook
4e990d720ef5888525d09d2e27e37a4db21a75db
[ "Unlicense" ]
null
null
null
Source/Chapter5/Linear.py
irmoralesb/MLForDevsBook
4e990d720ef5888525d09d2e27e37a4db21a75db
[ "Unlicense" ]
null
null
null
from Chapter5.TransferFunction import TransferFunction import numpy as np class Linear(TransferFunction): def getTransferFunction(x): return x def getTransferFunctionDerivative(x): return np.ones(len(x))
21
54
0.74026
25
231
6.84
0.64
0.25731
0
0
0
0
0
0
0
0
0
0.005376
0.194805
231
10
55
23.1
0.913978
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0.285714
0.285714
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
b003dfc344efcd0f95aabe690fd4038de044bb89
317
py
Python
generic_iterative_stemmer/training/stemming/__init__.py
asaf-kali/generic-iterative-stemmer
2d2e031ecab71cf04e6756e8062cb1c72feb6d14
[ "MIT" ]
null
null
null
generic_iterative_stemmer/training/stemming/__init__.py
asaf-kali/generic-iterative-stemmer
2d2e031ecab71cf04e6756e8062cb1c72feb6d14
[ "MIT" ]
13
2021-12-22T06:56:30.000Z
2022-01-30T20:39:40.000Z
generic_iterative_stemmer/training/stemming/__init__.py
asaf-kali/generic-iterative-stemmer
2d2e031ecab71cf04e6756e8062cb1c72feb6d14
[ "MIT" ]
null
null
null
from .stem_generator import StemDict, StemGenerator, reduce_stem_dict # noqa from .stemming_trainer import StemmingTrainer, get_stats_path # noqa from .corpus_stemmer import * # noqa from .ft_stemming_trainer import FastTextStemmingTrainer # noqa from .w2v_stemming_trainer import Word2VecStemmingTrainer # noqa
52.833333
77
0.835962
38
317
6.684211
0.552632
0.125984
0.248032
0
0
0
0
0
0
0
0
0.007168
0.119874
317
5
78
63.4
0.903226
0.07571
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b00ab3e4b382e166b16fab341e3c2642352dff1d
61
py
Python
aas_core_codegen/csharp/__init__.py
gillistephan/aas-core-codegen
5b89ea2ee35aecaca9a1bed7ac81d420cc560f29
[ "MIT" ]
5
2021-12-29T12:55:34.000Z
2022-03-01T17:57:21.000Z
aas_core_codegen/csharp/__init__.py
gillistephan/aas-core-codegen
5b89ea2ee35aecaca9a1bed7ac81d420cc560f29
[ "MIT" ]
10
2021-12-29T02:15:55.000Z
2022-03-09T11:04:22.000Z
aas_core_codegen/csharp/__init__.py
aas-core-works/aas-core-csharp-codegen
731f706e2d12bf80722ac55d920fcf5402fb26ef
[ "MIT" ]
2
2021-12-29T01:42:12.000Z
2022-02-15T13:46:33.000Z
"""Generate C# code based on the intermediate meta-model."""
30.5
60
0.721311
9
61
4.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.131148
61
1
61
61
0.830189
0.885246
0
null
1
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
b059c4dc0ef23cd58450a140ceebca8290b19a23
90
py
Python
application/pages/dialog_template/__init__.py
slamer59/awesome-panel
91c30bd6d6859eadf9c65b1e143952f7e64d5290
[ "Apache-2.0" ]
null
null
null
application/pages/dialog_template/__init__.py
slamer59/awesome-panel
91c30bd6d6859eadf9c65b1e143952f7e64d5290
[ "Apache-2.0" ]
null
null
null
application/pages/dialog_template/__init__.py
slamer59/awesome-panel
91c30bd6d6859eadf9c65b1e143952f7e64d5290
[ "Apache-2.0" ]
null
null
null
"""Provides a servable view of a Panel application with a dialog""" from .app import view
30
67
0.755556
15
90
4.533333
0.8
0
0
0
0
0
0
0
0
0
0
0
0.166667
90
2
68
45
0.906667
0.677778
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5