hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
56942573d83bde7e18db61497056cad0c7d4c761
211
py
Python
src/limecore/messaging/api/subscriber.py
limecore/messaging-api
40f9659f6b30bfd9fe3c4b9b85b7310e02345a2e
[ "MIT" ]
null
null
null
src/limecore/messaging/api/subscriber.py
limecore/messaging-api
40f9659f6b30bfd9fe3c4b9b85b7310e02345a2e
[ "MIT" ]
null
null
null
src/limecore/messaging/api/subscriber.py
limecore/messaging-api
40f9659f6b30bfd9fe3c4b9b85b7310e02345a2e
[ "MIT" ]
null
null
null
from .subscription import Subscription class Subscriber: def add_subscription(self, subscription: Subscription): raise NotImplementedError() def run(self): raise NotImplementedError()
21.1
59
0.729858
19
211
8.052632
0.578947
0.313725
0
0
0
0
0
0
0
0
0
0
0.203791
211
9
60
23.444444
0.910714
0
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
3b1d677b96e6b79d31a91f9b24d20583cef9255a
41
py
Python
examples/str.rfind/ex2.py
mcorne/python-by-example
15339c0909c84b51075587a6a66391100971c033
[ "MIT" ]
null
null
null
examples/str.rfind/ex2.py
mcorne/python-by-example
15339c0909c84b51075587a6a66391100971c033
[ "MIT" ]
null
null
null
examples/str.rfind/ex2.py
mcorne/python-by-example
15339c0909c84b51075587a6a66391100971c033
[ "MIT" ]
null
null
null
print('Looking for o'.rfind('o', 5, -1))
20.5
40
0.585366
8
41
3
0.875
0
0
0
0
0
0
0
0
0
0
0.055556
0.121951
41
1
41
41
0.611111
0
0
0
0
0
0.341463
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
3b457602dbb655ee8aa5e68094ee207169beb282
42
py
Python
cryptools/impl/__init__.py
orangeblock/cryptools
40519975427edadb79e6f0762d0819c1fa754c5c
[ "MIT" ]
null
null
null
cryptools/impl/__init__.py
orangeblock/cryptools
40519975427edadb79e6f0762d0819c1fa754c5c
[ "MIT" ]
null
null
null
cryptools/impl/__init__.py
orangeblock/cryptools
40519975427edadb79e6f0762d0819c1fa754c5c
[ "MIT" ]
null
null
null
from sha1 import sha1 from md4 import md4
14
21
0.809524
8
42
4.25
0.5
0
0
0
0
0
0
0
0
0
0
0.117647
0.190476
42
2
22
21
0.882353
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8e7dfb9b11a2ad87c6a9eb377cd6b419987300c7
33
py
Python
covid_challenge/io/__init__.py
ImmoNathanael/covid19-challenge
86abe53ddd4be051a2a282492e23df4e59778193
[ "MIT" ]
2
2020-07-24T00:45:10.000Z
2020-09-10T19:58:14.000Z
covid_challenge/io/__init__.py
ImmoNathanael/covid19-challenge
86abe53ddd4be051a2a282492e23df4e59778193
[ "MIT" ]
3
2020-09-14T10:23:00.000Z
2020-09-16T21:36:49.000Z
covid_challenge/io/__init__.py
ImmoNathanael/covid19-challenge
86abe53ddd4be051a2a282492e23df4e59778193
[ "MIT" ]
3
2020-08-18T19:47:14.000Z
2020-10-01T18:59:51.000Z
from .cloud_io import LoadS3Nifti
33
33
0.878788
5
33
5.6
1
0
0
0
0
0
0
0
0
0
0
0.033333
0.090909
33
1
33
33
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8e81cbf40e40b2e669429664bd704d4856ac088a
183
py
Python
tormor/exceptions.py
riseupz/tormor
61aadf67178a6d3e65b067b9818eda2f36493f09
[ "MIT" ]
null
null
null
tormor/exceptions.py
riseupz/tormor
61aadf67178a6d3e65b067b9818eda2f36493f09
[ "MIT" ]
null
null
null
tormor/exceptions.py
riseupz/tormor
61aadf67178a6d3e65b067b9818eda2f36493f09
[ "MIT" ]
4
2020-12-14T07:10:43.000Z
2022-01-03T10:09:10.000Z
class SchemaFilesNotFound(Exception): pass class SchemaNotPresent(Exception): pass class ModuleNotPresent(Exception): pass class SchemaPathNotFound(Exception): pass
16.636364
37
0.770492
16
183
8.8125
0.4375
0.368794
0.382979
0
0
0
0
0
0
0
0
0
0.163934
183
11
38
16.636364
0.921569
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
8eb3a63f088ff55fb93ab052c031ca2d24a80f9d
634
py
Python
remcall/schema/__init__.py
luphord/remcall
31419ff0f5c21ea2d90f9cabdaec85b6eebcaa12
[ "MIT" ]
null
null
null
remcall/schema/__init__.py
luphord/remcall
31419ff0f5c21ea2d90f9cabdaec85b6eebcaa12
[ "MIT" ]
null
null
null
remcall/schema/__init__.py
luphord/remcall
31419ff0f5c21ea2d90f9cabdaec85b6eebcaa12
[ "MIT" ]
null
null
null
from .core import Type, Interface, Enum, Record, Primitive, Method, \ string, int8, int16, int32, int64, uint8, uint16, \ uint32, uint64, float32, float64, void, boolean, \ date, datetime, time, primitive_types, Array, Schema from .base import assert_name __all__ = ['Type', 'Interface', 'Enum', 'Record', 'Primitive', 'Method', 'string', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'void', 'boolean', 'date', 'datetime', 'time', 'assert_name', 'primitive_types', 'Array', 'Schema']
52.833333
74
0.572555
63
634
5.634921
0.507937
0.073239
0.095775
0.129577
0.715493
0.715493
0.715493
0.715493
0.715493
0.715493
0
0.076271
0.255521
634
11
75
57.636364
0.675847
0
0
0
0
0
0.258675
0
0
0
0
0
0.2
1
0
false
0
0.2
0
0.2
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8ec12f35105a1d8151eec0468e8d12962fda7220
175
py
Python
open_ephys/analysis/formats/__init__.py
KylePoe/open-ephys-python-tools
c72cb533ad2e508d870d88cc22da4d052e66e89e
[ "MIT" ]
4
2021-09-20T17:56:15.000Z
2022-01-05T10:34:57.000Z
open_ephys/analysis/formats/__init__.py
KylePoe/open-ephys-python-tools
c72cb533ad2e508d870d88cc22da4d052e66e89e
[ "MIT" ]
10
2021-01-18T12:35:16.000Z
2022-03-29T16:58:02.000Z
open_ephys/analysis/formats/__init__.py
KylePoe/open-ephys-python-tools
c72cb533ad2e508d870d88cc22da4d052e66e89e
[ "MIT" ]
9
2021-04-26T19:00:14.000Z
2022-03-29T09:45:39.000Z
from .KwikRecording import KwikRecording from .OpenEphysRecording import OpenEphysRecording from .BinaryRecording import BinaryRecording from .NwbRecording import NwbRecording
43.75
50
0.891429
16
175
9.75
0.375
0
0
0
0
0
0
0
0
0
0
0
0.085714
175
4
51
43.75
0.975
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d6fe0e0afb216da1871630aaad037b397be75d27
104
py
Python
devlivery/ext/site/__init__.py
wlsouza/flasklivery
564c6135d29493ae5efe074488cb0df7f811d889
[ "Unlicense" ]
null
null
null
devlivery/ext/site/__init__.py
wlsouza/flasklivery
564c6135d29493ae5efe074488cb0df7f811d889
[ "Unlicense" ]
null
null
null
devlivery/ext/site/__init__.py
wlsouza/flasklivery
564c6135d29493ae5efe074488cb0df7f811d889
[ "Unlicense" ]
null
null
null
from flask import Flask from .main import bp def init_app(app: Flask): app.register_blueprint(bp)
14.857143
30
0.75
17
104
4.470588
0.588235
0
0
0
0
0
0
0
0
0
0
0
0.173077
104
6
31
17.333333
0.883721
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
d96d4369485fe6e6ab5de6f61bdbaabdd9254b42
231
py
Python
baseStation/src/vision/domain/iPlayAreaFinder.py
olgam4/design3
6e05d123a24deae7dda646df535844a158ef5cc0
[ "WTFPL" ]
null
null
null
baseStation/src/vision/domain/iPlayAreaFinder.py
olgam4/design3
6e05d123a24deae7dda646df535844a158ef5cc0
[ "WTFPL" ]
null
null
null
baseStation/src/vision/domain/iPlayAreaFinder.py
olgam4/design3
6e05d123a24deae7dda646df535844a158ef5cc0
[ "WTFPL" ]
null
null
null
from abc import ABC, abstractmethod from vision.domain.image import Image from vision.domain.rectangle import Rectangle class IPlayAreaFinder(ABC): @abstractmethod def find(self, image: Image) -> Rectangle: pass
21
46
0.748918
28
231
6.178571
0.5
0.196532
0.184971
0
0
0
0
0
0
0
0
0
0.181818
231
10
47
23.1
0.915344
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0.142857
0.428571
0
0.714286
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
d99425c04ebbf102b9f4a5b3e01748cd2595fd44
137
py
Python
src/kids/data/__init__.py
0k/kids.data
f77ef3c83af20d153b6d255edd7693b9821a8fbb
[ "BSD-2-Clause" ]
1
2017-12-30T00:50:50.000Z
2017-12-30T00:50:50.000Z
src/kids/data/__init__.py
0k/kids.data
f77ef3c83af20d153b6d255edd7693b9821a8fbb
[ "BSD-2-Clause" ]
null
null
null
src/kids/data/__init__.py
0k/kids.data
f77ef3c83af20d153b6d255edd7693b9821a8fbb
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from . import dsp from . import fmt from . import lib from . import mdict from . import graph from . import dct
15.222222
23
0.671533
21
137
4.380952
0.52381
0.652174
0
0
0
0
0
0
0
0
0
0.009346
0.218978
137
8
24
17.125
0.850467
0.153285
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
794a821168f32d004fb5f1f1e366e4b4044653e6
337
py
Python
ecosystems/cedar_settings.py
stewardshiptools/stewardshiptools
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
[ "MIT" ]
null
null
null
ecosystems/cedar_settings.py
stewardshiptools/stewardshiptools
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
[ "MIT" ]
11
2020-03-24T15:29:46.000Z
2022-03-11T23:14:48.000Z
ecosystems/cedar_settings.py
stewardshiptools/stewardshiptools
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
[ "MIT" ]
null
null
null
from cedar_settings.default_settings import default_settings default_settings['ecosystems_project_code_prefix'] = ('text', '#ECO-PRJ-') # see DEV PRJ settings for an example of how to use this. default_settings['ecosystems_project_misc_textareas'] = ('text', """notes|Notes""")
42.125
75
0.646884
38
337
5.447368
0.657895
0.289855
0.222222
0.309179
0
0
0
0
0
0
0
0
0.249258
337
7
76
48.142857
0.818182
0.163205
0
0
0
0
0.304183
0.239544
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
798087fbdc5c7d5444a4284a6b8bd03bec59ec91
2,673
py
Python
tests/plugins/director/test_director_plugin.py
nikitanovosibirsk/vedro
e975a1c1eb065bc6caa32c41c0d7576ee6d284db
[ "Apache-2.0" ]
2
2021-08-24T12:49:30.000Z
2022-01-23T07:21:25.000Z
tests/plugins/director/test_director_plugin.py
nikitanovosibirsk/vedro
e975a1c1eb065bc6caa32c41c0d7576ee6d284db
[ "Apache-2.0" ]
20
2015-12-09T11:04:23.000Z
2022-03-20T09:18:17.000Z
tests/plugins/director/test_director_plugin.py
nikitanovosibirsk/vedro
e975a1c1eb065bc6caa32c41c0d7576ee6d284db
[ "Apache-2.0" ]
3
2015-12-09T07:31:23.000Z
2022-01-28T11:03:24.000Z
from argparse import ArgumentParser, Namespace from unittest.mock import Mock, call import pytest from baby_steps import given, then, when from vedro._core import Dispatcher from vedro.events import ArgParseEvent from vedro.plugins.director import Director, Reporter @pytest.fixture() def dispatcher(): return Dispatcher() @pytest.mark.asyncio async def test_director_plugin(*, dispatcher: Dispatcher): with given: director = Director() director.subscribe(dispatcher) event = ArgParseEvent(ArgumentParser()) with when: res = await dispatcher.fire(event) with then: assert res is None @pytest.mark.asyncio async def test_director_plugin_with_default_reporter(*, dispatcher: Dispatcher): with given: reporter_ = Mock(Reporter) reporter_.name = "reporter" director = Director([reporter_]) director.subscribe(dispatcher) event = ArgParseEvent(ArgumentParser()) with when: res = await dispatcher.fire(event) with then: assert res is None assert reporter_.mock_calls == [call.subscribe(dispatcher)] @pytest.mark.asyncio async def test_director_plugin_with_reporter(*, dispatcher: Dispatcher): with given: reporter1_, reporter2_ = Mock(Reporter), Mock(Reporter) reporter1_.name = "reporter1" reporter2_.name = "reporter2" director = Director([reporter1_, reporter2_]) director.subscribe(dispatcher) args = Namespace(reporters=[reporter2_.name]) arg_parser = Mock(ArgumentParser, parse_known_args=Mock(return_value=(args, []))) event = ArgParseEvent(arg_parser) with when: res = await dispatcher.fire(event) with then: assert res is None assert reporter1_.mock_calls == [] assert reporter2_.mock_calls == [call.subscribe(dispatcher)] @pytest.mark.asyncio async def test_director_plugin_with_reporters(*, dispatcher: Dispatcher): with given: reporter1_, reporter2_ = Mock(Reporter), Mock(Reporter) reporter1_.name = "reporter1" reporter2_.name = "reporter2" director = Director([reporter1_, reporter2_]) director.subscribe(dispatcher) args = Namespace(reporters=[reporter2_.name, reporter1_.name]) arg_parser = Mock(ArgumentParser, parse_known_args=Mock(return_value=(args, []))) event = ArgParseEvent(arg_parser) with when: res = await dispatcher.fire(event) with then: assert res is None assert reporter1_.mock_calls == [call.subscribe(dispatcher)] assert reporter2_.mock_calls == [call.subscribe(dispatcher)]
29.7
89
0.690236
285
2,673
6.280702
0.178947
0.084916
0.037989
0.049162
0.777654
0.739665
0.739665
0.705028
0.705028
0.649162
0
0.010994
0.217359
2,673
89
90
30.033708
0.844646
0
0
0.656716
0
0
0.016461
0
0
0
0
0
0.134328
1
0.014925
false
0
0.104478
0.014925
0.134328
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
79816f37a99e623f12620ce53d8e727cfad49b82
20,218
py
Python
parser/team21/Analisis Ascendente/parsetab.py
elielbarrios/tytus
61d008423bc8eb27897624cd2f48e6127b6a8584
[ "MIT" ]
null
null
null
parser/team21/Analisis Ascendente/parsetab.py
elielbarrios/tytus
61d008423bc8eb27897624cd2f48e6127b6a8584
[ "MIT" ]
null
null
null
parser/team21/Analisis Ascendente/parsetab.py
elielbarrios/tytus
61d008423bc8eb27897624cd2f48e6127b6a8584
[ "MIT" ]
null
null
null
# parsetab.py # This file is automatically generated. Do not edit. # pylint: disable=W,C,R _tabversion = '3.10' _lr_method = 'LALR' _lr_signature = 'leftOROleftANDOleftNOIGUALIGUALIGUALleftMAYORMENORMAYORIGUALMENORIGUALleftMAYMAYMENMENleftMASMENOSleftMULTDIVleftNOTOGNOTleftPARIZQPARDRADD ALTER AND ANDO AS BETWEEN BIGINT BOOLEAN CADENA CHAR CHARACTER CHECK COMA CONSTRAINT CRDR CREATE CRIZQ DATABASE DATE DAY DECIMAL DEFAULT DELETE DIV DOSPUNTOS DOUBLE DROP ENTERO ENUM EXPONENCIAL FALSE FOREIGN FROM GNOT HOUR ID IF IGUAL IGUALIGUAL ILIKE IN INSERT INT INTERVAL INTO IS KEY LIKE LLDR LLIZQ MAS MAYMAY MAYOR MAYORIGUAL MENMEN MENOR MENORIGUAL MENOS MINUTE MONEY MONTH MULT NOIGUAL NOT NOTO NULL NUMDECIMAL NUMERIC OR ORO OWNER PARDR PARIZQ PORCENTAJE PRECISION PRIMARY PTCOMA PUNTO REAL REFERENCES RENAME REPLACE SECOND SELECT SET SHOW SIMILAR SMALLINT TABLE TEXT TIME TIMESTAMP TO TRUE TYPE UNIQUE UPDATE VALUES VARCHAR VARYING WHERE YEARs : instruccionesinstrucciones : instrucciones instruccioninstrucciones : instruccioninstruccion : CREATE TABLE ID PARIZQ campos PARDR PTCOMAcampos : campos COMA campocampos : campocampo : ID tipocampo : ID tipo acompaniamientocampo : CONSTRAINT ID FOREIGN KEY PARIZQ ID PARDR REFERENCES ID PARIZQ ID PARDRcampo : PRIMARY KEY PARIZQ ID PARDRacompaniamiento : acompaniamiento acomacompaniamiento : acomacom : NOT NULL\n | NULL\n | UNIQUE\n | DEFAULT ENTERO\n | DEFAULT CADENA\n | DEFAULT NUMDECIMAL\n | PRIMARY KEYtipo : SMALLINT\n | INT\n | BIGINT\n | DECIMAL\n | NUMERIC\n | REAL\n | DOUBLE\n | MONEY\n | VARYING\n | TEXT\n | TIMESTAMP\n | DATE\n | TIME\n | INTERVAL\n | BOOLEANtipo : CHARACTER PARIZQ ENTERO PARDR\n | VARCHAR PARIZQ ENTERO PARDR \n | CHAR PARIZQ ENTERO PARDR instruccion : INSERT INTO ID PARIZQ listaID PARDR VALUES values PTCOMAinstruccion : INSERT INTO ID VALUES values PTCOMAlistaID : listaID COMA IDlistaID : IDvalues : values COMA valuevalues : valuevalue : PARIZQ listaValores PARDRlistaValores : listaValores COMA valoreslistaValores : valoresvalores : ENTERO\n | NUMDECIMAL\n | CADENA instruccion : UPDATE ID SET asignaciones PTCOMAinstruccion : UPDATE ID SET asignaciones WHERE where PTCOMAasignaciones : asignaciones COMA asignacionasignaciones : asignacionwhere : asignacionasignacion : ID IGUAL EE : PARIZQ E PARDR\n | operando\n | unario\n | valores\n | varoperando : E MAS E\n\t | E MENOS E\n\t | E MULT E\n \t | E DIV E\n\t | E IGUALIGUAL E\n\t | E NOIGUAL E\n\t | E MENOR E\n\t | E MAYOR E\n\t | E MENORIGUAL E\n\t | E MAYORIGUAL E\n\t | E MENMEN E\n\t | E MAYMAY E\n\t | E ANDO E\n\t | E ORO E\n\t unario : NOTO E \n\t | MENOS E \n\t | GNOT E var : IDinstruccion : DELETE FROM ID WHERE where PTCOMAinstruccion : DELETE FROM ID PTCOMA' _lr_action_items = {'CREATE':([0,2,3,8,24,36,66,85,96,123,157,],[4,4,-3,-2,-80,-50,-39,-79,-4,-51,-38,]),'INSERT':([0,2,3,8,24,36,66,85,96,123,157,],[5,5,-3,-2,-80,-50,-39,-79,-4,-51,-38,]),'UPDATE':([0,2,3,8,24,36,66,85,96,123,157,],[6,6,-3,-2,-80,-50,-39,-79,-4,-51,-38,]),'DELETE':([0,2,3,8,24,36,66,85,96,123,157,],[7,7,-3,-2,-80,-50,-39,-79,-4,-51,-38,]),'$end':([1,2,3,8,24,36,66,85,96,123,157,],[0,-1,-3,-2,-80,-50,-39,-79,-4,-51,-38,]),'TABLE':([4,],[9,]),'INTO':([5,],[10,]),'ID':([6,9,10,12,15,17,18,23,28,35,37,38,61,65,75,80,81,82,99,105,106,107,108,109,110,111,112,113,114,115,116,117,118,155,160,162,],[11,13,14,16,20,25,30,20,62,73,20,20,25,101,73,73,73,73,134,73,73,73,73,73,73,73,73,73,73,73,73,73,73,158,161,163,]),'FROM':([7,],[12,]),'SET':([11,],[15,]),'PARIZQ':([13,14,19,35,57,58,59,63,67,75,80,81,82,100,105,106,107,108,109,110,111,112,113,114,115,116,117,118,133,161,],[17,18,34,75,93,94,95,99,34,75,75,75,75,34,75,75,75,75,75,75,75,75,75,75,75,75,75,75,155,162,]),'VALUES':([14,64,],[19,100,]),'WHERE':([16,21,22,70,71,72,73,74,76,77,78,79,84,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[23,37,-53,-47,-48,-49,-78,-55,-57,-58,-59,-60,-52,-76,-75,-77,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,-74,-56,]),'PTCOMA':([16,21,22,32,33,39,40,60,70,71,72,73,74,76,77,78,79,83,84,102,103,120,121,122,135,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[24,36,-53,66,-43,85,-54,96,-47,-48,-49,-78,-55,-57,-58,-59,-60,123,-52,-42,-44,-76,-75,-77,157,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,-74,-56,]),'CONSTRAINT':([17,61,],[28,28,]),'PRIMARY':([17,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,61,86,87,89,90,124,125,126,127,128,129,152,153,154,],[29,92,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,29,92,-12,-14,-15,-11,-13,-16,-17,-18,-19,-35,-36,-37,]),'IGUAL':([20,],[35,]),'COMA':([21,22,26,27,30,31,32,33,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,68,69,70,71,72,73,74,76,77,78,79,84,86,87,89,90,97,101,102,103,120,121,122,124,125,126,127,128,129,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,156,164,],[38,-53,61,-6,-41,65,67,-43,-7,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,104,-46,-47,-48,-49,-78,-55,-57,-58,-59,-60,-52,-8,-12,-14,-15,-5,-40,-42,-44,-76,-75,-77,-11,-13,-16,-17,-18,-19,67,-45,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,-74,-56,-35,-36,-37,-10,-9,]),'SMALLINT':([25,],[42,]),'INT':([25,],[43,]),'BIGINT':([25,],[44,]),'DECIMAL':([25,],[45,]),'NUMERIC':([25,],[46,]),'REAL':([25,],[47,]),'DOUBLE':([25,],[48,]),'MONEY':([25,],[49,]),'VARYING':([25,],[50,]),'TEXT':([25,],[51,]),'TIMESTAMP':([25,],[52,]),'DATE':([25,],[53,]),'TIME':([25,],[54,]),'INTERVAL':([25,],[55,]),'BOOLEAN':([25,],[56,]),'CHARACTER':([25,],[57,]),'VARCHAR':([25,],[58,]),'CHAR':([25,],[59,]),'PARDR':([26,27,30,31,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,68,69,70,71,72,73,76,77,78,79,86,87,89,90,97,101,119,120,121,122,124,125,126,127,128,129,130,131,132,134,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,156,158,163,164,],[60,-6,-41,64,-7,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,103,-46,-47,-48,-49,-78,-57,-58,-59,-60,-8,-12,-14,-15,-5,-40,151,-76,-75,-77,-11,-13,-16,-17,-18,-19,152,153,154,156,-45,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,-74,-56,-35,-36,-37,-10,159,164,-9,]),'KEY':([29,92,98,],[63,129,133,]),'ENTERO':([34,35,75,80,81,82,91,93,94,95,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[70,70,70,70,70,70,126,130,131,132,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,]),'NUMDECIMAL':([34,35,75,80,81,82,91,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[71,71,71,71,71,71,128,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,]),'CADENA':([34,35,75,80,81,82,91,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[72,72,72,72,72,72,127,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,]),'NOTO':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,81,]),'MENOS':([35,70,71,72,73,74,75,76,77,78,79,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[80,-47,-48,-49,-78,106,80,-57,-58,-59,-60,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,106,-76,-75,-77,-61,-62,-63,-64,106,106,106,106,106,106,106,106,106,106,-56,]),'GNOT':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,]),'NOT':([41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,86,87,89,90,124,125,126,127,128,129,152,153,154,],[88,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,88,-12,-14,-15,-11,-13,-16,-17,-18,-19,-35,-36,-37,]),'NULL':([41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,86,87,88,89,90,124,125,126,127,128,129,152,153,154,],[89,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,89,-12,125,-14,-15,-11,-13,-16,-17,-18,-19,-35,-36,-37,]),'UNIQUE':([41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,86,87,89,90,124,125,126,127,128,129,152,153,154,],[90,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,90,-12,-14,-15,-11,-13,-16,-17,-18,-19,-35,-36,-37,]),'DEFAULT':([41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,86,87,89,90,124,125,126,127,128,129,152,153,154,],[91,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33,-34,91,-12,-14,-15,-11,-13,-16,-17,-18,-19,-35,-36,-37,]),'FOREIGN':([62,],[98,]),'MAS':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,105,-57,-58,-59,-60,105,-76,-75,-77,-61,-62,-63,-64,105,105,105,105,105,105,105,105,105,105,-56,]),'MULT':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,107,-57,-58,-59,-60,107,107,-75,-77,107,107,-63,-64,107,107,107,107,107,107,107,107,107,107,-56,]),'DIV':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,108,-57,-58,-59,-60,108,108,-75,-77,108,108,-63,-64,108,108,108,108,108,108,108,108,108,108,-56,]),'IGUALIGUAL':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,109,-57,-58,-59,-60,109,-76,-75,-77,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,109,109,-56,]),'NOIGUAL':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,110,-57,-58,-59,-60,110,-76,-75,-77,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,110,110,-56,]),'MENOR':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,111,-57,-58,-59,-60,111,-76,-75,-77,-61,-62,-63,-64,111,111,-67,-68,-69,-70,-71,-72,111,111,-56,]),'MAYOR':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,112,-57,-58,-59,-60,112,-76,-75,-77,-61,-62,-63,-64,112,112,-67,-68,-69,-70,-71,-72,112,112,-56,]),'MENORIGUAL':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,113,-57,-58,-59,-60,113,-76,-75,-77,-61,-62,-63,-64,113,113,-67,-68,-69,-70,-71,-72,113,113,-56,]),'MAYORIGUAL':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,114,-57,-58,-59,-60,114,-76,-75,-77,-61,-62,-63,-64,114,114,-67,-68,-69,-70,-71,-72,114,114,-56,]),'MENMEN':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,115,-57,-58,-59,-60,115,-76,-75,-77,-61,-62,-63,-64,115,115,115,115,115,115,-71,-72,115,115,-56,]),'MAYMAY':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,116,-57,-58,-59,-60,116,-76,-75,-77,-61,-62,-63,-64,116,116,116,116,116,116,-71,-72,116,116,-56,]),'ANDO':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,117,-57,-58,-59,-60,117,-76,-75,-77,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,117,-56,]),'ORO':([70,71,72,73,74,76,77,78,79,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,],[-47,-48,-49,-78,118,-57,-58,-59,-60,118,-76,-75,-77,-61,-62,-63,-64,-65,-66,-67,-68,-69,-70,-71,-72,-73,-74,-56,]),'REFERENCES':([159,],[160,]),} _lr_action = {} for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = {} _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'s':([0,],[1,]),'instrucciones':([0,],[2,]),'instruccion':([0,2,],[3,8,]),'asignaciones':([15,],[21,]),'asignacion':([15,23,37,38,],[22,40,40,84,]),'campos':([17,],[26,]),'campo':([17,61,],[27,97,]),'listaID':([18,],[31,]),'values':([19,100,],[32,135,]),'value':([19,67,100,],[33,102,33,]),'where':([23,37,],[39,83,]),'tipo':([25,],[41,]),'listaValores':([34,],[68,]),'valores':([34,35,75,80,81,82,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[69,78,78,78,78,78,136,78,78,78,78,78,78,78,78,78,78,78,78,78,78,]),'E':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[74,119,120,121,122,137,138,139,140,141,142,143,144,145,146,147,148,149,150,]),'operando':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,]),'unario':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,77,]),'var':([35,75,80,81,82,105,106,107,108,109,110,111,112,113,114,115,116,117,118,],[79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,]),'acompaniamiento':([41,],[86,]),'acom':([41,86,],[87,124,]),} _lr_goto = {} for _k, _v in _lr_goto_items.items(): for _x, _y in zip(_v[0], _v[1]): if not _x in _lr_goto: _lr_goto[_x] = {} _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ ("S' -> s","S'",1,None,None,None), ('s -> instrucciones','s',1,'p_s','ascendente.py',219), ('instrucciones -> instrucciones instruccion','instrucciones',2,'p_instrucciones','ascendente.py',224), ('instrucciones -> instruccion','instrucciones',1,'p_instruccion','ascendente.py',230), ('instruccion -> CREATE TABLE ID PARIZQ campos PARDR PTCOMA','instruccion',7,'p_create','ascendente.py',235), ('campos -> campos COMA campo','campos',3,'p_campos','ascendente.py',239), ('campos -> campo','campos',1,'p_campos2','ascendente.py',244), ('campo -> ID tipo','campo',2,'p_campoSimple','ascendente.py',248), ('campo -> ID tipo acompaniamiento','campo',3,'p_campo','ascendente.py',251), ('campo -> CONSTRAINT ID FOREIGN KEY PARIZQ ID PARDR REFERENCES ID PARIZQ ID PARDR','campo',12,'p_foreign','ascendente.py',254), ('campo -> PRIMARY KEY PARIZQ ID PARDR','campo',5,'p_primary','ascendente.py',258), ('acompaniamiento -> acompaniamiento acom','acompaniamiento',2,'p_listacampo','ascendente.py',261), ('acompaniamiento -> acom','acompaniamiento',1,'p_listacampo2','ascendente.py',267), ('acom -> NOT NULL','acom',2,'p_acompaniamiento','ascendente.py',271), ('acom -> NULL','acom',1,'p_acompaniamiento','ascendente.py',272), ('acom -> UNIQUE','acom',1,'p_acompaniamiento','ascendente.py',273), ('acom -> DEFAULT ENTERO','acom',2,'p_acompaniamiento','ascendente.py',274), ('acom -> DEFAULT CADENA','acom',2,'p_acompaniamiento','ascendente.py',275), ('acom -> DEFAULT NUMDECIMAL','acom',2,'p_acompaniamiento','ascendente.py',276), ('acom -> PRIMARY KEY','acom',2,'p_acompaniamiento','ascendente.py',277), ('tipo -> SMALLINT','tipo',1,'p_tipos','ascendente.py',281), ('tipo -> INT','tipo',1,'p_tipos','ascendente.py',282), ('tipo -> BIGINT','tipo',1,'p_tipos','ascendente.py',283), ('tipo -> DECIMAL','tipo',1,'p_tipos','ascendente.py',284), ('tipo -> NUMERIC','tipo',1,'p_tipos','ascendente.py',285), ('tipo -> REAL','tipo',1,'p_tipos','ascendente.py',286), ('tipo -> DOUBLE','tipo',1,'p_tipos','ascendente.py',287), ('tipo -> MONEY','tipo',1,'p_tipos','ascendente.py',288), ('tipo -> VARYING','tipo',1,'p_tipos','ascendente.py',289), ('tipo -> TEXT','tipo',1,'p_tipos','ascendente.py',290), ('tipo -> TIMESTAMP','tipo',1,'p_tipos','ascendente.py',291), ('tipo -> DATE','tipo',1,'p_tipos','ascendente.py',292), ('tipo -> TIME','tipo',1,'p_tipos','ascendente.py',293), ('tipo -> INTERVAL','tipo',1,'p_tipos','ascendente.py',294), ('tipo -> BOOLEAN','tipo',1,'p_tipos','ascendente.py',295), ('tipo -> CHARACTER PARIZQ ENTERO PARDR','tipo',4,'p_tiposTexto','ascendente.py',298), ('tipo -> VARCHAR PARIZQ ENTERO PARDR','tipo',4,'p_tiposTexto','ascendente.py',299), ('tipo -> CHAR PARIZQ ENTERO PARDR','tipo',4,'p_tiposTexto','ascendente.py',300), ('instruccion -> INSERT INTO ID PARIZQ listaID PARDR VALUES values PTCOMA','instruccion',9,'p_insertInto','ascendente.py',304), ('instruccion -> INSERT INTO ID VALUES values PTCOMA','instruccion',6,'p_insertInto2','ascendente.py',308), ('listaID -> listaID COMA ID','listaID',3,'p_listaID','ascendente.py',313), ('listaID -> ID','listaID',1,'p_listaID2','ascendente.py',318), ('values -> values COMA value','values',3,'p_values','ascendente.py',322), ('values -> value','values',1,'p_values2','ascendente.py',327), ('value -> PARIZQ listaValores PARDR','value',3,'p_value','ascendente.py',331), ('listaValores -> listaValores COMA valores','listaValores',3,'p_listaValores','ascendente.py',336), ('listaValores -> valores','listaValores',1,'p_listaValores2','ascendente.py',341), ('valores -> ENTERO','valores',1,'p_valores','ascendente.py',346), ('valores -> NUMDECIMAL','valores',1,'p_valores','ascendente.py',347), ('valores -> CADENA','valores',1,'p_valores','ascendente.py',348), ('instruccion -> UPDATE ID SET asignaciones PTCOMA','instruccion',5,'p_update','ascendente.py',353), ('instruccion -> UPDATE ID SET asignaciones WHERE where PTCOMA','instruccion',7,'p_update2','ascendente.py',357), ('asignaciones -> asignaciones COMA asignacion','asignaciones',3,'p_asignaciones','ascendente.py',361), ('asignaciones -> asignacion','asignaciones',1,'p_asignaciones2','ascendente.py',366), ('where -> asignacion','where',1,'p_where','ascendente.py',371), ('asignacion -> ID IGUAL E','asignacion',3,'p_asignacion','ascendente.py',375), ('E -> PARIZQ E PARDR','E',3,'p_E','ascendente.py',378), ('E -> operando','E',1,'p_E','ascendente.py',379), ('E -> unario','E',1,'p_E','ascendente.py',380), ('E -> valores','E',1,'p_E','ascendente.py',381), ('E -> var','E',1,'p_E','ascendente.py',382), ('operando -> E MAS E','operando',3,'p_oper','ascendente.py',389), ('operando -> E MENOS E','operando',3,'p_oper','ascendente.py',390), ('operando -> E MULT E','operando',3,'p_oper','ascendente.py',391), ('operando -> E DIV E','operando',3,'p_oper','ascendente.py',392), ('operando -> E IGUALIGUAL E','operando',3,'p_oper','ascendente.py',393), ('operando -> E NOIGUAL E','operando',3,'p_oper','ascendente.py',394), ('operando -> E MENOR E','operando',3,'p_oper','ascendente.py',395), ('operando -> E MAYOR E','operando',3,'p_oper','ascendente.py',396), ('operando -> E MENORIGUAL E','operando',3,'p_oper','ascendente.py',397), ('operando -> E MAYORIGUAL E','operando',3,'p_oper','ascendente.py',398), ('operando -> E MENMEN E','operando',3,'p_oper','ascendente.py',399), ('operando -> E MAYMAY E','operando',3,'p_oper','ascendente.py',400), ('operando -> E ANDO E','operando',3,'p_oper','ascendente.py',401), ('operando -> E ORO E','operando',3,'p_oper','ascendente.py',402), ('unario -> NOTO E','unario',2,'p_unarios','ascendente.py',407), ('unario -> MENOS E','unario',2,'p_unarios','ascendente.py',408), ('unario -> GNOT E','unario',2,'p_unarios','ascendente.py',409), ('var -> ID','var',1,'p_var','ascendente.py',415), ('instruccion -> DELETE FROM ID WHERE where PTCOMA','instruccion',6,'p_delete','ascendente.py',423), ('instruccion -> DELETE FROM ID PTCOMA','instruccion',4,'p_delete2','ascendente.py',427), ]
182.144144
8,591
0.609309
3,717
20,218
3.27576
0.119989
0.078844
0.014783
0.015769
0.517165
0.490884
0.409412
0.36112
0.36112
0.314964
0
0.317084
0.111188
20,218
110
8,592
183.8
0.36049
0.004155
0
0.02
1
0.01
0.422525
0.011575
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
79816f629ed80f8a75119acc654255be54eac72e
224
bzl
Python
bloop/bloop.bzl
stripe-archive/bazel-bloop-exporter
d04826f06e1f72f80d80765844a4c74657001583
[ "MIT" ]
2
2021-04-01T20:18:33.000Z
2022-02-06T08:09:11.000Z
bloop/bloop.bzl
stripe-archive/bazel-bloop-exporter
d04826f06e1f72f80d80765844a4c74657001583
[ "MIT" ]
null
null
null
bloop/bloop.bzl
stripe-archive/bazel-bloop-exporter
d04826f06e1f72f80d80765844a4c74657001583
[ "MIT" ]
1
2021-04-08T11:02:04.000Z
2021-04-08T11:02:04.000Z
load("//bloop/private:bloop_target.bzl", "bloop_target_test") def bloop_target(**kwargs): bloop_target_test( name = "%s.bloop" % kwargs["name"], target = kwargs["name"], tags = ["manual"], )
24.888889
61
0.589286
26
224
4.846154
0.461538
0.349206
0.238095
0
0
0
0
0
0
0
0
0
0.223214
224
8
62
28
0.724138
0
0
0
0
0
0.316964
0.142857
0
0
0
0
0
1
0.142857
true
0
0
0
0.142857
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
79a9c613d862671c7c61990aad066c4a3257932f
223
py
Python
graphene_gis_extension/apps.py
serioeseGmbH/django-graphene-gis-extension
8066b4a3599a517e30fa6fbc254c3a623e0c3c04
[ "MIT" ]
2
2019-06-11T16:45:17.000Z
2019-06-20T19:48:01.000Z
graphene_gis_extension/apps.py
serioeseGmbH/django-graphene-gis-extension
8066b4a3599a517e30fa6fbc254c3a623e0c3c04
[ "MIT" ]
1
2020-09-18T17:26:18.000Z
2020-09-18T17:26:18.000Z
graphene_gis_extension/apps.py
serioeseGmbH/django-graphene-gis-extension
8066b4a3599a517e30fa6fbc254c3a623e0c3c04
[ "MIT" ]
null
null
null
from django.apps import AppConfig from .fields import register_field_conversions class GrapheneGisExtensionConfig(AppConfig): name = 'graphene_gis_extension' def ready(self): register_field_conversions()
22.3
46
0.784753
24
223
7.041667
0.75
0.153846
0.284024
0
0
0
0
0
0
0
0
0
0.156951
223
9
47
24.777778
0.898936
0
0
0
0
0
0.098655
0.098655
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.833333
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
79cac16a69374cf7c6acc1daefade9a7ad5effb3
72
py
Python
PaperRank/__init__.py
tetherless-world/PaperRank
a6b7c1294c7b9d5c3e0aebc59996eef044801daa
[ "MIT" ]
1
2018-07-13T17:12:54.000Z
2018-07-13T17:12:54.000Z
PaperRank/__init__.py
tetherless-world/PaperRank
a6b7c1294c7b9d5c3e0aebc59996eef044801daa
[ "MIT" ]
1
2021-06-01T22:23:26.000Z
2021-06-01T22:23:26.000Z
PaperRank/__init__.py
tetherless-world/PaperRank
a6b7c1294c7b9d5c3e0aebc59996eef044801daa
[ "MIT" ]
2
2020-10-30T03:02:13.000Z
2020-10-30T04:25:07.000Z
# Imports from . import compute from . import update from . import util
14.4
21
0.75
10
72
5.4
0.6
0.555556
0
0
0
0
0
0
0
0
0
0
0.194444
72
4
22
18
0.931034
0.097222
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
79d942d1c930f5357f917e35216268697e73ddd3
1,266
bzl
Python
nodejs/defs.bzl
purkhusid/rules_proto_grpc
943656d049d2932a32d8f882bbb05c024b499020
[ "Apache-2.0" ]
null
null
null
nodejs/defs.bzl
purkhusid/rules_proto_grpc
943656d049d2932a32d8f882bbb05c024b499020
[ "Apache-2.0" ]
null
null
null
nodejs/defs.bzl
purkhusid/rules_proto_grpc
943656d049d2932a32d8f882bbb05c024b499020
[ "Apache-2.0" ]
null
null
null
"""Backwards compat aliases for rules_proto_grpc Node.js rules.""" load("//js:defs.bzl", "js_grpc_node_compile", "js_grpc_node_library", "js_proto_compile", "js_proto_library") def nodejs_proto_compile(**kwargs): print("Rule nodejs_proto_compile from @rules_proto_grpc//nodejs:defs.bzl has been deprecated, please use js_proto_compile from @rules_proto_grpc//js:defs.bzl instead") # buildifier: disable=print js_proto_compile(**kwargs) def nodejs_grpc_compile(**kwargs): print("Rule nodejs_grpc_compile from @rules_proto_grpc//nodejs:defs.bzl has been deprecated, please use js_grpc_node_compile from @rules_proto_grpc//js:defs.bzl instead") # buildifier: disable=print js_grpc_node_compile(**kwargs) def nodejs_proto_library(**kwargs): print("Rule nodejs_proto_library from @rules_proto_grpc//nodejs:defs.bzl has been deprecated, please use js_proto_library from @rules_proto_grpc//js:defs.bzl instead") # buildifier: disable=print js_proto_library(**kwargs) def nodejs_grpc_library(**kwargs): print("Rule nodejs_grpc_library from @rules_proto_grpc//nodejs:defs.bzl has been deprecated, please use js_grpc_node_library from @rules_proto_grpc//js:defs.bzl instead") # buildifier: disable=print js_grpc_node_library(**kwargs)
63.3
203
0.790679
192
1,266
4.880208
0.145833
0.096051
0.134472
0.153682
0.753469
0.6254
0.614728
0.614728
0.614728
0.614728
0
0
0.101896
1,266
19
204
66.631579
0.824099
0.130332
0
0
0
0.307692
0.662088
0.234432
0
0
0
0
0
1
0.307692
true
0
0
0
0.307692
0.307692
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
79d9e4a7f3a7899d9db166a7db1b389f52959c91
139
py
Python
strava/api/__init__.py
rhydlewis/strava-cli
d5d6d2b6aee64c531745222f8ff3bd77e7bf7398
[ "MIT" ]
null
null
null
strava/api/__init__.py
rhydlewis/strava-cli
d5d6d2b6aee64c531745222f8ff3bd77e7bf7398
[ "MIT" ]
null
null
null
strava/api/__init__.py
rhydlewis/strava-cli
d5d6d2b6aee64c531745222f8ff3bd77e7bf7398
[ "MIT" ]
null
null
null
from .activity import get_activity from .upload import post_upload, get_upload from .athlete import get_activities, get_stats, get_athlete
34.75
59
0.848921
21
139
5.333333
0.428571
0.160714
0
0
0
0
0
0
0
0
0
0
0.107914
139
3
60
46.333333
0.903226
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
79e03e3397bd6454a931b775ef9ce9feb4470e34
358
py
Python
gkit_widgets/__init__.py
NonnagHcaz/gkit_widgets
d3efdeb653a63d25dc3ecf27f364337eeda1fb45
[ "MIT" ]
1
2017-06-13T19:30:44.000Z
2017-06-13T19:30:44.000Z
gkit_widgets/__init__.py
gannon93/gkit_widgets
d3efdeb653a63d25dc3ecf27f364337eeda1fb45
[ "MIT" ]
null
null
null
gkit_widgets/__init__.py
gannon93/gkit_widgets
d3efdeb653a63d25dc3ecf27f364337eeda1fb45
[ "MIT" ]
null
null
null
"""File contains package initialization data.""" import os import sys from .attachment_frame import * from .console_window import * from .framed_entry import * from .labeled_entry import * from .labeled_radios import * from .menu_bar import * from .menu_item import * from .labeled_file_entry import * from . import common sys.path.insert(0, os.getcwd())
21.058824
48
0.77095
51
358
5.235294
0.490196
0.299625
0.168539
0.164794
0
0
0
0
0
0
0
0.003247
0.139665
358
16
49
22.375
0.863636
0.117318
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.916667
0
0.916667
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8de618b5b1150b38da524b2ea9f0b67e3902bc7c
5,514
py
Python
backend/tests/test_favorites.py
nightriddler/fastapi_realworld
403da170494fd535a4d761bcb36e9575598af5a2
[ "MIT" ]
5
2021-12-12T19:46:36.000Z
2022-01-13T15:08:34.000Z
backend/tests/test_favorites.py
aland-zhang/fastapi_realworld
403da170494fd535a4d761bcb36e9575598af5a2
[ "MIT" ]
null
null
null
backend/tests/test_favorites.py
aland-zhang/fastapi_realworld
403da170494fd535a4d761bcb36e9575598af5a2
[ "MIT" ]
1
2022-01-20T03:15:03.000Z
2022-01-20T03:15:03.000Z
from typing import AsyncGenerator, Dict, Tuple import pytest from settings import config from slugify import slugify from sqlalchemy import func from sqlalchemy.ext.asyncio.session import AsyncSession from src.db.models import Favorite from starlette.responses import Response from .schemas import check_content_article pytestmark = pytest.mark.asyncio async def test_post_favorite( db: AsyncSession, client: AsyncGenerator, data_first_user: Dict[str, Dict[str, str]], token_first_user: str, data_first_article: Dict[str, Dict[str, str]], create_and_get_response_two_article: Tuple[Response], ) -> None: """ Test favorite an article. Auth is required. """ first_article, _ = create_and_get_response_two_article slug_first_article = first_article.json()["article"]["slug"] response_withot_auth = await client.post( f"/articles/{slug_first_article}/favorite", ) assert response_withot_auth.status_code == 401, "Expected 401 code." response_fake_article = await client.post( "/articles/fakeslugarticle/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) assert response_fake_article.status_code == 400, "Expected 400 code." response = await client.post( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) content = response.json() stmt = await db.execute(func.count(Favorite.id)) count_favorites = stmt.scalar() check_content_article(content["article"], data_first_article, data_first_user) assert response.status_code == 200, "Expected 200 code." assert ( content["article"]["favorited"] is True ), "The status of the favorite article is not displayed in the favorited field." assert ( content["article"]["favoritesCount"] == 1 ), "Adding the article to favorites did not change the 'favoritesCount' field." assert count_favorites == 1, "The favorite article was not added to the database." count_favorites_from_redis = await config.redis_db.hget( "count_favorites", slugify(data_first_article["article"]["title"]) ) assert count_favorites == int( count_favorites_from_redis ), "The article was not saved in redis cache." response_double = await client.post( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) assert response_double.status_code == 400, "Expected 400 code." stmt = await db.execute(func.count(Favorite.id)) count_favorites = stmt.scalar() assert ( count_favorites == 1 ), "A record with the same fields was created in the database." async def test_remove_favorite( db: AsyncSession, client: AsyncGenerator, data_first_user: Dict[str, Dict[str, str]], token_first_user: str, data_first_article: Dict[str, Dict[str, str]], token_second_user: str, get_tags: str, create_and_get_response_two_article: Tuple[Response], data_second_article: Dict[str, Dict[str, str]], ) -> None: """ Test unfavorite an article. Auth is required. """ first_article, _ = create_and_get_response_two_article slug_first_article = first_article.json()["article"]["slug"] await client.post( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) response_another_user = await client.delete( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_second_user}"}, ) assert response_another_user.status_code == 400, "Expected 400 code." stmt = await db.execute(func.count(Favorite.id)) count_favorites = stmt.scalar() assert count_favorites == 1, "Deleted article from the database by another user." response_withot_auth = await client.delete( f"/articles/{slug_first_article}/favorite", ) assert response_withot_auth.status_code == 401, "Expected 401 code." stmt = await db.execute(func.count(Favorite.id)) count_favorites = stmt.scalar() assert ( count_favorites == 1 ), "Deleted article from the database without authorization.." response_fake_article = await client.delete( "/articles/fakeslugarticle/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) assert response_fake_article.status_code == 400, "Expected 400 code." response = await client.delete( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) assert response.status_code == 200, "Expected 200 code." stmt = await db.execute(func.count(Favorite.id)) count_favorites = stmt.scalar() content = response.json() check_content_article(content["article"], data_first_article, data_first_user) assert count_favorites == 0, "The favorite article was not deleted to the database." count_favorites_from_redis = await config.redis_db.hget( "count_favorites", slugify(data_first_article["article"]["title"]) ) assert count_favorites == int( count_favorites_from_redis ), "The article was not delete in redis cache." response_double = await client.delete( f"/articles/{slug_first_article}/favorite", headers={"Authorization": f"Token {token_first_user}"}, ) assert response_double.status_code == 400, "Expected 400 code."
34.679245
88
0.700218
683
5,514
5.415813
0.162518
0.064882
0.043255
0.038929
0.775074
0.736145
0.728305
0.714247
0.714247
0.681536
0
0.013453
0.19115
5,514
158
89
34.898734
0.815919
0
0
0.586777
0
0
0.272373
0.070553
0
0
0
0
0.14876
1
0
false
0
0.07438
0
0.07438
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8df50a101806c8c06dd3af38944a694b99fda729
5,458
py
Python
tests/test_lfom_validation.py
AguaClara/aide_validation
997b9da8077d1f560d4aa9ccb236b21580da4808
[ "MIT" ]
null
null
null
tests/test_lfom_validation.py
AguaClara/aide_validation
997b9da8077d1f560d4aa9ccb236b21580da4808
[ "MIT" ]
3
2021-01-28T02:30:34.000Z
2021-03-30T16:57:21.000Z
tests/test_lfom_validation.py
AguaClara/aide_validation
997b9da8077d1f560d4aa9ccb236b21580da4808
[ "MIT" ]
null
null
null
import pytest import time from aguaclara.core.units import u from aide_validation.report_writer import ReportWriter from aide_validation.lfom_validation import flow_lfom_vert, check_flow_lfom_vert # set skip_all_tests = True to focus on single test skip_all_tests = False # sleep one second so reports won't have the same name time.sleep(1) writer = ReportWriter() @pytest.fixture def report_writer(): # reset result to its default between tests writer.set_result("Valid") return writer @pytest.mark.skipif(skip_all_tests, reason="Exclude all tests") @pytest.mark.parametrize( "height, d_ori, h_ori, n_oris, expected", [ ( 0.2 * u.m, 0.0157875 * u.m, [ 0.0079375, 0.02467613636363637, 0.04141477272727274, 0.0581534090909091, 0.07489204545454548, 0.09163068181818185, 0.1083693181818182, 0.1251079545454546, 0.14184659090909096, 0.15858522727272734, 0.1753238636363637, 0.19206250000000008, ] * u.m, [ 17.0, 4.0, 6.0, 3.0, 4.0, 3.0, 3.0, 3.0, 3.0, 2.0, 3.0, 1.0, ], 10.081949072000105 * u.L / u.s, ), ( 0.1 * u.m, 0.0157875 * u.m, [ 0.00396875, 0.0125, 0.0207074, 0.02925, 0.0375, 0.045, 0.055, 0.0625, 0.072, 0.08, 0.087, 0.096, ] * u.m, [ 17.0, 4.0, 6.0, 3.0, 4.0, 3.0, 3.0, 3.0, 3.0, 2.0, 3.0, 1.0, ], 7.117078350360663 * u.L / u.s, ), ], ) def test_flow_lfom_vert(height, d_ori, h_ori, n_oris, expected): result = flow_lfom_vert(height, d_ori, h_ori, n_oris) assert result == expected @pytest.mark.skipif(skip_all_tests, reason="Exclude all tests") @pytest.mark.parametrize( "diameter, ori_heights, ori_numbers, cutoff, q_input, expected", [ ( 0.0157875 * u.m, [ 0.0079375, 0.02467613636363637, 0.04141477272727274, 0.0581534090909091, 0.07489204545454548, 0.09163068181818185, 0.1083693181818182, 0.1251079545454546, 0.14184659090909096, 0.15858522727272734, 0.1753238636363637, 0.19206250000000008, ] * u.m, [ 17.0, 4.0, 6.0, 3.0, 4.0, 3.0, 3.0, 3.0, 3.0, 2.0, 3.0, 1.0, ], 0.05, 10 * u.L / u.s, "Valid", ), ( 0.0157875 * u.m, [ 0.00396875, 0.0125, 0.0207074, 0.02925, 0.0375, 0.045, 0.055, 0.0625, 0.072, 0.08, 0.087, 0.096, ] * u.m, [ 17.0, 4.0, 6.0, 3.0, 4.0, 3.0, 3.0, 3.0, 3.0, 2.0, 3.0, 1.0, ], 0.05, 10 * u.L / u.s, "Invalid: Check Validation Report", ), ( 0.0157875 * u.m, [ 0.00396875, 0.0125, 0.0207074, 0.02925, 0.0375, 0.045, 0.055, 0.0625, 0.072, 0.08, 0.087, 0.096, ] * u.m, [ 17.0, 4.0, 6.0, 3.0, 4.0, 3.0, 3.0, 3.0, 3.0, 2.0, 3.0, 1.0, ], 0.05, 7.15 * u.L / u.s, "Valid", ), ], ) def test_check_flow_lfom_vert( diameter, ori_heights, ori_numbers, cutoff, q_input, expected, report_writer ): check_flow_lfom_vert( diameter, ori_heights, ori_numbers, cutoff, q_input, report_writer ) assert report_writer.get_result() == expected
25.036697
81
0.335654
503
5,458
3.536779
0.218688
0.033727
0.05059
0.033727
0.684654
0.676785
0.676785
0.674536
0.654862
0.623384
0
0.348767
0.576585
5,458
217
82
25.152074
0.42103
0.026383
0
0.73913
0
0
0.035343
0
0
0
0
0
0.009662
1
0.014493
false
0
0.024155
0
0.043478
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5c0f38aaf1ae61f894dd28310419184744d0990c
169
py
Python
src/design patterns/factorypattern/autos/abs_auto.py
sudeep0901/python
7a50af12e72d21ca4cad7f2afa4c6f929552043f
[ "MIT" ]
null
null
null
src/design patterns/factorypattern/autos/abs_auto.py
sudeep0901/python
7a50af12e72d21ca4cad7f2afa4c6f929552043f
[ "MIT" ]
3
2019-12-26T05:13:55.000Z
2020-03-07T06:59:56.000Z
src/design patterns/factorypattern/autos/abs_auto.py
sudeep0901/python
7a50af12e72d21ca4cad7f2afa4c6f929552043f
[ "MIT" ]
null
null
null
import abc class AbsAuto(metaclass=abc.ABCMeta): @abc.abstractmethod def start(self): pass @abc.abstractmethod def end(self): pass
15.363636
37
0.615385
19
169
5.473684
0.631579
0.326923
0.384615
0
0
0
0
0
0
0
0
0
0.295858
169
11
38
15.363636
0.87395
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.125
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
5c0fb7a9c5f1cf66e16e3e768b612f188b389900
956
py
Python
app/core/migrations/0039_auto_20210728_1956.py
VMatyagin/recipe-rest
46e456330458a71b3315163a68ad82b8e58ca365
[ "MIT" ]
null
null
null
app/core/migrations/0039_auto_20210728_1956.py
VMatyagin/recipe-rest
46e456330458a71b3315163a68ad82b8e58ca365
[ "MIT" ]
1
2022-03-11T14:25:08.000Z
2022-03-11T14:25:08.000Z
app/core/migrations/0039_auto_20210728_1956.py
VMatyagin/recipe-rest
46e456330458a71b3315163a68ad82b8e58ca365
[ "MIT" ]
null
null
null
# Generated by Django 3.1.13 on 2021-07-28 16:56 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("core", "0038_auto_20210728_1954"), ] operations = [ migrations.RenameField( model_name="ticket", old_name="createdAt", new_name="created_at", ), migrations.RenameField( model_name="ticket", old_name="updatedAt", new_name="updated_at", ), migrations.RenameField( model_name="ticketscan", old_name="createdAt", new_name="created_at", ), migrations.RenameField( model_name="ticketscan", old_name="isFinal", new_name="is_final", ), migrations.RenameField( model_name="ticketscan", old_name="updatedAt", new_name="updated_at", ), ]
24.512821
48
0.536611
88
956
5.568182
0.454545
0.214286
0.265306
0.306122
0.667347
0.667347
0.667347
0.373469
0.253061
0.253061
0
0.05178
0.353556
956
38
49
25.157895
0.7411
0.048117
0
0.71875
1
0
0.176211
0.02533
0
0
0
0
0
1
0
false
0
0.03125
0
0.125
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5c2c258c658433b73f5742f9235b4860640cdc6b
54
py
Python
src/envs/core/__init__.py
napnel/UoA-thesis-s1260099
dce954b8795c24dc8fbbacb8b7ebdfe101fc01c6
[ "Apache-2.0" ]
null
null
null
src/envs/core/__init__.py
napnel/UoA-thesis-s1260099
dce954b8795c24dc8fbbacb8b7ebdfe101fc01c6
[ "Apache-2.0" ]
null
null
null
src/envs/core/__init__.py
napnel/UoA-thesis-s1260099
dce954b8795c24dc8fbbacb8b7ebdfe101fc01c6
[ "Apache-2.0" ]
null
null
null
from src.envs.core.core import Order, Position, Trade
27
53
0.796296
9
54
4.777778
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.111111
54
1
54
54
0.895833
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5c3ab9483202b4956cb4919d791feae98b55b247
445
py
Python
iterplus/chainable.py
Zildj1an/iterplus
28d6791f5528497d3669ea919b87b23eb945999f
[ "MIT" ]
null
null
null
iterplus/chainable.py
Zildj1an/iterplus
28d6791f5528497d3669ea919b87b23eb945999f
[ "MIT" ]
null
null
null
iterplus/chainable.py
Zildj1an/iterplus
28d6791f5528497d3669ea919b87b23eb945999f
[ "MIT" ]
2
2018-10-12T01:04:19.000Z
2021-02-24T15:03:54.000Z
# -*- coding: utf-8 -*- def chainable(items): class Chain(object): def __init__(self, items): self._items = items def map(self, a): return Chain(map(a, self._items)) def filter(self, a): return Chain(filter(a, self._items)) def reduce(self, a): return reduce(a, self._items) def val(self): return self._items return Chain(items)
19.347826
48
0.52809
54
445
4.185185
0.314815
0.238938
0.146018
0.172566
0
0
0
0
0
0
0
0.003448
0.348315
445
22
49
20.227273
0.775862
0.047191
0
0
0
0
0
0
0
0
0
0
0
1
0.461538
false
0
0
0.307692
0.923077
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
5c3f794dc2bafecf0f8196b71d812533e1d6b997
4,801
py
Python
tests/test_login.py
octopyth/python-salesforce-api
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
[ "MIT" ]
25
2019-05-20T06:38:45.000Z
2022-02-22T02:10:37.000Z
tests/test_login.py
octopyth/python-salesforce-api
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
[ "MIT" ]
19
2019-07-02T10:12:09.000Z
2022-01-09T23:33:21.000Z
tests/test_login.py
octopyth/python-salesforce-api
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
[ "MIT" ]
16
2019-12-04T20:45:16.000Z
2021-12-17T23:29:29.000Z
import pytest from salesforce_api.const.service import VERB from . import helpers from salesforce_api import login, core, exceptions, const class TestOAuth: def create_connection(self, api_version: str = None): return login.oauth2( client_id=helpers.TEST_CLIENT_KEY, client_secret=helpers.TEST_CLIENT_SECRET, username=helpers.TEST_USER_EMAIL, password=helpers.TEST_PASSWORD, instance_url=helpers.TEST_INSTANCE_URL, api_version=api_version ) def test_authenticate_success(self, requests_mock): requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/success.txt'), status_code=200) connection = self.create_connection() assert isinstance(connection, core.Connection) assert connection.access_token == helpers.TEST_ACCESS_TOKEN def test_authenticate_client_id_failure(self, requests_mock): requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/invalid_client_id.txt'), status_code=400) with pytest.raises(exceptions.AuthenticationInvalidClientIdError): self.create_connection() def test_authenticate_client_secret_failure(self, requests_mock): requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/invalid_client_secret.txt'), status_code=400) with pytest.raises(exceptions.AuthenticationInvalidClientSecretError): self.create_connection() def test_authenticate_invalid_grant_failure(self, requests_mock): requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/invalid_grant.txt'), status_code=400) with pytest.raises(exceptions.AuthenticationError): self.create_connection() def test_automatic_api_version(self, requests_mock): requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/success.txt'), status_code=200) connection = self.create_connection() assert connection.version == const.API_VERSION def test_manual_api_version(self, requests_mock): expected_api_version = '123.4' requests_mock.register_uri('POST', '/services/oauth2/token', text=helpers.get_data('login/oauth/success.txt'), status_code=200) connection = self.create_connection(expected_api_version) assert connection.version == expected_api_version class TestSoap(helpers.BaseTest): def create_connection(self, api_version: str = None): return login.soap( instance_url=helpers.TEST_INSTANCE_URL, username=helpers.TEST_USER_EMAIL, password=helpers.TEST_PASSWORD, security_token=helpers.TEST_SECURITY_TOKEN, api_version=api_version ) def test_authenticate_success(self, requests_mock): self.register_uri(requests_mock, VERB.POST, '/services/Soap/c/{version}', text=helpers.get_data('login/soap/success.txt')) connection = self.create_connection() assert isinstance(connection, core.Connection) assert connection.access_token == helpers.TEST_ACCESS_TOKEN def test_authenticate_alt_password_success(self, requests_mock): self.register_uri(requests_mock, VERB.POST, '/services/Soap/c/{version}', text=helpers.get_data('login/soap/success.txt')) connection = login.soap( instance_url=helpers.TEST_INSTANCE_URL, username=helpers.TEST_USER_EMAIL, password_and_security_token=helpers.TEST_PASSWORD ) assert isinstance(connection, core.Connection) assert connection.access_token == helpers.TEST_ACCESS_TOKEN def test_authenticate_missing_token_failure(self, requests_mock): self.register_uri(requests_mock, VERB.POST, '/services/Soap/c/{version}', text=helpers.get_data('login/soap/missing_token.txt')) with pytest.raises(exceptions.AuthenticationMissingTokenError): self.create_connection() def test_automatic_api_version(self, requests_mock): self.register_uri(requests_mock, VERB.POST, '/services/Soap/c/{version}', text=helpers.get_data('login/soap/success.txt')) assert self.create_connection().version == const.API_VERSION def test_manual_api_version(self, requests_mock): expected_api_version = '123.4' self.register_uri(requests_mock, VERB.POST, f'/services/Soap/c/{expected_api_version}', text=helpers.get_data('login/soap/success.txt', {'version': expected_api_version})) assert self.create_connection(expected_api_version).version == expected_api_version
52.184783
180
0.718184
569
4,801
5.762742
0.137083
0.080512
0.053675
0.060384
0.79445
0.79445
0.743519
0.732845
0.694419
0.658432
0
0.008406
0.182254
4,801
91
181
52.758242
0.826796
0
0
0.527027
0
0
0.127389
0.118684
0
0
0
0
0.135135
1
0.175676
false
0.054054
0.054054
0.027027
0.283784
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
30d1e383317de3083a48c5c1925a5d435d7cb3aa
101
py
Python
app/talks/__init__.py
bionikspoon/Flask-Talks
aa6ee881c5b1d7f1b367aea477adb58186370b52
[ "MIT" ]
null
null
null
app/talks/__init__.py
bionikspoon/Flask-Talks
aa6ee881c5b1d7f1b367aea477adb58186370b52
[ "MIT" ]
null
null
null
app/talks/__init__.py
bionikspoon/Flask-Talks
aa6ee881c5b1d7f1b367aea477adb58186370b52
[ "MIT" ]
null
null
null
# coding=utf-8 from flask import Blueprint talks = Blueprint('talks', __name__) from . import routes
20.2
36
0.762376
14
101
5.214286
0.714286
0.383562
0
0
0
0
0
0
0
0
0
0.011494
0.138614
101
5
37
20.2
0.827586
0.118812
0
0
0
0
0.056818
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
30e9b9f614114d6154c430fb9122cb543d389840
193
py
Python
src/users/admin.py
ezadorianschool/CharityCompass-main
1a6fba0647309202d164c0dac00bbccb9998d5b0
[ "MIT" ]
null
null
null
src/users/admin.py
ezadorianschool/CharityCompass-main
1a6fba0647309202d164c0dac00bbccb9998d5b0
[ "MIT" ]
null
null
null
src/users/admin.py
ezadorianschool/CharityCompass-main
1a6fba0647309202d164c0dac00bbccb9998d5b0
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Charity, User, Volunteer # Register your models here. admin.site.register(User) admin.site.register(Volunteer) admin.site.register(Charity)
27.571429
44
0.813472
27
193
5.814815
0.481481
0.171975
0.324841
0
0
0
0
0
0
0
0
0
0.093264
193
7
45
27.571429
0.897143
0.134715
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
a518776bba5d2581b39393028d169d956de7a306
57
py
Python
app/extensions/cors.py
vittorduartte/busca-jogo-dos-15
c0a5ba9eba4571a4893fbdfd6a4fb94c49d7b0db
[ "MIT" ]
1
2021-08-31T12:26:36.000Z
2021-08-31T12:26:36.000Z
app/extensions/cors.py
vittorduartte/busca-jogo-dos-15
c0a5ba9eba4571a4893fbdfd6a4fb94c49d7b0db
[ "MIT" ]
1
2021-09-10T12:06:54.000Z
2021-09-10T12:06:54.000Z
app/extensions/cors.py
vittorduartte/busca-jogo-dos-15
c0a5ba9eba4571a4893fbdfd6a4fb94c49d7b0db
[ "MIT" ]
1
2021-09-10T02:13:28.000Z
2021-09-10T02:13:28.000Z
from flask_cors import CORS def init(app): CORS(app)
14.25
27
0.719298
10
57
4
0.7
0
0
0
0
0
0
0
0
0
0
0
0.192982
57
4
28
14.25
0.869565
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
eb65b967b199c821b47fe5f6167621601ad4a395
71
py
Python
mercs/visuals/__init__.py
systemallica/mercs
39e999620ab989abb29310488dcd30354d029490
[ "MIT" ]
11
2020-01-28T16:15:53.000Z
2021-05-20T08:05:42.000Z
mercs/visuals/__init__.py
systemallica/mercs
39e999620ab989abb29310488dcd30354d029490
[ "MIT" ]
null
null
null
mercs/visuals/__init__.py
systemallica/mercs
39e999620ab989abb29310488dcd30354d029490
[ "MIT" ]
4
2020-02-06T09:02:28.000Z
2022-02-14T09:42:04.000Z
from .diagrams import show_diagram, diagram_to_dotstring, save_diagram
35.5
70
0.873239
10
71
5.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.084507
71
1
71
71
0.892308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ebad3f63ab96ba7888966de82f71a1a402044f92
52
py
Python
Week1_PyThonBasic/List/Ex1.py
minhvip2001/pythonproject
0ad9e70203fae2cd038872a8d1a71c0bc9416cf3
[ "MIT" ]
null
null
null
Week1_PyThonBasic/List/Ex1.py
minhvip2001/pythonproject
0ad9e70203fae2cd038872a8d1a71c0bc9416cf3
[ "MIT" ]
null
null
null
Week1_PyThonBasic/List/Ex1.py
minhvip2001/pythonproject
0ad9e70203fae2cd038872a8d1a71c0bc9416cf3
[ "MIT" ]
null
null
null
aLsit = [100, 200, 300, 400, 500] print(aLsit[::-1])
26
33
0.596154
9
52
3.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0.355556
0.134615
52
2
34
26
0.333333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
ebc9b4fafd088edacc24c73ad458b7924b68f340
727
py
Python
cryptofeed_werks/lib/__init__.py
globophobe/crypto-tick-data
7ec5d1e136b9bc27ae936f55cf6ab7fe5e37bda4
[ "MIT" ]
null
null
null
cryptofeed_werks/lib/__init__.py
globophobe/crypto-tick-data
7ec5d1e136b9bc27ae936f55cf6ab7fe5e37bda4
[ "MIT" ]
null
null
null
cryptofeed_werks/lib/__init__.py
globophobe/crypto-tick-data
7ec5d1e136b9bc27ae936f55cf6ab7fe5e37bda4
[ "MIT" ]
null
null
null
from .aggregate import aggregate_rows from .calendar import ( get_max_time, get_min_time, iter_timeframe, parse_datetime, parse_period_from_to, ) from .dataframe import ( assert_type_decimal, calculate_notional, calculate_tick_rule, set_dtypes, set_type_decimal, strip_nanoseconds, utc_timestamp, ) from .downloader import gzip_downloader __all__ = [ "aggregate_rows", "get_max_time", "get_min_time", "iter_timeframe", "parse_datetime", "parse_period_from_to", "assert_type_decimal", "calculate_notional", "calculate_tick_rule", "set_dtypes", "set_type_decimal", "strip_nanoseconds", "utc_timestamp", "gzip_downloader", ]
20.194444
39
0.700138
83
727
5.578313
0.385542
0.095032
0.043197
0.056156
0.712743
0.712743
0.712743
0.712743
0.712743
0.712743
0
0
0.210454
727
35
40
20.771429
0.80662
0
0
0
0
0
0.292985
0
0
0
0
0
0.058824
1
0
false
0
0.117647
0
0.117647
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ebe82628287c8c7dbb94b7ec99bdf571380c0752
238
py
Python
train_booking/bookings/apps.py
triump0870/train-booking
cf4db335011cc74aabc4f2052c14b98f31418d85
[ "MIT" ]
null
null
null
train_booking/bookings/apps.py
triump0870/train-booking
cf4db335011cc74aabc4f2052c14b98f31418d85
[ "MIT" ]
8
2022-02-01T17:27:41.000Z
2022-03-31T17:32:31.000Z
train_booking/bookings/apps.py
triump0870/train-booking
cf4db335011cc74aabc4f2052c14b98f31418d85
[ "MIT" ]
null
null
null
from django.apps import AppConfig class BookingsConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'train_booking.bookings' def ready(self): import train_booking.bookings.signals # noqa
23.8
56
0.739496
28
238
6.142857
0.785714
0.139535
0.232558
0
0
0
0
0
0
0
0
0
0.176471
238
9
57
26.444444
0.877551
0.016807
0
0
0
0
0.219828
0.219828
0
0
0
0
0
1
0.166667
false
0
0.333333
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ccd8b9c16054c9a6c4341cdac40ecbc7edabb355
8,077
py
Python
test/unit/web/base/test_HookPluginManager.py
tdans1/Use-Galaxy
e9f05cb1b11db20a729ac73520f886ee619c6b90
[ "CC-BY-3.0" ]
4
2018-10-29T18:34:38.000Z
2021-09-29T23:30:42.000Z
test/unit/web/base/test_HookPluginManager.py
tdans1/Use-Galaxy
e9f05cb1b11db20a729ac73520f886ee619c6b90
[ "CC-BY-3.0" ]
1
2019-02-04T16:21:27.000Z
2019-02-04T16:45:17.000Z
test/unit/web/base/test_HookPluginManager.py
chambm/galaxy
fd1926767996a161cd2fc8bd184e6835fd688765
[ "CC-BY-3.0" ]
3
2020-02-12T15:22:24.000Z
2021-08-19T10:27:39.000Z
""" """ import logging import os import sys import types import unittest from galaxy.web.base.pluginframework import HookPluginManager unit_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.insert(1, unit_root) from unittest_utils import galaxy_mock log = logging.getLogger(__name__) # ----------------------------------------------------------------------------- globals loading_point = HookPluginManager.loading_point_filename contents1 = """ import os def bler( x, y=3 ): return ( x, y ) """ contents2 = """ raise Exception( 'Bler' ) """ contents3 = """ import contents1 def blah( w ): return tuple( [ w ] + list( contents1.bler( 2 ) ) ) """ contents4 = """ from galaxy import util def blah( s ): return util.listify( s ) """ contents5 = """ def hook_blah( s ): return s.title() def hook_filter_test( s ): s += ' one' return s """ contents6 = """ def hook_blah( s ): return s.upper() def hook_filter_test( s ): s += ' two' return s """ contents7 = """ def hook_blah( s ): raise Exception( 'bler' ) def hook_filter_test( s ): raise Exception( 'bler' ) """ # ----------------------------------------------------------------------------- class HookPluginManager_TestCase(unittest.TestCase): def test_loading_point(self): """should attempt load on dirs containing loading_point file""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { loading_point: contents1 }, 'not_a_plugin': 'blerbler' } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins') app_path = mock_app_dir.root_path expected_plugins_path = os.path.join(app_path, 'plugins') self.assertEqual(plugin_mgr.directories, [expected_plugins_path]) self.assertEqual(list(plugin_mgr.plugins.keys()), ['plugin1']) plugin = plugin_mgr.plugins['plugin1'] self.assertEqual(plugin.name, 'plugin1') self.assertEqual(plugin.path, os.path.join(expected_plugins_path, 'plugin1')) self.assertIsInstance(plugin.module, types.ModuleType) self.assertEqual(plugin.module.bler(2), (2, 3)) mock_app_dir.remove() def test_bad_loading_points(self): """should NOT attempt load on dirs NOT containing loading_point file""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': {}, 'plugin2': { 'plogin.py': 'wot' } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins') app_path = mock_app_dir.root_path expected_plugins_path = os.path.join(app_path, 'plugins') self.assertEqual(plugin_mgr.directories, [expected_plugins_path]) self.assertEqual(list(plugin_mgr.plugins.keys()), []) mock_app_dir.remove() def test_bad_import(self): """should error gracefully (skip) on bad import""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { loading_point: contents2 } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins') app_path = mock_app_dir.root_path expected_plugins_path = os.path.join(app_path, 'plugins') self.assertEqual(plugin_mgr.directories, [expected_plugins_path]) self.assertEqual(list(plugin_mgr.plugins.keys()), []) mock_app_dir.remove() def test_import_w_rel_import(self): """should allow loading_point to rel. import other modules""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { 'contents1.py': contents1, loading_point: contents3 } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins', skip_bad_plugins=False) app_path = mock_app_dir.root_path expected_plugins_path = os.path.join(app_path, 'plugins') self.assertEqual(plugin_mgr.directories, [expected_plugins_path]) self.assertEqual(list(plugin_mgr.plugins.keys()), ['plugin1']) plugin = plugin_mgr.plugins['plugin1'] self.assertEqual(plugin.name, 'plugin1') self.assertEqual(plugin.path, os.path.join(expected_plugins_path, 'plugin1')) self.assertIsInstance(plugin.module, types.ModuleType) self.assertEqual(plugin.module.blah(1), (1, 2, 3)) mock_app_dir.remove() def test_import_w_galaxy_import(self): """should allow loading_point to rel. import GALAXY modules""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { loading_point: contents4 } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins', skip_bad_plugins=False) app_path = mock_app_dir.root_path expected_plugins_path = os.path.join(app_path, 'plugins') self.assertEqual(plugin_mgr.directories, [expected_plugins_path]) self.assertEqual(list(plugin_mgr.plugins.keys()), ['plugin1']) plugin = plugin_mgr.plugins['plugin1'] self.assertEqual(plugin.name, 'plugin1') self.assertEqual(plugin.path, os.path.join(expected_plugins_path, 'plugin1')) self.assertIsInstance(plugin.module, types.ModuleType) self.assertEqual(plugin.module.blah('one,two'), ['one', 'two']) mock_app_dir.remove() def test_run_hooks(self): """should run hooks of loaded plugins""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { loading_point: contents5 }, 'plugin2': { loading_point: contents6 } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins', skip_bad_plugins=False) self.assertEqual(sorted(plugin_mgr.plugins.keys()), ['plugin1', 'plugin2']) return_val_dict = plugin_mgr.run_hook('blah', 'one two check') self.assertEqual(return_val_dict, {'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK'}) result = plugin_mgr.filter_hook('filter_test', 'check') self.assertEqual(result, 'check one two') mock_app_dir.remove() def test_hook_errs(self): """should fail gracefully if hook fails (and continue with other plugins)""" mock_app_dir = galaxy_mock.MockDir({ 'plugins': { 'plugin1': { loading_point: contents5 }, 'plugin2': { loading_point: contents6 }, 'plugin3': { loading_point: contents7 } } }) mock_app = galaxy_mock.MockApp(root=mock_app_dir.root_path) plugin_mgr = HookPluginManager(mock_app, directories_setting='plugins', skip_bad_plugins=False) self.assertEqual(sorted(plugin_mgr.plugins.keys()), ['plugin1', 'plugin2', 'plugin3']) return_val_dict = plugin_mgr.run_hook('blah', 'one two check') self.assertEqual(return_val_dict, {'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK'}) result = plugin_mgr.filter_hook('filter_test', 'check') self.assertEqual(result, 'check one two') mock_app_dir.remove() if __name__ == '__main__': unittest.main()
32.179283
103
0.604185
892
8,077
5.190583
0.137892
0.060475
0.056156
0.036285
0.769546
0.765659
0.749244
0.747948
0.72635
0.695464
0
0.010403
0.262102
8,077
250
104
32.308
0.766443
0.068342
0
0.612903
0
0
0.148643
0
0
0
0
0
0.150538
1
0.037634
false
0
0.069892
0
0.150538
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ccec073e3ffa5e2ac7cf9d2edbf2710c47acfb04
84
py
Python
vnpy/api/sgit/__init__.py
funrunskypalace/vnpy
2d87aede685fa46278d8d3392432cc127b797926
[ "MIT" ]
19,529
2015-03-02T12:17:35.000Z
2022-03-31T17:18:27.000Z
vnpy/api/sgit/__init__.py
funrunskypalace/vnpy
2d87aede685fa46278d8d3392432cc127b797926
[ "MIT" ]
2,186
2015-03-04T23:16:33.000Z
2022-03-31T03:44:01.000Z
vnpy/api/sgit/__init__.py
funrunskypalace/vnpy
2d87aede685fa46278d8d3392432cc127b797926
[ "MIT" ]
8,276
2015-03-02T05:21:04.000Z
2022-03-31T13:13:13.000Z
from .vnsgitmd import MdApi from .vnsgittd import TdApi from .sgit_constant import *
28
28
0.821429
12
84
5.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.130952
84
3
28
28
0.931507
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
690356cf1af9ce9f13c028e51e477ddfd90c259e
93
py
Python
CAT/model/__init__.py
nnnyt/CAT
471f573dd51b9cc09339ea73241ad9ac9e5d0d8f
[ "MIT" ]
6
2021-02-15T13:10:45.000Z
2022-03-08T12:58:49.000Z
CAT/model/__init__.py
nnnyt/CAT
471f573dd51b9cc09339ea73241ad9ac9e5d0d8f
[ "MIT" ]
null
null
null
CAT/model/__init__.py
nnnyt/CAT
471f573dd51b9cc09339ea73241ad9ac9e5d0d8f
[ "MIT" ]
4
2021-01-11T15:19:41.000Z
2022-03-21T06:02:55.000Z
from .abstract_model import AbstractModel from .IRT import IRTModel from .NCD import NCDModel
31
41
0.849462
13
93
6
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.11828
93
3
42
31
0.95122
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
15cdfc22eb9f42824e54b116247c9da4527ad682
35
py
Python
test/test.py
lalit10/CSC510-Group19
6e456ef658b714044e3ed189dc043fa62f0172bb
[ "MIT" ]
null
null
null
test/test.py
lalit10/CSC510-Group19
6e456ef658b714044e3ed189dc043fa62f0172bb
[ "MIT" ]
null
null
null
test/test.py
lalit10/CSC510-Group19
6e456ef658b714044e3ed189dc043fa62f0172bb
[ "MIT" ]
null
null
null
#This file will contain test cases.
35
35
0.8
6
35
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
35
1
35
35
0.933333
0.971429
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
15ddb606d6f44a6d2c8091f50bc23f5f33ae48c8
364
py
Python
test_functions.py
nathancolmenero/Cogs18Python
d8419daa4432b9f5d8f7aaee71ac6ad4bab77954
[ "MIT" ]
1
2019-01-15T05:16:37.000Z
2019-01-15T05:16:37.000Z
test_functions.py
nathancolmenero/Cogs18Python
d8419daa4432b9f5d8f7aaee71ac6ad4bab77954
[ "MIT" ]
null
null
null
test_functions.py
nathancolmenero/Cogs18Python
d8419daa4432b9f5d8f7aaee71ac6ad4bab77954
[ "MIT" ]
null
null
null
import pytest from my_module.functions import select_language def test_select_language_english(): assert select_language('english') == 'english' def test_select_language_invalid(): with pytest.raises(ValueError) as e: select_language('menglish') def test_select_languagee_swedish(): assert select_language('swedish') == 'swedish'
30.333333
54
0.744505
43
364
5.976744
0.488372
0.326848
0.151751
0.163424
0
0
0
0
0
0
0
0
0.162088
364
12
54
30.333333
0.842623
0
0
0
0
0
0.09863
0
0
0
0
0
0.222222
1
0.333333
true
0
0.222222
0
0.555556
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
c62e8a464dcd85602865719fc976aa74876a0089
41
py
Python
vendor/HoeffdingTree/__init__.py
writ3it/HD_Netezza_in-database_prediction
9a89d4618e93ff2cf3b1c16660522c56aae27373
[ "MIT" ]
null
null
null
vendor/HoeffdingTree/__init__.py
writ3it/HD_Netezza_in-database_prediction
9a89d4618e93ff2cf3b1c16660522c56aae27373
[ "MIT" ]
null
null
null
vendor/HoeffdingTree/__init__.py
writ3it/HD_Netezza_in-database_prediction
9a89d4618e93ff2cf3b1c16660522c56aae27373
[ "MIT" ]
1
2019-09-03T03:11:09.000Z
2019-09-03T03:11:09.000Z
#!/usr/bin/env python2 #encoding: UTF-8
10.25
22
0.682927
7
41
4
1
0
0
0
0
0
0
0
0
0
0
0.055556
0.121951
41
3
23
13.666667
0.722222
0.878049
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
d6c225b35a69ee36c890db7b58abcf8e0c96d808
238
py
Python
{{cookiecutter.project_slug}}/tests/test_{{cookiecutter.project_slug}}.py
MicaelJarniac/cookiecutter-python-project
0ee297a56416535db4766d03ba14dc045da19ce3
[ "MIT" ]
null
null
null
{{cookiecutter.project_slug}}/tests/test_{{cookiecutter.project_slug}}.py
MicaelJarniac/cookiecutter-python-project
0ee297a56416535db4766d03ba14dc045da19ce3
[ "MIT" ]
68
2021-10-07T16:56:41.000Z
2022-03-21T16:23:37.000Z
{{cookiecutter.project_slug}}/tests/test_{{cookiecutter.project_slug}}.py
MicaelJarniac/cookiecutter-python-project
0ee297a56416535db4766d03ba14dc045da19ce3
[ "MIT" ]
null
null
null
from {{ cookiecutter.project_slug }} import make_greeting def test_make_greeting(): assert ( make_greeting("{{ cookiecutter.full_name }}") == "Hello, {{ cookiecutter.full_name }}. Welcome to your new project!" )
26.444444
78
0.659664
26
238
5.769231
0.653846
0.24
0.266667
0
0
0
0
0
0
0
0
0
0.210084
238
8
79
29.75
0.797872
0
0
0
0
0
0.390756
0.184874
0
0
0
0
0.166667
0
null
null
0
0.166667
null
null
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
d6d6dfbf9f18bbaca9d18543e10740addcd58a8d
159
py
Python
listeners/mouse_click_event_listener.py
vilelamarcospaulo/t-py
a73d376d50a9fb2b5286a5347cb42e18284f16c3
[ "MIT" ]
1
2021-07-20T21:15:27.000Z
2021-07-20T21:15:27.000Z
listeners/mouse_click_event_listener.py
vilelamarcospaulo/t-py
a73d376d50a9fb2b5286a5347cb42e18284f16c3
[ "MIT" ]
null
null
null
listeners/mouse_click_event_listener.py
vilelamarcospaulo/t-py
a73d376d50a9fb2b5286a5347cb42e18284f16c3
[ "MIT" ]
1
2021-07-20T03:41:06.000Z
2021-07-20T03:41:06.000Z
from .event_listener import EventListener class MouseClickEventListener(EventListener): def event_decode(self, event): return event.buttonNumber()
31.8
45
0.786164
16
159
7.6875
0.75
0
0
0
0
0
0
0
0
0
0
0
0.144654
159
5
46
31.8
0.904412
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
d6e15a3eeaa4ecbafd2ab40d9233e9a40d6dddf4
2,655
py
Python
Tests/test_math.py
tetsuo-cpp/Pyjion
9b2576d29799780abe8326b19ac5c62310059d7a
[ "MIT" ]
null
null
null
Tests/test_math.py
tetsuo-cpp/Pyjion
9b2576d29799780abe8326b19ac5c62310059d7a
[ "MIT" ]
null
null
null
Tests/test_math.py
tetsuo-cpp/Pyjion
9b2576d29799780abe8326b19ac5c62310059d7a
[ "MIT" ]
null
null
null
import pyjion import unittest import gc class MathTestCase(unittest.TestCase): def setUp(self) -> None: pyjion.enable() def tearDown(self) -> None: pyjion.disable() gc.collect() def test_floats(self): a = 2.0 b = 3.0 c = 4.0 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c %= a % b self.assertEqual(c, 0.0) def test_ints(self): a = 2 b = 3 c = 4 c += a * b self.assertEqual(c, 10) c /= a + b self.assertEqual(c, 2.0) c //= a + b self.assertEqual(c, 0) c = 4 c %= a % b self.assertEqual(c, 0) def test_mixed(self): a = 2 b = 3.0 c = 4 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c = 4 c %= a % b self.assertEqual(c, 0.0) def test_mixed2(self): a = 2.0 b = 3 c = 4 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c = 4 c %= a % b self.assertEqual(c, 0.0) def test_mixed3(self): a = 2 b = 3 c = 4.0 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c = 4.0 c %= a % b self.assertEqual(c, 0.0) def test_mixed4(self): a = 2 b = 3.0 c = 4.0 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c = 4.0 c %= a % b self.assertEqual(c, 0.0) i = -10 x = 1234567890.0 * (10.0 ** i) self.assertEqual(x, 0.12345678900000001) i = 0 x = 1234567890.0 * (10.0 ** i) self.assertEqual(x, 1234567890.0) i = 10 x = 1234567890.0 * (10.0 ** i) self.assertEqual(x, 1.23456789e+19) def test_mixed5(self): a = 2.0 b = 3 c = 4.0 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c %= a + b self.assertEqual(c, 2) def test_mixed6(self): a = 2.0 b = 3.0 c = 4 c += a * b self.assertEqual(c, 10.0) c /= a + b self.assertEqual(c, 2.0) c %= a % b self.assertEqual(c, 0.0) def test_modulo(self): a = 1 b = 2 c = "boo %s" x = c % (a + b) self.assertEqual(x, "boo 3") if __name__ == "__main__": unittest.main()
20.423077
48
0.415819
389
2,655
2.794344
0.118252
0.400184
0.071757
0.167433
0.725851
0.709292
0.709292
0.698252
0.673413
0.631095
0
0.131237
0.448964
2,655
129
49
20.581395
0.611757
0
0
0.690265
0
0
0.007156
0
0
0
0
0
0.256637
1
0.097345
false
0
0.026549
0
0.132743
0
0
0
0
null
1
0
1
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ba8d477f9b895d7e038152af7a5d7a6cb6a8db32
2,346
py
Python
restic/__init__.py
mtlynch/pyrestic
328aff6700751854c2882bb042271f67a5b555df
[ "MIT" ]
12
2021-04-01T07:29:31.000Z
2022-03-25T03:56:40.000Z
restic/__init__.py
mtlynch/pyrestic
328aff6700751854c2882bb042271f67a5b555df
[ "MIT" ]
1
2021-04-02T22:21:05.000Z
2021-04-02T22:21:05.000Z
restic/__init__.py
mtlynch/pyrestic
328aff6700751854c2882bb042271f67a5b555df
[ "MIT" ]
2
2021-12-03T12:25:11.000Z
2022-03-25T21:48:25.000Z
from restic.internal import backup as internal_backup from restic.internal import check as internal_check from restic.internal import copy as internal_copy from restic.internal import forget as internal_forget from restic.internal import generate as internal_generate from restic.internal import init as internal_init from restic.internal import restore as internal_restore from restic.internal import self_update as internal_self_update from restic.internal import snapshots as internal_snapshots from restic.internal import stats as internal_stats from restic.internal import unlock as internal_unlock from restic.internal import version as internal_version # Ignore warnings about naming of globals. # pylint: disable=C0103 binary_path = 'restic' # Global flags # Ignore warnings about naming of globals. # pylint: disable=C0103 repository = None password_file = None def backup(*args, **kwargs): return internal_backup.run(_make_base_command(), *args, **kwargs) def check(*args, **kwargs): return internal_check.run(_make_base_command(), *args, **kwargs) def copy(*args, **kwargs): return internal_copy.run(_make_base_command(), *args, **kwargs) def forget(*args, **kwargs): return internal_forget.run(_make_base_command(), *args, **kwargs) def generate(*args, **kwargs): return internal_generate.run(_make_base_command(), *args, **kwargs) def init(*args, **kwargs): return internal_init.run(_make_base_command(), *args, **kwargs) def restore(*args, **kwargs): return internal_restore.run(_make_base_command(), *args, **kwargs) def self_update(): return internal_self_update.run(_make_base_command()) def snapshots(*args, **kwargs): return internal_snapshots.run(_make_base_command(), *args, **kwargs) def stats(*args, **kwargs): return internal_stats.run(_make_base_command(), *args, **kwargs) def unlock(): return internal_unlock.run(_make_base_command()) def version(): return internal_version.run(_make_base_command()) def _make_base_command(): base_command = [binary_path] # Always add the JSON flag so we get back results in JSON. base_command.extend(['--json']) if repository: base_command.extend(['--repo', repository]) if password_file: base_command.extend(['--password-file', password_file]) return base_command
27.27907
72
0.755754
316
2,346
5.367089
0.18038
0.106132
0.114976
0.169811
0.262972
0.225825
0.225825
0.061321
0.061321
0
0
0.003964
0.139812
2,346
85
73
27.6
0.836472
0.08312
0
0
0
0
0.015392
0
0
0
0
0
0
1
0.276596
false
0.06383
0.255319
0.255319
0.808511
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
5
baad17c9e38f67471d1c1afd1a0c33e68a934af1
36,128
py
Python
tests/unit/v1/test_transaction.py
anna-hope/python-firestore
aa7594c93b2d7480ac4283a1d1abafe76aa7a353
[ "Apache-2.0" ]
140
2020-02-16T19:35:47.000Z
2022-03-27T23:58:03.000Z
tests/unit/v1/test_transaction.py
anna-hope/python-firestore
aa7594c93b2d7480ac4283a1d1abafe76aa7a353
[ "Apache-2.0" ]
311
2020-01-31T23:45:43.000Z
2022-03-22T14:41:34.000Z
tests/unit/v1/test_transaction.py
anna-hope/python-firestore
aa7594c93b2d7480ac4283a1d1abafe76aa7a353
[ "Apache-2.0" ]
52
2020-01-31T21:40:11.000Z
2022-02-25T18:32:51.000Z
# Copyright 2017 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import pytest def _make_transaction(*args, **kwargs): from google.cloud.firestore_v1.transaction import Transaction return Transaction(*args, **kwargs) def test_transaction_constructor_defaults(): from google.cloud.firestore_v1.transaction import MAX_ATTEMPTS transaction = _make_transaction(mock.sentinel.client) assert transaction._client is mock.sentinel.client assert transaction._write_pbs == [] assert transaction._max_attempts == MAX_ATTEMPTS assert not transaction._read_only assert transaction._id is None def test_transaction_constructor_explicit(): transaction = _make_transaction( mock.sentinel.client, max_attempts=10, read_only=True ) assert transaction._client is mock.sentinel.client assert transaction._write_pbs == [] assert transaction._max_attempts == 10 assert transaction._read_only assert transaction._id is None def test_transaction__add_write_pbs_failure(): from google.cloud.firestore_v1.base_transaction import _WRITE_READ_ONLY batch = _make_transaction(mock.sentinel.client, read_only=True) assert batch._write_pbs == [] with pytest.raises(ValueError) as exc_info: batch._add_write_pbs([mock.sentinel.write]) assert exc_info.value.args == (_WRITE_READ_ONLY,) assert batch._write_pbs == [] def test_transaction__add_write_pbs(): batch = _make_transaction(mock.sentinel.client) assert batch._write_pbs == [] batch._add_write_pbs([mock.sentinel.write]) assert batch._write_pbs == [mock.sentinel.write] def test_transaction__clean_up(): transaction = _make_transaction(mock.sentinel.client) transaction._write_pbs.extend([mock.sentinel.write_pb1, mock.sentinel.write]) transaction._id = b"not-this-time-my-friend" ret_val = transaction._clean_up() assert ret_val is None assert transaction._write_pbs == [] assert transaction._id is None def test_transaction__begin(): from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.types import firestore # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) txn_id = b"to-begin" response = firestore.BeginTransactionResponse(transaction=txn_id) firestore_api.begin_transaction.return_value = response # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Actually make a transaction and ``begin()`` it. transaction = _make_transaction(client) assert transaction._id is None ret_val = transaction._begin() assert ret_val is None assert transaction._id == txn_id # Verify the called mock. firestore_api.begin_transaction.assert_called_once_with( request={"database": client._database_string, "options": None}, metadata=client._rpc_metadata, ) def test_transaction__begin_failure(): from google.cloud.firestore_v1.base_transaction import _CANT_BEGIN client = _make_client() transaction = _make_transaction(client) transaction._id = b"not-none" with pytest.raises(ValueError) as exc_info: transaction._begin() err_msg = _CANT_BEGIN.format(transaction._id) assert exc_info.value.args == (err_msg,) def test_transaction__rollback(): from google.protobuf import empty_pb2 from google.cloud.firestore_v1.services.firestore import client as firestore_client # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) firestore_api.rollback.return_value = empty_pb2.Empty() # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Actually make a transaction and roll it back. transaction = _make_transaction(client) txn_id = b"to-be-r\x00lled" transaction._id = txn_id ret_val = transaction._rollback() assert ret_val is None assert transaction._id is None # Verify the called mock. firestore_api.rollback.assert_called_once_with( request={"database": client._database_string, "transaction": txn_id}, metadata=client._rpc_metadata, ) def test_transaction__rollback_not_allowed(): from google.cloud.firestore_v1.base_transaction import _CANT_ROLLBACK client = _make_client() transaction = _make_transaction(client) assert transaction._id is None with pytest.raises(ValueError) as exc_info: transaction._rollback() assert exc_info.value.args == (_CANT_ROLLBACK,) def test_transaction__rollback_failure(): from google.api_core import exceptions from google.cloud.firestore_v1.services.firestore import client as firestore_client # Create a minimal fake GAPIC with a dummy failure. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) exc = exceptions.InternalServerError("Fire during rollback.") firestore_api.rollback.side_effect = exc # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Actually make a transaction and roll it back. transaction = _make_transaction(client) txn_id = b"roll-bad-server" transaction._id = txn_id with pytest.raises(exceptions.InternalServerError) as exc_info: transaction._rollback() assert exc_info.value is exc assert transaction._id is None assert transaction._write_pbs == [] # Verify the called mock. firestore_api.rollback.assert_called_once_with( request={"database": client._database_string, "transaction": txn_id}, metadata=client._rpc_metadata, ) def test_transaction__commit(): from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import write # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) commit_response = firestore.CommitResponse(write_results=[write.WriteResult()]) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. client = _make_client("phone-joe") client._firestore_api_internal = firestore_api # Actually make a transaction with some mutations and call _commit(). transaction = _make_transaction(client) txn_id = b"under-over-thru-woods" transaction._id = txn_id document = client.document("zap", "galaxy", "ship", "space") transaction.set(document, {"apple": 4.5}) write_pbs = transaction._write_pbs[::] write_results = transaction._commit() assert write_results == list(commit_response.write_results) # Make sure transaction has no more "changes". assert transaction._id is None assert transaction._write_pbs == [] # Verify the mocks. firestore_api.commit.assert_called_once_with( request={ "database": client._database_string, "writes": write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) def test_transaction__commit_not_allowed(): from google.cloud.firestore_v1.base_transaction import _CANT_COMMIT transaction = _make_transaction(mock.sentinel.client) assert transaction._id is None with pytest.raises(ValueError) as exc_info: transaction._commit() assert exc_info.value.args == (_CANT_COMMIT,) def test_transaction__commit_failure(): from google.api_core import exceptions from google.cloud.firestore_v1.services.firestore import client as firestore_client # Create a minimal fake GAPIC with a dummy failure. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) exc = exceptions.InternalServerError("Fire during commit.") firestore_api.commit.side_effect = exc # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Actually make a transaction with some mutations and call _commit(). transaction = _make_transaction(client) txn_id = b"beep-fail-commit" transaction._id = txn_id transaction.create(client.document("up", "down"), {"water": 1.0}) transaction.delete(client.document("up", "left")) write_pbs = transaction._write_pbs[::] with pytest.raises(exceptions.InternalServerError) as exc_info: transaction._commit() assert exc_info.value is exc assert transaction._id == txn_id assert transaction._write_pbs == write_pbs # Verify the called mock. firestore_api.commit.assert_called_once_with( request={ "database": client._database_string, "writes": write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) def _transaction_get_all_helper(retry=None, timeout=None): from google.cloud.firestore_v1 import _helpers client = mock.Mock(spec=["get_all"]) transaction = _make_transaction(client) ref1, ref2 = mock.Mock(), mock.Mock() kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) result = transaction.get_all([ref1, ref2], **kwargs) client.get_all.assert_called_once_with( [ref1, ref2], transaction=transaction, **kwargs, ) assert result is client.get_all.return_value def test_transaction_get_all(): _transaction_get_all_helper() def test_transaction_get_all_w_retry_timeout(): from google.api_core.retry import Retry retry = Retry(predicate=object()) timeout = 123.0 _transaction_get_all_helper(retry=retry, timeout=timeout) def _transaction_get_w_document_ref_helper(retry=None, timeout=None): from google.cloud.firestore_v1.document import DocumentReference from google.cloud.firestore_v1 import _helpers client = mock.Mock(spec=["get_all"]) transaction = _make_transaction(client) ref = DocumentReference("documents", "doc-id") kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) result = transaction.get(ref, **kwargs) assert result is client.get_all.return_value client.get_all.assert_called_once_with([ref], transaction=transaction, **kwargs) def test_transaction_get_w_document_ref(): _transaction_get_w_document_ref_helper() def test_transaction_get_w_document_ref_w_retry_timeout(): from google.api_core.retry import Retry retry = Retry(predicate=object()) timeout = 123.0 _transaction_get_w_document_ref_helper(retry=retry, timeout=timeout) def _transaction_get_w_query_helper(retry=None, timeout=None): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.query import Query client = mock.Mock(spec=[]) transaction = _make_transaction(client) query = Query(parent=mock.Mock(spec=[])) query.stream = mock.MagicMock() kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) result = transaction.get(query, **kwargs) assert result is query.stream.return_value query.stream.assert_called_once_with(transaction=transaction, **kwargs) def test_transaction_get_w_query(): _transaction_get_w_query_helper() def test_transaction_get_w_query_w_retry_timeout(): from google.api_core.retry import Retry retry = Retry(predicate=object()) timeout = 123.0 _transaction_get_w_query_helper(retry=retry, timeout=timeout) def test_transaction_get_failure(): client = _make_client() transaction = _make_transaction(client) ref_or_query = object() with pytest.raises(ValueError): transaction.get(ref_or_query) def _make__transactional(*args, **kwargs): from google.cloud.firestore_v1.transaction import _Transactional return _Transactional(*args, **kwargs) def test__transactional_constructor(): wrapped = _make__transactional(mock.sentinel.callable_) assert wrapped.to_wrap is mock.sentinel.callable_ assert wrapped.current_id is None assert wrapped.retry_id is None def test__transactional__pre_commit_success(): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"totes-began" transaction = _make_transaction_pb(txn_id) result = wrapped._pre_commit(transaction, "pos", key="word") assert result is mock.sentinel.result assert transaction._id == txn_id assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. to_wrap.assert_called_once_with(transaction, "pos", key="word") firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( request={"database": transaction._client._database_string, "options": None}, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_not_called() def test__transactional__pre_commit_retry_id_already_set_success(): from google.cloud.firestore_v1.types import common to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = _make__transactional(to_wrap) txn_id1 = b"already-set" wrapped.retry_id = txn_id1 txn_id2 = b"ok-here-too" transaction = _make_transaction_pb(txn_id2) result = wrapped._pre_commit(transaction) assert result is mock.sentinel.result assert transaction._id == txn_id2 assert wrapped.current_id == txn_id2 assert wrapped.retry_id == txn_id1 # Verify mocks. to_wrap.assert_called_once_with(transaction) firestore_api = transaction._client._firestore_api options_ = common.TransactionOptions( read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id1) ) firestore_api.begin_transaction.assert_called_once_with( request={ "database": transaction._client._database_string, "options": options_, }, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_not_called() def test__transactional__pre_commit_failure(): exc = RuntimeError("Nope not today.") to_wrap = mock.Mock(side_effect=exc, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"gotta-fail" transaction = _make_transaction_pb(txn_id) with pytest.raises(RuntimeError) as exc_info: wrapped._pre_commit(transaction, 10, 20) assert exc_info.value is exc assert transaction._id is None assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. to_wrap.assert_called_once_with(transaction, 10, 20) firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( request={"database": transaction._client._database_string, "options": None}, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_called_once_with( request={ "database": transaction._client._database_string, "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) firestore_api.commit.assert_not_called() def test__transactional__pre_commit_failure_with_rollback_failure(): from google.api_core import exceptions exc1 = ValueError("I will not be only failure.") to_wrap = mock.Mock(side_effect=exc1, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"both-will-fail" transaction = _make_transaction_pb(txn_id) # Actually force the ``rollback`` to fail as well. exc2 = exceptions.InternalServerError("Rollback blues.") firestore_api = transaction._client._firestore_api firestore_api.rollback.side_effect = exc2 # Try to ``_pre_commit`` with pytest.raises(exceptions.InternalServerError) as exc_info: wrapped._pre_commit(transaction, a="b", c="zebra") assert exc_info.value is exc2 assert transaction._id is None assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. to_wrap.assert_called_once_with(transaction, a="b", c="zebra") firestore_api.begin_transaction.assert_called_once_with( request={"database": transaction._client._database_string, "options": None}, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_called_once_with( request={ "database": transaction._client._database_string, "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) firestore_api.commit.assert_not_called() def test__transactional__maybe_commit_success(): wrapped = _make__transactional(mock.sentinel.callable_) txn_id = b"nyet" transaction = _make_transaction_pb(txn_id) transaction._id = txn_id # We won't call ``begin()``. succeeded = wrapped._maybe_commit(transaction) assert succeeded # On success, _id is reset. assert transaction._id is None # Verify mocks. firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test__transactional__maybe_commit_failure_read_only(): from google.api_core import exceptions wrapped = _make__transactional(mock.sentinel.callable_) txn_id = b"failed" transaction = _make_transaction_pb(txn_id, read_only=True) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. # Actually force the ``commit`` to fail (use ABORTED, but cannot # retry since read-only). exc = exceptions.Aborted("Read-only did a bad.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc with pytest.raises(exceptions.Aborted) as exc_info: wrapped._maybe_commit(transaction) assert exc_info.value is exc assert transaction._id == txn_id assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test__transactional__maybe_commit_failure_can_retry(): from google.api_core import exceptions wrapped = _make__transactional(mock.sentinel.callable_) txn_id = b"failed-but-retry" transaction = _make_transaction_pb(txn_id) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. # Actually force the ``commit`` to fail. exc = exceptions.Aborted("Read-write did a bad.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc succeeded = wrapped._maybe_commit(transaction) assert not succeeded assert transaction._id == txn_id assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test__transactional__maybe_commit_failure_cannot_retry(): from google.api_core import exceptions wrapped = _make__transactional(mock.sentinel.callable_) txn_id = b"failed-but-not-retryable" transaction = _make_transaction_pb(txn_id) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. # Actually force the ``commit`` to fail. exc = exceptions.InternalServerError("Real bad thing") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc with pytest.raises(exceptions.InternalServerError) as exc_info: wrapped._maybe_commit(transaction) assert exc_info.value is exc assert transaction._id == txn_id assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test__transactional___call__success_first_attempt(): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"whole-enchilada" transaction = _make_transaction_pb(txn_id) result = wrapped(transaction, "a", b="c") assert result is mock.sentinel.result assert transaction._id is None assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. to_wrap.assert_called_once_with(transaction, "a", b="c") firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( request={"database": transaction._client._database_string, "options": None}, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test__transactional___call__success_second_attempt(): from google.api_core import exceptions from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import write to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"whole-enchilada" transaction = _make_transaction_pb(txn_id) # Actually force the ``commit`` to fail on first / succeed on second. exc = exceptions.Aborted("Contention junction.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = [ exc, firestore.CommitResponse(write_results=[write.WriteResult()]), ] # Call the __call__-able ``wrapped``. result = wrapped(transaction, "a", b="c") assert result is mock.sentinel.result assert transaction._id is None assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. wrapped_call = mock.call(transaction, "a", b="c") assert to_wrap.mock_calls, [wrapped_call == wrapped_call] firestore_api = transaction._client._firestore_api db_str = transaction._client._database_string options_ = common.TransactionOptions( read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id) ) expected_calls = [ mock.call( request={"database": db_str, "options": None}, metadata=transaction._client._rpc_metadata, ), mock.call( request={"database": db_str, "options": options_}, metadata=transaction._client._rpc_metadata, ), ] assert firestore_api.begin_transaction.mock_calls == expected_calls firestore_api.rollback.assert_not_called() commit_call = mock.call( request={"database": db_str, "writes": [], "transaction": txn_id}, metadata=transaction._client._rpc_metadata, ) assert firestore_api.commit.mock_calls == [commit_call, commit_call] def test__transactional___call__failure(): from google.api_core import exceptions from google.cloud.firestore_v1.base_transaction import _EXCEED_ATTEMPTS_TEMPLATE to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = _make__transactional(to_wrap) txn_id = b"only-one-shot" transaction = _make_transaction_pb(txn_id, max_attempts=1) # Actually force the ``commit`` to fail. exc = exceptions.Aborted("Contention just once.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc # Call the __call__-able ``wrapped``. with pytest.raises(ValueError) as exc_info: wrapped(transaction, "here", there=1.5) err_msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) assert exc_info.value.args == (err_msg,) assert transaction._id is None assert wrapped.current_id == txn_id assert wrapped.retry_id == txn_id # Verify mocks. to_wrap.assert_called_once_with(transaction, "here", there=1.5) firestore_api.begin_transaction.assert_called_once_with( request={"database": transaction._client._database_string, "options": None}, metadata=transaction._client._rpc_metadata, ) firestore_api.rollback.assert_called_once_with( request={ "database": transaction._client._database_string, "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) firestore_api.commit.assert_called_once_with( request={ "database": transaction._client._database_string, "writes": [], "transaction": txn_id, }, metadata=transaction._client._rpc_metadata, ) def test_transactional_factory(): from google.cloud.firestore_v1.transaction import _Transactional from google.cloud.firestore_v1.transaction import transactional wrapped = transactional(mock.sentinel.callable_) assert isinstance(wrapped, _Transactional) assert wrapped.to_wrap is mock.sentinel.callable_ @mock.patch("google.cloud.firestore_v1.transaction._sleep") def test__commit_with_retry_success_first_attempt(_sleep): from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.transaction import _commit_with_retry # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) # Attach the fake GAPIC to a real client. client = _make_client("summer") client._firestore_api_internal = firestore_api # Call function and check result. txn_id = b"cheeeeeez" commit_response = _commit_with_retry(client, mock.sentinel.write_pbs, txn_id) assert commit_response is firestore_api.commit.return_value # Verify mocks used. _sleep.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": client._database_string, "writes": mock.sentinel.write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) @mock.patch("google.cloud.firestore_v1.transaction._sleep", side_effect=[2.0, 4.0]) def test__commit_with_retry_success_third_attempt(_sleep): from google.api_core import exceptions from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.transaction import _commit_with_retry # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) # Make sure the first two requests fail and the third succeeds. firestore_api.commit.side_effect = [ exceptions.ServiceUnavailable("Server sleepy."), exceptions.ServiceUnavailable("Server groggy."), mock.sentinel.commit_response, ] # Attach the fake GAPIC to a real client. client = _make_client("outside") client._firestore_api_internal = firestore_api # Call function and check result. txn_id = b"the-world\x00" commit_response = _commit_with_retry(client, mock.sentinel.write_pbs, txn_id) assert commit_response is mock.sentinel.commit_response # Verify mocks used. # Ensure _sleep is called after commit failures, with intervals of 1 and 2 seconds assert _sleep.call_count == 2 _sleep.assert_any_call(1.0) _sleep.assert_any_call(2.0) # commit() called same way 3 times. commit_call = mock.call( request={ "database": client._database_string, "writes": mock.sentinel.write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) assert firestore_api.commit.mock_calls == [commit_call, commit_call, commit_call] @mock.patch("google.cloud.firestore_v1.transaction._sleep") def test__commit_with_retry_failure_first_attempt(_sleep): from google.api_core import exceptions from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.transaction import _commit_with_retry # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) # Make sure the first request fails with an un-retryable error. exc = exceptions.ResourceExhausted("We ran out of fries.") firestore_api.commit.side_effect = exc # Attach the fake GAPIC to a real client. client = _make_client("peanut-butter") client._firestore_api_internal = firestore_api # Call function and check result. txn_id = b"\x08\x06\x07\x05\x03\x00\x09-jenny" with pytest.raises(exceptions.ResourceExhausted) as exc_info: _commit_with_retry(client, mock.sentinel.write_pbs, txn_id) assert exc_info.value is exc # Verify mocks used. _sleep.assert_not_called() firestore_api.commit.assert_called_once_with( request={ "database": client._database_string, "writes": mock.sentinel.write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) @mock.patch("google.cloud.firestore_v1.transaction._sleep", return_value=2.0) def test__commit_with_retry_failure_second_attempt(_sleep): from google.api_core import exceptions from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.transaction import _commit_with_retry # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) # Make sure the first request fails retry-able and second # fails non-retryable. exc1 = exceptions.ServiceUnavailable("Come back next time.") exc2 = exceptions.InternalServerError("Server on fritz.") firestore_api.commit.side_effect = [exc1, exc2] # Attach the fake GAPIC to a real client. client = _make_client("peanut-butter") client._firestore_api_internal = firestore_api # Call function and check result. txn_id = b"the-journey-when-and-where-well-go" with pytest.raises(exceptions.InternalServerError) as exc_info: _commit_with_retry(client, mock.sentinel.write_pbs, txn_id) assert exc_info.value is exc2 # Verify mocks used. _sleep.assert_called_once_with(1.0) # commit() called same way 2 times. commit_call = mock.call( request={ "database": client._database_string, "writes": mock.sentinel.write_pbs, "transaction": txn_id, }, metadata=client._rpc_metadata, ) assert firestore_api.commit.mock_calls == [commit_call, commit_call] @mock.patch("random.uniform", return_value=5.5) @mock.patch("time.sleep", return_value=None) def test_defaults(sleep, uniform): from google.cloud.firestore_v1.transaction import _sleep curr_sleep = 10.0 assert uniform.return_value <= curr_sleep new_sleep = _sleep(curr_sleep) assert new_sleep == 2.0 * curr_sleep uniform.assert_called_once_with(0.0, curr_sleep) sleep.assert_called_once_with(uniform.return_value) @mock.patch("random.uniform", return_value=10.5) @mock.patch("time.sleep", return_value=None) def test_explicit(sleep, uniform): from google.cloud.firestore_v1.transaction import _sleep curr_sleep = 12.25 assert uniform.return_value <= curr_sleep multiplier = 1.5 new_sleep = _sleep(curr_sleep, max_sleep=100.0, multiplier=multiplier) assert new_sleep == multiplier * curr_sleep uniform.assert_called_once_with(0.0, curr_sleep) sleep.assert_called_once_with(uniform.return_value) @mock.patch("random.uniform", return_value=6.75) @mock.patch("time.sleep", return_value=None) def test_exceeds_max(sleep, uniform): from google.cloud.firestore_v1.transaction import _sleep curr_sleep = 20.0 assert uniform.return_value <= curr_sleep max_sleep = 38.5 new_sleep = _sleep(curr_sleep, max_sleep=max_sleep, multiplier=2.0) assert new_sleep == max_sleep uniform.assert_called_once_with(0.0, curr_sleep) sleep.assert_called_once_with(uniform.return_value) def _make_credentials(): import google.auth.credentials return mock.Mock(spec=google.auth.credentials.Credentials) def _make_client(project="feral-tom-cat"): from google.cloud.firestore_v1.client import Client credentials = _make_credentials() return Client(project=project, credentials=credentials) def _make_transaction_pb(txn_id, **txn_kwargs): from google.protobuf import empty_pb2 from google.cloud.firestore_v1.services.firestore import client as firestore_client from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import write from google.cloud.firestore_v1.transaction import Transaction # Create a fake GAPIC ... firestore_api = mock.create_autospec( firestore_client.FirestoreClient, instance=True ) # ... with a dummy ``BeginTransactionResponse`` result ... begin_response = firestore.BeginTransactionResponse(transaction=txn_id) firestore_api.begin_transaction.return_value = begin_response # ... and a dummy ``Rollback`` result ... firestore_api.rollback.return_value = empty_pb2.Empty() # ... and a dummy ``Commit`` result. commit_response = firestore.CommitResponse(write_results=[write.WriteResult()]) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api return Transaction(client, **txn_kwargs)
34.638543
87
0.727663
4,531
36,128
5.464577
0.081218
0.05525
0.037964
0.041761
0.832795
0.792609
0.749838
0.718417
0.671527
0.622819
0
0.006406
0.183376
36,128
1,042
88
34.671785
0.832831
0.095715
0
0.604426
0
0
0.049056
0.009578
0
0
0
0
0.222683
1
0.06639
false
0
0.085754
0
0.159059
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
bab99f95c4afc244b345c05beb8638cf3e390015
7,584
py
Python
src/jobs/tasks.py
easyautoml/easyml
5c4a724836c5dbd9eb66a81ae33d3d63961a0f80
[ "Apache-2.0" ]
2
2022-03-30T06:57:05.000Z
2022-03-30T23:44:10.000Z
src/jobs/tasks.py
easyautoml/easyml
5c4a724836c5dbd9eb66a81ae33d3d63961a0f80
[ "Apache-2.0" ]
null
null
null
src/jobs/tasks.py
easyautoml/easyml
5c4a724836c5dbd9eb66a81ae33d3d63961a0f80
[ "Apache-2.0" ]
null
null
null
from celery.decorators import task from utils.transform import Input, Output, get_file_eda_url, get_file_url from utils import config, services from jobs.automl import Train, Predict, Evaluation, Explain from pandas_profiling import ProfileReport @task(name="upload_file", bind=True) def upload_file_task(self, file_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) # TODO : # 1. Check column name, remove special character # 2. Add data type, receive from end-user # 3. Save file as pickle or parquet file try: # Load file file_url = get_file_url(file_id) df_file = Input(file_url).from_csv() # Create metadata file_metadata_dict = df_file.dtypes.apply(lambda x: str(x)).to_dict() # Post metadata into server _data = { "file_id": file_id, "file_metadata_dict": file_metadata_dict } services.post(data=_data, target_path=config.TARGET_PATH.get("file_metadata")) # # Post data into server _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Upload file failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="create_file_eda_task", bind=True) def create_file_eda_task(self, file_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) # Step 1 : Load file try: file_url = get_file_url(file_id) df_file = Input(file_url).from_csv() profile = ProfileReport(df_file, title="", explorative=False, minimal=False) url = get_file_eda_url(file_id) profile.to_file(url) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create experiment failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="create_experiment_task", bind=True) def create_experiment_task(self, experiment_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) try: _models = Train(experiment_id) _models.train() _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create experiment failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="predict_task", bind=True) def predict_task(self, predict_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) try: _predict = Predict(predict_id) _predict.predict() _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create experiment failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="evaluation_task", bind=True) def evaluation_task(self, evaluation_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) try: _evaluation = Evaluation(evaluation_id) _evaluation.evaluate() _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create experiment failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="evaluation_sub_population_task", bind=True) def evaluation_sub_population_task(self, evaluation_id, sub_population_id, column_name): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) try: _evaluation = Evaluation(evaluation_id) _evaluation.sub_population(sub_population_id, column_name) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create experiment failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300]) @task(name="explain_task", bind=True) def explain_task(self, explain_id): task_id = self.request.id _data = { "task_id": task_id, "status": config.TASK_STATUS.get('STARTED'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) try: explain = Explain(explain_id) explain.explain() _data = { "task_id": task_id, "status": config.TASK_STATUS.get('SUCCESS'), } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) except Exception as e: mes = "Create explain failure. Error {}".format(e) _data = { "task_id": task_id, "status": config.TASK_STATUS.get('FAILURE'), "description": mes[:300] } services.post(data=_data, target_path=config.TARGET_PATH.get("task")) raise Exception(mes[:300])
28.511278
88
0.607991
926
7,584
4.712743
0.101512
0.067369
0.049496
0.100825
0.774748
0.742209
0.742209
0.742209
0.742209
0.742209
0
0.008203
0.260549
7,584
266
89
28.511278
0.769971
0.029931
0
0.693122
0
0
0.132979
0.007078
0
0
0
0.003759
0
1
0.037037
false
0
0.026455
0
0.063492
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
bad4e33bd2dd8b2f9481fcb6bf09e7f1d83dad7c
393
py
Python
hoomd/hpmc/pair/__init__.py
USF-GT-Molecular-Modeling/hoomd-blue
2ba2f9e60b0320746d21aa8219bfc9df119c053f
[ "BSD-3-Clause" ]
null
null
null
hoomd/hpmc/pair/__init__.py
USF-GT-Molecular-Modeling/hoomd-blue
2ba2f9e60b0320746d21aa8219bfc9df119c053f
[ "BSD-3-Clause" ]
null
null
null
hoomd/hpmc/pair/__init__.py
USF-GT-Molecular-Modeling/hoomd-blue
2ba2f9e60b0320746d21aa8219bfc9df119c053f
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2009-2022 The Regents of the University of Michigan. # Part of HOOMD-blue, released under the BSD 3-Clause License. """Pair Potentials for Monte Carlo. Define :math:`U_{\\mathrm{pair},ij}` for use with `hoomd.hpmc.integrate.HPMCIntegrator`. Assign a pair potential instance to `hpmc.integrate.HPMCIntegrator.pair_potential` to activate the potential. """ from . import user
32.75
74
0.765903
58
393
5.155172
0.724138
0.086957
0.180602
0
0
0
0
0
0
0
0
0.026316
0.129771
393
11
75
35.727273
0.847953
0.916031
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bad4f78b9456203129586dd159344e8138a84107
126
py
Python
db/admin.py
dmitrytk/takkand.pw
162fd5bb0e58c419977e06ce4633177918bd6f61
[ "MIT" ]
null
null
null
db/admin.py
dmitrytk/takkand.pw
162fd5bb0e58c419977e06ce4633177918bd6f61
[ "MIT" ]
10
2021-03-18T23:07:30.000Z
2022-03-12T00:13:17.000Z
db/admin.py
dmitrytk/takkand.pw
162fd5bb0e58c419977e06ce4633177918bd6f61
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Well, OilField admin.site.register(Well) admin.site.register(OilField)
18
34
0.809524
18
126
5.666667
0.555556
0.176471
0.333333
0
0
0
0
0
0
0
0
0
0.103175
126
6
35
21
0.902655
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
baf495ff5fc80f96385bb3950d820dcf2716c48e
51
py
Python
Python_CI_CD_Test/__init__.py
fanoos/Python_CI_CD_Test
be2a93b5bb4c2585f4006befbe1f282b2a9e3f77
[ "MIT" ]
null
null
null
Python_CI_CD_Test/__init__.py
fanoos/Python_CI_CD_Test
be2a93b5bb4c2585f4006befbe1f282b2a9e3f77
[ "MIT" ]
9
2019-08-29T13:14:00.000Z
2021-02-02T22:15:19.000Z
Python_CI_CD_Test/__init__.py
fanoos/Python_CI_CD_Test
be2a93b5bb4c2585f4006befbe1f282b2a9e3f77
[ "MIT" ]
null
null
null
from .calculator import * from .helloworld import *
25.5
25
0.784314
6
51
6.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.137255
51
2
26
25.5
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
24410a76f6eab86db23076ebfd76754966d356f8
192
py
Python
cogs/utils/checks.py
jonah-chen/eve-bot
18bd61247ab890edfe5c0223ac4e81f6d588a3b4
[ "MIT" ]
null
null
null
cogs/utils/checks.py
jonah-chen/eve-bot
18bd61247ab890edfe5c0223ac4e81f6d588a3b4
[ "MIT" ]
null
null
null
cogs/utils/checks.py
jonah-chen/eve-bot
18bd61247ab890edfe5c0223ac4e81f6d588a3b4
[ "MIT" ]
null
null
null
import nextcord from nextcord.ext import commands def is_admin(): async def predicate(ctx): return ctx.author.guild_permissions.administrator return commands.check(predicate)
24
57
0.765625
24
192
6.041667
0.708333
0
0
0
0
0
0
0
0
0
0
0
0.166667
192
7
58
27.428571
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.333333
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
24524a736a3c2afa8eff939aaf594a7e1f3d016c
44
py
Python
dynamicserialize/dstypes/gov/noaa/nws/__init__.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
33
2016-03-17T01:21:18.000Z
2022-02-08T10:41:06.000Z
dynamicserialize/dstypes/gov/noaa/nws/__init__.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
15
2016-04-19T16:34:08.000Z
2020-09-09T19:57:54.000Z
dynamicserialize/dstypes/gov/noaa/nws/__init__.py
Unidata/python-awips
8459aa756816e5a45d2e5bea534d23d5b1dd1690
[ "BSD-3-Clause" ]
20
2016-03-12T01:46:58.000Z
2022-02-08T06:53:22.000Z
__all__ = [ 'ncep' ]
8.8
18
0.25
2
44
3.5
1
0
0
0
0
0
0
0
0
0
0
0
0.636364
44
4
19
11
0.4375
0
0
0
0
0
0.093023
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
24544df3ee6db9985144cf5aa170f92c69cc32bf
214
py
Python
main/SteppableDemos/FlexibleDiffusionSolverFE/diffusion_2D_uptake/Simulation/diffusion_2D_uptake.py
JulianoGianlupi/nh-cc3d-4x-base-tool
c0f4aceebd4c5bf3ec39e831ef851e419b161259
[ "CC0-1.0" ]
null
null
null
main/SteppableDemos/FlexibleDiffusionSolverFE/diffusion_2D_uptake/Simulation/diffusion_2D_uptake.py
JulianoGianlupi/nh-cc3d-4x-base-tool
c0f4aceebd4c5bf3ec39e831ef851e419b161259
[ "CC0-1.0" ]
null
null
null
main/SteppableDemos/FlexibleDiffusionSolverFE/diffusion_2D_uptake/Simulation/diffusion_2D_uptake.py
JulianoGianlupi/nh-cc3d-4x-base-tool
c0f4aceebd4c5bf3ec39e831ef851e419b161259
[ "CC0-1.0" ]
1
2021-02-26T21:50:29.000Z
2021-02-26T21:50:29.000Z
from cc3d import CompuCellSetup from diffusion_2D_uptakeSteppables import diffusion_2D_uptakeSteppable CompuCellSetup.register_steppable(steppable=diffusion_2D_uptakeSteppable(frequency=1)) CompuCellSetup.run()
26.75
86
0.892523
23
214
8
0.565217
0.179348
0.282609
0
0
0
0
0
0
0
0
0.024876
0.060748
214
7
87
30.571429
0.890547
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0305d586d116e9164afb3043b2103288b057bbdd
61
py
Python
dora/server/__init__.py
vishalbelsare/dora
5a56611d4ac6cbc0a7982100a5e1c9e13aac02f0
[ "Apache-2.0" ]
null
null
null
dora/server/__init__.py
vishalbelsare/dora
5a56611d4ac6cbc0a7982100a5e1c9e13aac02f0
[ "Apache-2.0" ]
null
null
null
dora/server/__init__.py
vishalbelsare/dora
5a56611d4ac6cbc0a7982100a5e1c9e13aac02f0
[ "Apache-2.0" ]
null
null
null
from . import response # NOQA from .server import * # NOQA
20.333333
30
0.688525
8
61
5.25
0.625
0
0
0
0
0
0
0
0
0
0
0
0.229508
61
2
31
30.5
0.893617
0.147541
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
0317ca999ad3bc5c5abe773e220719f2a53e6911
98
py
Python
moldudp/__init__.py
cchee/moldudp_codec
b4d7df556a619e67909320891488589292d887df
[ "MIT" ]
null
null
null
moldudp/__init__.py
cchee/moldudp_codec
b4d7df556a619e67909320891488589292d887df
[ "MIT" ]
null
null
null
moldudp/__init__.py
cchee/moldudp_codec
b4d7df556a619e67909320891488589292d887df
[ "MIT" ]
null
null
null
from pkgutil import extend_path __path__= extend_path(__path__, __name__) __all__=["msg","codec"]
24.5
41
0.795918
13
98
4.615385
0.692308
0.333333
0.466667
0
0
0
0
0
0
0
0
0
0.081633
98
3
42
32.666667
0.666667
0
0
0
0
0
0.081633
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
033b7157fa6231b4328dad7d6d3119befef4567f
346
py
Python
PyEasyQiwi/errors.py
Kur-up/PyEasyQiwi
d4ad6a037ed08bfc3815df543e82fbf311f594f6
[ "Apache-2.0" ]
null
null
null
PyEasyQiwi/errors.py
Kur-up/PyEasyQiwi
d4ad6a037ed08bfc3815df543e82fbf311f594f6
[ "Apache-2.0" ]
null
null
null
PyEasyQiwi/errors.py
Kur-up/PyEasyQiwi
d4ad6a037ed08bfc3815df543e82fbf311f594f6
[ "Apache-2.0" ]
null
null
null
class QiwiInvalidToken(Exception): pass class QiwiInvalidAmountValue(Exception): pass class QiwiTooLongComment(Exception): pass class QiwiTooLongDescription(Exception): pass class QiwiPaymentMethodNotSelected(Exception): pass class QiwiInvalidDelay(Exception): pass class QiwiInvalidBillId(Exception): pass
13.307692
46
0.765896
28
346
9.464286
0.357143
0.343396
0.407547
0
0
0
0
0
0
0
0
0
0.17341
346
26
47
13.307692
0.926573
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
035a014b32bebc38a59f2900d0b37be815e648f9
110
py
Python
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
bambielli-flex/dagster
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
[ "Apache-2.0" ]
1
2019-07-15T17:34:04.000Z
2019-07-15T17:34:04.000Z
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
bambielli-flex/dagster
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
[ "Apache-2.0" ]
null
null
null
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
bambielli-flex/dagster
30b75ba7c62fc536bc827f177c1dc6ba20f5ae20
[ "Apache-2.0" ]
null
null
null
from .version import __version__ from .resources import pagerduty_resource __all__ = ['pagerduty_resource']
18.333333
41
0.818182
12
110
6.666667
0.583333
0.425
0
0
0
0
0
0
0
0
0
0
0.118182
110
5
42
22
0.824742
0
0
0
0
0
0.163636
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
062a080e9ad3255889eb9afd1ea6db7fc6262a2f
223
py
Python
Maya/cicd/python/libMayaExtended/libMayaExtended/mayaSceneMel.py
Mu-L/Exporters
235ad02230791351d7a0440d9568641d28e2e77e
[ "Apache-2.0" ]
445
2017-10-18T01:54:00.000Z
2022-03-31T16:27:54.000Z
Maya/cicd/python/libMayaExtended/libMayaExtended/mayaSceneMel.py
Mu-L/Exporters
235ad02230791351d7a0440d9568641d28e2e77e
[ "Apache-2.0" ]
646
2017-10-16T00:46:17.000Z
2022-03-31T17:40:36.000Z
Maya/cicd/python/libMayaExtended/libMayaExtended/mayaSceneMel.py
Mu-L/Exporters
235ad02230791351d7a0440d9568641d28e2e77e
[ "Apache-2.0" ]
313
2017-10-15T09:20:45.000Z
2022-03-31T09:11:34.000Z
import maya.mel as mm def evalMelString(pyString): return mm.eval(pyString) def convertStringsToMelArray(pyStrings): return str([str(x) for x in pyStrings]).replace("'","\"").replace("[","{").replace("]", "}")
31.857143
96
0.654709
26
223
5.615385
0.653846
0.191781
0
0
0
0
0
0
0
0
0
0
0.134529
223
7
96
31.857143
0.756477
0
0
0
0
0
0.111607
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
062abb25091e82c64c698757d4b805f0469dd1d0
50
py
Python
src/courseProgress/__init__.py
tukanuk/universityCourseProgress
5d05b9f5c482af869c052a169496c2fad8285a8e
[ "MIT" ]
null
null
null
src/courseProgress/__init__.py
tukanuk/universityCourseProgress
5d05b9f5c482af869c052a169496c2fad8285a8e
[ "MIT" ]
5
2021-03-22T18:58:23.000Z
2021-03-24T01:54:46.000Z
src/courseProgress/__init__.py
tukanuk/universityCourseProgress
5d05b9f5c482af869c052a169496c2fad8285a8e
[ "MIT" ]
null
null
null
print(f"starting up the __init__ for {__name__}")
25
49
0.76
8
50
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.12
50
1
50
50
0.681818
0
0
0
0
0
0.78
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
064ad31dbb027be6e7e46adc4cd366414ef2ca95
131
py
Python
src/save.py
Aaronself/HotListStatistics
81df633a08eed328c99cd389eae73a3e9a0a1772
[ "MIT" ]
null
null
null
src/save.py
Aaronself/HotListStatistics
81df633a08eed328c99cd389eae73a3e9a0a1772
[ "MIT" ]
null
null
null
src/save.py
Aaronself/HotListStatistics
81df633a08eed328c99cd389eae73a3e9a0a1772
[ "MIT" ]
null
null
null
import pandas as pd def updateHotList2CSV(WeiboWordList, ZhihuWordList): csvFile = pd.read_csv("../docs/HotList.csv")
18.714286
53
0.709924
15
131
6.133333
0.866667
0
0
0
0
0
0
0
0
0
0
0.009259
0.175573
131
6
54
21.833333
0.842593
0
0
0
0
0
0.153226
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
0650d351f3cf1a6cfbcfb5da0169de73bd11dae7
21
py
Python
test.py
Matt198949/python
15e05424d6cce38f0bace3bde230c3a1780a0c5e
[ "Unlicense" ]
null
null
null
test.py
Matt198949/python
15e05424d6cce38f0bace3bde230c3a1780a0c5e
[ "Unlicense" ]
null
null
null
test.py
Matt198949/python
15e05424d6cce38f0bace3bde230c3a1780a0c5e
[ "Unlicense" ]
null
null
null
print ("helloworld")
10.5
20
0.714286
2
21
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.095238
21
1
21
21
0.789474
0
0
0
0
0
0.47619
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
0655fb7e084ce0c53d1823da8a2efa3d609006d1
102
py
Python
clr/__init__.py
ColorGenomics/clr
a444a0612374a19f22e511b5e3514a60561e6160
[ "MIT" ]
2
2016-03-09T02:15:35.000Z
2016-03-10T12:25:27.000Z
clr/__init__.py
color/clr
a444a0612374a19f22e511b5e3514a60561e6160
[ "MIT" ]
7
2021-04-28T03:12:55.000Z
2021-10-07T21:13:20.000Z
clr/__init__.py
color/clr
a444a0612374a19f22e511b5e3514a60561e6160
[ "MIT" ]
3
2017-08-21T18:43:02.000Z
2019-04-03T01:14:36.000Z
# This is the main entry point for the tool. from .main import main from ._version import __version__
25.5
44
0.784314
17
102
4.411765
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.176471
102
3
45
34
0.892857
0.411765
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
068319abe8b9ab87b7593cbd63b1d9eb262f310b
97
py
Python
sandbox/clib/tests/hello.py
pincoin/thai-online-judge
454c129541783a67b33113851318b808c722de9e
[ "MIT" ]
null
null
null
sandbox/clib/tests/hello.py
pincoin/thai-online-judge
454c129541783a67b33113851318b808c722de9e
[ "MIT" ]
5
2020-06-05T22:01:14.000Z
2021-06-09T18:10:50.000Z
sandbox/clib/tests/hello.py
pincoin/pincoin-online-judge
454c129541783a67b33113851318b808c722de9e
[ "MIT" ]
null
null
null
# compile: python3 -c "import py_compile; py_compile.compile(r'hello.py')" print('Hello, world')
32.333333
74
0.731959
15
97
4.6
0.6
0.26087
0
0
0
0
0
0
0
0
0
0.011364
0.092784
97
2
75
48.5
0.772727
0.742268
0
0
0
0
0.521739
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
068a8ed1357f92e2f1a9a31ed99cfe37e318a4b6
130,992
py
Python
resources/Wireshark/WiresharkDissectorFoo/test/suite_dissectors/group_asterix.py
joshis1/C_Programming
4a8003321251448a167bfca0b595c5eeab88608d
[ "MIT" ]
2
2020-09-11T05:51:42.000Z
2020-12-31T11:42:02.000Z
resources/Wireshark/WiresharkDissectorFoo/test/suite_dissectors/group_asterix.py
joshis1/C_Programming
4a8003321251448a167bfca0b595c5eeab88608d
[ "MIT" ]
null
null
null
resources/Wireshark/WiresharkDissectorFoo/test/suite_dissectors/group_asterix.py
joshis1/C_Programming
4a8003321251448a167bfca0b595c5eeab88608d
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Wireshark ASTERIX dissector tests # By Atli Guðmundsson <atli@tern.is> # # SPDX-License-Identifier: GPL-2.0-or-later # '''ASTERIX dissector tests''' # Standard modules import inspect # Wireshark modules import fixtures import subprocesstest from suite_dissectors.dissectorstest import * @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_asterix(subprocesstest.SubprocessTestCase): def test_for_asterix(self, dissection_validator): '''Verifies that the asterix dissector is installed and accessible''' tester = dissection_validator('asterix') tester.add_dissection( [0x13, 0x00, 0x03], { "asterix.category": "19", "asterix.length": "3" } ) tester.check_dissections() class _asterix_validator_real: def __init__(self, category, dissection_validator): self.category = category self.validator = dissection_validator("asterix") def add_dissection(self, byte_list, field, expected_message, line_no=None): '''pre-wrap asterix category messages with proper asterix structure''' total_length = len(byte_list) + 3 byte_list = [ self.category, (total_length // 256) % 256, total_length % 256 ] + byte_list expected_result = { "asterix.category": "{}".format(self.category), "asterix.length": "{}".format(total_length), "asterix.message": { "asterix.fspec": "", field: expected_message } } if line_no is None: caller = inspect.getframeinfo(inspect.stack()[1][0]) line_no = caller.lineno self.validator.add_dissection(byte_list, expected_result, line_no) def check_dissections(self): self.validator.check_dissections() @fixtures.fixture def asterix_validator(dissection_validator): def generate_asterix_validator(category): retval = _asterix_validator_real(category, dissection_validator) return retval return generate_asterix_validator class _asterix_re_validator_real(_asterix_validator_real): def __init__(self, category, re_byte_list, dissection_validator): super().__init__(category, dissection_validator) self.re_byte_list = re_byte_list def add_re_dissection(self, byte_list, field, expected_message, line_no=None): '''pre-wrap asterix RE messages with proper asterix RE structure''' re_length = len(byte_list) + 1 byte_list = self.re_byte_list + [ re_length % 256 ] + byte_list expected_result = { "asterix.re_field_len": "{}".format(re_length), "asterix.fspec": "", "asterix.{:03}_RE_{}".format(self.category, field): expected_message } if line_no is None: caller = inspect.getframeinfo(inspect.stack()[1][0]) line_no = caller.lineno self.add_dissection(byte_list, "asterix.{:03}_RE".format( self.category), expected_result, line_no) @fixtures.fixture def asterix_re_validator(dissection_validator): def generate_re_asterix_validator(category, re_byte_list): retval = _asterix_re_validator_real( category, re_byte_list, dissection_validator) return retval return generate_re_asterix_validator def fspec_local(key, idx, value): result = { "asterix.fspec": "", "asterix.{}".format(key): { "asterix.{}_{}".format(key, idx): value } } return result def fspec_global(key, idx, value): result = { "asterix.fspec": "", "asterix.{}".format(key): { "asterix.{}".format(idx): value } } return result def dict_local(vmap, key, idx, value): result = vmap.copy() result["asterix.{}_{}".format(key, idx)] = value return result def dict_global(vmap, key, value): result = vmap.copy() result["asterix.{}".format(key)] = value return result def dict_fspec_local(vmap, key, idx, value): result = { "asterix.fspec": "", "asterix.{}".format(key): dict_local(vmap, key, idx, value) } return result def dict_fspec_global(vmap, key, idx, value): result = { "asterix.fspec": "", "asterix.{}".format(key): dict_global(vmap, idx, value) } return result def counter_local(vmap, counter, key, idx, value): result = { "asterix.fspec": "", "asterix.{}".format(key): { "asterix.counter": counter, "asterix.{}".format(key): dict_local(vmap, key, idx, value) } } return result @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_category_019(subprocesstest.SubprocessTestCase): ''' Unittest case for ASTERIX Category 019 Online specification: https://www.eurocontrol.int/publications/cat019-multilateration-system-status-messages-part-18 Part 18 : Category 019 (1.3) Multilateration System Status Messages Standard User Application Profile FRN Data Item Information Length 1 I019/010 Data Source Identifier 2 2 I019/000 Message Type 1 3 I019/140 Time of Day 3 4 I019/550 System Status 1 5 I019/551 Tracking Processor Detailed Status 1 6 I019/552 Remote Sensor Detailed Status 1+ 7 I019/553 Reference Transponder Detailed Status 1+ FX - Field Extension Indicator - 8 I019/600 Position of the MLT System Reference point 8 9 I019/610 Height of the MLT System Reference point 2 10 I019/620 WGS-84 Undulation 1 11 - Spare - 12 - Spare - 13 RE Reserved Expansion Field - 14 SP Special Purpose Field - FX - Field Extension Indicator - ''' maxDiff = None def test_for_fields(self, asterix_validator): '''verifies existence of all fields and their maximum value''' validator = asterix_validator(19) validator.add_dissection( [0x80, 0xff, 0x00], "asterix.019_010", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x80, 0x00, 0xff], "asterix.019_010", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x40, 0x03], "asterix.019_000", { "asterix.019_000_MT": "3" } ) validator.add_dissection( [0x20, 0xa8, 0xbf, 0xff], "asterix.019_140", { "asterix.TOD": "86399.9921875" } ) validator.add_dissection( [0x10, 0xc0], "asterix.019_550", { "asterix.019_550_NOGO": "3", "asterix.019_550_OVL": "0", "asterix.019_550_TSV": "0", "asterix.019_550_TTF": "0" } ) validator.add_dissection( [0x10, 0x20], "asterix.019_550", { "asterix.019_550_NOGO": "0", "asterix.019_550_OVL": "1", "asterix.019_550_TSV": "0", "asterix.019_550_TTF": "0" } ) validator.add_dissection( [0x10, 0x10], "asterix.019_550", { "asterix.019_550_NOGO": "0", "asterix.019_550_OVL": "0", "asterix.019_550_TSV": "1", "asterix.019_550_TTF": "0" } ) validator.add_dissection( [0x10, 0x08], "asterix.019_550", { "asterix.019_550_NOGO": "0", "asterix.019_550_OVL": "0", "asterix.019_550_TSV": "0", "asterix.019_550_TTF": "1" } ) validator.add_dissection( [0x08, 0x80], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "1", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x40], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "1", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x20], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "1", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x10], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "1", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x08], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "1", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x04], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "1", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x02], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "1", "asterix.019_551_SP4_GOOD": "0" } ) validator.add_dissection( [0x08, 0x01], "asterix.019_551", { "asterix.019_551_SP1_EXEC": "0", "asterix.019_551_SP1_GOOD": "0", "asterix.019_551_SP2_EXEC": "0", "asterix.019_551_SP2_GOOD": "0", "asterix.019_551_SP3_EXEC": "0", "asterix.019_551_SP3_GOOD": "0", "asterix.019_551_SP4_EXEC": "0", "asterix.019_551_SP4_GOOD": "1" } ) validator.add_dissection( [0x04, 0x00], "asterix.019_552", { "asterix.counter": "0" } ) validator.add_dissection( [0x04, 0x01, 0xff, 0x00], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "255", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x40], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "1", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x20], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "1", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x10], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "1", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x08], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "1", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x04], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "1" } } ) validator.add_dissection( [0x04, 0x03, 0x12, 0x34, 0x56, 0x78, 0x9a, 0x0c], "asterix.019_552", { "asterix.counter": "3", "asterix.019_552": { "asterix.019_552_RS_Identification": "18", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "1", "asterix.019_552_Transmitter_1090_MHz": "1", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "1" }, "asterix.019_552": { "asterix.019_552_RS_Identification": "86", "asterix.019_552_Receiver_1090_MHz": "1", "asterix.019_552_Transmitter_1030_MHz": "1", "asterix.019_552_Transmitter_1090_MHz": "1", "asterix.019_552_RS_Status": "1", "asterix.019_552_RS_Operational": "0" }, "asterix.019_552": { "asterix.019_552_RS_Identification": "154", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "1", "asterix.019_552_RS_Operational": "1" } } ) validator.add_dissection( [0x02, 0xc0], "asterix.019_553", { "asterix.019_553_Ref_Trans_1_Status": "3", "asterix.019_553_Ref_Trans_2_Status": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x02, 0x0c], "asterix.019_553", { "asterix.019_553_Ref_Trans_1_Status": "0", "asterix.019_553_Ref_Trans_2_Status": "3", "asterix.FX": "0" } ) validator.add_dissection( [0x02, 0x01, 0x01, 0x0c], "asterix.019_553", { "asterix.019_553_Ref_Trans_1_Status": "0", "asterix.019_553_Ref_Trans_2_Status": "0", "asterix.019_553_Ref_Trans_3_Status": "0", "asterix.019_553_Ref_Trans_4_Status": "0", "asterix.019_553_Ref_Trans_5_Status": "0", "asterix.019_553_Ref_Trans_6_Status": "3", "asterix.FX": "0" } ) validator.add_dissection( [0x01, 0x80, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "90", "asterix.019_600_Longitude": "0" } ) validator.add_dissection( [0x01, 0x80, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "-90", "asterix.019_600_Longitude": "0" } ) validator.add_dissection( [0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "0", "asterix.019_600_Longitude": "180" } ) validator.add_dissection( [0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0xc0, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "0", "asterix.019_600_Longitude": "-180" } ) validator.add_dissection( [0x01, 0x40, 0x7f, 0xff], "asterix.019_610", { "asterix.019_610_Height": "8191.75" } ) validator.add_dissection( [0x01, 0x40, 0x80, 0x00], "asterix.019_610", { "asterix.019_610_Height": "-8192" } ) validator.add_dissection( [0x01, 0x20, 0x7f], "asterix.019_620", { "asterix.019_620_Undulation": "127" } ) validator.add_dissection( [0x01, 0x20, 0x81], "asterix.019_620", { "asterix.019_620_Undulation": "-127" } ) validator.check_dissections() def test_undefined_value_handling(self, asterix_validator): '''verifies that the dissector can dissect undefined field values by setting the maximum value of bits or by setting all undefined bits''' validator = asterix_validator(19) validator.add_dissection( [0x40, 0xff], "asterix.019_000", { "asterix.019_000_MT": "255" } ) validator.add_dissection( [0x20, 0xff, 0xff, 0xff], "asterix.019_140", { "asterix.TOD": "131071.9921875" } ) validator.add_dissection( [0x10, 0x07], "asterix.019_550", { "asterix.019_550_NOGO": "0", "asterix.019_550_OVL": "0", "asterix.019_550_TSV": "0", "asterix.019_550_TTF": "0" } ) validator.add_dissection( [0x04, 0x01, 0x00, 0x83], "asterix.019_552", { "asterix.counter": "1", "asterix.019_552": { "asterix.019_552_RS_Identification": "0", "asterix.019_552_Receiver_1090_MHz": "0", "asterix.019_552_Transmitter_1030_MHz": "0", "asterix.019_552_Transmitter_1090_MHz": "0", "asterix.019_552_RS_Status": "0", "asterix.019_552_RS_Operational": "0" } } ) validator.add_dissection( [0x02, 0x32], "asterix.019_553", { "asterix.019_553_Ref_Trans_1_Status": "0", "asterix.019_553_Ref_Trans_2_Status": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x02, 0x33, 0x33, 0x32], "asterix.019_553", { "asterix.019_553_Ref_Trans_1_Status": "0", "asterix.019_553_Ref_Trans_2_Status": "0", "asterix.019_553_Ref_Trans_3_Status": "0", "asterix.019_553_Ref_Trans_4_Status": "0", "asterix.019_553_Ref_Trans_5_Status": "0", "asterix.019_553_Ref_Trans_6_Status": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x01, 0x80, 0x7f, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "359.999999832362", "asterix.019_600_Longitude": "0" } ) validator.add_dissection( [0x01, 0x80, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "-360", "asterix.019_600_Longitude": "0" } ) validator.add_dissection( [0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x7f, 0xff, 0xff, 0xff], "asterix.019_600", { "asterix.019_600_Latitude": "0", "asterix.019_600_Longitude": "359.999999832362" } ) validator.add_dissection( [0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00], "asterix.019_600", { "asterix.019_600_Latitude": "0", "asterix.019_600_Longitude": "-360" } ) validator.add_dissection( [0x01, 0x20, 0x80], "asterix.019_620", { "asterix.019_620_Undulation": "-128" } ) validator.add_dissection( [0x01, 0x10], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x08], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x04, 0x02, 0x00], "asterix.019_RE", { "asterix.re_field_len": "2", "asterix.fspec": "" } ) validator.add_dissection( [0x01, 0x04, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.019_RE", { "asterix.fspec": "", "asterix.re_field_len": "16" } ) validator.add_dissection( [0x01, 0x02, 0x01], "asterix.019_SP", "" ) validator.add_dissection( [0x01, 0x02, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.019_SP", "" ) validator.check_dissections() @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_category_034(subprocesstest.SubprocessTestCase): ''' Unittest case for ASTERIX Category 034 Online specification: https://www.eurocontrol.int/publications/cat034-monoradar-service-messages-part-2b-next-version-cat-002 Part 2b Transmission of Monoradar Service Messages Standard User Application Profile FRN Data Item Information Length 1 I034/010 Data Source Identifier 2 2 I034/000 Message Type 1 3 I034/030 Time-of-Day 3 4 I034/020 Sector Number 1 5 I034/041 Antenna Rotation Period 2 6 I034/050 System Configuration and Status 1+ 7 I034/060 System Processing Mode 1+ FX N/A. Field Extension Indicator N/A. 8 I034/070 Message Count Values (1+2*N) 9 I034/100 Generic Polar Window 8 10 I034/110 Data Filter 1 11 I034/120 3D-Position of Data Source 8 12 I034/090 Collimation Error 2 13 RE-Data Item Reserved Expansion Field 1+1+ 14 SP-Data Item Special Purpose Field 1+1+ FX N/A. Field Extension Indicator n.a. ''' maxDiff = None def test_for_fields(self, asterix_validator): '''verifies existence of all fields and their maximum value''' validator = asterix_validator(34) validator.add_dissection( [0x80, 0xff, 0x00], "asterix.034_010", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x80, 0x00, 0xff], "asterix.034_010", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x40, 0x04], "asterix.034_000", { "asterix.034_000_MT": "4" } ) validator.add_dissection( [0x20, 0xa8, 0xbf, 0xff], "asterix.034_030", { "asterix.TOD": "86399.9921875" } ) validator.add_dissection( [0x10, 0xff], "asterix.034_020", { "asterix.034_020_SN": "358.59375" } ) validator.add_dissection( [0x08, 0xff, 0xff], "asterix.034_041", { "asterix.034_041_ARS": "511.9921875" } ) x_050_01 = { "asterix.034_050_01_NOGO": "0", "asterix.034_050_01_RDPC": "0", "asterix.034_050_01_RDPR": "0", "asterix.034_050_01_OVL_RDP": "0", "asterix.034_050_01_OVL_XMT": "0", "asterix.034_050_01_MSC": "0", "asterix.034_050_01_TSV": "0" } validator.add_dissection( [0x04, 0x80, 0x80], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "NOGO", "1") ) validator.add_dissection( [0x04, 0x80, 0x40], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "RDPC", "1") ) validator.add_dissection( [0x04, 0x80, 0x20], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "RDPR", "1") ) validator.add_dissection( [0x04, 0x80, 0x10], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "OVL_RDP", "1") ) validator.add_dissection( [0x04, 0x80, 0x08], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "OVL_XMT", "1") ) validator.add_dissection( [0x04, 0x80, 0x04], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "MSC", "1") ) validator.add_dissection( [0x04, 0x80, 0x02], "asterix.034_050", dict_fspec_local(x_050_01, "034_050_01", "TSV", "1") ) x_050_02 = { "asterix.034_050_02_ANT": "0", "asterix.034_050_02_CHAB": "0", "asterix.034_050_02_OVL": "0", "asterix.034_050_02_MSC": "0" } validator.add_dissection( [0x04, 0x10, 0x80], "asterix.034_050", dict_fspec_local(x_050_02, "034_050_02", "ANT", "1") ) validator.add_dissection( [0x04, 0x10, 0x60], "asterix.034_050", dict_fspec_local(x_050_02, "034_050_02", "CHAB", "3") ) validator.add_dissection( [0x04, 0x10, 0x10], "asterix.034_050", dict_fspec_local(x_050_02, "034_050_02", "OVL", "1") ) validator.add_dissection( [0x04, 0x10, 0x08], "asterix.034_050", dict_fspec_local(x_050_02, "034_050_02", "MSC", "1") ) x_050_03 = { "asterix.034_050_03_ANT": "0", "asterix.034_050_03_CHAB": "0", "asterix.034_050_03_OVL": "0", "asterix.034_050_03_MSC": "0" } validator.add_dissection( [0x04, 0x08, 0x80], "asterix.034_050", dict_fspec_local(x_050_03, "034_050_03", "ANT", "1") ) validator.add_dissection( [0x04, 0x08, 0x60], "asterix.034_050", dict_fspec_local(x_050_03, "034_050_03", "CHAB", "3") ) validator.add_dissection( [0x04, 0x08, 0x10], "asterix.034_050", dict_fspec_local(x_050_03, "034_050_03", "OVL", "1") ) validator.add_dissection( [0x04, 0x08, 0x08], "asterix.034_050", dict_fspec_local(x_050_03, "034_050_03", "MSC", "1") ) x_050_04 = { "asterix.034_050_04_ANT": "0", "asterix.034_050_04_CHAB": "0", "asterix.034_050_04_OVL_SUR": "0", "asterix.034_050_04_MSC": "0", "asterix.034_050_04_SCF": "0", "asterix.034_050_04_DLF": "0", "asterix.034_050_04_OVL_SCF": "0", "asterix.034_050_04_OVL_DLF": "0" } validator.add_dissection( [0x04, 0x04, 0x80, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "ANT", "1") ) validator.add_dissection( [0x04, 0x04, 0x60, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "CHAB", "3") ) validator.add_dissection( [0x04, 0x04, 0x10, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "OVL_SUR", "1") ) validator.add_dissection( [0x04, 0x04, 0x08, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "MSC", "1") ) validator.add_dissection( [0x04, 0x04, 0x04, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "SCF", "1") ) validator.add_dissection( [0x04, 0x04, 0x02, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "DLF", "1") ) validator.add_dissection( [0x04, 0x04, 0x01, 0x00], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "OVL_SCF", "1") ) validator.add_dissection( [0x04, 0x04, 0x00, 0x80], "asterix.034_050", dict_fspec_local(x_050_04, "034_050_04", "OVL_DLF", "1") ) x_060_01 = { "asterix.034_060_01_RED_RDP": "0", "asterix.034_060_01_RED_XMT": "0" } validator.add_dissection( [0x02, 0x80, 0x70], "asterix.034_060", dict_fspec_local(x_060_01, "034_060_01", "RED_RDP", "7") ) validator.add_dissection( [0x02, 0x80, 0x0e], "asterix.034_060", dict_fspec_local(x_060_01, "034_060_01", "RED_XMT", "7") ) x_060_02 = { "asterix.034_060_02_POL": "0", "asterix.034_060_02_RED_RAD": "0", "asterix.034_060_02_STC": "0" } validator.add_dissection( [0x02, 0x10, 0x80], "asterix.034_060", dict_fspec_local(x_060_02, "034_060_02", "POL", "1") ) validator.add_dissection( [0x02, 0x10, 0x70], "asterix.034_060", dict_fspec_local(x_060_02, "034_060_02", "RED_RAD", "7") ) validator.add_dissection( [0x02, 0x10, 0x0c], "asterix.034_060", dict_fspec_local(x_060_02, "034_060_02", "STC", "3") ) validator.add_dissection( [0x02, 0x08, 0xe0], "asterix.034_060", fspec_local("034_060_03", "RED_RAD", "7") ) x_060_06 = { "asterix.034_060_04_RED_RAD": "0", "asterix.034_060_04_CLU": "0" } validator.add_dissection( [0x02, 0x04, 0xe0], "asterix.034_060", dict_fspec_local(x_060_06, "034_060_04", "RED_RAD", "7") ) validator.add_dissection( [0x02, 0x04, 0x10], "asterix.034_060", dict_fspec_local(x_060_06, "034_060_04", "CLU", "1") ) x_070 = { "asterix.034_070_TYP": "0", "asterix.034_070_COUNTER": "0" } validator.add_dissection( [0x01, 0x80, 0x01, 0x80, 0x00], "asterix.034_070", { "asterix.counter": "1", "asterix.034_070": dict_local(x_070, "034_070", "TYP", "16") } ) validator.add_dissection( [0x01, 0x80, 0x03, 0x80, 0x00, 0x87, 0xff, 0x07, 0xff], "asterix.034_070", { "asterix.counter": "3", "asterix.034_070": dict_local(x_070, "034_070", "TYPE", "16"), "asterix.034_070": { "asterix.034_070_TYP": "16", "asterix.034_070_COUNTER": "2047" }, "asterix.034_070": dict_local(x_070, "034_070", "COUNTER", "2047"), } ) x_100 = { "asterix.034_100_RHOS": "0", "asterix.034_100_RHOE": "0", "asterix.034_100_THETAS": "0", "asterix.034_100_THETAE": "0" } validator.add_dissection( [0x01, 0x40, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.034_100", dict_local(x_100, "034_100", "RHOS", "255.99609375") ) validator.add_dissection( [0x01, 0x40, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00], "asterix.034_100", dict_local(x_100, "034_100", "RHOE", "255.99609375") ) validator.add_dissection( [0x01, 0x40, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00], "asterix.034_100", dict_local(x_100, "034_100", "THETAS", "359.994506835938") ) validator.add_dissection( [0x01, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff], "asterix.034_100", dict_local(x_100, "034_100", "THETAE", "359.994506835938") ) validator.add_dissection( [0x01, 0x20, 0x09], "asterix.034_110", { "asterix.034_110_TYP": "9" } ) x_120 = { "asterix.034_120_H": "0", "asterix.034_120_LAT": "0", "asterix.034_120_LON": "0" } validator.add_dissection( [0x01, 0x10, 0x7f, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.034_120", dict_local(x_120, "034_120", "H", "32767") ) validator.add_dissection( [0x01, 0x10, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.034_120", dict_local(x_120, "034_120", "H", "-32768") ) validator.add_dissection( [0x01, 0x10, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.034_120", dict_local(x_120, "034_120", "LAT", "90") ) validator.add_dissection( [0x01, 0x10, 0x00, 0x00, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.034_120", dict_local(x_120, "034_120", "LAT", "-90") ) validator.add_dissection( [0x01, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0xff, 0xff], "asterix.034_120", dict_local(x_120, "034_120", "LON", "179.999978542328") ) validator.add_dissection( [0x01, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00], "asterix.034_120", dict_local(x_120, "034_120", "LON", "-180") ) x_090 = { "asterix.034_090_RE": "0", "asterix.034_090_AE": "0" } validator.add_dissection( [0x01, 0x08, 0x7f, 0x00], "asterix.034_090", dict_local(x_090, "034_090", "RE", "0.9921875") ) validator.add_dissection( [0x01, 0x08, 0x80, 0x00], "asterix.034_090", dict_local(x_090, "034_090", "RE", "-1") ) validator.add_dissection( [0x01, 0x08, 0x00, 0x80], "asterix.034_090", dict_local(x_090, "034_090", "AE", "-2.8125") ) validator.check_dissections() def test_undefined_value_handling(self, asterix_validator): '''verifies that the dissector can dissect undefined field values by setting the maximum value of bits or by setting all undefined bits''' validator = asterix_validator(34) validator.add_dissection( [0x40, 0xff], "asterix.034_000", { "asterix.034_000_MT": "255" } ) validator.add_dissection( [0x20, 0xff, 0xff, 0xff], "asterix.034_030", { "asterix.TOD": "131071.9921875" } ) validator.add_dissection( [0x04, 0x63, 0x00], "asterix.034_050", { "asterix.fspec": "", "asterix.spare": "" } ) validator.add_dissection( [0x04, 0x80, 0x01], "asterix.034_050", { "asterix.fspec": "", "asterix.034_050_01": { "asterix.034_050_01_NOGO": "0", "asterix.034_050_01_RDPC": "0", "asterix.034_050_01_RDPR": "0", "asterix.034_050_01_OVL_RDP": "0", "asterix.034_050_01_OVL_XMT": "0", "asterix.034_050_01_MSC": "0", "asterix.034_050_01_TSV": "0" } } ) validator.add_dissection( [0x04, 0x10, 0x07], "asterix.034_050", { "asterix.fspec": "", "asterix.034_050_02": { "asterix.034_050_02_ANT": "0", "asterix.034_050_02_CHAB": "0", "asterix.034_050_02_OVL": "0", "asterix.034_050_02_MSC": "0" } } ) validator.add_dissection( [0x04, 0x08, 0x07], "asterix.034_050", { "asterix.fspec": "", "asterix.034_050_03": { "asterix.034_050_03_ANT": "0", "asterix.034_050_03_CHAB": "0", "asterix.034_050_03_OVL": "0", "asterix.034_050_03_MSC": "0" } } ) validator.add_dissection( [0x04, 0x04, 0x00, 0x7f], "asterix.034_050", { "asterix.fspec": "", "asterix.034_050_04": { "asterix.034_050_04_ANT": "0", "asterix.034_050_04_CHAB": "0", "asterix.034_050_04_OVL_SUR": "0", "asterix.034_050_04_MSC": "0", "asterix.034_050_04_SCF": "0", "asterix.034_050_04_DLF": "0", "asterix.034_050_04_OVL_SCF": "0", "asterix.034_050_04_OVL_DLF": "0" } } ) validator.add_dissection( [0x02, 0x63, 0x00], "asterix.034_060", { "asterix.fspec": "", "asterix.spare": "" } ) validator.add_dissection( [0x02, 0x80, 0x81], "asterix.034_060", { "asterix.fspec": "", "asterix.034_060_01": { "asterix.034_060_01_RED_RDP": "0", "asterix.034_060_01_RED_XMT": "0" } } ) validator.add_dissection( [0x02, 0x10, 0x03], "asterix.034_060", { "asterix.fspec": "", "asterix.034_060_02": { "asterix.034_060_02_POL": "0", "asterix.034_060_02_RED_RAD": "0", "asterix.034_060_02_STC": "0" } } ) validator.add_dissection( [0x02, 0x08, 0x1f], "asterix.034_060", fspec_local("034_060_03", "RED_RAD", "0") ) validator.add_dissection( [0x02, 0x04, 0x0f], "asterix.034_060", { "asterix.fspec": "", "asterix.034_060_04": { "asterix.034_060_04_RED_RAD": "0", "asterix.034_060_04_CLU": "0" } } ) x_070 = { "asterix.034_070_TYP": "0", "asterix.034_070_COUNTER": "0" } validator.add_dissection( [0x01, 0x80, 0x01, 0xf8, 0x00], "asterix.034_070", { "asterix.counter": "1", "asterix.034_070": dict_local(x_070, "034_070", "TYP", "31") } ) validator.add_dissection( [0x01, 0x20, 0xff], "asterix.034_110", { "asterix.034_110_TYP": "255" } ) validator.add_dissection( [0x01, 0x04, 0x02, 0xfe], "asterix.034_RE", { "asterix.re_field_len": "2", "asterix.fspec": "" } ) validator.add_dissection( [0x01, 0x02, 0x01], "asterix.034_SP", "" ) validator.add_dissection( [0x01, 0x02, 0x11, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.034_SP", "" ) validator.check_dissections() @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_category_048(subprocesstest.SubprocessTestCase): ''' Unittest case for ASTERIX Category 048 Online specification: https://www.eurocontrol.int/publications/cat048-monoradar-target-reports-part-4-next-version-cat-001 https://www.eurocontrol.int/publications/cat048-reserved-expansion-field-part-4-appendix Part 4 Category 048 Monoradar Target Reports Standard User Application Profile FRN Data Item Information Length 1 I048/010 Data Source Identifier 2 2 I048/140 Time-of-Day 3 3 I048/020 Target Report Descriptor 1+ 4 I048/040 Measured Position in Slant Polar Coordinates 4 5 I048/070 Mode-3/A Code in Octal Representation 2 6 I048/090 Flight Level in Binary Representation 2 7 I048/130 Radar Plot Characteristics 1+1+ FX n.a. Field Extension Indicator n.a. 8 I048/220 Aircraft Address 3 9 I048/240 Aircraft Identification 6 10 I048/250 Mode S MB Data 1+8*n 11 I048/161 Track Number 2 12 I048/042 Calculated Position in Cartesian Coordinates 4 13 I048/200 Calculated Track Velocity in Polar Representation 4 14 I048/170 Track Status 1+ FX n.a. Field Extension Indicator n.a. 15 I048/210 Track Quality 4 16 I048/030 Warning/Error Conditions 1+ 17 I048/080 Mode-3/A Code Confidence Indicator 2 18 I048/100 Mode-C Code and Confidence Indicator 4 19 I048/110 Height Measured by 3D Radar 2 20 I048/120 Radial Doppler Speed 1+ 21 I048/230 Communications / ACAS Capability and Flight Status 2 FX n.a. Field Extension Indicator n.a. 22 I048/260 ACAS Resolution Advisory Report 7 23 I048/055 Mode-1 Code in Octal Representation 1 24 I048/050 Mode-2 Code in Octal Representation 2 25 I048/065 Mode-1 Code Confidence Indicator 1 26 I048/060 Mode-2 Code Confidence Indicator 2 27 SP-Data Item Special Purpose Field 1+1+ 28 RE-Data Item Reserved Expansion Field 1+1+ FX n.a. Field Extension Indicator n.a. ''' maxDiff = None def test_for_fields(self, asterix_re_validator): '''verifies existence of all fields and their maximum value''' validator = asterix_re_validator(48, [0x01, 0x01, 0x01, 0x02]) validator.add_dissection( [0x80, 0xff, 0x00], "asterix.048_010", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x80, 0x00, 0xff], "asterix.048_010", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x40, 0xa8, 0xbf, 0xff], "asterix.048_140", { "asterix.TOD": "86399.9921875" } ) x_020 = { "asterix.048_020_TYP": "0", "asterix.048_020_SIM": "0", "asterix.048_020_RDP": "0", "asterix.048_020_SPI": "0", "asterix.048_020_RAB": "0", "asterix.FX": "0" } validator.add_dissection( [0x20, 0xe0], "asterix.048_020", dict_local(x_020, "048_020", "TYP", "7") ) validator.add_dissection( [0x20, 0x08], "asterix.048_020", dict_local(x_020, "048_020", "RDP", "1") ) validator.add_dissection( [0x20, 0x04], "asterix.048_020", dict_local(x_020, "048_020", "SPI", "1") ) validator.add_dissection( [0x20, 0x02], "asterix.048_020", dict_local(x_020, "048_020", "RAB", "1") ) x_020.update({ "asterix.048_020_TST": "0", "asterix.048_020_ERR": "0", "asterix.048_020_XPP": "0", "asterix.048_020_ME": "0", "asterix.048_020_MI": "0", "asterix.048_020_FOE": "0" }) validator.add_dissection( [0x20, 0x01, 0x80], "asterix.048_020", dict_local(x_020, "048_020", "TST", "1") ) validator.add_dissection( [0x20, 0x01, 0x40], "asterix.048_020", dict_local(x_020, "048_020", "ERR", "1") ) validator.add_dissection( [0x20, 0x01, 0x20], "asterix.048_020", dict_local(x_020, "048_020", "XPP", "1") ) validator.add_dissection( [0x20, 0x01, 0x10], "asterix.048_020", dict_local(x_020, "048_020", "ME", "1") ) validator.add_dissection( [0x20, 0x01, 0x08], "asterix.048_020", dict_local(x_020, "048_020", "MI", "1") ) validator.add_dissection( [0x20, 0x01, 0x06], "asterix.048_020", dict_local(x_020, "048_020", "FOE", "3") ) x_040 = { "asterix.048_040_RHO": "0", "asterix.048_040_THETA": "0" } validator.add_dissection( [0x10, 0xff, 0xff, 0x00, 0x00], "asterix.048_040", dict_local(x_040, "048_040", "RHO", "255.99609375") ) validator.add_dissection( [0x10, 0x00, 0x00, 0xff, 0xff], "asterix.048_040", dict_local(x_040, "048_040", "THETA", "359.994506835938") ) x_070 = { "asterix.048_070_V": "0", "asterix.048_070_G": "0", "asterix.048_070_L": "0", "asterix.048_070_SQUAWK": "0" } validator.add_dissection( [0x08, 0x80, 0x00], "asterix.048_070", dict_local(x_070, "048_070", "V", "1") ) validator.add_dissection( [0x08, 0x40, 0x00], "asterix.048_070", dict_local(x_070, "048_070", "G", "1") ) validator.add_dissection( [0x08, 0x20, 0x00], "asterix.048_070", dict_local(x_070, "048_070", "L", "1") ) validator.add_dissection( [0x08, 0x0e, 0x00], "asterix.048_070", dict_local(x_070, "048_070", "SQUAWK", "3584") # 07000 ) validator.add_dissection( [0x08, 0x01, 0xc0], "asterix.048_070", dict_local(x_070, "048_070", "SQUAWK", "448") # 0700 ) validator.add_dissection( [0x08, 0x00, 0x38], "asterix.048_070", dict_local(x_070, "048_070", "SQUAWK", "56") # 070 ) validator.add_dissection( [0x08, 0x00, 0x07], "asterix.048_070", dict_local(x_070, "048_070", "SQUAWK", "7") # 07 ) x_090 = { "asterix.048_090_V": "0", "asterix.048_090_G": "0", "asterix.048_090_FL": "0" } validator.add_dissection( [0x04, 0x80, 0x00], "asterix.048_090", dict_local(x_090, "048_090", "V", "1") ) validator.add_dissection( [0x04, 0x40, 0x00], "asterix.048_090", dict_local(x_090, "048_090", "G", "1") ) validator.add_dissection( [0x04, 0x1f, 0xff], "asterix.048_090", dict_local(x_090, "048_090", "FL", "2047.75") ) validator.add_dissection( [0x04, 0x20, 0x00], "asterix.048_090", dict_local(x_090, "048_090", "FL", "-2048") ) validator.add_dissection( [0x02, 0x80, 0xff], "asterix.048_130", fspec_local("048_130_01", "SRL", "11.2060546875") ) validator.add_dissection( [0x02, 0x40, 0xff], "asterix.048_130", fspec_local("048_130_02", "SRR", "255") ) validator.add_dissection( [0x02, 0x20, 0x7f], "asterix.048_130", fspec_local("048_130_03", "SAM", "127") ) validator.add_dissection( [0x02, 0x20, 0x80], "asterix.048_130", fspec_local("048_130_03", "SAM", "-128") ) validator.add_dissection( [0x02, 0x10, 0xff], "asterix.048_130", fspec_local("048_130_04", "PRL", "11.2060546875") ) validator.add_dissection( [0x02, 0x08, 0x7f], "asterix.048_130", fspec_local("048_130_05", "PAM", "127") ) validator.add_dissection( [0x02, 0x08, 0x80], "asterix.048_130", fspec_local("048_130_05", "PAM", "-128") ) validator.add_dissection( [0x02, 0x04, 0x7f], "asterix.048_130", fspec_local("048_130_06", "RPD", "0.49609375") ) validator.add_dissection( [0x02, 0x04, 0x80], "asterix.048_130", fspec_local("048_130_06", "RPD", "-0.5") ) validator.add_dissection( [0x02, 0x02, 0x7f], "asterix.048_130", fspec_local("048_130_07", "APD", "2.79052734375") ) validator.add_dissection( [0x02, 0x02, 0x80], "asterix.048_130", fspec_local("048_130_07", "APD", "-2.8125") ) validator.add_dissection( [0x01, 0x80, 0xff, 0xff, 0xff], "asterix.048_220", { "asterix.AA": "0x00ffffff" } ) validator.add_dissection( [0x01, 0x80, 0xff, 0xff, 0xff], "asterix.048_220", { "asterix.AA": "0x00ffffff" } ) validator.add_dissection( [0x01, 0x40, 0x04, 0x20, 0xda, 0x83, 0x0c, 0x79], "asterix.048_240", { "asterix.AI": "ABCZ 019" } ) x_250 = { "asterix.MB_DATA": "00:00:00:00:00:00:00", "asterix.BDS1": "0", "asterix.BDS2": "0" } validator.add_dissection( [0x01, 0x20, 0x01, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x00], "asterix.048_250", { "asterix.counter": "1", "asterix.048_250": dict_global(x_250, "MB_DATA", "11:22:33:44:55:66:77"), } ) validator.add_dissection( [0x01, 0x20, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0], "asterix.048_250", { "asterix.counter": "1", "asterix.048_250": dict_global(x_250, "BDS1", "15"), } ) validator.add_dissection( [0x01, 0x20, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f], "asterix.048_250", { "asterix.counter": "1", "asterix.048_250": dict_global(x_250, "BDS2", "15"), } ) validator.add_dissection( [0x01, 0x20, 0x03, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f], "asterix.048_250", { "asterix.counter": "3", "asterix.048_250": dict_global(x_250, "MB_DATA", "11:22:33:44:55:66:77"), "asterix.048_250": dict_global(x_250, "BDS1", "15"), "asterix.048_250": dict_global(x_250, "BDS2", "15"), } ) validator.add_dissection( [0x01, 0x10, 0x0f, 0xff], "asterix.048_161", { "asterix.048_161_TN": "4095" } ) x_042 = { "asterix.048_042_X": "0", "asterix.048_042_Y": "0" } validator.add_dissection( [0x01, 0x08, 0x7f, 0xff, 0x00, 0x00], "asterix.048_042", dict_local(x_042, "048_042", "X", "255.9921875") ) validator.add_dissection( [0x01, 0x08, 0x80, 0x00, 0x00, 0x00], "asterix.048_042", dict_local(x_042, "048_042", "X", "-256") ) validator.add_dissection( [0x01, 0x08, 0x00, 0x0, 0x7f, 0xff], "asterix.048_042", dict_local(x_042, "048_042", "Y", "255.9921875") ) validator.add_dissection( [0x01, 0x08, 0x00, 0x0, 0x80, 0x00], "asterix.048_042", dict_local(x_042, "048_042", "Y", "-256") ) x_200 = { "asterix.048_200_GS": "0", "asterix.048_200_HDG": "0" } validator.add_dissection( [0x01, 0x04, 0xff, 0xff, 0x00, 0x00], "asterix.048_200", dict_local(x_200, "048_200", "GS", "3.99993896484375") ) validator.add_dissection( [0x01, 0x04, 0x00, 0x00, 0xff, 0xff], "asterix.048_200", dict_local(x_200, "048_200", "HDG", "359.994506835938") ) x_170 = { "asterix.048_170_CNF": "0", "asterix.048_170_RAD": "0", "asterix.048_170_DOU": "0", "asterix.048_170_MAH": "0", "asterix.048_170_CDM": "0", "asterix.FX": "0" } validator.add_dissection( [0x01, 0x02, 0x80], "asterix.048_170", dict_local(x_170, "048_170", "CNF", "1") ) validator.add_dissection( [0x01, 0x02, 0x60], "asterix.048_170", dict_local(x_170, "048_170", "RAD", "3") ) validator.add_dissection( [0x01, 0x02, 0x10], "asterix.048_170", dict_local(x_170, "048_170", "DOU", "1") ) validator.add_dissection( [0x01, 0x02, 0x08], "asterix.048_170", dict_local(x_170, "048_170", "MAH", "1") ) validator.add_dissection( [0x01, 0x02, 0x06], "asterix.048_170", dict_local(x_170, "048_170", "CDM", "3") ) x_170.update({ "asterix.048_170_TRE": "0", "asterix.048_170_GHO": "0", "asterix.048_170_SUP": "0", "asterix.048_170_TCC": "0" }) validator.add_dissection( [0x01, 0x02, 0x01, 0x80], "asterix.048_170", dict_local(x_170, "048_170", "TRE", "1") ) validator.add_dissection( [0x01, 0x02, 0x01, 0x40], "asterix.048_170", dict_local(x_170, "048_170", "GHO", "1") ) validator.add_dissection( [0x01, 0x02, 0x01, 0x20], "asterix.048_170", dict_local(x_170, "048_170", "SUP", "1") ) validator.add_dissection( [0x01, 0x02, 0x01, 0x10], "asterix.048_170", dict_local(x_170, "048_170", "TCC", "1") ) x_210 = { "asterix.048_210_X": "0", "asterix.048_210_Y": "0", "asterix.048_210_V": "0", "asterix.048_210_H": "0" } validator.add_dissection( [0x01, 0x01, 0x80, 0xff, 0x00, 0x00, 0x00], "asterix.048_210", dict_local(x_210, "048_210", "X", "1.9921875") ) validator.add_dissection( [0x01, 0x01, 0x80, 0x00, 0xff, 0x00, 0x00], "asterix.048_210", dict_local(x_210, "048_210", "Y", "1.9921875") ) validator.add_dissection( [0x01, 0x01, 0x80, 0x00, 0x00, 0xff, 0x00], "asterix.048_210", dict_local(x_210, "048_210", "V", "0.01556396484375") ) validator.add_dissection( [0x01, 0x01, 0x80, 0x00, 0x00, 0x00, 0xff], "asterix.048_210", dict_local(x_210, "048_210", "H", "22.412109375") ) validator.add_dissection( [0x01, 0x01, 0x40, 0x2e], "asterix.048_030", { "asterix.048_030_WE": "23", "asterix.FX": "0" } ) validator.add_dissection( [0x01, 0x01, 0x40, 0x2f, 0x03, 0x05, 0x06], "asterix.048_030", { "asterix.048_030_WE": "23", "asterix.048_030_1_WE": "1", "asterix.048_030_2_WE": "2", "asterix.048_030_3_WE": "3", "asterix.FX": "0" } ) x_080 = { "asterix.048_080_QA4": "0", "asterix.048_080_QA2": "0", "asterix.048_080_QA1": "0", "asterix.048_080_QB4": "0", "asterix.048_080_QB2": "0", "asterix.048_080_QB1": "0", "asterix.048_080_QC4": "0", "asterix.048_080_QC2": "0", "asterix.048_080_QC1": "0", "asterix.048_080_QD4": "0", "asterix.048_080_QD2": "0", "asterix.048_080_QD1": "0" } validator.add_dissection( [0x01, 0x01, 0x20, 0x08, 0x00], "asterix.048_080", dict_local(x_080, "048_080", "QA4", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x04, 0x00], "asterix.048_080", dict_local(x_080, "048_080", "QA2", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x02, 0x00], "asterix.048_080", dict_local(x_080, "048_080", "QA1", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x01, 0x00], "asterix.048_080", dict_local(x_080, "048_080", "QB4", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x80], "asterix.048_080", dict_local(x_080, "048_080", "QB2", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x40], "asterix.048_080", dict_local(x_080, "048_080", "QB1", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x20], "asterix.048_080", dict_local(x_080, "048_080", "QC4", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x10], "asterix.048_080", dict_local(x_080, "048_080", "QC2", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x08], "asterix.048_080", dict_local(x_080, "048_080", "QC1", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x04], "asterix.048_080", dict_local(x_080, "048_080", "QD4", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x02], "asterix.048_080", dict_local(x_080, "048_080", "QD2", "1") ) validator.add_dissection( [0x01, 0x01, 0x20, 0x00, 0x01], "asterix.048_080", dict_local(x_080, "048_080", "QD1", "1") ) x_100 = { "asterix.048_100_V": "0", "asterix.048_100_G": "0", "asterix.048_100_C1": "0", "asterix.048_100_A1": "0", "asterix.048_100_C2": "0", "asterix.048_100_A2": "0", "asterix.048_100_C4": "0", "asterix.048_100_A4": "0", "asterix.048_100_B1": "0", "asterix.048_100_D1": "0", "asterix.048_100_B2": "0", "asterix.048_100_D2": "0", "asterix.048_100_B4": "0", "asterix.048_100_D4": "0", "asterix.048_100_QC1": "0", "asterix.048_100_QA1": "0", "asterix.048_100_QC2": "0", "asterix.048_100_QA2": "0", "asterix.048_100_QC4": "0", "asterix.048_100_QA4": "0", "asterix.048_100_QB1": "0", "asterix.048_100_QD1": "0", "asterix.048_100_QB2": "0", "asterix.048_100_QD2": "0", "asterix.048_100_QB4": "0", "asterix.048_100_QD4": "0" } validator.add_dissection( [0x01, 0x01, 0x10, 0x80, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "V", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x40, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "G", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x08, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "C1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x04, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "A1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x02, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "C2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x01, 0x00, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "A2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x80, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "C4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x40, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "A4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x20, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "B1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x10, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "D1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x08, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "B2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x04, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "D2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x02, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "B4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x01, 0x00, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "D4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x08, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "QC1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x04, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "QA1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x02, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "QC2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x01, 0x00], "asterix.048_100", dict_local(x_100, "048_100", "QA2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x80], "asterix.048_100", dict_local(x_100, "048_100", "QC4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x40], "asterix.048_100", dict_local(x_100, "048_100", "QA4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x20], "asterix.048_100", dict_local(x_100, "048_100", "QB1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x10], "asterix.048_100", dict_local(x_100, "048_100", "QD1", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x08], "asterix.048_100", dict_local(x_100, "048_100", "QB2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x04], "asterix.048_100", dict_local(x_100, "048_100", "QD2", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x02], "asterix.048_100", dict_local(x_100, "048_100", "QB4", "1") ) validator.add_dissection( [0x01, 0x01, 0x10, 0x00, 0x00, 0x00, 0x01], "asterix.048_100", dict_local(x_100, "048_100", "QD4", "1") ) validator.add_dissection( [0x01, 0x01, 0x08, 0x1f, 0xff], "asterix.048_110", { "asterix.048_110_3DHEIGHT": "204775" } ) validator.add_dissection( [0x01, 0x01, 0x08, 0x20, 0x00], "asterix.048_110", { "asterix.048_110_3DHEIGHT": "-204800" } ) x_120_01 = { "asterix.048_120_01_D": "0", "asterix.048_120_01_CAL": "0" } validator.add_dissection( [0x01, 0x01, 0x04, 0x80, 0x80, 0x00], "asterix.048_120", dict_fspec_local(x_120_01, "048_120_01", "D", "1") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x80, 0x01, 0xff], "asterix.048_120", dict_fspec_local(x_120_01, "048_120_01", "CAL", "511") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x80, 0x02, 0x00], "asterix.048_120", dict_fspec_local(x_120_01, "048_120_01", "CAL", "-512") ) x_120_02 = { "asterix.048_120_02_DOP": "0", "asterix.048_120_02_AMB": "0", "asterix.048_120_02_FRQ": "0" } validator.add_dissection( [0x01, 0x01, 0x04, 0x40, 0x01, 0x7f, 0xff, 0x00, 0x00, 0x00, 0x00], "asterix.048_120", counter_local(x_120_02, "1", "048_120_02", "DOP", "32767") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x40, 0x01, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.048_120", counter_local(x_120_02, "1", "048_120_02", "DOP", "-32768") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x40, 0x01, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00], "asterix.048_120", counter_local(x_120_02, "1", "048_120_02", "AMB", "65535") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff], "asterix.048_120", counter_local(x_120_02, "1", "048_120_02", "FRQ", "65535") ) validator.add_dissection( [0x01, 0x01, 0x04, 0x40, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff], "asterix.048_120", { "asterix.fspec": "", "asterix.048_120_02": { "asterix.counter": "3", "asterix.048_120_02": dict_local(x_120_02, "048_120_02", "DOP", "-32768"), "asterix.048_120_02": dict_local(x_120_02, "048_120_02", "AMB", "65535"), "asterix.048_120_02": dict_local(x_120_02, "048_120_02", "FRQ", "65535") } } ) x_230 = { "asterix.048_230_COM": "0", "asterix.048_230_STAT": "0", "asterix.048_230_SI": "0", "asterix.048_230_MSSC": "0", "asterix.048_230_ARC": "0", "asterix.048_230_AIC": "0", "asterix.048_230_B1A": "0", "asterix.048_230_B1B": "0" } validator.add_dissection( [0x01, 0x01, 0x02, 0xe0, 0x00], "asterix.048_230", dict_local(x_230, "048_230", "COM", "7") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x1c, 0x00], "asterix.048_230", dict_local(x_230, "048_230", "STAT", "7") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x02, 0x00], "asterix.048_230", dict_local(x_230, "048_230", "SI", "1") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x00, 0x80], "asterix.048_230", dict_local(x_230, "048_230", "MSSC", "1") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x00, 0x40], "asterix.048_230", dict_local(x_230, "048_230", "ARC", "1") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x00, 0x20], "asterix.048_230", dict_local(x_230, "048_230", "AIC", "1") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x00, 0x10], "asterix.048_230", dict_local(x_230, "048_230", "B1A", "1") ) validator.add_dissection( [0x01, 0x01, 0x02, 0x00, 0x0f], "asterix.048_230", dict_local(x_230, "048_230", "B1B", "15") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x80, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77], "asterix.048_260", { "asterix.048_260_ACAS": "11:22:33:44:55:66:77" } ) x_055 = { "asterix.048_055_V": "0", "asterix.048_055_G": "0", "asterix.048_055_L": "0", "asterix.048_055_CODE": "0" } validator.add_dissection( [0x01, 0x01, 0x01, 0x40, 0x80], "asterix.048_055", dict_local(x_055, "048_055", "V", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x40, 0x40], "asterix.048_055", dict_local(x_055, "048_055", "G", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x40, 0x20], "asterix.048_055", dict_local(x_055, "048_055", "L", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x40, 0x1f], "asterix.048_055", dict_local(x_055, "048_055", "CODE", "31") ) x_050 = { "asterix.048_050_V": "0", "asterix.048_050_G": "0", "asterix.048_050_L": "0", "asterix.048_050_SQUAWK": "0" } validator.add_dissection( [0x01, 0x01, 0x01, 0x20, 0x80, 0x00], "asterix.048_050", dict_local(x_050, "048_050", "V", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x20, 0x40, 0x00], "asterix.048_050", dict_local(x_050, "048_050", "G", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x20, 0x20, 0x00], "asterix.048_050", dict_local(x_050, "048_050", "L", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x20, 0x0f, 0xff], "asterix.048_050", dict_local(x_050, "048_050", "SQUAWK", "4095") ) x_065 = { "asterix.048_065_QA4": "0", "asterix.048_065_QA2": "0", "asterix.048_065_QA1": "0", "asterix.048_065_QB2": "0", "asterix.048_065_QB1": "0" } validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0x10], "asterix.048_065", dict_local(x_065, "048_065", "QA4", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0x08], "asterix.048_065", dict_local(x_065, "048_065", "QA2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0x04], "asterix.048_065", dict_local(x_065, "048_065", "QA1", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0x02], "asterix.048_065", dict_local(x_065, "048_065", "QB2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0x01], "asterix.048_065", dict_local(x_065, "048_065", "QB1", "1") ) x_060 = { "asterix.048_060_QA4": "0", "asterix.048_060_QA2": "0", "asterix.048_060_QA1": "0", "asterix.048_060_QB4": "0", "asterix.048_060_QB2": "0", "asterix.048_060_QB1": "0", "asterix.048_060_QC4": "0", "asterix.048_060_QC2": "0", "asterix.048_060_QC1": "0", "asterix.048_060_QD4": "0", "asterix.048_060_QD2": "0", "asterix.048_060_QD1": "0" } validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x08, 0x00], "asterix.048_060", dict_local(x_060, "048_060", "QA4", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x04, 0x00], "asterix.048_060", dict_local(x_060, "048_060", "QA2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x02, 0x00], "asterix.048_060", dict_local(x_060, "048_060", "QA1", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x01, 0x00], "asterix.048_060", dict_local(x_060, "048_060", "QB4", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x80], "asterix.048_060", dict_local(x_060, "048_060", "QB2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x40], "asterix.048_060", dict_local(x_060, "048_060", "QB1", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x20], "asterix.048_060", dict_local(x_060, "048_060", "QC4", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x10], "asterix.048_060", dict_local(x_060, "048_060", "QC2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x08], "asterix.048_060", dict_local(x_060, "048_060", "QC1", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x04], "asterix.048_060", dict_local(x_060, "048_060", "QD4", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x02], "asterix.048_060", dict_local(x_060, "048_060", "QD2", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x08, 0x00, 0x01], "asterix.048_060", dict_local(x_060, "048_060", "QD1", "1") ) validator.add_dissection( [0x01, 0x01, 0x01, 0x04, 0x01], "asterix.048_SP", "" ) validator.add_dissection( [0x01, 0x01, 0x01, 0x04, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.048_SP", "" ) x_re_md5 = { "asterix.048_RE_MD5_01_M5": "0", "asterix.048_RE_MD5_01_ID": "0", "asterix.048_RE_MD5_01_DA": "0", "asterix.048_RE_MD5_01_M1": "0", "asterix.048_RE_MD5_01_M2": "0", "asterix.048_RE_MD5_01_M3": "0", "asterix.048_RE_MD5_01_MC": "0" } validator.add_re_dissection( [0x80, 0x80, 0x80], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "M5", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x40], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "ID", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x20], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "DA", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x10], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "M1", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x08], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "M2", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x04], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "M3", "1") ) validator.add_re_dissection( [0x80, 0x80, 0x02], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "MC", "1") ) x_re_pmn = { "asterix.048_RE_MD5_02_PIN": "0", "asterix.048_RE_MD5_02_NAV": "0", "asterix.048_RE_MD5_02_NAT": "0", "asterix.048_RE_MD5_02_MIS": "0" } validator.add_re_dissection( [0x80, 0x40, 0x3f, 0xff, 0x00, 0x00], "MD5", dict_fspec_local(x_re_pmn, "048_RE_MD5_02", "PIN", "16383") ) validator.add_re_dissection( [0x80, 0x40, 0x00, 0x00, 0x20, 0x00], "MD5", dict_fspec_local(x_re_pmn, "048_RE_MD5_02", "NAV", "1") ) validator.add_re_dissection( [0x80, 0x40, 0x00, 0x00, 0x1f, 0x00], "MD5", dict_fspec_local(x_re_pmn, "048_RE_MD5_02", "NAT", "31") ) validator.add_re_dissection( [0x80, 0x40, 0x00, 0x00, 0x00, 0x3f], "MD5", dict_fspec_local(x_re_pmn, "048_RE_MD5_02", "MIS", "63") ) x_re_pos = { "asterix.048_RE_MD5_03_LAT": "0", "asterix.048_RE_MD5_03_LON": "0" } validator.add_re_dissection( [0x80, 0x20, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00], "MD5", dict_fspec_local(x_re_pos, "048_RE_MD5_03", "LAT", "90") ) validator.add_re_dissection( [0x80, 0x20, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00], "MD5", dict_fspec_local(x_re_pos, "048_RE_MD5_03", "LAT", "-90") ) validator.add_re_dissection( [0x80, 0x20, 0x00, 0x00, 0x00, 0x7f, 0xff, 0xff], "MD5", dict_fspec_local(x_re_pos, "048_RE_MD5_03", "LON", "179.999978542328") ) validator.add_re_dissection( [0x80, 0x20, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00], "MD5", dict_fspec_local(x_re_pos, "048_RE_MD5_03", "LON", "-180") ) x_re_ga = { "asterix.048_RE_MD5_04_RES": "0", "asterix.048_RE_MD5_04_GA": "0" } validator.add_re_dissection( [0x80, 0x10, 0x40, 0x00], "MD5", dict_fspec_local(x_re_ga, "048_RE_MD5_04", "RES", "1") ) validator.add_re_dissection( [0x80, 0x10, 0x1f, 0xff], "MD5", dict_fspec_local(x_re_ga, "048_RE_MD5_04", "GA", "204775") ) validator.add_re_dissection( [0x80, 0x10, 0x20, 0x00], "MD5", dict_fspec_local(x_re_ga, "048_RE_MD5_04", "GA", "-204800") ) x_re_em1 = { "asterix.048_RE_MD5_05_V": "0", "asterix.048_RE_MD5_05_G": "0", "asterix.048_RE_MD5_05_L": "0", "asterix.048_RE_MD5_05_SQUAWK": "0" } validator.add_re_dissection( [0x80, 0x08, 0x80, 0x00], "MD5", dict_fspec_local(x_re_em1, "048_RE_MD5_05", "V", "1") ) validator.add_re_dissection( [0x80, 0x08, 0x40, 0x00], "MD5", dict_fspec_local(x_re_em1, "048_RE_MD5_05", "G", "1") ) validator.add_re_dissection( [0x80, 0x08, 0x20, 0x00], "MD5", dict_fspec_local(x_re_em1, "048_RE_MD5_05", "L", "1") ) validator.add_re_dissection( [0x80, 0x08, 0x0f, 0xff], "MD5", dict_fspec_local(x_re_em1, "048_RE_MD5_05", "SQUAWK", "4095") ) validator.add_re_dissection( [0x80, 0x04, 0x7f], "MD5", fspec_local("048_RE_MD5_06", "TOS", "0.9921875") ) validator.add_re_dissection( [0x80, 0x04, 0x80], "MD5", fspec_local("048_RE_MD5_06", "TOS", "-1") ) x_re_xp = { "asterix.048_RE_MD5_07_XP": "0", "asterix.048_RE_MD5_07_X5": "0", "asterix.048_RE_MD5_07_XC": "0", "asterix.048_RE_MD5_07_X3": "0", "asterix.048_RE_MD5_07_X2": "0", "asterix.048_RE_MD5_07_X1": "0" } validator.add_re_dissection( [0x80, 0x02, 0x20], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "XP", "1") ) validator.add_re_dissection( [0x80, 0x02, 0x10], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "X5", "1") ) validator.add_re_dissection( [0x80, 0x02, 0x08], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "XC", "1") ) validator.add_re_dissection( [0x80, 0x02, 0x04], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "X3", "1") ) validator.add_re_dissection( [0x80, 0x02, 0x02], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "X2", "1") ) validator.add_re_dissection( [0x80, 0x02, 0x01], "MD5", dict_fspec_local(x_re_xp, "048_RE_MD5_07", "X1", "1") ) x_re_md5 = { "asterix.048_RE_M5N_01_M5": "0", "asterix.048_RE_M5N_01_ID": "0", "asterix.048_RE_M5N_01_DA": "0", "asterix.048_RE_M5N_01_M1": "0", "asterix.048_RE_M5N_01_M2": "0", "asterix.048_RE_M5N_01_M3": "0", "asterix.048_RE_M5N_01_MC": "0" } validator.add_re_dissection( [0x40, 0x80, 0x80], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "M5", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x40], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "ID", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x20], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "DA", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x10], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "M1", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x08], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "M2", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x04], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "M3", "1") ) validator.add_re_dissection( [0x40, 0x80, 0x02], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "MC", "1") ) x_re_pmn = { "asterix.048_RE_M5N_02_PIN": "0", "asterix.048_RE_M5N_02_NOV": "0", "asterix.048_RE_M5N_02_NO": "0" } validator.add_re_dissection( [0x40, 0x40, 0x3f, 0xff, 0x00, 0x00], "M5N", dict_fspec_local(x_re_pmn, "048_RE_M5N_02", "PIN", "16383") ) validator.add_re_dissection( [0x40, 0x40, 0x00, 0x00, 0x08, 0x00], "M5N", dict_fspec_local(x_re_pmn, "048_RE_M5N_02", "NOV", "1") ) validator.add_re_dissection( [0x40, 0x40, 0x00, 0x00, 0x07, 0xff], "M5N", dict_fspec_local(x_re_pmn, "048_RE_M5N_02", "NO", "2047") ) x_re_pos = { "asterix.048_RE_M5N_03_LAT": "0", "asterix.048_RE_M5N_03_LON": "0" } validator.add_re_dissection( [0x40, 0x20, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00], "M5N", dict_fspec_local(x_re_pos, "048_RE_M5N_03", "LAT", "90") ) validator.add_re_dissection( [0x40, 0x20, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00], "M5N", dict_fspec_local(x_re_pos, "048_RE_M5N_03", "LAT", "-90") ) validator.add_re_dissection( [0x40, 0x20, 0x00, 0x00, 0x00, 0x7f, 0xff, 0xff], "M5N", dict_fspec_local(x_re_pos, "048_RE_M5N_03", "LON", "179.999978542328") ) validator.add_re_dissection( [0x40, 0x20, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00], "M5N", dict_fspec_local(x_re_pos, "048_RE_M5N_03", "LON", "-180") ) x_re_ga = { "asterix.048_RE_M5N_04_RES": "0", "asterix.048_RE_M5N_04_GA": "0" } validator.add_re_dissection( [0x40, 0x10, 0x40, 0x00], "M5N", dict_fspec_local(x_re_ga, "048_RE_M5N_04", "RES", "1") ) validator.add_re_dissection( [0x40, 0x10, 0x1f, 0xff], "M5N", dict_fspec_local(x_re_ga, "048_RE_M5N_04", "GA", "204775") ) validator.add_re_dissection( [0x40, 0x10, 0x20, 0x00], "M5N", dict_fspec_local(x_re_ga, "048_RE_M5N_04", "GA", "-204800") ) x_re_em1 = { "asterix.048_RE_M5N_05_V": "0", "asterix.048_RE_M5N_05_G": "0", "asterix.048_RE_M5N_05_L": "0", "asterix.048_RE_M5N_05_SQUAWK": "0" } validator.add_re_dissection( [0x40, 0x08, 0x80, 0x00], "M5N", dict_fspec_local(x_re_em1, "048_RE_M5N_05", "V", "1") ) validator.add_re_dissection( [0x40, 0x08, 0x40, 0x00], "M5N", dict_fspec_local(x_re_em1, "048_RE_M5N_05", "G", "1") ) validator.add_re_dissection( [0x40, 0x08, 0x20, 0x00], "M5N", dict_fspec_local(x_re_em1, "048_RE_M5N_05", "L", "1") ) validator.add_re_dissection( [0x40, 0x08, 0x0f, 0xff], "M5N", dict_fspec_local(x_re_em1, "048_RE_M5N_05", "SQUAWK", "4095") ) validator.add_re_dissection( [0x40, 0x04, 0x7f], "M5N", fspec_local("048_RE_M5N_06", "TOS", "0.9921875") ) validator.add_re_dissection( [0x40, 0x04, 0x80], "M5N", fspec_local("048_RE_M5N_06", "TOS", "-1") ) x_re_xp = { "asterix.048_RE_M5N_07_XP": "0", "asterix.048_RE_M5N_07_X5": "0", "asterix.048_RE_M5N_07_XC": "0", "asterix.048_RE_M5N_07_X3": "0", "asterix.048_RE_M5N_07_X2": "0", "asterix.048_RE_M5N_07_X1": "0" } validator.add_re_dissection( [0x40, 0x02, 0x20], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "XP", "1") ) validator.add_re_dissection( [0x40, 0x02, 0x10], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "X5", "1") ) validator.add_re_dissection( [0x40, 0x02, 0x08], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "XC", "1") ) validator.add_re_dissection( [0x40, 0x02, 0x04], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "X3", "1") ) validator.add_re_dissection( [0x40, 0x02, 0x02], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "X2", "1") ) validator.add_re_dissection( [0x40, 0x02, 0x01], "M5N", dict_fspec_local(x_re_xp, "048_RE_M5N_07", "X1", "1") ) validator.add_re_dissection( [0x40, 0x01, 0x80, 0x1f], "M5N", fspec_local("048_RE_M5N_08", "FOM", "31") ) validator.add_re_dissection( [0x20, 0x06], "M4E", { "asterix.048_RE_M4E_FOE_FRI": "3", "asterix.FX": "0" } ) validator.add_re_dissection( [0x10, 0x80, 0xff], "RPC", fspec_local("048_RE_RPC_01", "SCO", "255") ) validator.add_re_dissection( [0x10, 0x40, 0xff, 0xff], "RPC", fspec_local("048_RE_RPC_02", "SCR", "6553.5") ) validator.add_re_dissection( [0x10, 0x20, 0xff, 0xff], "RPC", fspec_local("048_RE_RPC_03", "RW", "255.99609375") ) validator.add_re_dissection( [0x10, 0x10, 0xff, 0xff], "RPC", fspec_local("048_RE_RPC_04", "AR", "255.99609375") ) validator.add_re_dissection( [0x08, 0xff, 0xff, 0xff], "ERR", { "asterix.048_RE_ERR_RHO": "65535.99609375" } ) validator.check_dissections() def test_undefined_value_handling(self, asterix_re_validator): '''verifies that the dissector can dissect undefined field values by setting the maximum value of bits or by setting all undefined bits''' validator = asterix_re_validator(48, [0x01, 0x01, 0x01, 0x02]) validator.add_dissection( [0x08, 0x10, 0x00], "asterix.048_070", { "asterix.048_070_V": "0", "asterix.048_070_G": "0", "asterix.048_070_L": "0", "asterix.048_070_SQUAWK": "0" } ) validator.add_dissection( [0x01, 0x10, 0xf0, 0x00], "asterix.048_161", { "asterix.048_161_TN": "0" } ) validator.add_dissection( [0x01, 0x02, 0x01, 0x0e], "asterix.048_170", { "asterix.048_170_CNF": "0", "asterix.048_170_RAD": "0", "asterix.048_170_DOU": "0", "asterix.048_170_MAH": "0", "asterix.048_170_CDM": "0", "asterix.048_170_TRE": "0", "asterix.048_170_GHO": "0", "asterix.048_170_SUP": "0", "asterix.048_170_TCC": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x01, 0x01, 0x40, 0xfe], "asterix.048_030", { "asterix.048_030_WE": "127", "asterix.FX": "0" } ) validator.add_dissection( [0x01, 0x01, 0x20, 0xf0, 0x00], "asterix.048_080", { "asterix.048_080_QA4": "0", "asterix.048_080_QA2": "0", "asterix.048_080_QA1": "0", "asterix.048_080_QB4": "0", "asterix.048_080_QB2": "0", "asterix.048_080_QB1": "0", "asterix.048_080_QC4": "0", "asterix.048_080_QC2": "0", "asterix.048_080_QC1": "0", "asterix.048_080_QD4": "0", "asterix.048_080_QD2": "0", "asterix.048_080_QD1": "0" } ) validator.add_dissection( [0x01, 0x01, 0x10, 0x30, 0x00, 0xf0, 0x00], "asterix.048_100", { "asterix.048_100_V": "0", "asterix.048_100_G": "0", "asterix.048_100_C1": "0", "asterix.048_100_A1": "0", "asterix.048_100_C2": "0", "asterix.048_100_A2": "0", "asterix.048_100_C4": "0", "asterix.048_100_A4": "0", "asterix.048_100_B1": "0", "asterix.048_100_D1": "0", "asterix.048_100_B2": "0", "asterix.048_100_D2": "0", "asterix.048_100_B4": "0", "asterix.048_100_D4": "0", "asterix.048_100_QC1": "0", "asterix.048_100_QA1": "0", "asterix.048_100_QC2": "0", "asterix.048_100_QA2": "0", "asterix.048_100_QC4": "0", "asterix.048_100_QA4": "0", "asterix.048_100_QB1": "0", "asterix.048_100_QD1": "0", "asterix.048_100_QB2": "0", "asterix.048_100_QD2": "0", "asterix.048_100_QB4": "0", "asterix.048_100_QD4": "0" } ) validator.add_dissection( [0x01, 0x01, 0x04, 0x80, 0x7c, 0x00], "asterix.048_120", { "asterix.fspec": "", "asterix.048_120_01": { "asterix.048_120_01_D": "0", "asterix.048_120_01_CAL": "0" } } ) validator.add_dissection( [0x01, 0x01, 0x04, 0x3e], "asterix.048_120", { "asterix.fspec": "" } ) validator.add_dissection( [0x01, 0x01, 0x02, 0x01, 0x00], "asterix.048_230", { "asterix.048_230_COM": "0", "asterix.048_230_STAT": "0", "asterix.048_230_SI": "0", "asterix.048_230_MSSC": "0", "asterix.048_230_ARC": "0", "asterix.048_230_AIC": "0", "asterix.048_230_B1A": "0", "asterix.048_230_B1B": "0" } ) validator.add_dissection( [0x01, 0x01, 0x01, 0x20, 0x10, 0x00], "asterix.048_050", { "asterix.048_050_V": "0", "asterix.048_050_G": "0", "asterix.048_050_L": "0", "asterix.048_050_SQUAWK": "0" } ) validator.add_dissection( [0x01, 0x01, 0x01, 0x10, 0xe0], "asterix.048_065", { "asterix.048_065_QA4": "0", "asterix.048_065_QA2": "0", "asterix.048_065_QA1": "0", "asterix.048_065_QB2": "0", "asterix.048_065_QB1": "0" } ) x_re_md5 = { "asterix.048_RE_MD5_01_M5": "0", "asterix.048_RE_MD5_01_ID": "0", "asterix.048_RE_MD5_01_DA": "0", "asterix.048_RE_MD5_01_M1": "0", "asterix.048_RE_MD5_01_M2": "0", "asterix.048_RE_MD5_01_M3": "0", "asterix.048_RE_MD5_01_MC": "0" } validator.add_re_dissection( [0x80, 0x80, 0x01, 0x00], "MD5", dict_fspec_local(x_re_md5, "048_RE_MD5_01", "M5", "0") ) x_re_pmn = { "asterix.048_RE_MD5_02_PIN": "0", "asterix.048_RE_MD5_02_NAV": "0", "asterix.048_RE_MD5_02_NAT": "0", "asterix.048_RE_MD5_02_MIS": "0" } validator.add_re_dissection( [0x80, 0x40, 0xc0, 0x00, 0xc0, 0xc0], "MD5", dict_fspec_local(x_re_pmn, "048_RE_MD5_02", "PIN", "0") ) x_re_em1 = { "asterix.048_RE_MD5_05_V": "0", "asterix.048_RE_MD5_05_G": "0", "asterix.048_RE_MD5_05_L": "0", "asterix.048_RE_MD5_05_SQUAWK": "0" } validator.add_re_dissection( [0x80, 0x08, 0x10, 0x00], "MD5", dict_fspec_local(x_re_em1, "048_RE_MD5_05", "V", "0") ) x_re_md5 = { "asterix.048_RE_M5N_01_M5": "0", "asterix.048_RE_M5N_01_ID": "0", "asterix.048_RE_M5N_01_DA": "0", "asterix.048_RE_M5N_01_M1": "0", "asterix.048_RE_M5N_01_M2": "0", "asterix.048_RE_M5N_01_M3": "0", "asterix.048_RE_M5N_01_MC": "0" } validator.add_re_dissection( [0x40, 0x80, 0x01, 0x00], "M5N", dict_fspec_local(x_re_md5, "048_RE_M5N_01", "M5", "0") ) x_re_pmn = { "asterix.048_RE_M5N_02_PIN": "0", "asterix.048_RE_M5N_02_NOV": "0", "asterix.048_RE_M5N_02_NO": "0" } validator.add_re_dissection( [0x40, 0x40, 0xc0, 0x00, 0xf0, 0x00], "M5N", dict_fspec_local(x_re_pmn, "048_RE_M5N_02", "PIN", "0") ) x_re_em1 = { "asterix.048_RE_M5N_05_V": "0", "asterix.048_RE_M5N_05_G": "0", "asterix.048_RE_M5N_05_L": "0", "asterix.048_RE_M5N_05_SQUAWK": "0" } validator.add_re_dissection( [0x40, 0x08, 0x10, 0x00], "M5N", dict_fspec_local(x_re_em1, "048_RE_M5N_05", "V", "0") ) validator.add_re_dissection( [0x40, 0x01, 0x80, 0xe0], "M5N", fspec_local("048_RE_M5N_08", "FOM", "0") ) validator.add_re_dissection( [0x20, 0xf8], "M4E", { "asterix.048_RE_M4E_FOE_FRI": "0", "asterix.FX": "0" } ) validator.add_re_dissection( [0x20, 0x01, 0x00], "M4E", { "asterix.048_RE_M4E_FOE_FRI": "0", "asterix.FX": "1" } ) validator.check_dissections() @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_category_063(subprocesstest.SubprocessTestCase): ''' Unittest case for ASTERIX Category 063 Online specification: https://www.eurocontrol.int/publications/cat063-sensor-status-messages-part-10 Part 10: Category 63 (1.4) Sensor Status Messages Standard User Application Profile FRN Data Item Information Length 1 I063/010 Data Source Identifier 2 2 I063/015 Service Identification 1 3 I063/030 Time of Message 3 4 I063/050 Sensor Identifier 2 5 I063/060 Sensor Configuration and Status 1+1 6 I063/070 Time Stamping Bias 2 7 I063/080 SSR/Mode S Range Gain and Bias 4 FX - Field extension indicator - 8 I063/081 SSR/Mode S Azimuth Bias 2 9 I063/090 PSR Range Gain and Bias 4 10 I063/091 PSR Azimuth Bias 2 11 I063/092 PSR Elevation Bias 2 12 - spare - 13 RE Reserved Expansion Field 1+1+ 14 SP Special Purpose Field 1+1+ FX - Field extension indicator - ''' maxDiff = None def test_for_fields(self, asterix_validator): '''verifies existence of all fields and their maximum value''' validator = asterix_validator(63) validator.add_dissection( [0x80, 0xff, 0x00], "asterix.063_010", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x80, 0x00, 0xff], "asterix.063_010", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x40, 0xff], "asterix.063_015", { "asterix.063_015_SI": "255" } ) validator.add_dissection( [0x20, 0xa8, 0xbf, 0xff], "asterix.063_030", { "asterix.TOD": "86399.9921875" } ) validator.add_dissection( [0x10, 0xff, 0x00], "asterix.063_050", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x10, 0x00, 0xff], "asterix.063_050", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x08, 0xc0], "asterix.063_060", { "asterix.063_060_CON": "3", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x20], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "1", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x10], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "1", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x08], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "1", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x04], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "1", "asterix.063_060_MLT": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x02], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "1", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x80], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "1", "asterix.063_060_ODP": "0", "asterix.063_060_OXT": "0", "asterix.063_060_MSC": "0", "asterix.063_060_TSV": "0", "asterix.063_060_NPW": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x40], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "0", "asterix.063_060_ODP": "1", "asterix.063_060_OXT": "0", "asterix.063_060_MSC": "0", "asterix.063_060_TSV": "0", "asterix.063_060_NPW": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x20], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "0", "asterix.063_060_ODP": "0", "asterix.063_060_OXT": "1", "asterix.063_060_MSC": "0", "asterix.063_060_TSV": "0", "asterix.063_060_NPW": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x10], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "0", "asterix.063_060_ODP": "0", "asterix.063_060_OXT": "0", "asterix.063_060_MSC": "1", "asterix.063_060_TSV": "0", "asterix.063_060_NPW": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x08], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "0", "asterix.063_060_ODP": "0", "asterix.063_060_OXT": "0", "asterix.063_060_MSC": "0", "asterix.063_060_TSV": "1", "asterix.063_060_NPW": "0", "asterix.FX": "0" } ) validator.add_dissection( [0x08, 0x01, 0x04], "asterix.063_060", { "asterix.063_060_CON": "0", "asterix.063_060_PSR": "0", "asterix.063_060_SSR": "0", "asterix.063_060_MDS": "0", "asterix.063_060_ADS": "0", "asterix.063_060_MLT": "0", "asterix.063_060_OPS": "0", "asterix.063_060_ODP": "0", "asterix.063_060_OXT": "0", "asterix.063_060_MSC": "0", "asterix.063_060_TSV": "0", "asterix.063_060_NPW": "1", "asterix.FX": "0" } ) validator.add_dissection( [0x04, 0xff, 0xff], "asterix.063_070", { "asterix.063_070_TSB": "65535" } ) validator.add_dissection( [0x02, 0x7f, 0xff, 0x00, 0x00], "asterix.063_080", { "asterix.063_080_SRG": "0.32767", "asterix.063_080_SRB": "0" } ) validator.add_dissection( [0x02, 0x80, 0x00, 0x00, 0x00], "asterix.063_080", { "asterix.063_080_SRG": "-0.32768", "asterix.063_080_SRB": "0" } ) validator.add_dissection( [0x02, 0x00, 0x00, 0x7f, 0xff], "asterix.063_080", { "asterix.063_080_SRG": "0", "asterix.063_080_SRB": "255.9921875" } ) validator.add_dissection( [0x02, 0x00, 0x00, 0x80, 0x00], "asterix.063_080", { "asterix.063_080_SRG": "0", "asterix.063_080_SRB": "-256" } ) validator.add_dissection( [0x01, 0x80, 0x7f, 0xff], "asterix.063_081", { "asterix.063_081_SAB": "179.994506835938" } ) validator.add_dissection( [0x01, 0x80, 0x80, 0x00], "asterix.063_081", { "asterix.063_081_SAB": "-180" } ) validator.add_dissection( [0x01, 0x40, 0x7f, 0xff, 0x00, 0x00], "asterix.063_090", { "asterix.063_090_PRG": "0.32767", "asterix.063_090_PRB": "0" } ) validator.add_dissection( [0x01, 0x40, 0x80, 0x00, 0x00, 0x00], "asterix.063_090", { "asterix.063_090_PRG": "-0.32768", "asterix.063_090_PRB": "0" } ) validator.add_dissection( [0x01, 0x40, 0x00, 0x00, 0x7f, 0xff], "asterix.063_090", { "asterix.063_090_PRG": "0", "asterix.063_090_PRB": "255.9921875" } ) validator.add_dissection( [0x01, 0x40, 0x00, 0x00, 0x80, 0x00], "asterix.063_090", { "asterix.063_090_PRG": "0", "asterix.063_090_PRB": "-256" } ) validator.add_dissection( [0x01, 0x20, 0x7f, 0xff], "asterix.063_091", { "asterix.063_091_PAB": "179.994506835938" } ) validator.add_dissection( [0x01, 0x20, 0x80, 0x00], "asterix.063_091", { "asterix.063_091_PAB": "-180" } ) validator.add_dissection( [0x01, 0x10, 0x7f, 0xff], "asterix.063_092", { "asterix.063_092_PEB": "179.994506835938" } ) validator.add_dissection( [0x01, 0x10, 0x80, 0x00], "asterix.063_092", { "asterix.063_092_PEB": "-180" } ) validator.check_dissections() def test_undefined_value_handling(self, asterix_validator): '''verifies that the dissector can dissect undefined field values by setting the maximum value of bits or by setting all undefined bits''' validator = asterix_validator(63) validator.add_dissection( [0x01, 0x08], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x04, 0x02, 0x00], "asterix.063_RE", { "asterix.re_field_len": "2", "asterix.fspec": "" } ) validator.add_dissection( [0x01, 0x04, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.063_RE", { "asterix.fspec": "", "asterix.re_field_len": "16" } ) validator.add_dissection( [0x01, 0x02, 0x01], "asterix.063_SP", "" ) validator.add_dissection( [0x01, 0x02, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.063_SP", "" ) validator.check_dissections() @fixtures.mark_usefixtures('test_env') @fixtures.uses_fixtures class case_category_065(subprocesstest.SubprocessTestCase): ''' Unittest case for ASTERIX Category 065 Online specification: https://www.eurocontrol.int/publications/cat065-surveillance-data-processing-system-sdps-service-status-messages-part-15 https://www.eurocontrol.int/publications/cat065-coding-rules-reserved-expansion-field-part-15-appendix Part 15 Category 65 (1.4) SDPS Service Status Reports Standard User Application Profile FRN Data Item Information Length 1 I065/010 Data Source Identifier 2 2 I065/000 Message Type 1 3 I065/015 Service Identification 1 4 I065/030 Time of Message 3 5 I065/020 Batch Number 1 6 I065/040 SDPS Configuration and Status 1 7 I065/050 Service Status Report 1 FX - Field extension indicator - 8 - Spare - 9 - Spare - 10 - Spare - 11 - Spare - 12 - Spare - 13 RE Reserved Expansion Field 1+1+ 14 SP Special Purpose Field 1+1+ FX - Field extension indicator - ''' maxDiff = None def test_for_fields(self, asterix_validator): '''verifies existence of all fields and their maximum value''' validator = asterix_validator(65) validator.add_dissection( [0x80, 0xff, 0x00], "asterix.065_010", { "asterix.SAC": "255", "asterix.SIC": "0" } ) validator.add_dissection( [0x80, 0x00, 0xff], "asterix.065_010", { "asterix.SAC": "0", "asterix.SIC": "255" } ) validator.add_dissection( [0x40, 0x03], "asterix.065_000", { "asterix.065_000_MT": "3" } ) validator.add_dissection( [0x20, 0xff], "asterix.065_015", { "asterix.065_015_SI": "255" } ) validator.add_dissection( [0x10, 0xa8, 0xbf, 0xff], "asterix.065_030", { "asterix.TOD": "86399.9921875" } ) validator.add_dissection( [0x08, 0xff], "asterix.065_020", { "asterix.065_020_BTN": "255" } ) validator.add_dissection( [0x04, 0xc0], "asterix.065_040", { "asterix.065_040_NOGO": "3", "asterix.065_040_OVL": "0", "asterix.065_040_TSV": "0", "asterix.065_040_PSS": "0", "asterix.065_040_STTN": "0" } ) validator.add_dissection( [0x04, 0x20], "asterix.065_040", { "asterix.065_040_NOGO": "0", "asterix.065_040_OVL": "1", "asterix.065_040_TSV": "0", "asterix.065_040_PSS": "0", "asterix.065_040_STTN": "0" } ) validator.add_dissection( [0x04, 0x10], "asterix.065_040", { "asterix.065_040_NOGO": "0", "asterix.065_040_OVL": "0", "asterix.065_040_TSV": "1", "asterix.065_040_PSS": "0", "asterix.065_040_STTN": "0" } ) validator.add_dissection( [0x04, 0x0c], "asterix.065_040", { "asterix.065_040_NOGO": "0", "asterix.065_040_OVL": "0", "asterix.065_040_TSV": "0", "asterix.065_040_PSS": "3", "asterix.065_040_STTN": "0" } ) validator.add_dissection( [0x04, 0x02], "asterix.065_040", { "asterix.065_040_NOGO": "0", "asterix.065_040_OVL": "0", "asterix.065_040_TSV": "0", "asterix.065_040_PSS": "0", "asterix.065_040_STTN": "1" } ) validator.add_dissection( [0x02, 0xff], "asterix.065_050", { "asterix.065_050_REP": "255" } ) validator.add_dissection( [0x01, 0x04, 0x02, 0x00], "asterix.065_RE", { "asterix.re_field_len": "2", "asterix.fspec": "" } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "90", "asterix.065_RE_SRP_Longitude": "0" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "-90", "asterix.065_RE_SRP_Longitude": "0" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "0", "asterix.065_RE_SRP_Longitude": "180" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x00, 0x00, 0x00, 0x00, 0xc0, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "0", "asterix.065_RE_SRP_Longitude": "-180" } } ) validator.add_dissection( [0x01, 0x04, 0x04, 0x40, 0xff, 0xfc], "asterix.065_RE", { "asterix.re_field_len": "4", "asterix.fspec": "", "asterix.065_RE_ARL": { "asterix.065_RE_ARL_ARL": "65532" } } ) validator.check_dissections() def test_undefined_value_handling(self, asterix_validator): '''verifies that the dissector can dissect undefined field values by setting the maximum value of bits or by setting all undefined bits''' validator = asterix_validator(65) validator.add_dissection( [0x40, 0xff], "asterix.065_000", { "asterix.065_000_MT": "255" } ) validator.add_dissection( [0x10, 0xff, 0xff, 0xff], "asterix.065_030", { "asterix.TOD": "131071.9921875" } ) validator.add_dissection( [0x04, 0x01], "asterix.065_040", { "asterix.065_040_NOGO": "0", "asterix.065_040_OVL": "0", "asterix.065_040_TSV": "0", "asterix.065_040_PSS": "0", "asterix.065_040_STTN": "0" } ) validator.add_dissection( [0x01, 0x80], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x40], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x20], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x10], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x08], "asterix.spare", "" ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x7f, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "359.999999832362", "asterix.065_RE_SRP_Longitude": "0" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "-360", "asterix.065_RE_SRP_Longitude": "0" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x00, 0x00, 0x00, 0x00, 0x7f, 0xff, 0xff, 0xff], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "0", "asterix.065_RE_SRP_Longitude": "359.999999832362" } } ) validator.add_dissection( [0x01, 0x04, 0x0a, 0x80, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00], "asterix.065_RE", { "asterix.re_field_len": "10", "asterix.fspec": "", "asterix.065_RE_SRP": { "asterix.065_RE_SRP_Latitude": "0", "asterix.065_RE_SRP_Longitude": "-360" } } ) validator.add_dissection( [0x01, 0x04, 0x04, 0x40, 0xff, 0xff], "asterix.065_RE", { "asterix.re_field_len": "4", "asterix.fspec": "", "asterix.065_RE_ARL": { "asterix.065_RE_ARL_ARL": "65535" } } ) validator.add_dissection( [0x01, 0x02, 0x01], "asterix.065_SP", "" ) validator.add_dissection( [0x01, 0x02, 0x10, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], "asterix.065_SP", "" ) validator.check_dissections()
33.804387
124
0.465005
13,717
130,992
4.125246
0.046585
0.073799
0.13802
0.090058
0.880607
0.843106
0.783321
0.687025
0.621426
0.547927
0
0.235017
0.408842
130,992
3,874
125
33.813113
0.495719
0.071546
0
0.492891
0
0
0.245044
0.073231
0
0
0.070717
0
0
1
0.007527
false
0
0.001115
0
0.015333
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
06aed4868c5d17483f90ab8db3dacf80d56a6289
89
py
Python
firstapp/salt_api/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
1
2017-12-22T04:12:10.000Z
2017-12-22T04:12:10.000Z
firstapp/salt_api/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
1
2020-01-08T01:49:03.000Z
2020-01-08T01:49:03.000Z
firstapp/salt_api/__init__.py
mudong1991/DevOpsApi
8cc880c7c628f04492427fe73a1a684eadb94e84
[ "Apache-2.0" ]
null
null
null
# -*- coding:utf-8 -*- # file: __init__.py # author: Mundy # date: 2017/5/10 0010 """ """
14.833333
22
0.561798
13
89
3.538462
1
0
0
0
0
0
0
0
0
0
0
0.162162
0.168539
89
6
23
14.833333
0.459459
0.820225
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
ebf06defbe5f6b61135c4915f3741b7ef4ce7625
97
py
Python
agrib.py
CharleiThorin/agrib-excellency
5e9910b18b0f8ca8c314afc9e07c7cafad7a6e5d
[ "MIT" ]
null
null
null
agrib.py
CharleiThorin/agrib-excellency
5e9910b18b0f8ca8c314afc9e07c7cafad7a6e5d
[ "MIT" ]
3
2019-12-26T17:34:07.000Z
2022-03-21T22:17:34.000Z
agrib.py
CharleiThorin/agrib-excellency
5e9910b18b0f8ca8c314afc9e07c7cafad7a6e5d
[ "MIT" ]
null
null
null
import os from app import create_app app = create_app(os.getenv('AGRIB_CONFIG') or 'default')
13.857143
56
0.752577
16
97
4.375
0.625
0.257143
0
0
0
0
0
0
0
0
0
0
0.14433
97
6
57
16.166667
0.843373
0
0
0
0
0
0.197917
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
23070aa3f876d440b594849d79a1c623271c2ab5
22
py
Python
tccli/services/tcr/v20190924/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
tccli/services/tcr/v20190924/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
tccli/services/tcr/v20190924/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
version = "2019-09-24"
22
22
0.681818
4
22
3.75
1
0
0
0
0
0
0
0
0
0
0
0.4
0.090909
22
1
22
22
0.35
0
0
0
0
0
0.434783
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
230b34a9e0bf64b36f2e5b98cce1734924cde33f
19
py
Python
Freedom.py
tougov/Git_Repo_Console
a6d66625647bc9287ecc6c57309eed409820d26a
[ "BSD-3-Clause" ]
null
null
null
Freedom.py
tougov/Git_Repo_Console
a6d66625647bc9287ecc6c57309eed409820d26a
[ "BSD-3-Clause" ]
null
null
null
Freedom.py
tougov/Git_Repo_Console
a6d66625647bc9287ecc6c57309eed409820d26a
[ "BSD-3-Clause" ]
null
null
null
print("Freedom!!!")
19
19
0.631579
2
19
6
1
0
0
0
0
0
0
0
0
0
0
0
0
19
1
19
19
0.631579
0
0
0
0
0
0.5
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
2320b99b6355bbfc42457c72a6f514adf472a1f8
77
py
Python
AKDSFramework/applications/sorting/__init__.py
DeepNet-Research/AKDSFramework
a0b9fc2466b228ea6053b9f03e1d497462567a96
[ "MIT" ]
13
2020-11-03T00:07:43.000Z
2021-12-31T04:18:03.000Z
AKDSFramework/applications/sorting/__init__.py
DeepNet-Research/AKDSFramework
a0b9fc2466b228ea6053b9f03e1d497462567a96
[ "MIT" ]
2
2021-03-06T12:20:33.000Z
2021-03-07T04:26:29.000Z
AKDSFramework/applications/sorting/__init__.py
DeepNet-Research/AKDSFramework
a0b9fc2466b228ea6053b9f03e1d497462567a96
[ "MIT" ]
2
2020-11-03T23:13:53.000Z
2021-02-24T13:16:02.000Z
from .sort import bubblesort, insertionsort, heapsort, quicksort, merge_sort
38.5
76
0.831169
9
77
7
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.103896
77
1
77
77
0.913043
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
233d04f7904894f8e99efc9af0ddef6ba5dc8c35
96
py
Python
src/bavli_pages_tests/test___init__.py
lisrael1/bavli_pages
35a6c96db8078a535077e414ac64ce701be753fb
[ "MIT" ]
1
2022-03-22T08:36:00.000Z
2022-03-22T08:36:00.000Z
src/bavli_pages_tests/test___init__.py
lisrael1/bavli_pages
35a6c96db8078a535077e414ac64ce701be753fb
[ "MIT" ]
null
null
null
src/bavli_pages_tests/test___init__.py
lisrael1/bavli_pages
35a6c96db8078a535077e414ac64ce701be753fb
[ "MIT" ]
null
null
null
from unittest import TestCase import bavli_pages as bp class Test(TestCase): pass
12
30
0.708333
13
96
5.153846
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.260417
96
7
31
13.714286
0.943662
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
88ef6a93a309cd1456a6860a3d50f55679c9c0ab
238
py
Python
machine/tokenization/sentencepiece/sentence_piece_detokenizer.py
sillsdev/machine.py
61c0b29e706636a2353a1afc0b3ee372db7e632f
[ "MIT" ]
2
2021-09-14T15:41:14.000Z
2021-09-14T15:53:46.000Z
machine/tokenization/sentencepiece/sentence_piece_detokenizer.py
sillsdev/machine.py
61c0b29e706636a2353a1afc0b3ee372db7e632f
[ "MIT" ]
2
2021-11-04T09:12:26.000Z
2021-11-08T08:35:36.000Z
machine/tokenization/sentencepiece/sentence_piece_detokenizer.py
sillsdev/machine.py
61c0b29e706636a2353a1afc0b3ee372db7e632f
[ "MIT" ]
1
2021-11-03T14:45:11.000Z
2021-11-03T14:45:11.000Z
from typing import Iterable from ..detokenizer import Detokenizer class SentencePieceDetokenizer(Detokenizer[str, str]): def detokenize(self, tokens: Iterable[str]) -> str: return "".join(tokens).replace("▁", " ").lstrip()
26.444444
57
0.705882
26
238
6.5
0.653846
0.071006
0
0
0
0
0
0
0
0
0
0
0.151261
238
8
58
29.75
0.831683
0
0
0
0
0
0.008403
0
0
0
0
0
0
1
0.2
false
0
0.4
0.2
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
001cdfc0ee0714c702c26efaa5127fc5e60bc30b
275
py
Python
tests/test_fm_classifier/test_utils/__init__.py
delmalih/few-shots-classification
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
[ "MIT" ]
null
null
null
tests/test_fm_classifier/test_utils/__init__.py
delmalih/few-shots-classification
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
[ "MIT" ]
null
null
null
tests/test_fm_classifier/test_utils/__init__.py
delmalih/few-shots-classification
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
[ "MIT" ]
null
null
null
########################## # Imports ########################## import os from tests.test_fm_classifier import TEST_DIRECTORY_PATH as DIR_PATH ########################## # Constants ########################## TEST_DIRECTORY_PATH = os.path.join(DIR_PATH, "test_utils")
16.176471
68
0.483636
26
275
4.769231
0.576923
0.209677
0.274194
0
0
0
0
0
0
0
0
0
0.098182
275
16
69
17.1875
0.5
0.061818
0
0
0
0
0.066225
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
cc4d4714e916b54a8b7f1bd6eda7cfea2d98a938
121
py
Python
src/jobs/admin.py
porschebest/zerojetlag
cbfd8bbf185ac8c10f2a63d144dba4a4178c956e
[ "MIT" ]
null
null
null
src/jobs/admin.py
porschebest/zerojetlag
cbfd8bbf185ac8c10f2a63d144dba4a4178c956e
[ "MIT" ]
null
null
null
src/jobs/admin.py
porschebest/zerojetlag
cbfd8bbf185ac8c10f2a63d144dba4a4178c956e
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import JobInfo admin.site.register(JobInfo)
17.285714
32
0.801653
17
121
5.705882
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.132231
121
6
33
20.166667
0.92381
0.214876
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
cc5f0dc491a1c6ec70913655d806751ad73db3fc
388
py
Python
PINp/2014/Platonova Olga/task_2_21.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
PINp/2014/Platonova Olga/task_2_21.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
PINp/2014/Platonova Olga/task_2_21.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
# Задача 2. Вариант 21. #Напишите программу, которая будет выводить на экран наиболее понравившееся вам высказывание, автором которого является Леонардо да Винчи. Не забудьте о том, что автор должен быть упомянут на отдельной строке. # Platonova O. A. # 29.05.2016 print("Истина была единственной дочерью времени.\n\t\t\t\t\t\tЛеонардо да Винчи") input("\n\nНажмите Enter для выхода.")
43.111111
209
0.775773
60
388
5.016667
0.866667
0.026578
0.0299
0.026578
0
0
0
0
0
0
0
0.032836
0.136598
388
8
210
48.5
0.865672
0.659794
0
0
0
0.5
0.795276
0.23622
0
0
0
0
0
1
0
true
0
0
0
0
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
cc68e609e6e9505889b2ff2ccf30e740ea881fe1
182
py
Python
GmailAnalyser/test_extract_event.py
Remydeme/zodiac
46a2615f102cb11558a7a8ec5b3740b4b6fd155a
[ "MIT" ]
2
2020-08-07T15:10:54.000Z
2020-09-15T13:29:11.000Z
GmailAnalyser/test_extract_event.py
Remydeme/zodiac
46a2615f102cb11558a7a8ec5b3740b4b6fd155a
[ "MIT" ]
null
null
null
GmailAnalyser/test_extract_event.py
Remydeme/zodiac
46a2615f102cb11558a7a8ec5b3740b4b6fd155a
[ "MIT" ]
1
2020-08-07T15:11:09.000Z
2020-08-07T15:11:09.000Z
import unittest import pytest # - def test_extract_event_type1(events): pass def test_extract_event_type2(events): pass def test_extract_event_type3(events): pass
10.705882
37
0.752747
25
182
5.12
0.48
0.164063
0.328125
0.445313
0.453125
0.453125
0
0
0
0
0
0.02027
0.186813
182
16
38
11.375
0.844595
0.005495
0
0.375
0
0
0
0
0
0
0
0
0
1
0.375
false
0.375
0.25
0
0.625
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
cc857ec04dd39bc221cf4088323518d0dba0f8cb
29
py
Python
slrm/__init__.py
vzhong/slrm
3d2b2ebb955d70fcabf1f73b67368053fb7863a9
[ "MIT" ]
5
2020-02-05T06:20:09.000Z
2020-12-08T07:10:45.000Z
slrm/__init__.py
vzhong/slrm
3d2b2ebb955d70fcabf1f73b67368053fb7863a9
[ "MIT" ]
null
null
null
slrm/__init__.py
vzhong/slrm
3d2b2ebb955d70fcabf1f73b67368053fb7863a9
[ "MIT" ]
null
null
null
from .launcher import launch
14.5
28
0.827586
4
29
6
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
cc8745116efdf707cfedfe275d9983ad5addc01e
61
py
Python
alooma/__init__.py
haarthi/alooma-python
e9fc91695d87e518fa953dea54d9e11ede8732a7
[ "Apache-2.0" ]
5
2017-08-08T23:15:00.000Z
2019-11-05T15:36:50.000Z
alooma/__init__.py
haarthi/alooma-python
e9fc91695d87e518fa953dea54d9e11ede8732a7
[ "Apache-2.0" ]
19
2017-08-03T23:09:03.000Z
2020-06-01T23:27:37.000Z
alooma/__init__.py
haarthi/alooma-python
e9fc91695d87e518fa953dea54d9e11ede8732a7
[ "Apache-2.0" ]
9
2017-08-03T22:58:48.000Z
2019-04-05T14:56:30.000Z
from __future__ import absolute_import from .alooma import *
20.333333
38
0.836066
8
61
5.75
0.625
0
0
0
0
0
0
0
0
0
0
0
0.131148
61
2
39
30.5
0.867925
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
cc93cf5af1eaffc8bd1a232ef7f9b4551cec5ed4
34
py
Python
git-kit.py
akx/git-kit
54948b57f201adecc810c4895b6712c1c8265cf3
[ "MIT" ]
3
2017-02-16T09:04:09.000Z
2021-05-03T08:25:52.000Z
git-kit.py
akx/git-kit
54948b57f201adecc810c4895b6712c1c8265cf3
[ "MIT" ]
2
2017-02-16T08:54:15.000Z
2017-02-16T09:09:41.000Z
git-kit.py
akx/git-kit
54948b57f201adecc810c4895b6712c1c8265cf3
[ "MIT" ]
1
2022-02-07T09:07:39.000Z
2022-02-07T09:07:39.000Z
from gitkit.cli import cli cli()
8.5
26
0.735294
6
34
4.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.176471
34
3
27
11.333333
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
cca6d7598a14e5013a959a9082cb02d518197049
119
py
Python
protopy/generation/__init__.py
mjasher/scikit-protopy
f4deddc42c5883b527d7bb1bfc6d0ece7d01979d
[ "BSD-2-Clause" ]
17
2015-01-27T12:30:25.000Z
2021-06-24T22:11:31.000Z
protopy/generation/__init__.py
mjasher/scikit-protopy
f4deddc42c5883b527d7bb1bfc6d0ece7d01979d
[ "BSD-2-Clause" ]
null
null
null
protopy/generation/__init__.py
mjasher/scikit-protopy
f4deddc42c5883b527d7bb1bfc6d0ece7d01979d
[ "BSD-2-Clause" ]
6
2015-07-07T18:13:36.000Z
2021-06-22T16:42:31.000Z
from sgp import SGP from sgp import SGP2 from sgp import ASGP __all__ = ['SGP', 'SGP2', 'ASGP']
14.875
20
0.571429
16
119
4
0.375
0.328125
0.609375
0
0
0
0
0
0
0
0
0.025316
0.336134
119
7
21
17
0.78481
0
0
0
0
0
0.092437
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
ccb9ff6dc963b6793b1b8055311c86a48504f424
13
py
Python
grow/__init__.py
jpk0727/growApp
016d56de740c14e89440a6bf61fccc937e792473
[ "MIT" ]
null
null
null
grow/__init__.py
jpk0727/growApp
016d56de740c14e89440a6bf61fccc937e792473
[ "MIT" ]
null
null
null
grow/__init__.py
jpk0727/growApp
016d56de740c14e89440a6bf61fccc937e792473
[ "MIT" ]
null
null
null
""" grow """
6.5
12
0.307692
1
13
4
1
0
0
0
0
0
0
0
0
0
0
0
0.230769
13
1
13
13
0.4
0.307692
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
aea7d4df579b9d05febda580cfc3a1f884ac91bd
34
py
Python
node/__init__.py
thecsapprentice/phoenix-plumage-client
552bab81757ef3767a5ecfdf44e40b69eeea7e4c
[ "BSD-3-Clause" ]
null
null
null
node/__init__.py
thecsapprentice/phoenix-plumage-client
552bab81757ef3767a5ecfdf44e40b69eeea7e4c
[ "BSD-3-Clause" ]
null
null
null
node/__init__.py
thecsapprentice/phoenix-plumage-client
552bab81757ef3767a5ecfdf44e40b69eeea7e4c
[ "BSD-3-Clause" ]
null
null
null
from rendernode import RenderNode
17
33
0.882353
4
34
7.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
aeb30f5b59d9f55ee1e2504edc7a94d4bf395ffa
50
py
Python
codesignal/arcade/python/intro_48_is_digit.py
tinesife94/random
b802924dce4635ae074d30dc03962d4301bd6d8b
[ "MIT" ]
null
null
null
codesignal/arcade/python/intro_48_is_digit.py
tinesife94/random
b802924dce4635ae074d30dc03962d4301bd6d8b
[ "MIT" ]
null
null
null
codesignal/arcade/python/intro_48_is_digit.py
tinesife94/random
b802924dce4635ae074d30dc03962d4301bd6d8b
[ "MIT" ]
null
null
null
def solution(symbol): return symbol.isdigit()
16.666667
27
0.72
6
50
6
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.16
50
2
28
25
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
aef7466edacc56f8de351e664d009a6e3c02afc4
47
py
Python
devices/lib/pidev/__init__.py
dmitry-root/home-automation
82bba47729313f8262ab4d02c57aed34c0a2f5fd
[ "MIT" ]
null
null
null
devices/lib/pidev/__init__.py
dmitry-root/home-automation
82bba47729313f8262ab4d02c57aed34c0a2f5fd
[ "MIT" ]
null
null
null
devices/lib/pidev/__init__.py
dmitry-root/home-automation
82bba47729313f8262ab4d02c57aed34c0a2f5fd
[ "MIT" ]
null
null
null
import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM)
15.666667
23
0.787234
9
47
4.111111
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.106383
47
2
24
23.5
0.880952
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
4e02d8f0db2f35d3f7d6adf747171821db7b57fe
1,873
py
Python
seleniumapis/vanguard/test/test_vanguard.py
rikonor/vanguard-api
5462b2327cacad68bedb945dc323d534ebbdfeee
[ "MIT" ]
61
2016-04-19T00:14:37.000Z
2022-03-14T03:49:05.000Z
seleniumapis/vanguard/test/test_vanguard.py
swordfish6975/vanguard-api
46452e1ffbe175fa82c41f87d9a299be95a2008b
[ "MIT" ]
4
2017-07-10T01:30:33.000Z
2018-08-07T05:07:54.000Z
seleniumapis/vanguard/test/test_vanguard.py
swordfish6975/vanguard-api
46452e1ffbe175fa82c41f87d9a299be95a2008b
[ "MIT" ]
17
2017-07-10T23:26:27.000Z
2022-02-25T01:46:21.000Z
from unittest import TestCase from vanguard import Vanguard, config, tests_config class TestBrowser(TestCase): def setUp(self): self.v = Vanguard() def tearDown(self): self.v.close_browser() def test_can_login(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) def test_can_get_security_question(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) question = self.v.get_security_question() def test_can_answer_security_question(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) question = self.v.get_security_question() answer = tests_config.TEST_SECURITY_QUESTIONS.get(question) self.assertIsNotNone(answer) self.v.answer_security_question(answer) def test_can_go_to_balances_and_holdings(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) question = self.v.get_security_question() answer = tests_config.TEST_SECURITY_QUESTIONS.get(question) self.v.answer_security_question(answer) self.v.go_to_balances_and_holdings() self.assertEqual(self.v.browser.title, "Balances and holdings") def test_can_get_total_assets(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) question = self.v.get_security_question() answer = tests_config.TEST_SECURITY_QUESTIONS.get(question) self.v.answer_security_question(answer) self.v.get_total_assets() def test_can_get_current_holdings(self): self.v.login(tests_config.TEST_USER, tests_config.TEST_PASSWORD) question = self.v.get_security_question() answer = tests_config.TEST_SECURITY_QUESTIONS.get(question) self.v.answer_security_question(answer) self.v.get_current_holdings()
34.054545
72
0.730913
250
1,873
5.124
0.148
0.081967
0.187354
0.065574
0.733021
0.733021
0.674473
0.674473
0.674473
0.674473
0
0
0.181527
1,873
54
73
34.685185
0.835616
0
0
0.513514
0
0
0.011212
0
0
0
0
0
0.054054
1
0.216216
false
0.162162
0.054054
0
0.297297
0
0
0
0
null
0
1
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
9d6a01dd9034e4f006c4a5ee262ac1ba7c9f3c33
135
py
Python
Pamas/pamas/admin.py
Kipngetich33/Patient-Management-Systme
85ca9f3ba403c40ae950e7a59bdff460b5aa0a4b
[ "MIT" ]
1
2019-03-01T17:01:04.000Z
2019-03-01T17:01:04.000Z
Pamas/pamas/admin.py
Kipngetich33/Patient-Management-Systme
85ca9f3ba403c40ae950e7a59bdff460b5aa0a4b
[ "MIT" ]
null
null
null
Pamas/pamas/admin.py
Kipngetich33/Patient-Management-Systme
85ca9f3ba403c40ae950e7a59bdff460b5aa0a4b
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Profile,Appointment admin.site.register(Profile) admin.site.register(Appointment)
27
39
0.844444
18
135
6.333333
0.555556
0.157895
0.298246
0
0
0
0
0
0
0
0
0
0.074074
135
5
40
27
0.912
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
9d6bc6c1dc31f9fcb47e4e2329937ea81e0d8092
164
py
Python
checklist_fork/checklist/tests/abstract_tests/__init__.py
pczarnowska/generalized-fairness-metrics
06a4cea1e017e1340ecb14617b629215a8b014cf
[ "Apache-2.0" ]
3
2021-10-30T12:34:32.000Z
2022-02-24T10:27:23.000Z
checklist_fork/checklist/tests/abstract_tests/__init__.py
pczarnowska/generalized-fairness-metrics
06a4cea1e017e1340ecb14617b629215a8b014cf
[ "Apache-2.0" ]
8
2021-08-18T19:13:53.000Z
2022-02-02T16:06:08.000Z
checklist_fork/checklist/tests/abstract_tests/__init__.py
pczarnowska/generalized-fairness-metrics
06a4cea1e017e1340ecb14617b629215a8b014cf
[ "Apache-2.0" ]
4
2021-08-13T15:28:28.000Z
2022-03-29T05:25:00.000Z
from .abstract_test import AbstractTest from .generalized_metrics import * from .classification_test import ClassificationMetric from .metric_test import MetricTest
41
53
0.878049
19
164
7.368421
0.578947
0.214286
0
0
0
0
0
0
0
0
0
0
0.091463
164
4
54
41
0.939597
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
9d7c35cbde835166864c860584f592f2988564f4
321
py
Python
test/marks.py
cheshi-mantu/allure-pytest-github
fd83edac3c0c14dd051acdd6e4baf7b4b4d3b092
[ "Apache-2.0" ]
null
null
null
test/marks.py
cheshi-mantu/allure-pytest-github
fd83edac3c0c14dd051acdd6e4baf7b4b4d3b092
[ "Apache-2.0" ]
null
null
null
test/marks.py
cheshi-mantu/allure-pytest-github
fd83edac3c0c14dd051acdd6e4baf7b4b4d3b092
[ "Apache-2.0" ]
null
null
null
import allure def microservice(name): return allure.label("msrv", name) def owner(name): return allure.label("owner", name) def layer(name): return allure.label("layer", name) def tm4j(issue): return allure.label("tm4j", issue) def jira_issues(*issues): return allure.label("jira", *issues)
14.590909
40
0.676012
43
321
5.023256
0.325581
0.277778
0.393519
0.291667
0
0
0
0
0
0
0
0.007576
0.17757
321
21
41
15.285714
0.810606
0
0
0
0
0
0.068536
0
0
0
0
0
0
1
0.454545
false
0
0.090909
0.454545
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
9d8c6e3b0af901c26eab06710f5ef6a7bc1001ce
54
py
Python
vistautils/__init__.py
isi-vista/vistautils
c25e5fbbed02281ca321523c4c4d604c7a369b1d
[ "MIT" ]
3
2018-10-26T16:44:44.000Z
2019-10-03T14:02:44.000Z
vistautils/__init__.py
isi-vista/vistautils
c25e5fbbed02281ca321523c4c4d604c7a369b1d
[ "MIT" ]
136
2018-10-11T21:37:31.000Z
2021-03-25T22:15:13.000Z
vistautils/__init__.py
isi-vista/vistautils
c25e5fbbed02281ca321523c4c4d604c7a369b1d
[ "MIT" ]
3
2019-07-01T17:55:37.000Z
2020-08-05T15:42:56.000Z
from vistautils.version import version as __version__
27
53
0.87037
7
54
6.142857
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.111111
54
1
54
54
0.895833
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9de9bbbb2c8d2f7482baeac5e36d776de5cca33b
80
py
Python
template/dogebuild_{{plugin_name}}/plugin.py
tapas-scaffold-tool/dogebuild-plugin-tapa
d7f32daa677513336f10ce098c7adb9cc553afbd
[ "MIT" ]
null
null
null
template/dogebuild_{{plugin_name}}/plugin.py
tapas-scaffold-tool/dogebuild-plugin-tapa
d7f32daa677513336f10ce098c7adb9cc553afbd
[ "MIT" ]
null
null
null
template/dogebuild_{{plugin_name}}/plugin.py
tapas-scaffold-tool/dogebuild-plugin-tapa
d7f32daa677513336f10ce098c7adb9cc553afbd
[ "MIT" ]
null
null
null
from dogebuild.plugins import DogePlugin class MyPlugin(DogePlugin): pass
13.333333
40
0.7875
9
80
7
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.1625
80
5
41
16
0.940299
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
3b023476a541fce59b064017b2263a57e13c6d9d
134
py
Python
driver.py
ShadenSmith/triton-packaging-test
8bc24194577a51124c14f79c8043d96dc1e15a65
[ "MIT" ]
null
null
null
driver.py
ShadenSmith/triton-packaging-test
8bc24194577a51124c14f79c8043d96dc1e15a65
[ "MIT" ]
null
null
null
driver.py
ShadenSmith/triton-packaging-test
8bc24194577a51124c14f79c8043d96dc1e15a65
[ "MIT" ]
null
null
null
from triton import cats, snakes if __name__ == '__main__': print(f'triton.cats="{cats}"') print(f'triton.snakes="{snakes}"')
22.333333
38
0.656716
18
134
4.444444
0.555556
0.15
0.3
0
0
0
0
0
0
0
0
0
0.149254
134
6
38
22.333333
0.701754
0
0
0
0
0
0.38806
0.179104
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0.5
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
d1997718563d4033af5ce2e9a457e2e4a7beb7cf
35
py
Python
loadtests/student_notes/__init__.py
cmltaWt0/edx-load-tests
c25547a3ee241b3aa323dc6931e161a356ed5314
[ "Apache-2.0" ]
18
2016-01-31T13:29:56.000Z
2019-02-08T18:08:49.000Z
loadtests/student_notes/__init__.py
cmltaWt0/edx-load-tests
c25547a3ee241b3aa323dc6931e161a356ed5314
[ "Apache-2.0" ]
92
2015-07-31T20:16:51.000Z
2019-08-09T14:32:12.000Z
loadtests/student_notes/__init__.py
cmltaWt0/edx-load-tests
c25547a3ee241b3aa323dc6931e161a356ed5314
[ "Apache-2.0" ]
15
2015-08-19T15:23:58.000Z
2018-02-01T19:47:38.000Z
from locustfile import NotesLocust
17.5
34
0.885714
4
35
7.75
1
0
0
0
0
0
0
0
0
0
0
0
0.114286
35
1
35
35
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d1c710036a1e4caf53530675540407cb6a3d0eed
92
py
Python
pydaily/images/__init__.py
codingPingjun/pydaily
966b96db05b3170f926aeb830ca6f81093a5371a
[ "Apache-2.0" ]
null
null
null
pydaily/images/__init__.py
codingPingjun/pydaily
966b96db05b3170f926aeb830ca6f81093a5371a
[ "Apache-2.0" ]
null
null
null
pydaily/images/__init__.py
codingPingjun/pydaily
966b96db05b3170f926aeb830ca6f81093a5371a
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- import os, sys, pdb from ._color import * from ._combine import *
13.142857
23
0.630435
13
92
4.307692
0.769231
0
0
0
0
0
0
0
0
0
0
0.013699
0.206522
92
6
24
15.333333
0.753425
0.228261
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d1e9a61ef8e116e452c21a69a9941090ad489d2e
282
py
Python
pymusicterm/util/time.py
EGAMAGZ/Terminal-Music-Player
fc5ce7d9cef6ee98f2dde1784d363a8002584d96
[ "MIT" ]
2
2020-07-12T20:43:06.000Z
2020-08-07T06:16:12.000Z
pymusicterm/util/time.py
EGAMAGZ/Terminal-Music-Player
fc5ce7d9cef6ee98f2dde1784d363a8002584d96
[ "MIT" ]
null
null
null
pymusicterm/util/time.py
EGAMAGZ/Terminal-Music-Player
fc5ce7d9cef6ee98f2dde1784d363a8002584d96
[ "MIT" ]
null
null
null
def milliseconds_to_seconds(milliseconds:int) -> int: return int((milliseconds/1000)%60) def milliseconds_to_minutes(milliseconds:int) -> int: return int((milliseconds/(1000*60))%60) def seconds_to_milliseconds(seconds:float) -> int: return int(round(seconds,2)*1000)
31.333333
53
0.748227
38
282
5.394737
0.315789
0.131707
0.17561
0.234146
0.439024
0.439024
0.439024
0.439024
0
0
0
0.076
0.113475
282
8
54
35.25
0.744
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
ae078b9f787b1b7506c29ff8ab00c21ca1704210
56
py
Python
spirl/configs/hrl/block_stacking/no_prior/conf.py
kouroshHakha/fist
328c098789239fd892e17edefd799fc1957ab637
[ "BSD-3-Clause" ]
8
2021-10-14T03:14:23.000Z
2022-03-15T21:31:17.000Z
spirl/configs/hrl/block_stacking/no_prior/conf.py
kouroshHakha/fist
328c098789239fd892e17edefd799fc1957ab637
[ "BSD-3-Clause" ]
null
null
null
spirl/configs/hrl/block_stacking/no_prior/conf.py
kouroshHakha/fist
328c098789239fd892e17edefd799fc1957ab637
[ "BSD-3-Clause" ]
1
2021-09-13T20:42:28.000Z
2021-09-13T20:42:28.000Z
from spirl.configs.hrl.block_stacking.base_conf import *
56
56
0.857143
9
56
5.111111
1
0
0
0
0
0
0
0
0
0
0
0
0.053571
56
1
56
56
0.867925
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ae3770e1b812cad31ad578b0c86ecccc1b812c20
93
py
Python
poewr-mod.py
DomirScire/HackerRank_answers
0432185a472aeae7062cf4e406d0e7a5ed2cc979
[ "MIT" ]
1
2021-03-19T13:05:16.000Z
2021-03-19T13:05:16.000Z
poewr-mod.py
DomirScire/HackerRank_answers
0432185a472aeae7062cf4e406d0e7a5ed2cc979
[ "MIT" ]
null
null
null
poewr-mod.py
DomirScire/HackerRank_answers
0432185a472aeae7062cf4e406d0e7a5ed2cc979
[ "MIT" ]
null
null
null
# DomirScire a=int(input()) b=int(input()) c=int(input()) print(pow(a,b)) print(pow(a,b,c))
11.625
17
0.623656
19
93
3.052632
0.421053
0.413793
0.310345
0.344828
0
0
0
0
0
0
0
0
0.086022
93
7
18
13.285714
0.682353
0.107527
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ae4660194bebc036c1467264b397470d06870263
87
bzl
Python
lib/prolog/prolog.bzl
pivonroll/Gerrit-Owners
741b3fc6dedec50a94b3bfb5cab903ea8e12b487
[ "Apache-2.0" ]
null
null
null
lib/prolog/prolog.bzl
pivonroll/Gerrit-Owners
741b3fc6dedec50a94b3bfb5cab903ea8e12b487
[ "Apache-2.0" ]
null
null
null
lib/prolog/prolog.bzl
pivonroll/Gerrit-Owners
741b3fc6dedec50a94b3bfb5cab903ea8e12b487
[ "Apache-2.0" ]
null
null
null
load("@com_googlesource_gerrit_bazlets//lib/prolog:prolog.bzl", "prolog_cafe_library")
43.5
86
0.827586
12
87
5.583333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.022989
87
1
87
87
0.788235
0
0
0
0
0
0.850575
0.632184
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
8830fdad5ad8a9c84e5fa6783a7ca78d2f474e5e
48
py
Python
instagram_api/exceptions/endpoint.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
13
2019-08-07T21:24:34.000Z
2020-12-12T12:23:50.000Z
instagram_api/exceptions/endpoint.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
null
null
null
instagram_api/exceptions/endpoint.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
null
null
null
class EndpointException(Exception): pass
8
35
0.729167
4
48
8.75
1
0
0
0
0
0
0
0
0
0
0
0
0.208333
48
5
36
9.6
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
886be195b2253d9798e1568271f429c345cfa2e4
121
py
Python
ballpark/ventures/admin.py
keyvanm/ballpark
90ca6ac355319f159fa0836f30df487ee8e72ddd
[ "MIT" ]
null
null
null
ballpark/ventures/admin.py
keyvanm/ballpark
90ca6ac355319f159fa0836f30df487ee8e72ddd
[ "MIT" ]
null
null
null
ballpark/ventures/admin.py
keyvanm/ballpark
90ca6ac355319f159fa0836f30df487ee8e72ddd
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Venture # Register your models here. admin.site.register(Venture)
17.285714
32
0.801653
17
121
5.705882
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.132231
121
6
33
20.166667
0.92381
0.214876
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
887071cf0c38cef363521b84ef76ac254ac0892a
25
py
Python
gunit/__init__.py
estcube/GUnit
e4efea80e671a574479466de19a65d6ad870102a
[ "MIT" ]
null
null
null
gunit/__init__.py
estcube/GUnit
e4efea80e671a574479466de19a65d6ad870102a
[ "MIT" ]
null
null
null
gunit/__init__.py
estcube/GUnit
e4efea80e671a574479466de19a65d6ad870102a
[ "MIT" ]
null
null
null
from .GUnit import GUnit
12.5
24
0.8
4
25
5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8876c3d39137573bdd12ecdb17fcfbabfe0ac2b8
5,563
py
Python
data_loader/create_dataset.py
ShawnNew/AMMNet
f1c645afec3c5897f2cf5dfa78a416b9c784caa9
[ "MIT" ]
10
2019-10-11T16:54:05.000Z
2020-06-23T07:30:55.000Z
data_loader/create_dataset.py
ShawnNew/AMMNet
f1c645afec3c5897f2cf5dfa78a416b9c784caa9
[ "MIT" ]
4
2019-09-21T23:11:00.000Z
2020-12-24T12:24:43.000Z
data_loader/create_dataset.py
ShawnNew/AMSMNet
f1c645afec3c5897f2cf5dfa78a416b9c784caa9
[ "MIT" ]
3
2019-03-12T13:17:36.000Z
2019-06-06T09:51:50.000Z
import numpy as np import random from PIL import Image from torch.utils.data import Dataset, DataLoader, TensorDataset import os #from .preprocessor.Preprocessor import Preprocessor from .preprocessor.utils import generate_gradient_map class myDataset(Dataset): def __init__(self, root_, train, transform, shuffle): self.root = os.path.expanduser(root_) self.transform = transform self.trainable = train # indicate whether generate train set or test set train_lists_ = [] test_lists_ = [] train_lists_ += [os.path.join(self.root, x) \ for x in os.listdir(self.root) \ if os.path.isfile(os.path.join(self.root, x)) and\ x.endswith('txt') and 'train' in x] test_lists_ += [ os.path.join(self.root, x) \ for x in os.listdir(self.root) \ if os.path.isfile(os.path.join(self.root, x)) and\ x.endswith('txt') and 'test' in x] if self.trainable: data_file_ = [] for path in train_lists_: with open(path, 'r') as f: data_file_ += f.readlines() self.data_file_ = data_file_ self.len_ = len(self.data_file_) else: data_file_ = [] for path in test_lists_: with open(path, 'r') as f: data_file_ += f.readlines() self.data_file_ = data_file_ self.len_ = len(self.data_file_) if shuffle: random.shuffle(self.data_file_) def __len__(self): return self.len_ def __getitem__(self, idx): raise NotImplementedError class adobeDataset(myDataset): def __init__(self, root_, train=True, transform=None, shuffle=False): super(adobeDataset, self).__init__(root_, train, transform, shuffle) def __getitem__(self, idx): # return the idx's image and related information line = self.data_file_[idx] items_list = line.rstrip().replace('./', '').split(' ') img_path = os.path.join(self.root, items_list[1]) gt_path = os.path.join(self.root, items_list[3]) trimap_path = os.path.join(self.root, items_list[6]) gradient_path = os.path.join(self.root, items_list[0]) img = Image.open(img_path) # h*w*c gt = Image.open(gt_path) # h*w trimap = Image.open(trimap_path) # h*w gradient = Image.open(gradient_path) # h*w # gradient = generate_gradient_map(np.asarray(gradient), 3) # gradient = Image.fromarray(gradient) if img.mode != 'RGB': img = img.convert('RGB') sample = { 'name': img_path, 'size': (img.height, img.width), 'image': img, 'gt': gt, 'trimap': trimap, 'gradient': gradient } if self.transform: sample = self.transform(sample) return sample class alphamatting(myDataset): def __init__(self, root_, train=True, transform=None, shuffle=False): super(alphamatting, self).__init__(root_, train, transform, shuffle) def __getitem__(self, idx): line = self.data_file_[idx] items_list = line.rstrip().replace('./', '').split(' ') if self.trainable: # modify here for different datasets img_path = os.path.join(self.root, items_list[0]) gt_path = os.path.join(self.root, items_list[2]) trimap_path = os.path.join(self.root, items_list[1]) img = Image.open(img_path) gt = Image.open(gt_path) trimap = Image.open(trimap_path) sample = { 'name': img_path, 'size': (img.height, img.width), 'image': img, 'gt': gt, 'trimap': trimap } else: img_path = os.path.join(self.root, items_list[0]) trimap_path = os.path.join(self.root, items_list[1]) img = Image.open(img_path) trimap = Image.open(trimap_path) sample = { 'name': img_path, 'size': (img.height, img.width), 'image': img, 'trimap': trimap } if self.transform: sample = self.transform(sample) return sample class carmediaDataset(myDataset): def __init__(self, root_, train=True, transform=None, shuffle=False): super(carmediaDataset, self).__init__(root_, train, transform, shuffle) def __getitem__(self, idx): # return the idx's image and related information line = self.data_file_[idx] items_list = line.rstrip().replace('./', '').split(' ') img_path = os.path.join(self.root, items_list[0]) gt_path = os.path.join(self.root, items_list[1]) trimap_path = os.path.join(self.root, items_list[2]) img = Image.open(img_path) # h*w*c gt = Image.open(gt_path) # h*w trimap = Image.open(trimap_path) # h*w if img.mode != 'RGB': img = img.convert('RGB') sample = { 'name': img_path, 'size': (img.height, img.width), 'image': img, 'gt': gt, 'trimap': trimap } if self.transform: sample = self.transform(sample) return sample
35.660256
80
0.545209
655
5,563
4.407634
0.155725
0.063734
0.055421
0.077589
0.730862
0.707655
0.707655
0.707655
0.707655
0.658123
0
0.003526
0.337228
5,563
155
81
35.890323
0.779496
0.063814
0
0.64
0
0
0.024644
0
0
0
0
0
0
1
0.072
false
0
0.048
0.008
0.184
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8886d46bd981bf3bd2aa580c8b887f24c166efc4
79
py
Python
tron/Hub/Command/__init__.py
sdss/tron
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
[ "BSD-3-Clause" ]
null
null
null
tron/Hub/Command/__init__.py
sdss/tron
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
[ "BSD-3-Clause" ]
null
null
null
tron/Hub/Command/__init__.py
sdss/tron
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
[ "BSD-3-Clause" ]
null
null
null
from .Command import Command #from Decoders import * #from Encoders import *
13.166667
28
0.759494
10
79
6
0.5
0
0
0
0
0
0
0
0
0
0
0
0.177215
79
5
29
15.8
0.923077
0.556962
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8889868377b50d3b04280c2c385511c733f270e2
233
py
Python
topopy/__init__.py
geolovic/topopy
0ccfc4bfc0364b99489d08a1d4b87582deb08b81
[ "MIT" ]
5
2020-04-05T18:42:45.000Z
2022-02-17T11:15:32.000Z
topopy/__init__.py
geolovic/topopy
0ccfc4bfc0364b99489d08a1d4b87582deb08b81
[ "MIT" ]
null
null
null
topopy/__init__.py
geolovic/topopy
0ccfc4bfc0364b99489d08a1d4b87582deb08b81
[ "MIT" ]
5
2019-07-02T11:14:54.000Z
2021-12-15T08:43:42.000Z
from topopy.grid import Grid, DEM, PRaster, Basin from topopy.flow import Flow from topopy.network import Network, BNetwork, Channel from topopy.functions import extract_points, rivers_to_channels from topopy.profiler import TProfile
46.6
63
0.845494
34
233
5.705882
0.558824
0.257732
0
0
0
0
0
0
0
0
0
0
0.107296
233
5
64
46.6
0.932692
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5