hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ccea6c7adffdfe4092a7a82c033c1eb8ef0a70cd
| 102
|
py
|
Python
|
source/experiment/__init__.py
|
Octavian-ai/genetic-curriculum
|
c409681be92880793c021586f35f0ac2af5e5003
|
[
"Apache-2.0"
] | 5
|
2019-01-20T09:52:54.000Z
|
2020-05-18T02:12:53.000Z
|
source/experiment/__init__.py
|
Octavian-ai/genetic-curriculum
|
c409681be92880793c021586f35f0ac2af5e5003
|
[
"Apache-2.0"
] | null | null | null |
source/experiment/__init__.py
|
Octavian-ai/genetic-curriculum
|
c409681be92880793c021586f35f0ac2af5e5003
|
[
"Apache-2.0"
] | 1
|
2019-06-24T22:18:12.000Z
|
2019-06-24T22:18:12.000Z
|
from .helpers import DatasetParam
from .args import get_args
from .worker_test import WorkerTestCase
| 20.4
| 39
| 0.843137
| 14
| 102
| 6
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 5
| 39
| 20.4
| 0.94382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ccec7ab781f6f51cbf7d0518742fe743b2d1023e
| 125
|
py
|
Python
|
assets/src/ba_data/python/ba/_appmode.py
|
Benefit-Zebra/ballistica
|
eb85df82cff22038e74a2d93abdcbe9cd755d782
|
[
"MIT"
] | 317
|
2020-04-04T00:33:10.000Z
|
2022-03-28T01:07:09.000Z
|
assets/src/ba_data/python/ba/_appmode.py
|
Alshahriah/ballistica
|
326f6677a0118667e93ce9034849622ebef706fa
|
[
"MIT"
] | 315
|
2020-04-04T22:33:10.000Z
|
2022-03-31T22:50:02.000Z
|
assets/src/ba_data/python/ba/_appmode.py
|
Alshahriah/ballistica
|
326f6677a0118667e93ce9034849622ebef706fa
|
[
"MIT"
] | 97
|
2020-04-04T01:32:17.000Z
|
2022-03-16T19:02:59.000Z
|
# Released under the MIT License. See LICENSE for details.
#
"""Functionality related to the high level state of the app."""
| 31.25
| 63
| 0.744
| 19
| 125
| 4.894737
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168
| 125
| 3
| 64
| 41.666667
| 0.894231
| 0.92
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cceec08b4a487ea148ef6e1e878b071daf989e78
| 4,075
|
py
|
Python
|
sdk/python/pulumi_azure_native/desktopvirtualization/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/desktopvirtualization/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/desktopvirtualization/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .application import *
from .application_group import *
from .get_application import *
from .get_application_group import *
from .get_host_pool import *
from .get_msix_package import *
from .get_private_endpoint_connection_by_host_pool import *
from .get_private_endpoint_connection_by_workspace import *
from .get_scaling_plan import *
from .get_workspace import *
from .host_pool import *
from .msix_package import *
from .private_endpoint_connection_by_host_pool import *
from .private_endpoint_connection_by_workspace import *
from .scaling_plan import *
from .workspace import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_azure_native.desktopvirtualization.v20190123preview as __v20190123preview
v20190123preview = __v20190123preview
import pulumi_azure_native.desktopvirtualization.v20190924preview as __v20190924preview
v20190924preview = __v20190924preview
import pulumi_azure_native.desktopvirtualization.v20191210preview as __v20191210preview
v20191210preview = __v20191210preview
import pulumi_azure_native.desktopvirtualization.v20200921preview as __v20200921preview
v20200921preview = __v20200921preview
import pulumi_azure_native.desktopvirtualization.v20201019preview as __v20201019preview
v20201019preview = __v20201019preview
import pulumi_azure_native.desktopvirtualization.v20201102preview as __v20201102preview
v20201102preview = __v20201102preview
import pulumi_azure_native.desktopvirtualization.v20201110preview as __v20201110preview
v20201110preview = __v20201110preview
import pulumi_azure_native.desktopvirtualization.v20210114preview as __v20210114preview
v20210114preview = __v20210114preview
import pulumi_azure_native.desktopvirtualization.v20210201preview as __v20210201preview
v20210201preview = __v20210201preview
import pulumi_azure_native.desktopvirtualization.v20210309preview as __v20210309preview
v20210309preview = __v20210309preview
import pulumi_azure_native.desktopvirtualization.v20210401preview as __v20210401preview
v20210401preview = __v20210401preview
import pulumi_azure_native.desktopvirtualization.v20210712 as __v20210712
v20210712 = __v20210712
import pulumi_azure_native.desktopvirtualization.v20210903preview as __v20210903preview
v20210903preview = __v20210903preview
else:
v20190123preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20190123preview')
v20190924preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20190924preview')
v20191210preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20191210preview')
v20200921preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20200921preview')
v20201019preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20201019preview')
v20201102preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20201102preview')
v20201110preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20201110preview')
v20210114preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210114preview')
v20210201preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210201preview')
v20210309preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210309preview')
v20210401preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210401preview')
v20210712 = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210712')
v20210903preview = _utilities.lazy_import('pulumi_azure_native.desktopvirtualization.v20210903preview')
| 57.394366
| 107
| 0.84589
| 386
| 4,075
| 8.494819
| 0.19171
| 0.095151
| 0.134797
| 0.182373
| 0.610552
| 0.586459
| 0.286368
| 0.027447
| 0
| 0
| 0
| 0.170858
| 0.102331
| 4,075
| 70
| 108
| 58.214286
| 0.725533
| 0.056687
| 0
| 0
| 1
| 0
| 0.194683
| 0.194683
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.758065
| 0
| 0.758065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ccf74057ff48d5e7357907a308d3b2547b8b3559
| 666
|
py
|
Python
|
tests/test_SeeThru_Feed.py
|
SeeThru-Networks/Python-Feed
|
9c29fcf3462475e339f95d3e9766ed3a652ee6c0
|
[
"MIT"
] | 3
|
2020-04-15T19:58:37.000Z
|
2020-06-24T14:08:48.000Z
|
tests/test_SeeThru_Feed.py
|
SeeThru-Networks/Python-Feed
|
9c29fcf3462475e339f95d3e9766ed3a652ee6c0
|
[
"MIT"
] | null | null | null |
tests/test_SeeThru_Feed.py
|
SeeThru-Networks/Python-Feed
|
9c29fcf3462475e339f95d3e9766ed3a652ee6c0
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
# TODO: as per TDD, all tests should fail initially, hence self.fail() below. But this is not a new repo, we will
# hopefully add test coverage as we go along....
class TestSeeThru_Feed(TestCase):
def test_create_feed_scheme(self):
# self.fail()
self.assertTrue(True)
def test_create_script(self):
# self.fail()
self.assertTrue(True)
def test_run_feed_scheme(self):
# self.fail()
self.assertTrue(True)
def test_create_dir(self):
# self.fail()
self.assertTrue(True)
def test_touch_file(self):
# self.fail()
self.assertTrue(True)
| 24.666667
| 113
| 0.644144
| 90
| 666
| 4.622222
| 0.488889
| 0.115385
| 0.144231
| 0.192308
| 0.504808
| 0.504808
| 0.432692
| 0.432692
| 0.254808
| 0.254808
| 0
| 0
| 0.255255
| 666
| 26
| 114
| 25.615385
| 0.83871
| 0.328829
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.416667
| 1
| 0.416667
| false
| 0
| 0.083333
| 0
| 0.583333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
69027caa94b620a7a3981d0d4946767f184cabce
| 36
|
py
|
Python
|
forensics/steg1/009.py
|
CAMSCSC/archive_learn
|
21e9c3f0c26565a94f85e89c00f19179ca084b90
|
[
"MIT"
] | null | null | null |
forensics/steg1/009.py
|
CAMSCSC/archive_learn
|
21e9c3f0c26565a94f85e89c00f19179ca084b90
|
[
"MIT"
] | null | null | null |
forensics/steg1/009.py
|
CAMSCSC/archive_learn
|
21e9c3f0c26565a94f85e89c00f19179ca084b90
|
[
"MIT"
] | null | null | null |
def hide(input_image_file, message):
| 36
| 36
| 0.833333
| 6
| 36
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 36
| 1
| 36
| 36
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
690dbeec88763d0bf0288533ceefd3572b466564
| 247
|
py
|
Python
|
src/atlassian_marketplace_python_client/settings.py
|
seibert-media/atlassian_marketplace_python_client
|
982905051adbd12ecf4d3521449c2befdd98115e
|
[
"MIT"
] | 2
|
2018-05-24T06:33:39.000Z
|
2022-01-05T20:20:28.000Z
|
src/atlassian_marketplace_python_client/settings.py
|
seibert-media/atlassian_marketplace_python_client
|
982905051adbd12ecf4d3521449c2befdd98115e
|
[
"MIT"
] | null | null | null |
src/atlassian_marketplace_python_client/settings.py
|
seibert-media/atlassian_marketplace_python_client
|
982905051adbd12ecf4d3521449c2befdd98115e
|
[
"MIT"
] | null | null | null |
ATLASSIAN_MARKETPLACE_BASE_URL = 'https://marketplace.atlassian.com'
ATLASSIAN_MARKETPLACE_VENDOR_URL = 'https://marketplace.atlassian.com/rest/2/addons/vendor/'
ATLASSIAN_MARKETPLACE_ADDON_URL = 'https://marketplace.atlassian.com/rest/2/addons/'
| 61.75
| 92
| 0.82996
| 31
| 247
| 6.322581
| 0.354839
| 0.306122
| 0.290816
| 0.428571
| 0.586735
| 0.428571
| 0.428571
| 0.428571
| 0
| 0
| 0
| 0.008403
| 0.036437
| 247
| 3
| 93
| 82.333333
| 0.815126
| 0
| 0
| 0
| 0
| 0
| 0.550607
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
693337737411300b3bee9ff11c81fafa57e90b8e
| 49
|
py
|
Python
|
__init__.py
|
kareemsaf/dgn2
|
795a67d855119057c93bec06c6e43e9dda539bfd
|
[
"MIT"
] | null | null | null |
__init__.py
|
kareemsaf/dgn2
|
795a67d855119057c93bec06c6e43e9dda539bfd
|
[
"MIT"
] | null | null | null |
__init__.py
|
kareemsaf/dgn2
|
795a67d855119057c93bec06c6e43e9dda539bfd
|
[
"MIT"
] | null | null | null |
from .dategen import *
__version__ = "0.0.2"
| 12.25
| 23
| 0.632653
| 7
| 49
| 3.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0.22449
| 49
| 3
| 24
| 16.333333
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
695bef997fb79d78a709ac6ea2f9a9b0dc9584e7
| 184
|
py
|
Python
|
janitor/package/chef/utils/file.py
|
nilesh-naik/lambda-functions
|
5daa2828914d23851538d5c3cb11f36b0f4bac52
|
[
"MIT"
] | 77
|
2016-03-15T21:51:11.000Z
|
2021-09-15T18:40:25.000Z
|
janitor/package/chef/utils/file.py
|
nilesh-naik/lambda-functions
|
5daa2828914d23851538d5c3cb11f36b0f4bac52
|
[
"MIT"
] | 20
|
2016-04-15T18:40:57.000Z
|
2021-06-01T18:59:56.000Z
|
lambda/chef/utils/file.py
|
novu/lambda-chef-node-cleanup
|
6659db950a3ab6b88ff608a6324f36a75fecebf2
|
[
"Apache-2.0"
] | 53
|
2016-04-07T07:35:04.000Z
|
2022-01-25T18:48:10.000Z
|
import os
def walk_backwards(path):
while 1:
yield path
next_path = os.path.dirname(path)
if path == next_path:
break
path = next_path
| 18.4
| 41
| 0.559783
| 24
| 184
| 4.125
| 0.541667
| 0.242424
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.369565
| 184
| 9
| 42
| 20.444444
| 0.844828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
695f455d94f34d102f78d8129766182ef0025c58
| 119
|
py
|
Python
|
feeds/exceptions.py
|
jochenklar/reader2
|
32dcedfff0b3d8c1b65a8398eab9f87a665a8d72
|
[
"Apache-2.0"
] | null | null | null |
feeds/exceptions.py
|
jochenklar/reader2
|
32dcedfff0b3d8c1b65a8398eab9f87a665a8d72
|
[
"Apache-2.0"
] | null | null | null |
feeds/exceptions.py
|
jochenklar/reader2
|
32dcedfff0b3d8c1b65a8398eab9f87a665a8d72
|
[
"Apache-2.0"
] | null | null | null |
class FeedException(Exception):
def __init__(self, *args, **kwargs):
self.message = kwargs.pop('message')
| 23.8
| 44
| 0.663866
| 13
| 119
| 5.769231
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184874
| 119
| 4
| 45
| 29.75
| 0.773196
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
15d041021bfaa5759dc79ed13623520c92654840
| 356
|
py
|
Python
|
python/pandemic_simulator/environment/location/__init__.py
|
stacyvjong/PandemicSimulator
|
eca906f5dc8135d7c90a1582b96621235f745c17
|
[
"Apache-2.0"
] | null | null | null |
python/pandemic_simulator/environment/location/__init__.py
|
stacyvjong/PandemicSimulator
|
eca906f5dc8135d7c90a1582b96621235f745c17
|
[
"Apache-2.0"
] | null | null | null |
python/pandemic_simulator/environment/location/__init__.py
|
stacyvjong/PandemicSimulator
|
eca906f5dc8135d7c90a1582b96621235f745c17
|
[
"Apache-2.0"
] | null | null | null |
# Confidential, Copyright 2020, Sony Corporation of America, All rights reserved.
# flake8: noqa
from .base import *
from .base_business import *
from .cemetery import *
from .home import *
from .hospital import *
from .misc_locations import *
from .office import *
from .recreation import *
from .road import *
from .school import *
from .store import *
| 23.733333
| 81
| 0.752809
| 47
| 356
| 5.659574
| 0.553191
| 0.37594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016835
| 0.16573
| 356
| 14
| 82
| 25.428571
| 0.878788
| 0.258427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
15f074e9aa8ec055fe747641db3920123b1f0f16
| 116
|
py
|
Python
|
skipping_letter.py
|
AkhilaSaiBejjarapu/Python
|
238cc7692cf2e93eb585a03967b8d688ee3760f2
|
[
"MIT"
] | null | null | null |
skipping_letter.py
|
AkhilaSaiBejjarapu/Python
|
238cc7692cf2e93eb585a03967b8d688ee3760f2
|
[
"MIT"
] | null | null | null |
skipping_letter.py
|
AkhilaSaiBejjarapu/Python
|
238cc7692cf2e93eb585a03967b8d688ee3760f2
|
[
"MIT"
] | null | null | null |
word=input()
n=int(input())
first_half=word[:n]
second_half=word[n+1:]
result=(first_half+second_half)
print(result)
| 19.333333
| 31
| 0.758621
| 21
| 116
| 4
| 0.47619
| 0.214286
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 0.043103
| 116
| 6
| 32
| 19.333333
| 0.747748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
15f5337572ee47236786a763c55ac941dc8cb00d
| 120
|
py
|
Python
|
passwords.py
|
joonamo/hsl-stop-monitor
|
8ed21e3fee17cffa3ee0bb75ecc6d82b9632cfd2
|
[
"MIT"
] | null | null | null |
passwords.py
|
joonamo/hsl-stop-monitor
|
8ed21e3fee17cffa3ee0bb75ecc6d82b9632cfd2
|
[
"MIT"
] | null | null | null |
passwords.py
|
joonamo/hsl-stop-monitor
|
8ed21e3fee17cffa3ee0bb75ecc6d82b9632cfd2
|
[
"MIT"
] | null | null | null |
# Get your user and password from http://developer.reittiopas.fi
HSL_USERNAME = "hsl_user"
HSL_PASSWORD = "hsl_password"
| 40
| 64
| 0.791667
| 18
| 120
| 5.055556
| 0.666667
| 0.241758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108333
| 120
| 3
| 65
| 40
| 0.850467
| 0.516667
| 0
| 0
| 0
| 0
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
c6228d8ece630ac6554878bf45bd06efbd480ba2
| 4,517
|
py
|
Python
|
utils/config_parser.py
|
PaloAltoNetworks/autocombo
|
02876a776d8fe3fe5f1a3fbfd3516ec47b4c4801
|
[
"MIT"
] | 2
|
2021-12-07T22:15:10.000Z
|
2022-01-07T01:47:36.000Z
|
utils/config_parser.py
|
PaloAltoNetworks/autocombo
|
02876a776d8fe3fe5f1a3fbfd3516ec47b4c4801
|
[
"MIT"
] | 1
|
2021-11-01T04:03:44.000Z
|
2021-11-01T04:03:44.000Z
|
utils/config_parser.py
|
PaloAltoNetworks/autocombo
|
02876a776d8fe3fe5f1a3fbfd3516ec47b4c4801
|
[
"MIT"
] | 1
|
2021-12-07T22:31:47.000Z
|
2021-12-07T22:31:47.000Z
|
"""
@author: Min Du (midu@paloaltonetworks.com)
Copyright (c) 2021 Palo Alto Networks
"""
import os
import sys
import ast
from configparser import ConfigParser
class CommonConfig:
CONFIG_PARSER = ConfigParser()
common_config = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'config', 'common_config.ini'))
CONFIG_PARSER.read([common_config])
@staticmethod
def get_data_folder():
folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
CommonConfig.CONFIG_PARSER.get('common', 'data_folder')))
os.makedirs(folder, exist_ok=True)
return folder
@staticmethod
def get_data_preparation_logs_folder():
folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
CommonConfig.CONFIG_PARSER.get('common', 'data_preparation_logs_folder')))
os.makedirs(folder, exist_ok=True)
return folder
@staticmethod
def get_property_lists_folder():
folder = CommonConfig.CONFIG_PARSER.get('dataset_preparer', 'property_lists_folder')
os.makedirs(folder, exist_ok=True)
return folder
@staticmethod
def get_property_lists_integer_folder():
folder = CommonConfig.CONFIG_PARSER.get('dataset_preparer', 'property_lists_integer_folder')
os.makedirs(folder, exist_ok=True)
return folder
@staticmethod
def get_property_index_mapping_file():
return CommonConfig.CONFIG_PARSER.get('dataset_preparer', 'property_index_mapping_file')
@staticmethod
def get_property_count():
return CommonConfig.CONFIG_PARSER.getint('dataset_preparer', 'property_count')
@staticmethod
def get_combo_generation_result_folder():
folder = CommonConfig.CONFIG_PARSER.get('combo_generation', 'result_folder')
os.makedirs(folder, exist_ok=True)
return folder
@staticmethod
def get_start_date(config_section):
return CommonConfig.CONFIG_PARSER.get(config_section, 'start_date')
@staticmethod
def get_end_date(config_section):
return CommonConfig.CONFIG_PARSER.get(config_section, 'end_date')
@staticmethod
def get_property_sorting_criteria():
return CommonConfig.CONFIG_PARSER.get('combo_generation', 'property_sorting_criteria')
@staticmethod
def get_min_threshold():
return CommonConfig.CONFIG_PARSER.getfloat('combo_generation', 'min_threshold')
@staticmethod
def get_max_threshold():
return CommonConfig.CONFIG_PARSER.getfloat('combo_generation', 'max_threshold')
@staticmethod
def get_min_combo_size():
return CommonConfig.CONFIG_PARSER.getint('combo_generation', 'min_combo_size')
@staticmethod
def get_max_combo_size():
return CommonConfig.CONFIG_PARSER.getint('combo_generation', 'max_combo_size')
@staticmethod
def get_combo_sorting_criteria():
return CommonConfig.CONFIG_PARSER.get('combo_selection', 'combo_sorting_criteria')
@staticmethod
def get_combo_selection_approach():
return CommonConfig.CONFIG_PARSER.get('combo_selection', 'combo_selection_approach')
@staticmethod
def get_generation_mode():
return CommonConfig.CONFIG_PARSER.get('combo_generation', 'generation_mode')
@staticmethod
def get_use_integer_subset():
return CommonConfig.CONFIG_PARSER.getboolean('combo_generation', 'use_integer_subset')
@staticmethod
def get_do_property_sorting():
return CommonConfig.CONFIG_PARSER.getboolean('combo_generation', 'do_property_sorting')
@staticmethod
def get_random_seed():
return CommonConfig.CONFIG_PARSER.getint('combo_generation', 'random_seed')
@staticmethod
def get_use_multi_threading():
return CommonConfig.CONFIG_PARSER.getboolean('combo_generation', 'use_multi_threading')
@staticmethod
def get_num_cores():
return CommonConfig.CONFIG_PARSER.getint('combo_generation', 'num_cores')
@staticmethod
def get_use_sorted_combo_file_for_eval():
return CommonConfig.CONFIG_PARSER.getboolean('combo_evaluation', 'sorted_combo_file')
@staticmethod
def get_use_selected_combo_file():
return CommonConfig.CONFIG_PARSER.getboolean('combo_evaluation', 'use_selected_combo_file')
@staticmethod
def get_selection_threshold():
return CommonConfig.CONFIG_PARSER.getfloat('combo_selection', 'selection_threshold')
| 35.015504
| 120
| 0.720832
| 501
| 4,517
| 6.123753
| 0.163673
| 0.105606
| 0.20339
| 0.195567
| 0.682529
| 0.599739
| 0.582138
| 0.44296
| 0.30867
| 0.269557
| 0
| 0.001084
| 0.182865
| 4,517
| 128
| 121
| 35.289063
| 0.830127
| 0.017932
| 0
| 0.389474
| 0
| 0
| 0.182927
| 0.044941
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.042105
| 0.210526
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
c63d1e1acedd230d3ae0fbd589ebbd535c75ebd8
| 267
|
py
|
Python
|
source/__init__.py
|
tomhosker/wp17_configs
|
61035e7ae116b2b17dee2387e71fabe8a429792e
|
[
"MIT"
] | null | null | null |
source/__init__.py
|
tomhosker/wp17_configs
|
61035e7ae116b2b17dee2387e71fabe8a429792e
|
[
"MIT"
] | null | null | null |
source/__init__.py
|
tomhosker/wp17_configs
|
61035e7ae116b2b17dee2387e71fabe8a429792e
|
[
"MIT"
] | null | null | null |
"""
This code gives us, amongst other things, the ability to import specific
classes and functions from the package directly, rather than having to go
through the modules in which they are defined.
"""
from .configs_manager import ConfigsManager, get_configs_object
| 33.375
| 73
| 0.805243
| 40
| 267
| 5.3
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149813
| 267
| 7
| 74
| 38.142857
| 0.933921
| 0.722846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d66a548e07d3b0e445fed0b2fedac2cc9b70461c
| 64
|
py
|
Python
|
python/1-hello/hello_script.py
|
mstenback/references
|
4ef4030cdd33a5d63a78d3ca9eb5b5aa82babd84
|
[
"Apache-2.0"
] | null | null | null |
python/1-hello/hello_script.py
|
mstenback/references
|
4ef4030cdd33a5d63a78d3ca9eb5b5aa82babd84
|
[
"Apache-2.0"
] | null | null | null |
python/1-hello/hello_script.py
|
mstenback/references
|
4ef4030cdd33a5d63a78d3ca9eb5b5aa82babd84
|
[
"Apache-2.0"
] | null | null | null |
# It doesn't get any simpler than this...
print("Hello, World!")
| 32
| 41
| 0.6875
| 11
| 64
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 64
| 2
| 42
| 32
| 0.8
| 0.609375
| 0
| 0
| 0
| 0
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
d68be518c9c05167abe5dadaf91a06e33ee30a20
| 90
|
py
|
Python
|
codes_auto/43.multiply-strings.py
|
smartmark-pro/leetcode_record
|
6504b733d892a705571eb4eac836fb10e94e56db
|
[
"MIT"
] | null | null | null |
codes_auto/43.multiply-strings.py
|
smartmark-pro/leetcode_record
|
6504b733d892a705571eb4eac836fb10e94e56db
|
[
"MIT"
] | null | null | null |
codes_auto/43.multiply-strings.py
|
smartmark-pro/leetcode_record
|
6504b733d892a705571eb4eac836fb10e94e56db
|
[
"MIT"
] | null | null | null |
#
# @lc app=leetcode.cn id=43 lang=python3
#
# [43] multiply-strings
#
None
# @lc code=end
| 12.857143
| 40
| 0.666667
| 15
| 90
| 4
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065789
| 0.155556
| 90
| 7
| 41
| 12.857143
| 0.723684
| 0.811111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d6948d1595317f2914982be47570e4339091fe45
| 33
|
py
|
Python
|
src/pypackgen_scripts/scripts/__init__.py
|
predatorian3/pypackgen
|
5993596e625548db72c6e967d5f1a0011cea2dbf
|
[
"MIT"
] | 3
|
2018-02-02T20:02:43.000Z
|
2019-11-03T10:19:18.000Z
|
src/pypackgen_scripts/scripts/__init__.py
|
FilBot3/pypackgen
|
5993596e625548db72c6e967d5f1a0011cea2dbf
|
[
"MIT"
] | 7
|
2017-11-10T14:18:31.000Z
|
2019-10-18T01:51:56.000Z
|
src/pypackgen_scripts/scripts/__init__.py
|
FilBot3/pypackgen
|
5993596e625548db72c6e967d5f1a0011cea2dbf
|
[
"MIT"
] | 1
|
2019-11-03T10:19:31.000Z
|
2019-11-03T10:19:31.000Z
|
""" PyPackGen Scripts script
"""
| 11
| 28
| 0.666667
| 3
| 33
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 2
| 29
| 16.5
| 0.785714
| 0.727273
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d697aa6ed3507da9ec38480eb07d08c93bf9072d
| 8,163
|
py
|
Python
|
data_aggregator/views/api/analytics.py
|
uw-it-aca/canvas-analytics
|
9df8098183b0193457808438ae665cef84b9aedd
|
[
"Apache-2.0"
] | null | null | null |
data_aggregator/views/api/analytics.py
|
uw-it-aca/canvas-analytics
|
9df8098183b0193457808438ae665cef84b9aedd
|
[
"Apache-2.0"
] | 172
|
2021-01-22T20:16:50.000Z
|
2022-03-25T18:47:10.000Z
|
data_aggregator/views/api/analytics.py
|
uw-it-aca/canvas-analytics
|
9df8098183b0193457808438ae665cef84b9aedd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.conf import settings
from django.db.models import F
from django.utils.decorators import method_decorator
from rest_framework.generics import GenericAPIView
from rest_framework.pagination import PageNumberPagination
from rest_framework.renderers import JSONRenderer
from uw_saml.decorators import group_required
from data_aggregator.models import Assignment, Participation, User
from data_aggregator.serializers import ParticipationSerializer, \
AssignmentSerializer, UserSerializer
"""
Analytics API
"""
class AnalyticsResultsSetPagination(PageNumberPagination):
page_size = 1000
page_size_query_param = 'page_size'
max_page_size = 2500
@method_decorator(group_required(settings.DATA_AGGREGATOR_ACCESS_GROUP),
name='dispatch')
class BaseAnalyticsAPIView(GenericAPIView):
renderer_classes = [JSONRenderer]
pagination_class = AnalyticsResultsSetPagination
def get_assignment_queryset(self):
queryset = (
Assignment.objects.select_related()
.annotate(sis_account_id=F('course__sis_account_id'))
.annotate(sis_term_id=F('week__term__sis_term_id'))
)
return queryset
def get_participation_queryset(self):
queryset = (
Participation.objects.select_related()
.annotate(sis_account_id=F('course__sis_account_id'))
.annotate(sis_term_id=F('week__term__sis_term_id'))
)
return queryset
class AccountParticipationView(BaseAnalyticsAPIView):
'''
API endpoint returning participation analytics for a account
/api/[version]/account/[sis-account-id]/participation/
Endpoint accepts the following query parameters:
* sis_term_id: limit results to a term
* week: limit results to a week in term
'''
def get(self, request, version, sis_account_id):
queryset = (
self.get_participation_queryset()
.filter(sis_account_id__startswith=sis_account_id))
sis_term_id = request.GET.get("sis_term_id")
if sis_term_id:
queryset = queryset.filter(sis_term_id=sis_term_id)
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = ParticipationSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class AccountAssignmentView(BaseAnalyticsAPIView):
'''
API endpoint returning assignment analytics for a account
/api/[version]/account/[sis-account-id]/assignment/
Endpoint accepts the following query parameters:
* sis_term_id: limit results to a term
* week: limit results to a week in term
'''
def get(self, request, version, sis_account_id):
queryset = (
self.get_assignment_queryset()
.filter(sis_account_id__startswith=sis_account_id))
sis_term_id = request.GET.get("sis_term_id")
if sis_term_id:
queryset = queryset.filter(sis_term_id=sis_term_id)
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = AssignmentSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class TermParticipationView(BaseAnalyticsAPIView):
'''
API endpoint returning participation analytics for a term
/api/[version]/term/[sis-term-id]/participation/
Endpoint accepts the following query parameters:
* week: limit results to a week in term
'''
def get(self, request, version, sis_term_id):
queryset = (
self.get_participation_queryset()
.filter(sis_term_id=sis_term_id))
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = ParticipationSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class TermAssignmentView(BaseAnalyticsAPIView):
'''
API endpoint returning assignment analytics for a term
/api/[version]/term/[sis-term-id]/assignment/
Endpoint accepts the following query parameters:
* week: limit results to a week in term
'''
def get(self, request, version, sis_term_id):
queryset = (
self.get_assignment_queryset()
.filter(sis_term_id=sis_term_id))
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = AssignmentSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class UserView(GenericAPIView):
renderer_classes = [JSONRenderer]
pagination_class = AnalyticsResultsSetPagination
def get(self, request, version):
queryset = User.objects.select_related()
canvas_user_id = request.GET.get("canvas_user_id")
if (canvas_user_id):
queryset = queryset.filter(canvas_user_id=canvas_user_id)
has_analytics = request.GET.get("has_analytics")
if (has_analytics is not None):
assign_analytic_users = Assignment.objects.values("user")
partic_analytic_users = Participation.objects.values("user")
if has_analytics.lower() == "true":
queryset = queryset.filter(
id__in=assign_analytic_users)
queryset = queryset.filter(
id__in=partic_analytic_users)
elif has_analytics.lower() == "false":
queryset = queryset.exclude(
id__in=assign_analytic_users)
queryset = queryset.exclude(
id__in=partic_analytic_users)
paginated_queryset = self.paginate_queryset(queryset)
serializer = UserSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class UserParticipationView(BaseAnalyticsAPIView):
'''
API endpoint returning participation analytics for a particular user
/api/[version]/user/[sis-user-id]/participation/
Endpoint accepts the following query parameters:
* sis_term_id: limit results to a term
* week: limit results to a week in term
'''
def get(self, request, version, sis_user_id):
queryset = (
self.get_participation_queryset()
.filter(user__sis_user_id=sis_user_id))
sis_term_id = request.GET.get("sis_term_id")
if sis_term_id:
queryset = queryset.filter(sis_term_id=sis_term_id)
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = ParticipationSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
class UserAssignmentView(BaseAnalyticsAPIView):
'''
API endpoint returning assignment analytics for a particular user
/api/[version]/user/[sis-user-id]/assignment/
Endpoint accepts the following query parameters:
* sis_term_id: limit results to a term
* week: limit results to a week in term
'''
def get(self, request, version, sis_user_id):
queryset = (
self.get_assignment_queryset()
.filter(user__sis_user_id=sis_user_id))
sis_term_id = request.GET.get("sis_term_id")
if sis_term_id:
queryset = queryset.filter(sis_term_id=sis_term_id)
week = request.GET.get("week")
if week:
queryset = queryset.filter(week__week=week)
paginated_queryset = self.paginate_queryset(queryset)
serializer = AssignmentSerializer(paginated_queryset, many=True)
return self.get_paginated_response(serializer.data)
| 36.441964
| 75
| 0.691167
| 931
| 8,163
| 5.796992
| 0.131042
| 0.046693
| 0.060033
| 0.027793
| 0.761164
| 0.738558
| 0.738558
| 0.709654
| 0.639615
| 0.605892
| 0
| 0.002218
| 0.226877
| 8,163
| 223
| 76
| 36.605381
| 0.852955
| 0.177508
| 0
| 0.691176
| 0
| 0
| 0.033625
| 0.013819
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066176
| false
| 0
| 0.066176
| 0
| 0.316176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d6b2e55648acd0b30ed9bb3a17125d8dcfc5bc6d
| 81
|
py
|
Python
|
pype9/simulate/nest/cells/__init__.py
|
tclose/Pype9
|
23f96c0885fd9df12d9d11ff800f816520e4b17a
|
[
"MIT"
] | null | null | null |
pype9/simulate/nest/cells/__init__.py
|
tclose/Pype9
|
23f96c0885fd9df12d9d11ff800f816520e4b17a
|
[
"MIT"
] | null | null | null |
pype9/simulate/nest/cells/__init__.py
|
tclose/Pype9
|
23f96c0885fd9df12d9d11ff800f816520e4b17a
|
[
"MIT"
] | 1
|
2021-04-08T12:46:21.000Z
|
2021-04-08T12:46:21.000Z
|
from .base import Cell, CellMetaClass
from pype9.simulate.nest import simulation
| 27
| 42
| 0.839506
| 11
| 81
| 6.181818
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.111111
| 81
| 2
| 43
| 40.5
| 0.930556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d6bcf929c27a7c68c8b7937a08badca2956cffe6
| 220
|
py
|
Python
|
dashboard/admin.py
|
ibnshayed/Advanced-Blog-Application
|
27b38d569d5f883a0cd02b04d50162fa35de2388
|
[
"MIT"
] | null | null | null |
dashboard/admin.py
|
ibnshayed/Advanced-Blog-Application
|
27b38d569d5f883a0cd02b04d50162fa35de2388
|
[
"MIT"
] | null | null | null |
dashboard/admin.py
|
ibnshayed/Advanced-Blog-Application
|
27b38d569d5f883a0cd02b04d50162fa35de2388
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Author
admin.site.register(Author)
# made by Nazrul Islam Yeasin
# Facebook : facebook.com/yeariha.farsin
# Github : github.com/yeazin
# website : yeazin.github.io
| 22
| 40
| 0.772727
| 31
| 220
| 5.483871
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 220
| 9
| 41
| 24.444444
| 0.894737
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d6e9dff8279bad0f0bff61afc58588a7af065be4
| 96
|
py
|
Python
|
FAIRshakeHub/apps.py
|
Nitrogen-DCPPC/FAIRshake
|
af83c1cb82bdd41e6214d23ab6587d5a4c185b11
|
[
"Apache-2.0"
] | 1
|
2019-04-15T14:02:03.000Z
|
2019-04-15T14:02:03.000Z
|
FAIRshakeHub/apps.py
|
Nitrogen-DCPPC/FAIRshake
|
af83c1cb82bdd41e6214d23ab6587d5a4c185b11
|
[
"Apache-2.0"
] | 109
|
2018-05-21T19:45:19.000Z
|
2019-04-19T12:09:06.000Z
|
FAIRshakeHub/apps.py
|
Nitrogen-DCPPC/FAIRshake
|
af83c1cb82bdd41e6214d23ab6587d5a4c185b11
|
[
"Apache-2.0"
] | 3
|
2018-08-06T22:09:33.000Z
|
2018-12-09T18:52:46.000Z
|
from django.apps import AppConfig
class FairshakeConfig(AppConfig):
name = 'FAIRshakeHub'
| 16
| 33
| 0.770833
| 10
| 96
| 7.4
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 96
| 5
| 34
| 19.2
| 0.91358
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ba42ccdbe5a9b15e5115dda683dd26edf1b35c21
| 46
|
py
|
Python
|
server/serverapp.py
|
Alfie1582/sfe-giveaways
|
29931fa375479c30978ca91f81d45d1d5d7a89a1
|
[
"MIT"
] | null | null | null |
server/serverapp.py
|
Alfie1582/sfe-giveaways
|
29931fa375479c30978ca91f81d45d1d5d7a89a1
|
[
"MIT"
] | null | null | null |
server/serverapp.py
|
Alfie1582/sfe-giveaways
|
29931fa375479c30978ca91f81d45d1d5d7a89a1
|
[
"MIT"
] | null | null | null |
from flask import Flask
app = Flask(__main__)
| 15.333333
| 23
| 0.782609
| 7
| 46
| 4.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 3
| 24
| 15.333333
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ba7b98fa642475650512009f2254e3652cbbab65
| 204
|
py
|
Python
|
{{cookiecutter.project_slug}}/apps/sms/admin.py
|
ukwahlula/django-server-boilerplate
|
6bd4b83511ea7e3370349957cf0b6dbff4003ab1
|
[
"BSD-3-Clause"
] | 2
|
2020-10-30T09:47:07.000Z
|
2020-10-30T09:48:11.000Z
|
{{cookiecutter.project_slug}}/apps/sms/admin.py
|
ukwahlula/django-server-boilerplate
|
6bd4b83511ea7e3370349957cf0b6dbff4003ab1
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/apps/sms/admin.py
|
ukwahlula/django-server-boilerplate
|
6bd4b83511ea7e3370349957cf0b6dbff4003ab1
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Sms
@admin.register(Sms)
class SmsAdmin(admin.ModelAdmin):
list_display = ("created_date", "phone", "sms")
search_fields = ("phone", "sms")
| 20.4
| 51
| 0.705882
| 26
| 204
| 5.423077
| 0.692308
| 0.113475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151961
| 204
| 9
| 52
| 22.666667
| 0.815029
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ba81841058679879564d6c7c2d69c122ecd1c14c
| 179
|
py
|
Python
|
yatracker/types/base.py
|
Olegt0rr/aioYaTracker
|
a3c800ac560e2454c16028e68a72b9f2af4b8f0b
|
[
"MIT"
] | 2
|
2020-01-15T06:08:02.000Z
|
2021-07-02T06:54:14.000Z
|
yatracker/types/base.py
|
Olegt0rr/aioYaTracker
|
a3c800ac560e2454c16028e68a72b9f2af4b8f0b
|
[
"MIT"
] | null | null | null |
yatracker/types/base.py
|
Olegt0rr/aioYaTracker
|
a3c800ac560e2454c16028e68a72b9f2af4b8f0b
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel
class BaseObject(BaseModel):
@property
def tracker(self):
from ..tracker import YaTracker
return YaTracker.get_current()
| 17.9
| 39
| 0.698324
| 19
| 179
| 6.526316
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234637
| 179
| 9
| 40
| 19.888889
| 0.905109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
bab94060f95378b44215370279535745e9f6bc98
| 10
|
py
|
Python
|
Face Recognition/facepp/win32-dist/call.py
|
ds17/machine_learning
|
f8e853fed5a12da1c42bc84850e472d7481324d5
|
[
"MIT"
] | null | null | null |
Face Recognition/facepp/win32-dist/call.py
|
ds17/machine_learning
|
f8e853fed5a12da1c42bc84850e472d7481324d5
|
[
"MIT"
] | null | null | null |
Face Recognition/facepp/win32-dist/call.py
|
ds17/machine_learning
|
f8e853fed5a12da1c42bc84850e472d7481324d5
|
[
"MIT"
] | null | null | null |
../call.py
| 10
| 10
| 0.6
| 2
| 10
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 1
| 10
| 10
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bac8de0fb32fb56b94f0c4869f2d099442c6deed
| 202
|
py
|
Python
|
homepage/models.py
|
abahernest/SHREDFULL
|
b3509946bb9d7d06ff202b5b6861b66376344d29
|
[
"Apache-2.0"
] | 1
|
2020-04-23T15:43:45.000Z
|
2020-04-23T15:43:45.000Z
|
homepage/models.py
|
abahernest/The-Pit
|
b3509946bb9d7d06ff202b5b6861b66376344d29
|
[
"Apache-2.0"
] | 6
|
2021-04-08T20:29:48.000Z
|
2022-01-13T02:38:43.000Z
|
homepage/models.py
|
abahernest/The-Pit
|
b3509946bb9d7d06ff202b5b6861b66376344d29
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
class Background (models.Model):
image =models.ImageField(upload_to='img/')
text = models.CharField(max_length=300)
def __str__(self):
return self.text
| 25.25
| 46
| 0.707921
| 27
| 202
| 5.074074
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 0.183168
| 202
| 8
| 47
| 25.25
| 0.812121
| 0
| 0
| 0
| 0
| 0
| 0.019704
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0.166667
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
bae27aa98847efc86680e3771c661e1a70bd43c7
| 71
|
py
|
Python
|
examples/header.py
|
nickzhuang0613/lvgl
|
b7e6f6763d6fa967bacaca5a60ee33321f73c8a5
|
[
"MIT"
] | 5,238
|
2020-05-18T13:08:59.000Z
|
2022-03-31T21:38:16.000Z
|
examples/header.py
|
imliubo/lvgl
|
93c1303ee7989d25216262e1d0ea244b59b975f6
|
[
"MIT"
] | 1,590
|
2020-05-18T09:47:07.000Z
|
2022-03-31T23:26:41.000Z
|
examples/header.py
|
imliubo/lvgl
|
93c1303ee7989d25216262e1d0ea244b59b975f6
|
[
"MIT"
] | 1,214
|
2020-05-19T00:45:08.000Z
|
2022-03-31T06:41:13.000Z
|
#!/opt/bin/lv_micropython -i
import lvgl as lv
import display_driver
| 11.833333
| 28
| 0.774648
| 12
| 71
| 4.416667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 71
| 5
| 29
| 14.2
| 0.868852
| 0.380282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
bafe88c5557ca3f639467d69f6932f271830c049
| 133
|
py
|
Python
|
setup.py
|
keith/extract-section
|
371e62755f31a119c2e3142a7618a9a805b21e00
|
[
"MIT"
] | 9
|
2017-03-04T09:23:50.000Z
|
2021-06-15T21:27:55.000Z
|
setup.py
|
keith/extract-section
|
371e62755f31a119c2e3142a7618a9a805b21e00
|
[
"MIT"
] | null | null | null |
setup.py
|
keith/extract-section
|
371e62755f31a119c2e3142a7618a9a805b21e00
|
[
"MIT"
] | null | null | null |
import setuptools
setuptools.setup(
name='extract-section',
scripts=['extract-section'],
install_requires=['r2pipe'],
)
| 16.625
| 32
| 0.691729
| 13
| 133
| 7
| 0.769231
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00885
| 0.150376
| 133
| 7
| 33
| 19
| 0.79646
| 0
| 0
| 0
| 0
| 0
| 0.270677
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
2407ef7811ea096868c63542741ea1355b12b5e8
| 270
|
py
|
Python
|
src/jupyter_notebook_config.py
|
zietzm/hail-on-EMR
|
8519c0ff1268ce182343c78ee8e772d6ef936c6a
|
[
"Apache-2.0"
] | 2
|
2019-07-24T02:35:19.000Z
|
2019-10-24T20:27:59.000Z
|
src/jupyter_notebook_config.py
|
zietzm/hail-on-EMR
|
8519c0ff1268ce182343c78ee8e772d6ef936c6a
|
[
"Apache-2.0"
] | 1
|
2019-01-14T09:56:18.000Z
|
2019-01-14T09:56:18.000Z
|
src/jupyter_notebook_config.py
|
zietzm/hail-on-EMR
|
8519c0ff1268ce182343c78ee8e772d6ef936c6a
|
[
"Apache-2.0"
] | 6
|
2018-12-10T05:39:22.000Z
|
2019-10-25T17:11:11.000Z
|
c.NotebookApp.open_browser = False
c.NotebookApp.ip='0.0.0.0' #'*'
c.NotebookApp.port = 8192
c.NotebookApp.password = u'sha1:45f7d7ac038c:c36b98f22eac5921c435095af65a9a00b0e1eeb9'
c.Authenticator.admin_users = {'jupyter'}
c.LocalAuthenticator.create_system_users = True
| 38.571429
| 86
| 0.807407
| 34
| 270
| 6.294118
| 0.647059
| 0.224299
| 0.028037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158103
| 0.062963
| 270
| 6
| 87
| 45
| 0.687747
| 0.011111
| 0
| 0
| 0
| 0
| 0.270677
| 0.218045
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
240b47e64a0bc14aa7cea73b8a29c3caf4f26f51
| 235
|
wsgi
|
Python
|
hook_system/processor/app.wsgi
|
onyiny-ang/chloroplasts
|
06ea45454f5fd1de88a7ae366130f0521410a7d5
|
[
"MIT"
] | 1
|
2019-04-29T19:36:15.000Z
|
2019-04-29T19:36:15.000Z
|
hook_system/processor/app.wsgi
|
onyiny-ang/chloroplasts
|
06ea45454f5fd1de88a7ae366130f0521410a7d5
|
[
"MIT"
] | 13
|
2019-08-19T17:53:32.000Z
|
2022-03-02T03:53:20.000Z
|
hook_system/processor/app.wsgi
|
onyiny-ang/chloroplasts-2
|
06ea45454f5fd1de88a7ae366130f0521410a7d5
|
[
"MIT"
] | 3
|
2019-04-26T21:34:43.000Z
|
2021-01-07T19:48:53.000Z
|
#!/usr/bin/python3
import logging
import sys
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0, '/home/4F00/chloroplasts/hook_system/processor/Src') #Needs to be set to where the source code is
from Src import app as application
| 39.166667
| 116
| 0.8
| 39
| 235
| 4.794872
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023585
| 0.097872
| 235
| 6
| 117
| 39.166667
| 0.858491
| 0.255319
| 0
| 0
| 0
| 0
| 0.281609
| 0.281609
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
243d8c089507858dde2425ec2d15263cf5f87f7f
| 48
|
py
|
Python
|
mysite/ct/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 8
|
2015-06-02T15:34:44.000Z
|
2019-03-21T12:27:30.000Z
|
mysite/ct/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 761
|
2015-01-07T05:13:08.000Z
|
2022-02-10T10:23:37.000Z
|
mysite/ct/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 12
|
2015-01-28T20:09:36.000Z
|
2018-03-20T13:32:11.000Z
|
default_app_config = 'ct.apps.CourseletsConfig'
| 24
| 47
| 0.833333
| 6
| 48
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.844444
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
245a02179553283a32e949bf8e2e4620e7d915b5
| 139
|
py
|
Python
|
groups/apps.py
|
mahoyen/web
|
1d190a86e3277315804bfcc0b8f9abd4f9c1d780
|
[
"MIT"
] | null | null | null |
groups/apps.py
|
mahoyen/web
|
1d190a86e3277315804bfcc0b8f9abd4f9c1d780
|
[
"MIT"
] | null | null | null |
groups/apps.py
|
mahoyen/web
|
1d190a86e3277315804bfcc0b8f9abd4f9c1d780
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class GroupsConfig(AppConfig):
name = 'groups'
def ready(self):
import groups.signals
| 15.444444
| 33
| 0.690647
| 16
| 139
| 6
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230216
| 139
| 8
| 34
| 17.375
| 0.897196
| 0
| 0
| 0
| 0
| 0
| 0.043165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
245d15c19e45585ffb828460093ece8f8b5cfe65
| 77
|
py
|
Python
|
kadai_001/sample_004_nl.py
|
a1852rw/system_propraming
|
795ce06e9c1b48552577de472194646bab240541
|
[
"MIT"
] | 2
|
2018-07-02T09:48:28.000Z
|
2018-07-04T10:17:40.000Z
|
kadai_001/sample_004_nl.py
|
a1852rw/aiit_system_programing
|
795ce06e9c1b48552577de472194646bab240541
|
[
"MIT"
] | null | null | null |
kadai_001/sample_004_nl.py
|
a1852rw/aiit_system_programing
|
795ce06e9c1b48552577de472194646bab240541
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
# nl相当のプログラウを目指して書いていく((´;ω;`)) 正直どうやればいいかさっぱりわからない
| 19.25
| 51
| 0.727273
| 8
| 77
| 7.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.090909
| 77
| 3
| 52
| 25.666667
| 0.785714
| 0.935065
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
246152b2329dea414c5d374580c8e93dbf3be205
| 4,797
|
py
|
Python
|
src/lms-harmonizer/tests_integration_sql/views/mssql/test_exceptions_LMSUser.py
|
markramonDL/LMS-Toolkit
|
d7097f9e063f39a45c8a08ec7316d2a1c4034e50
|
[
"Apache-2.0"
] | 3
|
2020-10-15T10:29:59.000Z
|
2020-12-01T21:40:55.000Z
|
src/lms-harmonizer/tests_integration_sql/views/mssql/test_exceptions_LMSUser.py
|
markramonDL/LMS-Toolkit
|
d7097f9e063f39a45c8a08ec7316d2a1c4034e50
|
[
"Apache-2.0"
] | 40
|
2020-08-17T21:08:33.000Z
|
2021-02-02T19:56:09.000Z
|
src/lms-harmonizer/tests_integration_sql/views/mssql/test_exceptions_LMSUser.py
|
markramonDL/LMS-Toolkit
|
d7097f9e063f39a45c8a08ec7316d2a1c4034e50
|
[
"Apache-2.0"
] | 10
|
2021-06-10T16:27:27.000Z
|
2021-12-27T12:31:57.000Z
|
# SPDX-License-Identifier: Apache-2.0
# Licensed to the Ed-Fi Alliance under one or more agreements.
# The Ed-Fi Alliance licenses this file to you under the Apache License, Version 2.0.
# See the LICENSE and NOTICES files in the project root for more information.
from tests_integration_sql.mssql_loader import (
insert_lms_user,
insert_lms_user_deleted,
insert_edfi_student,
)
from tests_integration_sql.mssql_connection import MSSqlConnection, query
from tests_integration_sql.server_config import ServerConfig
from tests_integration_sql.orchestrator import run_harmonizer
SOURCE_SYSTEM = "Canvas"
def describe_when_lms_and_ods_tables_are_both_empty():
def it_should_return_no_exceptions(test_db_config: ServerConfig):
# act
run_harmonizer(test_db_config)
# Assert
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
exceptions = query(
connection, "SELECT SourceSystemIdentifier FROM lmsx.exceptions_LMSUser"
)
assert len(exceptions) == 0
def describe_when_lms_and_ods_tables_have_no_matches():
SIS_ID_1 = "sis_id_1"
SIS_ID_2 = "sis_id_2"
def it_should_return_exceptions(test_db_config: ServerConfig):
# arrange
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
insert_lms_user(connection, SIS_ID_1, "e1@e.com", SOURCE_SYSTEM)
insert_lms_user(connection, SIS_ID_2, "e2@e.com", SOURCE_SYSTEM)
insert_edfi_student(connection, "not_matching_sis_id_1")
insert_edfi_student(connection, "not_matching_sis_id_2")
# act
run_harmonizer(test_db_config)
# assert
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
exceptions = query(
connection, "SELECT SourceSystemIdentifier FROM lmsx.exceptions_LMSUser"
)
assert len(exceptions) == 2
assert exceptions[0]["SourceSystemIdentifier"] == SIS_ID_1
assert exceptions[1]["SourceSystemIdentifier"] == SIS_ID_2
def describe_when_lms_and_ods_tables_have_a_match():
STUDENT_ID = "10000000-0000-0000-0000-000000000000"
SIS_ID = "sis_id"
UNIQUE_ID = f"{SIS_ID}1"
def it_should_return_no_exceptions(test_db_config: ServerConfig):
# arrange
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
insert_lms_user(connection, SIS_ID, "e1@e.com", SOURCE_SYSTEM)
insert_edfi_student(connection, UNIQUE_ID, STUDENT_ID)
# act
run_harmonizer(test_db_config)
# assert
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
exceptions = query(
connection, "SELECT SourceSystemIdentifier FROM lmsx.exceptions_LMSUser"
)
assert len(exceptions) == 0
def describe_when_lms_and_ods_tables_have_a_match_to_deleted_record():
STUDENT_ID = "10000000-0000-0000-0000-000000000000"
SIS_ID = "sis_id"
UNIQUE_ID = f"{SIS_ID}1"
def it_should_return_no_exceptions(test_db_config: ServerConfig):
# arrange
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
insert_lms_user_deleted(connection, SIS_ID, "e1@e.com", SOURCE_SYSTEM)
insert_edfi_student(connection, UNIQUE_ID, STUDENT_ID)
# act
run_harmonizer(test_db_config)
# assert
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
exceptions = query(
connection, "SELECT SourceSystemIdentifier FROM lmsx.exceptions_LMSUser"
)
assert len(exceptions) == 0
def describe_when_lms_and_ods_tables_have_one_match_and_one_not_match():
STUDENT_ID = "10000000-0000-0000-0000-000000000000"
SIS_ID = "sis_id"
UNIQUE_ID = f"{SIS_ID}1"
NOT_MATCHING_SIS_ID = "not_matching_sis_id"
def it_should_return_one_exception(test_db_config: ServerConfig):
# arrange
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
insert_lms_user(connection, SIS_ID, "e1@e.com", SOURCE_SYSTEM)
insert_edfi_student(connection, UNIQUE_ID, STUDENT_ID)
insert_lms_user(connection, NOT_MATCHING_SIS_ID, "e2@e.com", SOURCE_SYSTEM)
insert_edfi_student(connection, "also_not_matching_unique_id")
# act
run_harmonizer(test_db_config)
# assert
with MSSqlConnection(test_db_config).pyodbc_conn() as connection:
exceptions = query(
connection, "SELECT SourceSystemIdentifier FROM lmsx.exceptions_LMSUser"
)
assert len(exceptions) == 1
assert exceptions[0]["SourceSystemIdentifier"] == NOT_MATCHING_SIS_ID
| 36.618321
| 88
| 0.69877
| 597
| 4,797
| 5.21608
| 0.174204
| 0.041747
| 0.073218
| 0.072254
| 0.756262
| 0.72447
| 0.709698
| 0.700064
| 0.681118
| 0.652216
| 0
| 0.03425
| 0.227017
| 4,797
| 130
| 89
| 36.9
| 0.805556
| 0.071503
| 0
| 0.55
| 0
| 0
| 0.150395
| 0.105524
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.125
| false
| 0
| 0.05
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
2467c7fa6f0e84d09100e8474b9321da88ab22ee
| 157
|
py
|
Python
|
bot/loader.py
|
LD31D/anonim_chat_bot
|
5d4c88f228a77195e6835aab6ad93eb7f116c0c1
|
[
"MIT"
] | 2
|
2021-06-04T10:16:25.000Z
|
2021-10-06T10:32:41.000Z
|
bot/loader.py
|
LD31D/anonim_chat_bot
|
5d4c88f228a77195e6835aab6ad93eb7f116c0c1
|
[
"MIT"
] | null | null | null |
bot/loader.py
|
LD31D/anonim_chat_bot
|
5d4c88f228a77195e6835aab6ad93eb7f116c0c1
|
[
"MIT"
] | 3
|
2021-12-31T10:07:42.000Z
|
2022-01-11T06:29:15.000Z
|
from aiogram import Bot, Dispatcher, types
from .config import BOT_TOKEN
bot = Bot(token=BOT_TOKEN, parse_mode=types.ParseMode.HTML)
dp = Dispatcher(bot)
| 19.625
| 59
| 0.783439
| 24
| 157
| 5
| 0.541667
| 0.2
| 0.183333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127389
| 157
| 7
| 60
| 22.428571
| 0.875912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
031064c9db922af6bb79104c6dad9142edd41519
| 351
|
py
|
Python
|
users/__init__.py
|
siret-junior/somhunter-simulator
|
6ec6371f32758a5514c285b900dbf4f0e214953b
|
[
"MIT"
] | 2
|
2022-01-27T17:06:00.000Z
|
2022-01-27T17:43:01.000Z
|
users/__init__.py
|
siret-junior/somhunter-simulator
|
6ec6371f32758a5514c285b900dbf4f0e214953b
|
[
"MIT"
] | null | null | null |
users/__init__.py
|
siret-junior/somhunter-simulator
|
6ec6371f32758a5514c285b900dbf4f0e214953b
|
[
"MIT"
] | null | null | null |
from .ideal_user import IdealUser
from .logit_user import LogitUser
from .null_user import NullUser
from .ransam_multiple_prior_user import RanSamMultiplePriorUser
from .ransam_prior_user import RanSamPriorUser
from .ransam_smooth_user import RanSamSmoothUser
from .ransam_universal_user import RanSamUniversalUser
from .ransam_user import RanSamUser
| 39
| 63
| 0.88604
| 45
| 351
| 6.622222
| 0.422222
| 0.268456
| 0.100671
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091168
| 351
| 8
| 64
| 43.875
| 0.934169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
0316e77d98b4d27f1158bc05ac0e39a27a92f792
| 18,866
|
py
|
Python
|
InteligenciaBot.py
|
Aukan96/Diario-bot-1.0
|
6627f03b931842ac8ae6b5d80b63f18e5e95f7d2
|
[
"Apache-2.0"
] | null | null | null |
InteligenciaBot.py
|
Aukan96/Diario-bot-1.0
|
6627f03b931842ac8ae6b5d80b63f18e5e95f7d2
|
[
"Apache-2.0"
] | null | null | null |
InteligenciaBot.py
|
Aukan96/Diario-bot-1.0
|
6627f03b931842ac8ae6b5d80b63f18e5e95f7d2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import random
import sys
import unicodedata
import my_conexion
sys.stdout.encoding
'UTF-8'
# PROCESAR MENSAJE DE USUARIO
def generarRespuesta(mensaje, STATES, username):
message_=sinAcentos(mensaje)
ans=''
vez = STATES['vez']
print(message_)
SALUDO = re.compile('^(h|H)ola|HOLA|(Q|q)u(e|é)\stal|QU(É|E)\sTAL|((B|b)uenas)|BUENAS|(Q|q)u(e|é)\sonda|QU(E|É)\sONDA|(H|h)ello|HELLO|(H|h)i|HI|(Q|q)iubo|QUIUBO|(S|s)aludos|SALUDOS|(B|b)uenos\sd(i|í)as|BUENOS\sD(I|Í)AS($|.*)');
SALUDO_MATCH = re.match(SALUDO,message_)
if SALUDO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nHola, soy DiarioBot, me encantaria platicar contigo..."
if ev == 2:
ans =ans + "\nHola, yo soy tu diario personal..."
if ev == 3:
ans =ans + "\nHola, vamos a platicar..."
SENTIMIENTO = re.compile('(.*|^)((E|e)stoy|(M|m)e\s(fue|(S|s)iento)|puse|sent(i|í))(.*|(M|m)uy)\s((F|f)el(i\í)z|(C|c)ontent(o|a)|(A|a)legre|(B|b)ien)($|.*)');
SENTIMIENTO_MATCH = re.match(SENTIMIENTO,message_)
if SENTIMIENTO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nMe alegra que te sientas así, espero todos los días te sientas de esa manera, ¿Que más pasó?..."
if ev == 2:
ans =ans + "\nQue bueno que estes así, ¿Que mas pasó?..."
if ev == 3:
ans =ans + "\nMuy bien, que siempre sea así, ¿Que mas pasó?..."
SENTIMIENTO_1 = re.compile('(.*|^)((E|e)stoy|(M|m)e\s(fue|(S|s)iento)|puse|sent(i|í))(.*|(M|m)uy)\s((T|t)riste|(M|m)al|(I|i)nfel(i|í)z|(D|d)ecaid(o|a))($|.*)');
SENTIMIENTO_MATCH = re.match(SENTIMIENTO_1,message_)
if SENTIMIENTO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nNo me gusta que te sientas asi animo, Cuenta me más..."
if ev == 2:
ans =ans + "\nQue mal en serio, Cuenta me más..."
if ev == 3:
ans =ans + "\nNo te preocupes, se que estaras mejor mañana, Cuenta me más..."
NO_MAMA = re.compile('(.*|^)(fallecio|murio|no\stengo)(|\smi)\smam(a|á)($|.*)');
NO_MATCH = re.match(NO_MAMA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('mama',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['mama'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['mama'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_MAMA = re.compile('(.*|^)mi\smam(a|á)\s(fallecio|murio)($|.*)');
NO_MATCH = re.match(NO_MAMA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('mama',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['mama'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['mama'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_PAPA = re.compile('(.*|^)(falleci(o|ó)|muri(o|ó)|no\stengo)(|\smi)\spap(a|á)($|.*)');
NO_MATCH = re.match(NO_PAPA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('papa',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['papa'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['papa'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_PAPA = re.compile('(.*|^)mi\spap(a|á)\s(fallecio|murio)($|.*)');
NO_MATCH = re.match(NO_PAPA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('papa',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['papa'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['papa'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_HERMA = re.compile('(.*|^)(fallecio|murio|no\stengo)(|\smi)\sherman(o|a)($|.*)');
NO_MATCH = re.match(NO_PAPA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('hermano',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['hermano'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['hermano'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_HERMA = re.compile('(.*|^)mi\sherman(o|a)\s(fallecio|murio)($|.*)');
NO_MATCH = re.match(NO_PAPA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('hermano',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['hermano'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['hermano'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_MAS = re.compile('(.*|^)(fallecio|murio|no\stengo)(|\smi)\s(mascota|perro|gato|pajaro|pez|rana|tortuga|iguana)($|.*)');
NO_MATCH = re.match(NO_MAS,message_)
if NO_MATCH:
my_conexion.cambiar_estado('mascota',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['mascota'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['mascota'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_MAS = re.compile('(.*|^)mi\s(mascota|perro|gato|pajaro|pez|rana|tortuga|iguana)\s(fallecio|murio)($|.*)');
NO_MATCH = re.match(NO_MAS,message_)
if NO_MATCH:
my_conexion.cambiar_estado('mascota',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['mascota'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['mascota'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_AMI = re.compile('(.*|^)(fallecio|murio|no\stengo)(|\smi)\smejor amig(o|a)($|.*)');
NO_MATCH = re.match(NO_AMI,message_)
if NO_MATCH:
my_conexion.cambiar_estado('amigo',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['amigo'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['amigo'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_AMI = re.compile('(.*|^)mi\smejor amig(o|a)\s(fallecio|murio)($|.*)');
NO_MATCH = re.match(NO_AMI,message_)
if NO_MATCH:
my_conexion.cambiar_estado('amigo',username)
ev = random.randint(1, 2)
if ev == 1:
STATES['amigo'] = True
ans =ans + "\nLo siento mucho, se que es algo demaciado doloroso, ¿Quieres seguir platicando?..."
if ev == 2:
STATES['amigo'] = True
ans =ans + "\nQue fuerte, lo siento ¿Quieres seguir platicando?..."
NO_TRA = re.compile('(.*|^)((no\strabajo)|(no\stengo\strabajo))($|.*)');
NO_MATCH = re.match(NO_TRA,message_)
if NO_MATCH:
my_conexion.cambiar_estado('trabajo',username)
STATES['trabajo'] = True
NO_ESC = re.compile('(.*|^)no\sestudio($|.*)');
NO_MATCH = re.match(NO_ESC,message_)
if NO_MATCH:
my_conexion.cambiar_estado('escuela',username)
STATES['escuela'] = True
MAMA = re.compile('(.*|^)((M|m)i\s(mama|mamá|madre)\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(excelente|bien|alegre|animada|apasionada|cariñosa|contenta|encantada|euforica|exitada|feliz|satisfecha|orgullosa))($|.*)');
MAMA_MATCH = re.match(MAMA,message_)
if MAMA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nOh Que bien que tu mamá este asi..."
if ev == 2:
ans =ans + "\nMe alegra leer esto de tu mamá..."
if ev == 3:
ans =ans + "\nMaravilloso, que todo siga asi para tu mamá..."
STATES['mama'] = True
MAMA = re.compile('(.*|^)((M|m)i\s(mama|mamá|madre)\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(abrumada|mal|mala|enferma|afligida|agotada|amargada|angustiada|apatica|arrepentida|asustada|aterrada|avergonzada|celosa|cansada|confundida|debil|decaida|decepcionada|deprimida|desanimada|desesperada|enojada|infeliz|herida|insegura|triste|tensa|molesta|irritada))($|.*)');
MAMA_MATCH = re.match(MAMA,message_)
if MAMA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nmmm que mal que tu mama este asi..."
if ev == 2:
ans =ans + "\nLo siento, que todo mejore para tu mamá..."
if ev == 3:
ans =ans + "\nLeer esto de tu mama no me gusta, lo siento por ella..."
STATES['mama'] = True
PAPA = re.compile('(.*|^)((M|m)i\s(papa|papá|padre)\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(excelente|bien|alegre|animado|apasionado|cariñoso|contento|encantado|euforico|exitado|feliz|satisfecho|orgullos))($|.*)');
PAPA_MATCH = re.match(PAPA,message_)
if PAPA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nOh Que bien que tu papá este así..."
if ev == 2:
ans =ans + "\nMe alegra leer esto de tu papá..."
if ev == 3:
ans =ans + "\nMaravilloso, que todo siga asi para tu padre..."
STATES['papa'] = True
PAPA = re.compile('(.*|^)((M|m)i\s(papa|papá|padre)\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(abrumado|mal|afligido|agotado|malo|enfermo|amargado|angustiado|apatico|arrepentido|asustado|aterrado|avergonzado|celoso|cansado|confundido|debil|decaido|decepcionado|deprimido|desanimado|desesperado|enojado|infeliz|herido|inseguro|triste|tenso|molesto|irritado))($|.*)');
PAPA_MATCH = re.match(PAPA,message_)
if PAPA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nmmm que mal que tu papa este asi..."
if ev == 2:
ans =ans + "\nLo siento, que todo mejore para tu papa..."
if ev == 3:
ans =ans + "\nLeer esto de tu papa no me gusta, lo siento por ella..."
STATES['papa'] = True
HERMANO = re.compile('(.*|^)((M|m)i\s(herman(o|a))\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(excelente|bien|alegre|animado|apasionado|cariñoso|contento|encantado|euforico|exitado|feliz|satisfecho|orgullos))($|.*)');
HERMANO_MATCH = re.match(HERMANO,message_)
if HERMANO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nOh Que bien que tu hermano este asi..."
if ev == 2:
ans =ans + "\nMe alegra leer esto de tu hermano..."
if ev == 3:
ans =ans + "\nMaravilloso, que todo siga asi para tu hermano..."
STATES['hermano'] = True
HERMANO = re.compile('(.*|^)((M|m)i\s(herman(o|a))\s(es|esta)(.*|algo|un poco|mucho|mucho muy|muy)\s(abrumado|afligido|agotado|malo|enfermo|amargado|angustiado|apatico|arrepentido|asustado|aterrado|avergonzado|celoso|cansado|confundido|debil|decaido|decepcionado|deprimido|desanimado|desesperado|enojado|infeliz|herido|inseguro|triste|tenso|molesto|irritado))($|.*)');
HERMANO_MATCH = re.match(HERMANO,message_)
if HERMANO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nmmm que mal que tu hermano este asi..."
if ev == 2:
ans =ans + "\nLo siento, que todo mejore para tu hermano..."
if ev == 3:
ans =ans + "\nLeer esto de tu hermano no me gusta, lo siento por ella..."
STATES['hermano'] = True
MASCOTA = re.compile('(.*|^)((M|m)i(.*|s)\s(mascota|perro|gato|pajaro|pez|rana|tortuga|iguana)(.*|s)\s(es|esta|estan|son)(.*|algo|un poco|mucho|mucho muy|muy)\s(excelente(.*|s)|bien|alegre(.*|s)|animad(o|a)(.*|s)|apasionad(o|a)(.*|s)|cariños(o|a)(.*|s)|content(o|a)(.*|s)|encantad(o|a)(.*|s)|euforic(o|a)(.*|s)|exitad(o|a)(.*|s)|feliz|felices|satisfech(o|a)(.*|s)|orgullos(o|a)(.*|s)))($|.*)');
MASCOTA_MATCH = re.match(MASCOTA,message_)
if MASCOTA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nOh Que bien que tu mascota este asi..."
if ev == 2:
ans =ans + "\nMe alegra leer esto de tu mascota..."
if ev == 3:
ans =ans + "\nMaravilloso, que todo siga asi para tu mascota..."
STATES['mascota'] = True
MASCOTA = re.compile('(.*|^)((M|m)i(.*|s)\s(mascota|perro|gato|pajaro|pez|rana|tortuga|iguana)(.*|s)\s(es|esta|estan|son)(.*|algo|un poco|mucho|mucho muy|muy)\s(abrumad(o|a)(.*|s)|mal(o|a)|enferm(o|a)|afligid(o|a)(.*|s)|agotad(o|a)(.*|s)|amargad(o|a)(.*|s)|angustiad(o|a)(.*|s)|apatic(o|a)(.*|s)|arrepentid(o|a)(.*|s)|asustad(o|a)(.*|s)|aterrad(o|a)(.*|s)|avergonzad(o|a)(.*|s)|celos(o|a)(.*|s)|cansad(o|a)(.*|s)|confundid(o|a)(.*|s)|debil|debiles|decaid(o|a)(.*|s)|decepcionad(o|a)(.*|s)|deprimid(o|a)(.*|s)|desanimad(o|a)(.*|s)|desesperad(o|a)(.*|s)|enojad(o|a)(.*|s)|infeliz|infelices|herid(o|a)(.*|s)|insegur(o|a)(.*|s)|triste(.*|s)|tens(o|a)(.*|s)|molest(o|a)(.*|s)|irritad(o|a)(.*|s)))($|.*)');
MASCOTA_MATCH = re.match(MASCOTA,message_)
if MASCOTA_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nmmm que mal que tu mascota este asi..."
if ev == 2:
ans =ans + "\nLo siento, que todo mejore para tu mascota..."
if ev == 3:
ans =ans + "\nLeer esto de tu mascota no me gusta, lo siento por ella..."
STATES['mascota'] = True
AMIGO = re.compile('(.*|^)((M|m)i(.*|s)\s(mejores amigos|mejor amigo|amig(o|a)(.*|s))\s(es|esta|son|estan)(.*|algo|un poco|mucho|mucho muy|muy)\s(excelente(.*|s)|bien|alegre|alegres|animad(o|a)|animad(o|a)s|apasionad(o|a)|apasionad(o|a)s|cariños(o|a)|cariños(o|a)s|content(o|a)|content(o|a)s|encantad(o|a)|encantad(o|a)s|euforic(o|a)|euforic(o|a)s|exitad(o|a)|exitad(o|a)s|feliz|felices|satisfech(a|o)|satisfech(o|a)s|orgullos(o|a)|orgullos(o|a)s))($|.*)');
AMIGO_MATCH = re.match(AMIGO,message_)
if AMIGO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nOh Que bien que tu amigo este asi..."
if ev == 2:
ans =ans + "\nMe alegra leer esto de tu amigo..."
if ev == 3:
ans =ans + "\nMaravilloso, que todo siga asi para tu amigo..."
STATES['amigo'] = True
AMIGO = re.compile('(.*|^)((M|m)i(.*|s)\s(mejores amigos|mejor amigo|amig(o|a)(.*|s))\s(es|esta|son|estan)(.*|algo|un poco|mucho|mucho muy|muy)\s(abrumad(o|a)(.*|s)|afligid(o|a)(.*|s)|agotad(o|a)(.*|s)|amargad(o|a)(.*|s)|angustiad(o|a)(.*|s)|apatic(o|a)(.*|s)|arrepentid(o|a)(.*|s)|asustad(o|a)(.*|s)|aterrad(o|a)(.*|s)|avergonzad(o|a)(.*|s)|celos(o|a)(.*|s)|cansad(o|a)(.*|s)|confundid(o|a)(.*|s)|debil|debiles|decaid(o|a)(.*|s)|decepcionad(o|a)(.*|s)|deprimid(o|a)(.*|s)|desanimad(o|a)(.*|s)|desesperad(o|a)(.*|s)|enojad(o|a)(.*|s)|infeliz|infelices|herid(o|a)(.*|s)|insegur(o|a)(.*|s)|triste(.*|s)|tens(o|a)(.*|s)|molest(o|a)(.*|s)|irritad(o|a)(.*|s)))($|.*)');
AMIGO_MATCH = re.match(AMIGO,message_)
if AMIGO_MATCH:
ev = random.randint(1, 3)
if ev == 1:
ans =ans + "\nmmm que mal que tu amigo este asi..."
if ev == 2:
ans =ans + "\nLo siento, que todo mejore para tu amigo..."
if ev == 3:
ans =ans + "\nLeer esto de tu amigo no me gusta, lo siento por ella..."
STATES['amigo'] = True
TRABAJO = re.compile('(.*|^)(((M|m)i|(E|e)l|(E|e)n\sel)\s(negocio|empleo|trabajo)\s(es|esta|son|estuvo))($|.*)');
TRABAJO_MATCH = re.match(TRABAJO,message_)
if TRABAJO_MATCH:
STATES['trabajo'] = True
ESCUELA = re.compile('(.*|^)(((M|m)i|(E|e)l|(E|e)n\sel)\s(escuela|universidad|prepa|preparatoria|secu|secundaria|primaria)\s(es|esta|son|estuvo))($|.*)');
ESCUELA_MATCH = re.match(ESCUELA,message_)
if ESCUELA_MATCH:
STATES['escuela'] = True
#Conversaciones del Bot
if vez > 0:
ev = random.randint(1, 9)
if ev == 1:
ans = ans + "\n ¿cómo te fue hoy?..."
if ev == 2:
ans = ans + "\n ¿cómo estuvo tu dia de hoy?..."
if ev == 3:
ans = ans + "\n ¿que hiciste el dia de hoy?..."
if ev == 4:
ans = ans + "\n ¿algo interesante que hicieras hoy?..."
if ev == 5:
ans = ans + "\n ¿que te paso el dia de hoy?..."
if ev == 6:
ans = ans + "\n ¿Que mas hiciste en tu dia?..."
if ev == 7:
ans = ans + "\n Y ¿Que mas?..."
if ev == 8:
ans = ans + "\n ¿Algo mas que quieras platicarme?..."
if ev == 9:
ans = ans + "\n ¿Que mas hiciste?..."
if vez == 0:
ev = random.randint(1, 4)
if ev == 1:
ans = ans + "\n ¿cómo estas hoy?..."
if ev == 2:
ans = ans + "\n ¿cómo te sientes el dia de hoy?..."
if ev == 3:
ans = ans + "\n ¿cómo te sentiste hoy?..."
if ev == 4:
ans = ans + "\n ¿que te paso el dia de hoy?..."
# REVISAR ESTADO
if STATES[username]['mama']== 'False':
print(vez)
if vez > 2:
ans=ans + "\n Cuéntame, ¿Como esta tu mamá?"
elif STATES['papa']== 'False':
if vez > 0:
ans=ans + "\n¿Cómo esta tu papá?"
elif STATES['hermano']== 'False':
if vez > 0:
ans=ans + "\n¿Que tal tu hermano?"
elif STATES['mascota']== 'False':
if vez > 0:
ans=ans + "\n¿Que tal tu mascota?"
elif STATES['amigo']== 'False':
if vez > 0:
ans=ans + "\n¿Como esta tu mejor amigo?"
elif STATES['escuela']== 'False':
if vez > 0:
ans=ans + "\n¿Como vas en la escuela?"
elif STATES['trabajo']== 'False':
if vez > 0:
ans=ans + "\n¿Como estubo el trabajo?"
ADIOS = re.compile('(H|h)asta\sluego|HASTA\sLUEGO|(A|a)di(o|ó)s|ADI(O|Ó)S|(N|n)os\svemos|NOS\sVEMOS|(C|c)hao|CHAO|(B|b)ye|BYE($)');
ADIOS_MATCH = re.match(ADIOS,message_)
if ADIOS_MATCH:
vez = 100
ev = random.randint(1, 4)
if ev == 1:
ans ="\n Adios, fue un gusto platicar contigo."
if ev == 2:
ans ="\n Adios, Me encanta platicar contigo."
if ev == 3:
ans ="\n Adios, Te deseo suerte y que tus dias sean mejores."
if ev == 4:
ans ="\n Adios, Espero que vuelvas a platicar conmigo.... ;)"
vez = vez + 1
STATES['vez'] = vez
return ans
def sinAcentos(Mensaje):
cadena= ''.join((c for c in unicodedata.normalize('NFD',unicode(Mensaje)) if unicodedata.category(c) != 'Mn'))
return cadena.decode().lower()
| 46.46798
| 705
| 0.577706
| 2,879
| 18,866
| 3.753387
| 0.140327
| 0.015917
| 0.019989
| 0.038497
| 0.763742
| 0.740515
| 0.731353
| 0.684897
| 0.65621
| 0.626411
| 0
| 0.009917
| 0.224955
| 18,866
| 405
| 706
| 46.582716
| 0.726098
| 0.005672
| 0
| 0.605114
| 0
| 0.056818
| 0.518313
| 0.271583
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017045
| null | null | 0.005682
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
034a1b4ae7ff4f445ccd317c40cbd616fa0d6026
| 1,252
|
py
|
Python
|
tfx/v1/dsl/io/fileio.py
|
avelez93/tfx
|
75fbb6a7d50e99138609be3ca4c3a204a13a2195
|
[
"Apache-2.0"
] | 1,813
|
2019-02-04T17:17:30.000Z
|
2022-03-29T13:39:30.000Z
|
tfx/v1/dsl/io/fileio.py
|
avelez93/tfx
|
75fbb6a7d50e99138609be3ca4c3a204a13a2195
|
[
"Apache-2.0"
] | 2,710
|
2019-02-14T00:41:00.000Z
|
2022-03-31T07:23:00.000Z
|
tfx/v1/dsl/io/fileio.py
|
avelez93/tfx
|
75fbb6a7d50e99138609be3ca4c3a204a13a2195
|
[
"Apache-2.0"
] | 731
|
2019-02-04T17:59:18.000Z
|
2022-03-31T06:45:51.000Z
|
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX DSL file I/O module."""
# pylint: disable=unused-import
from tfx.dsl.io.fileio import copy
from tfx.dsl.io.fileio import exists
from tfx.dsl.io.fileio import glob
from tfx.dsl.io.fileio import isdir
from tfx.dsl.io.fileio import listdir
from tfx.dsl.io.fileio import makedirs
from tfx.dsl.io.fileio import mkdir
from tfx.dsl.io.fileio import NotFoundError
from tfx.dsl.io.fileio import open # pylint: disable=redefined-builtin
from tfx.dsl.io.fileio import PathType
from tfx.dsl.io.fileio import remove
from tfx.dsl.io.fileio import rename
from tfx.dsl.io.fileio import rmtree
from tfx.dsl.io.fileio import stat
from tfx.dsl.io.fileio import walk
| 39.125
| 74
| 0.780351
| 212
| 1,252
| 4.608491
| 0.443396
| 0.09826
| 0.153531
| 0.184237
| 0.368475
| 0.368475
| 0
| 0
| 0
| 0
| 0
| 0.007428
| 0.139776
| 1,252
| 31
| 75
| 40.387097
| 0.899721
| 0.526358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
cef6fae22f5acd7d7d274b8563364de4fd4fb392
| 40
|
py
|
Python
|
Week 3: Real numbers/3 (19).py
|
MLunov/Python-programming-basics-HSE
|
7df8bba105db84d6b932c454fdc39193a648254e
|
[
"MIT"
] | null | null | null |
Week 3: Real numbers/3 (19).py
|
MLunov/Python-programming-basics-HSE
|
7df8bba105db84d6b932c454fdc39193a648254e
|
[
"MIT"
] | null | null | null |
Week 3: Real numbers/3 (19).py
|
MLunov/Python-programming-basics-HSE
|
7df8bba105db84d6b932c454fdc39193a648254e
|
[
"MIT"
] | null | null | null |
s = input().split()
print(s[1], s[0])
| 13.333333
| 20
| 0.5
| 8
| 40
| 2.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.175
| 40
| 2
| 21
| 20
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
3000c6d69ea8817d6b73b9839efd805d65d0407a
| 1,347
|
py
|
Python
|
bunnyhop/billing.py
|
jefford-qwigo/bunnyhop
|
b039f1a713a234d02aae4d9371133768001095ff
|
[
"Apache-2.0"
] | 4
|
2020-10-06T00:07:25.000Z
|
2021-03-24T07:10:12.000Z
|
bunnyhop/billing.py
|
jefford-qwigo/bunnyhop
|
b039f1a713a234d02aae4d9371133768001095ff
|
[
"Apache-2.0"
] | 3
|
2020-08-18T02:09:08.000Z
|
2021-08-25T01:21:12.000Z
|
bunnyhop/billing.py
|
jefford-qwigo/bunnyhop
|
b039f1a713a234d02aae4d9371133768001095ff
|
[
"Apache-2.0"
] | 1
|
2020-08-05T15:18:21.000Z
|
2020-08-05T15:18:21.000Z
|
from bunnyhop import base
class Billing(base.BaseBunny):
def get(self):
return BillingSummary(self.api_key, **self.call_api(f"/billing", "GET"))
def apply_code(self, coupon_code):
return self.call_api(f"/billing/applycode?couponCode={coupon_code}", "GET")
class BillingRecord(base.BaseBunny):
Id = base.IntegerProperty()
PaymentId = base.CharProperty()
Amount = base.FloatProperty()
Payer = base.EmailProperty()
Timestamp = base.DateTimeProperty()
Type = base.IntegerProperty()
InvoiceAvailable = base.BooleanProperty()
def __str__(self):
return f"{self.Id} - {self.Amount}"
class BillingSummary(base.BaseBunny):
Balance = base.FloatProperty(required=True)
ThisMonthCharges = base.FloatProperty(required=True)
BillingRecords = base.ListProperty(BillingRecord)
MonthlyChargesStorage = base.FloatProperty(required=True)
MonthlyChargesEUTraffic = base.FloatProperty(required=True)
MonthlyChargesUSTraffic = base.FloatProperty(required=True)
MonthlyChargesASIATraffic = base.FloatProperty(required=True)
MonthlyChargesSATraffic = base.FloatProperty(required=True)
def __str__(self):
return f"{self.Balance}"
@property
def billing_records(self):
return [BillingRecord(self.api_key, **i) for i in self.BillingRecords]
| 32.071429
| 83
| 0.725316
| 139
| 1,347
| 6.913669
| 0.374101
| 0.141519
| 0.182102
| 0.211238
| 0.083247
| 0.043704
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164811
| 1,347
| 41
| 84
| 32.853659
| 0.854222
| 0
| 0
| 0.066667
| 0
| 0
| 0.071269
| 0.031923
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.033333
| 0.166667
| 0.966667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
302c896a38cfdc1877b56b9324a65d435e40b6d4
| 154
|
py
|
Python
|
django_town/core/exceptions.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/core/exceptions.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
django_town/core/exceptions.py
|
uptown/django-town
|
4c3b078a8ce5dcc275d65faa4a1cdfb7ebc74a50
|
[
"MIT"
] | null | null | null |
"""
Global Django Town exception and warning classes.
"""
class SettingError(Exception):
pass
class ThirdPartyDependencyError(Exception):
pass
| 14
| 49
| 0.746753
| 15
| 154
| 7.666667
| 0.733333
| 0.226087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 154
| 11
| 50
| 14
| 0.898438
| 0.318182
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
302ff1e381d828c4d2852e83489d0fd3fcc13bb5
| 91
|
py
|
Python
|
app/handlers/__init__.py
|
hyzyla/dou-jobs-bot
|
15b263f18fbdda09628a39152b915e3b262b22e7
|
[
"Apache-2.0"
] | null | null | null |
app/handlers/__init__.py
|
hyzyla/dou-jobs-bot
|
15b263f18fbdda09628a39152b915e3b262b22e7
|
[
"Apache-2.0"
] | 1
|
2021-06-02T00:41:29.000Z
|
2021-06-02T00:41:29.000Z
|
app/handlers/__init__.py
|
hyzyla/dou-jobs-bot
|
15b263f18fbdda09628a39152b915e3b262b22e7
|
[
"Apache-2.0"
] | null | null | null |
from app.handlers.handlers import configure_dispatcher
__all__ = ['configure_dispatcher']
| 22.75
| 54
| 0.835165
| 10
| 91
| 7
| 0.7
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 91
| 3
| 55
| 30.333333
| 0.843373
| 0
| 0
| 0
| 0
| 0
| 0.21978
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
303c9f2a2323db38c47409a0073a79d6d417bc4f
| 809
|
py
|
Python
|
src/rqt_py_trees_creator/behaviour_tree.py
|
naveedhd/rqt_py_trees_creator
|
65afa438cb5fa757da7af7f5ab8ec6cb549d3299
|
[
"BSD-3-Clause"
] | null | null | null |
src/rqt_py_trees_creator/behaviour_tree.py
|
naveedhd/rqt_py_trees_creator
|
65afa438cb5fa757da7af7f5ab8ec6cb549d3299
|
[
"BSD-3-Clause"
] | null | null | null |
src/rqt_py_trees_creator/behaviour_tree.py
|
naveedhd/rqt_py_trees_creator
|
65afa438cb5fa757da7af7f5ab8ec6cb549d3299
|
[
"BSD-3-Clause"
] | null | null | null |
##############################################################################
# Documentation
##############################################################################
##############################################################################
# Imports
##############################################################################
from python_qt_binding.QtCore import QObject
##############################################################################
# Classes
##############################################################################
class RosBehaviourTreeCreator(QObject):
no_roscore_switch = "--no-roscore"
def __init__(self, context):
super(RosBehaviourTreeCreator, self).__init__(context)
@staticmethod
def add_arguments(parser, group=True):
pass
| 35.173913
| 78
| 0.301607
| 35
| 809
| 6.6
| 0.771429
| 0.077922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084054
| 809
| 23
| 79
| 35.173913
| 0.311741
| 0.035847
| 0
| 0
| 0
| 0
| 0.038835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.125
| 0.125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
306b77512784011090345c6185d5130f2e067619
| 62
|
py
|
Python
|
pyscript/__init__.py
|
sumanta23/pyscript
|
466cb24733290a04d757869e38039cd286d6b9f2
|
[
"Apache-2.0"
] | null | null | null |
pyscript/__init__.py
|
sumanta23/pyscript
|
466cb24733290a04d757869e38039cd286d6b9f2
|
[
"Apache-2.0"
] | null | null | null |
pyscript/__init__.py
|
sumanta23/pyscript
|
466cb24733290a04d757869e38039cd286d6b9f2
|
[
"Apache-2.0"
] | null | null | null |
from .pyscript import (open, close)
from .exceptions import *
| 20.666667
| 35
| 0.758065
| 8
| 62
| 5.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 62
| 2
| 36
| 31
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3080f6f8f1567b42d1968c70b36f9b5b4f6a7583
| 31
|
py
|
Python
|
Lib/test/test_importlib/namespace_pkgs/both_portions/foo/one.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 52,316
|
2015-01-01T15:56:25.000Z
|
2022-03-31T23:19:01.000Z
|
Lib/test/test_importlib/namespace_pkgs/both_portions/foo/one.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 25,286
|
2015-03-03T23:18:02.000Z
|
2022-03-31T23:17:27.000Z
|
Lib/test/test_importlib/namespace_pkgs/both_portions/foo/one.py
|
shawwn/cpython
|
0ff8a3b374286d2218fc18f47556a5ace202dad3
|
[
"0BSD"
] | 31,623
|
2015-01-01T13:29:37.000Z
|
2022-03-31T19:55:06.000Z
|
attr = 'both_portions foo one'
| 15.5
| 30
| 0.741935
| 5
| 31
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 31
| 1
| 31
| 31
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0635f1135c09c196abb4b03f2b7c19e9cdf6fa74
| 2,542
|
py
|
Python
|
sensor/accelerometer.py
|
lsx137946009/pawo
|
69cef1797fe1971d87c59cb8c5c167089fa66ecd
|
[
"MIT"
] | null | null | null |
sensor/accelerometer.py
|
lsx137946009/pawo
|
69cef1797fe1971d87c59cb8c5c167089fa66ecd
|
[
"MIT"
] | null | null | null |
sensor/accelerometer.py
|
lsx137946009/pawo
|
69cef1797fe1971d87c59cb8c5c167089fa66ecd
|
[
"MIT"
] | 1
|
2019-05-08T05:50:13.000Z
|
2019-05-08T05:50:13.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 24 17:49:42 2020
@author: sixingliu, yaruchen
"""
from sensorpowa.core.frame import SensorFrame
from sensorpowa.core.series import SensorSeries
import numpy as np
class _SensorACM(SensorFrame):
"""
The biosensor module also contains a three dimensional accelerometer
for measurements of physical activity. A microcontroller digitizes the
analog signals via a 12-bit A-D and the data is written to an onboard
microSD card.
# TODO: move to clinlic module
- Seizure:
Generalized tonic-clonic (GTC) seizures are composed of two primary phases
-- the tonic phase and the clonic phase.
The tonic phase involves stiffening of the limbs and flexion or extension of
the neck, back and extremities. During the clonic phase, muscles of the entire
body start to contract and relax rapidly. These convulsions are manifest in the
ACM signal as rhythmic activity typically above 2 Hz.
Thus, each epoch was evaluated for important periods using an algorithm by
Vlachos and colleagues (Vlachos et al., 2004). The underlying assumption is
that the magnitudes of the coefficients of the DFT of a non-periodic time
series are distributed according to an exponential distribution.
"""
def to_magnitude(self):
"""
Combined information from all three axes of the accelerometer to
calculate the magnitude of the net acceleration
Returns
-------
TYPE
DESCRIPTION.
"""
data = np.mat(self.vals)
magnitude = np.linalg.norm(data, ord=2, axis=1, keepdims=True)
return SensorSeries(magnitude)
def pipeline(self, frequency, *pipe):
"""
Parameters
----------
frequency : TYPE
DESCRIPTION.
Returns
-------
None.
"""
for filter_pipe in pipe:
self = filter_pipe.transform(self)
return self
def indicator_activate(self, window=None):
"""
Returns
-------
bool
DESCRIPTION.
"""
indicator = np.std
if indicator > 0.1:
return True
else:
return False
def feature_timedomain(self, window=None):
pass
def feature_freqdomain(self, window=None):
pass
def feature_nonlinear(self, window=None):
pass
| 28.886364
| 88
| 0.609363
| 295
| 2,542
| 5.223729
| 0.589831
| 0.022713
| 0.03634
| 0.035042
| 0.03634
| 0.03634
| 0
| 0
| 0
| 0
| 0
| 0.013945
| 0.322974
| 2,542
| 88
| 89
| 28.886364
| 0.881464
| 0.540126
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0
| 1
| 0.25
| false
| 0.125
| 0.125
| 0
| 0.583333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
06538a679f8fc1ac55195420095673712ea9d9ef
| 461
|
py
|
Python
|
mybitbank/libs/connections/models.py
|
zonedoutspace/mybitbank
|
85d28726117a3c1ca76be5772d30c9edae1df7f4
|
[
"MIT"
] | null | null | null |
mybitbank/libs/connections/models.py
|
zonedoutspace/mybitbank
|
85d28726117a3c1ca76be5772d30c9edae1df7f4
|
[
"MIT"
] | null | null | null |
mybitbank/libs/connections/models.py
|
zonedoutspace/mybitbank
|
85d28726117a3c1ca76be5772d30c9edae1df7f4
|
[
"MIT"
] | 2
|
2016-06-13T19:57:30.000Z
|
2018-09-15T21:03:45.000Z
|
from django.db import models
class CurrencyService(models.Model):
'''
model for address aliases
'''
name = models.CharField(max_length=200)
type = models.CharField(max_length=200)
rpcusername = models.CharField(max_length=200)
rpcpassword = models.CharField(max_length=200)
rpchost = models.CharField(max_length=200)
rpcport = models.CharField(max_length=200)
entered = models.DateTimeField('date published')
| 27.117647
| 52
| 0.711497
| 54
| 461
| 5.962963
| 0.462963
| 0.279503
| 0.335404
| 0.447205
| 0.503106
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048
| 0.186551
| 461
| 16
| 53
| 28.8125
| 0.810667
| 0.05423
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0.111111
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
0680715fb87395cc84968553ece1b783529d1128
| 136
|
py
|
Python
|
apps.py
|
klml/kohrsupply
|
89ae3ebae120398e8259dbe77c3b092485cc79f9
|
[
"MIT"
] | null | null | null |
apps.py
|
klml/kohrsupply
|
89ae3ebae120398e8259dbe77c3b092485cc79f9
|
[
"MIT"
] | 9
|
2017-06-15T10:24:09.000Z
|
2018-08-01T21:07:25.000Z
|
apps.py
|
klml/kohrsupply
|
89ae3ebae120398e8259dbe77c3b092485cc79f9
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.apps import AppConfig
class kohrsupplyConfig(AppConfig):
name = 'kohrsupply'
| 17
| 39
| 0.801471
| 15
| 136
| 6.933333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 136
| 7
| 40
| 19.428571
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0.073529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
0684a2e1b8f893688d768f1d9ec0e580244854a6
| 995
|
py
|
Python
|
tests/python/suds/testcases/test_add_string_lists.py
|
stepank/pyws
|
ff39133aabeb56bbb08d66286ac0cc8731eda7dd
|
[
"MIT"
] | 11
|
2015-01-19T15:43:46.000Z
|
2022-03-13T08:23:01.000Z
|
tests/python/suds/testcases/test_add_string_lists.py
|
stepank/pyws
|
ff39133aabeb56bbb08d66286ac0cc8731eda7dd
|
[
"MIT"
] | 1
|
2015-01-21T07:31:35.000Z
|
2015-02-08T17:52:35.000Z
|
tests/python/suds/testcases/test_add_string_lists.py
|
stepank/pyws
|
ff39133aabeb56bbb08d66286ac0cc8731eda7dd
|
[
"MIT"
] | 11
|
2015-01-19T15:43:49.000Z
|
2022-03-04T10:11:24.000Z
|
import unittest2 as unittest
from testcases.base import BaseTestCaseMixin
class AddStringListsTestCase(BaseTestCaseMixin, unittest.TestCase):
def test_empty(self):
p = self.factory.create('types:StringList')
p.item = []
q = self.factory.create('types:StringList')
q.item = []
res = self.service.add_string_lists(p, q)
def test_simple(self):
p = self.factory.create('types:StringList')
p.item = ['a', 'b', 'c']
q = self.factory.create('types:StringList')
q.item = ['d', 'e', 'f']
res = self.service.add_string_lists(p, q)
self.assertEqual(res.item, ['ad', 'be', 'cf'])
def test_diff_size(self):
p = self.factory.create('types:StringList')
p.item = ['a', 'b', 'c']
q = self.factory.create('types:StringList')
q.item = ['d', 'e', 'f', 'g', 'h']
res = self.service.add_string_lists(p, q)
self.assertEqual(res.item, ['ad', 'be', 'cf', 'g', 'h'])
| 33.166667
| 67
| 0.579899
| 129
| 995
| 4.395349
| 0.325581
| 0.116402
| 0.179894
| 0.232804
| 0.70194
| 0.70194
| 0.70194
| 0.70194
| 0.582011
| 0.507937
| 0
| 0.001323
| 0.240201
| 995
| 29
| 68
| 34.310345
| 0.748677
| 0
| 0
| 0.478261
| 0
| 0
| 0.124623
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.130435
| false
| 0
| 0.086957
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
068b20cb67a85a4da7833a4aa77d3313453d6f0c
| 156
|
py
|
Python
|
bat/pyinst_hooks_copies/hook-grpc.py
|
konsan1101/pyRiKi1
|
687061ce09889ec91c1c3c11df62f4cfcb3d9613
|
[
"MIT"
] | null | null | null |
bat/pyinst_hooks_copies/hook-grpc.py
|
konsan1101/pyRiKi1
|
687061ce09889ec91c1c3c11df62f4cfcb3d9613
|
[
"MIT"
] | 1
|
2020-11-06T04:36:33.000Z
|
2020-11-06T04:36:33.000Z
|
bat/pyinst_hooks_copies/hook-grpc.py
|
konsan1101/pyRiKi1
|
687061ce09889ec91c1c3c11df62f4cfcb3d9613
|
[
"MIT"
] | null | null | null |
# https://github.com/googleapis/google-cloud-python/issues/5774
from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files('grpc')
| 26
| 63
| 0.814103
| 22
| 156
| 5.590909
| 0.863636
| 0.178862
| 0.260163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027586
| 0.070513
| 156
| 5
| 64
| 31.2
| 0.82069
| 0.391026
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
06a5cf5383ab7affc4ef37381f9df5c36cf96567
| 93
|
py
|
Python
|
main.py
|
GageMac/Stock-search-and-linear-regression
|
1eacae4d06d715cb605bbeba93b1fe9b67fbad01
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
GageMac/Stock-search-and-linear-regression
|
1eacae4d06d715cb605bbeba93b1fe9b67fbad01
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
GageMac/Stock-search-and-linear-regression
|
1eacae4d06d715cb605bbeba93b1fe9b67fbad01
|
[
"Apache-2.0"
] | null | null | null |
from bs4 import BeautifulSoup
import requests
r=requests.get("https://finance.yahoo.com")
| 13.285714
| 43
| 0.774194
| 13
| 93
| 5.538462
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.107527
| 93
| 6
| 44
| 15.5
| 0.855422
| 0
| 0
| 0
| 0
| 0
| 0.274725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
06a6fa0dd8d9336fcac402fc61902df83f1bb0ee
| 216
|
py
|
Python
|
app/app/api/domain/services/data/command/IUserCommandRepository.py
|
GPortas/Playgroundb
|
60f98a4dd62ce34fbb8abfa0d9ee63697e82c57e
|
[
"Apache-2.0"
] | 1
|
2019-01-30T19:59:20.000Z
|
2019-01-30T19:59:20.000Z
|
app/app/api/domain/services/data/command/IUserCommandRepository.py
|
GPortas/Playgroundb
|
60f98a4dd62ce34fbb8abfa0d9ee63697e82c57e
|
[
"Apache-2.0"
] | null | null | null |
app/app/api/domain/services/data/command/IUserCommandRepository.py
|
GPortas/Playgroundb
|
60f98a4dd62ce34fbb8abfa0d9ee63697e82c57e
|
[
"Apache-2.0"
] | null | null | null |
class IUserCommandRepository:
def create_user(self, user):
pass
def update_user_auth_token(self, user_id, auth_token):
pass
def increment_user_score(self, user_id, score):
pass
| 19.636364
| 58
| 0.675926
| 28
| 216
| 4.892857
| 0.464286
| 0.175182
| 0.145985
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25463
| 216
| 10
| 59
| 21.6
| 0.850932
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
23085cfa5e8e54bc5fd2ca13aecd61cc26441d49
| 72
|
py
|
Python
|
library.py
|
kartikeyshaurya/100-days-of-code
|
47215768878700942c367fe0c2158106559fdd67
|
[
"Apache-2.0"
] | null | null | null |
library.py
|
kartikeyshaurya/100-days-of-code
|
47215768878700942c367fe0c2158106559fdd67
|
[
"Apache-2.0"
] | 9
|
2020-01-28T23:09:24.000Z
|
2022-03-12T00:06:30.000Z
|
library.py
|
kartikeyshaurya/100-days-of-code
|
47215768878700942c367fe0c2158106559fdd67
|
[
"Apache-2.0"
] | null | null | null |
#this file is created for notebook no 16
class Base:
print("hello")
| 18
| 40
| 0.708333
| 12
| 72
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 0.208333
| 72
| 4
| 41
| 18
| 0.859649
| 0.541667
| 0
| 0
| 0
| 0
| 0.151515
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
231e53f74d4d0d4e5d00db912bbb23009df6757d
| 73
|
py
|
Python
|
src/cobra/apps/organization/__init__.py
|
lyoniionly/django-cobra
|
2427e5cf74b7739115b1224da3306986b3ee345c
|
[
"Apache-2.0"
] | 1
|
2015-01-27T08:56:46.000Z
|
2015-01-27T08:56:46.000Z
|
src/cobra/apps/organization/__init__.py
|
lyoniionly/django-cobra
|
2427e5cf74b7739115b1224da3306986b3ee345c
|
[
"Apache-2.0"
] | null | null | null |
src/cobra/apps/organization/__init__.py
|
lyoniionly/django-cobra
|
2427e5cf74b7739115b1224da3306986b3ee345c
|
[
"Apache-2.0"
] | null | null | null |
default_app_config = 'cobra.apps.organization.config.OrganizationConfig'
| 36.5
| 72
| 0.863014
| 8
| 73
| 7.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041096
| 73
| 1
| 73
| 73
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0.671233
| 0.671233
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
232940ac146d6c3893543a310f433aad86e155e1
| 23
|
py
|
Python
|
tests/__init__.py
|
yaml2sbml-dev/yaml2sbml
|
43ef3cb478ef30cfc0f1f4e232499824baf20598
|
[
"MIT"
] | 13
|
2021-01-09T13:48:22.000Z
|
2022-01-18T09:12:21.000Z
|
tests/__init__.py
|
yaml2sbml-dev/yaml2sbml
|
43ef3cb478ef30cfc0f1f4e232499824baf20598
|
[
"MIT"
] | 103
|
2020-09-05T09:07:32.000Z
|
2021-08-16T13:52:44.000Z
|
tests/__init__.py
|
yaml2sbml-dev/yaml2sbml
|
43ef3cb478ef30cfc0f1f4e232499824baf20598
|
[
"MIT"
] | 4
|
2020-09-27T17:21:12.000Z
|
2021-10-20T19:41:17.000Z
|
"""yaml2sbml tests."""
| 11.5
| 22
| 0.608696
| 2
| 23
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.086957
| 23
| 1
| 23
| 23
| 0.619048
| 0.695652
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
23415ade45ff9bd9767288e0a1574bf893d6e78f
| 391
|
py
|
Python
|
gsmodutils/__init__.py
|
SBRCNottingham/gsmodutils
|
56540119043b91be9b4fd066c9ca7a12dd3e9f4f
|
[
"Apache-2.0"
] | 12
|
2018-09-25T21:44:06.000Z
|
2022-03-16T21:27:57.000Z
|
gsmodutils/__init__.py
|
SBRCNottingham/gsmodutils
|
56540119043b91be9b4fd066c9ca7a12dd3e9f4f
|
[
"Apache-2.0"
] | 11
|
2018-09-26T10:38:14.000Z
|
2019-10-13T22:59:35.000Z
|
gsmodutils/__init__.py
|
SBRCNottingham/gsmodutils
|
56540119043b91be9b4fd066c9ca7a12dd3e9f4f
|
[
"Apache-2.0"
] | 1
|
2018-09-25T21:44:21.000Z
|
2018-09-25T21:44:21.000Z
|
from __future__ import absolute_import, print_function
from gsmodutils.project.interface import GSMProject
from gsmodutils.project.project_config import ProjectConfig
from gsmodutils.project.design import StrainDesign
from gsmodutils.project.model import GSModutilsModel
from gsmodutils.utils.io import load_model
import logging
logger = logging.getLogger(__name__)
__version__ = '0.0.4'
| 30.076923
| 59
| 0.856777
| 49
| 391
| 6.510204
| 0.530612
| 0.219436
| 0.263323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008451
| 0.092072
| 391
| 12
| 60
| 32.583333
| 0.890141
| 0
| 0
| 0
| 0
| 0
| 0.012788
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.777778
| 0
| 0.777778
| 0.111111
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
234f7967875a6606958f99375df1e17d1ba1dd03
| 53
|
py
|
Python
|
python/testData/editing/sectionIndentInsideGoogleDocString.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/editing/sectionIndentInsideGoogleDocString.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/editing/sectionIndentInsideGoogleDocString.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def f(param):
"""
Args:
param
"""
| 10.6
| 13
| 0.339623
| 5
| 53
| 3.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.471698
| 53
| 5
| 14
| 10.6
| 0.642857
| 0.283019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
236e95d3b8906ca765d43b4de186e56f792a97df
| 3,112
|
py
|
Python
|
tests/simulation_data.py
|
bjrnfrdnnd/panel-test
|
4609a259e749825b2a2012d8a7e48ed8e8a78deb
|
[
"MIT"
] | null | null | null |
tests/simulation_data.py
|
bjrnfrdnnd/panel-test
|
4609a259e749825b2a2012d8a7e48ed8e8a78deb
|
[
"MIT"
] | 1
|
2019-07-26T22:12:19.000Z
|
2019-10-31T17:48:51.000Z
|
tests/simulation_data.py
|
bjrnfrdnnd/panel-test
|
4609a259e749825b2a2012d8a7e48ed8e8a78deb
|
[
"MIT"
] | 1
|
2019-09-19T11:54:45.000Z
|
2019-09-19T11:54:45.000Z
|
import numpy as np
def spin3():
v = np.array([115, 140, 190])
J = np.zeros((3, 3))
J[0, 1] = 6
J[0, 2] = 12
J[1, 2] = 3
J = J + J.T
return v, J
def spin8():
v = np.array([85, 120, 160, 185, 205, 215, 235, 260])
# Note: previous version used a scipy lil_matrix for J, but hamiltonian
# gave a dimension mismatch. Changed to a np matrix and worked.
J = np.zeros((8, 8))
J[0, 1] = -12
J[0, 2] = 6
J[0, 3] = 2
# J[0, 4] = 0
# J[0, 5] = 0
# J[0, 6] = 0
# J[0, 7] = 0
# J[1, 2] = 0
# J[1, 3] = 0
J[1, 4] = 14
# J[1, 5] = 0
# J[1, 6] = 0
J[1, 7] = 3
# J[2, 3] = 0
# J[2, 4] = 0
J[2, 5] = 3
# J[2, 6] = 0
# J[2, 7] = 0
# J[3, 4] = 0
J[3, 5] = 5
# J[3, 6] = 0
# J[3, 7] = 0
J[4, 5] = 2
# J[4, 6] = 0
# J[4, 7] = 0
# J[5, 6] = 0
# J[5, 7] = 0
J[6, 7] = 12
J = J + J.T
return v, J
def spin11():
"""Fox() pared down by 1 nuclei, for testing spin-11 system."""
# memory error crash on 12 nuclei (on my home PC). Hashing out nuclei 10/11 data to reduce it to a
# 10- or 11-nuclei test case works.
v = np.array([1.63, 1.63, 2.2, 2.2, 2.5, 2.5, 2.5, 2.5, 2.5,
2.5,
5.71 #,
# 5.77
]) * 400
J = np.zeros((len(v), len(v)))
J[0, 1] = -12
J[0, 2] = 1
J[0, 3] = 10
J[0, 8] = 1
J[0, 9] = 8.5
J[1, 2] = 10
J[1, 3] = 1
J[1, 8] = 8.5
J[1, 9] = 1
J[2, 3] = -12
J[2, 10] = 9
J[3, 10] = 8.5
J[4, 5] = -12
J[4, 6] = 3.5
J[4, 7] = 7
# J[4, 11] = 8.5
J[5, 6] = 7
J[5, 7] = 3.5
# J[5, 11] = 7.5
J[6, 7] = -12
J = J + J.T
return v, J
def fox():
"""Joe Fox had an interesting spectrum for cyclooct-4-enone. Here are estimated parameters for this simulation.
A stress test for a 12-nuclei simulation.
"""
# memory error crash on 12 nuclei (on my home PC). Hashing out nuclei 10/11 data to reduce it to a
# 10- or 11-nuclei test case works.
v = np.array([1.63, 1.63, 2.2, 2.2, 2.5, 2.5, 2.5, 2.5, 2.5,
2.5,
5.71 #,
# 5.77
]) * 400
J = np.zeros((len(v), len(v)))
J[0, 1] = -12
J[0, 2] = 1
J[0, 3] = 10
J[0, 8] = 1
J[0, 9] = 8.5
J[1, 2] = 10
J[1, 3] = 1
J[1, 8] = 8.5
J[1, 9] = 1
J[2, 3] = -12
J[2, 10] = 9
J[3, 10] = 8.5
J[4, 5] = -12
J[4, 6] = 3.5
J[4, 7] = 7
# J[4, 11] = 8.5
J[5, 6] = 7
J[5, 7] = 3.5
# J[5, 11] = 7.5
J[6, 7] = -12
J = J + J.T
return v, J
def rioux():
"""http://www.users.csbsju.edu/~frioux/nmr/ABC-NMR-Tensor.pdf
Returns
-------
"""
v = np.array([430.0, 265.0, 300.0])
J = np.zeros((3, 3))
J[0, 1] = 7.0
J[0, 2] = 15.0
J[1, 2] = 1.50
J = J + J.T
return v, J
# def ethyl_propionate():
# v = np.array([1651.3, 926.9, 503.2, 455.6])
# J = np.zeros((4, 4))
# J[0, 2] = 7.1
# J[1, 3] = 7.6
# J = J + J.T
# return v, J
| 21.315068
| 115
| 0.40392
| 636
| 3,112
| 1.97327
| 0.193396
| 0.03506
| 0.023904
| 0.031873
| 0.498805
| 0.498805
| 0.498805
| 0.482869
| 0.438247
| 0.438247
| 0
| 0.229266
| 0.39171
| 3,112
| 146
| 116
| 21.315068
| 0.433703
| 0.367931
| 0
| 0.710843
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060241
| false
| 0
| 0.012048
| 0
| 0.13253
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
2378d32d00af2ff7c748c07d83c6ae3dec75493c
| 57
|
py
|
Python
|
py/start/helloworld2.py
|
zhongwei/ztodo
|
fef4f24e65fb8d571c6c13e6f82d842023e7a8e1
|
[
"CC0-1.0"
] | 1
|
2015-09-22T08:28:27.000Z
|
2015-09-22T08:28:27.000Z
|
py/start/helloworld2.py
|
zhongwei/ztodo
|
fef4f24e65fb8d571c6c13e6f82d842023e7a8e1
|
[
"CC0-1.0"
] | null | null | null |
py/start/helloworld2.py
|
zhongwei/ztodo
|
fef4f24e65fb8d571c6c13e6f82d842023e7a8e1
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
print "你好,世界!";
| 11.4
| 21
| 0.631579
| 10
| 57
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020408
| 0.140351
| 57
| 4
| 22
| 14.25
| 0.714286
| 0.631579
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
237edea0e2b8fd917195ba4d261f5a11be560fe6
| 58
|
py
|
Python
|
server/main.py
|
AP-Atul/Gamepad
|
5b332f88e2b00d2fcbd697100f5eba9d44f10b21
|
[
"MIT"
] | null | null | null |
server/main.py
|
AP-Atul/Gamepad
|
5b332f88e2b00d2fcbd697100f5eba9d44f10b21
|
[
"MIT"
] | null | null | null |
server/main.py
|
AP-Atul/Gamepad
|
5b332f88e2b00d2fcbd697100f5eba9d44f10b21
|
[
"MIT"
] | null | null | null |
from server import server
server.startServer(debug=False)
| 19.333333
| 31
| 0.844828
| 8
| 58
| 6.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086207
| 58
| 2
| 32
| 29
| 0.924528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
88ff10607b9623b56c6fefad5ff3e6328da0b8cd
| 264
|
py
|
Python
|
src/ufdl/json/core/jobs/meta/__init__.py
|
waikato-ufdl/ufdl-json-messages
|
408901bdf79aa9ae7cff1af165deee83e62f6088
|
[
"Apache-2.0"
] | null | null | null |
src/ufdl/json/core/jobs/meta/__init__.py
|
waikato-ufdl/ufdl-json-messages
|
408901bdf79aa9ae7cff1af165deee83e62f6088
|
[
"Apache-2.0"
] | null | null | null |
src/ufdl/json/core/jobs/meta/__init__.py
|
waikato-ufdl/ufdl-json-messages
|
408901bdf79aa9ae7cff1af165deee83e62f6088
|
[
"Apache-2.0"
] | null | null | null |
"""
Package defining the JSON structures for specifying the structure of
meta-templates, which coordinate the execution of child templates in
a workflow.
"""
from ._Dependency import Dependency
from ._DependencyGraph import DependencyGraph
from ._Node import Node
| 29.333333
| 68
| 0.82197
| 34
| 264
| 6.294118
| 0.676471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132576
| 264
| 8
| 69
| 33
| 0.934498
| 0.564394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
0007c657d0be908c6c9b4e7583fdf25335864863
| 1,478
|
py
|
Python
|
tests/test_kv.py
|
lablup/etcetra
|
d734a73e6442452d84e6f6ab18945712429fd814
|
[
"Apache-2.0"
] | 3
|
2022-03-22T02:40:02.000Z
|
2022-03-25T07:08:58.000Z
|
tests/test_kv.py
|
lablup/etcetra
|
d734a73e6442452d84e6f6ab18945712429fd814
|
[
"Apache-2.0"
] | 3
|
2022-03-15T11:32:52.000Z
|
2022-03-21T18:16:35.000Z
|
tests/test_kv.py
|
lablup/etcetra
|
d734a73e6442452d84e6f6ab18945712429fd814
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from etcetra import EtcdClient
@pytest.mark.asyncio
async def test_put(etcd: EtcdClient):
async with etcd.connect() as communicator:
await communicator.put('/test/foo', 'bar')
assert (await communicator.get('/test/foo')) == 'bar'
await communicator.put('/test/foo', 'baz')
assert (await communicator.get('/test/foo')) == 'baz'
@pytest.mark.asyncio
async def test_get(etcd: EtcdClient):
async with etcd.connect() as communicator:
await communicator.put('/test/foo', 'bar')
await communicator.put('/test/foo/bar', 'asd')
await communicator.put('/test/foo/baz', '1234')
await communicator.put('/test/foo/baz/qwe', 'rty')
assert (await communicator.get('/test/foo')) == 'bar'
assert (await communicator.get('/test/somewhatnonexistingkey')) is None
assert (await communicator.get_prefix('/test/foo/baz')) == {
'/test/foo/baz': '1234',
'/test/foo/baz/qwe': 'rty',
}
assert (await communicator.get_prefix('/test/foo/baz/')) == {
'/test/foo/baz/qwe': 'rty',
}
assert (await communicator.keys_prefix('/test/foo')) == [
'/test/foo', '/test/foo/bar', '/test/foo/baz', '/test/foo/baz/qwe',
]
await communicator.delete('/test/foo/baz/qwe')
assert (await communicator.keys_prefix('/test/foo')) == [
'/test/foo', '/test/foo/bar', '/test/foo/baz',
]
| 34.372093
| 79
| 0.593369
| 176
| 1,478
| 4.948864
| 0.181818
| 0.192882
| 0.149254
| 0.165327
| 0.858783
| 0.858783
| 0.690011
| 0.675086
| 0.518944
| 0.452354
| 0
| 0.006987
| 0.225304
| 1,478
| 42
| 80
| 35.190476
| 0.753712
| 0
| 0
| 0.375
| 0
| 0
| 0.242896
| 0.018945
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| true
| 0
| 0.0625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
00161fb09ab208e826a7b85648aa59a6287f54f7
| 162
|
py
|
Python
|
TimeTracker/calendarpage/apps.py
|
TreasonableShorebirds/TimeTracker
|
8f0593dbe976fab4d510a378919bef71cb56d1ee
|
[
"MIT"
] | null | null | null |
TimeTracker/calendarpage/apps.py
|
TreasonableShorebirds/TimeTracker
|
8f0593dbe976fab4d510a378919bef71cb56d1ee
|
[
"MIT"
] | 1
|
2019-01-16T18:12:42.000Z
|
2019-01-16T18:12:42.000Z
|
TimeTracker/calendarpage/apps.py
|
TreasonableShorebirds/TimeTracker
|
8f0593dbe976fab4d510a378919bef71cb56d1ee
|
[
"MIT"
] | null | null | null |
"""
Calendar configuration
"""
from django.apps import AppConfig
class CalendarpageConfig(AppConfig):
"""Name the calendar app"""
name = 'calendarpage'
| 16.2
| 36
| 0.716049
| 16
| 162
| 7.25
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 162
| 9
| 37
| 18
| 0.859259
| 0.271605
| 0
| 0
| 0
| 0
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
0016c8163df27b71550d2c4f262ee1fbd8165573
| 14,155
|
py
|
Python
|
src/view/util/parsingUtil.py
|
struts2spring/sql-editor
|
082868dd92cbd8f0f6715f734f9ebe64032cbe4a
|
[
"MIT"
] | 9
|
2018-10-15T04:57:37.000Z
|
2021-12-07T07:39:35.000Z
|
src/view/util/parsingUtil.py
|
struts2spring/sql-editor
|
082868dd92cbd8f0f6715f734f9ebe64032cbe4a
|
[
"MIT"
] | 13
|
2018-10-19T11:52:44.000Z
|
2021-09-08T00:39:30.000Z
|
src/view/util/parsingUtil.py
|
struts2spring/sql-editor
|
082868dd92cbd8f0f6715f734f9ebe64032cbe4a
|
[
"MIT"
] | 3
|
2018-10-25T11:08:04.000Z
|
2021-02-23T08:28:31.000Z
|
import re
import logging.config
from src.view.constants import LOG_SETTINGS
logger = logging.getLogger('extensive')
logging.config.dictConfig(LOG_SETTINGS)
class SqlParser():
def __init__(self):
pass
def createSqlToDict(self, createSql=None):
# https://www.debuggex.com/
# https://pythex.org/
# logger.debug(createSql)
# pattern='''(\s*CREATE TABLE\s*)("?\w+"?)\s+(\(\s+((("?\w+"?)*\s*(UNIQUE\s+(\(\w+\))|PRIMARY KEY\s+(\(\w+\))|FLOAT|DATETIME|INTEGER\s*(NOT NULL)*|VARCHAR\(\d*\)),?\s+))*\s*\))(\s*;?)'''
# pattern='''(\s*CREATE TABLE\s*)("?\w+"?)\s+(\(\s+((("?\w+"?)*\s*(UNIQUE\s+(\(\w+\))|PRIMARY KEY\s+(\(\w+\))|FLOAT|DATETIME|INTEGER\s*(NOT NULL)*|VARCHAR\(\d*\)),?\s+))*\s*\))(\s*;?)'''
# pattern='''(\s*CREATE TABLE\s*)("?\w+"?)\s+(\(\s+("?\w+"?)\s+((UNIQUE\s+(\(\w+\))|PRIMARY KEY\s+(\(\w+\))|FLOAT|DATETIME|INTEGER\s*(NOT NULL)*|VARCHAR\(\d*\))))'''
pattern = '''\s*(CREATE TABLE)\s+("?\w+"?)\s+\(\s+(("?\w+"?\s+(INTEGER|FLOAT|DATETIME|VARCHAR\(\d*\))\s*(NOT NULL|PRIMARY KEY)?,?\s*)*)\s+(((\s*(PRIMARY KEY|UNIQUE)\s+\(\w+\)),?\s)*)(\)\s*;?)'''
matchObj = re.match(pattern, createSql, re.I)
if matchObj:
logger.debug ("matchObj.groups() : ", matchObj.groups())
# for g in matchObj.groups():
# logger.debug(g)
logger.debug ("matchObj.group(0) : ", matchObj.group(0))
logger.debug ("matchObj.group(1) : ", matchObj.group(1))
logger.debug ("matchObj.group(2) : ", matchObj.group(2))
else:
logger.debug ("No match!!")
def getColumn(self, createSql=None):
columnDict = dict()
if createSql:
# picking bracket part of create sql
h_0, t_0 = createSql.split("(", 1)
h_1, t_1 = t_0.rsplit(")", 1)
logger.debug("columns: {}".format(h_1))
logger.debug (t_0)
columnText=self.getAllConstrantInSeparteLine(columnText=h_1)
# removing constrinat as last line. primary key , unique key , foreign key
columnPattern = r'''((('|"|`).+?\3)|(\[?\w+\]?))\s*((?i)\bANY\b|(?i)\bJSON\b|(?i)\bINT\b|(?i)\bINTEGER\b|(?i)\bTINYINT\b|(?i)\bSMALLINT\b|(?i)\bMEDIUMINT\b|(?i)\bBIGINT\b|(?i)\bUNSIGNED BIG INT\b|(?i)\bINT2\b|(?i)\bINT8\b|(?i)CHARACTER\([0-9]{3}\)|(?i)\bVARYING CHARACTER\([0-9]{3}\)|(?i)\bNCHAR\([0-9]{3}\)\b|(?i)\bNATIVE CHARACTER\([0-9]{3}\)\b|(?i)\bNVARCHAR\([0-9]+\)|(?i)\bFLOAT\b|(?i)\bNUMERIC\b(\([0-9]+,[0-9]+\))?|(?i)\bDECIMAL\(d+\)\b|(?i)\bBOOLEAN\b|(?i)\bDATE\b|(?i)\bDATETIME\b|(?i)\[timestamp\]|(?i)\bREAL\b|(?i)\bDOUBLE\b|(?i)\bDOUBLE PRECISION\b|(?i)\bCLOB\b|(?i)\bBLOB\b|(?i)\bTEXT\b|(?i)\bDATETIME\b|(?i)VARCHAR\([0-9]*\))?\s*((?i)\bHIDDEN\b|(?i)\bNULL\b|(?i)\bNOT NULL\b|(?i)\bPRIMARY KEY\b\s*(ASC|DSC)?)?((?i)\bDEFAULT\b .*)?(\s+(?i)\bAUTOINCREMENT\b|(?i)\bUNIQUE\b)?,?\s*(-{2}.*)?'''
columnDict[0] = ("#", "Name", "Datatype", "PRIMARY KEY", "Nullable", "Unique", "Auto increment", "Hidden", "Default data","Description")
# this is column name
columnMatchObj = re.match(columnPattern, columnText, re.MULTILINE)
if columnMatchObj:
logger.info(columnMatchObj.groups())
logger.debug(columnMatchObj)
columnObj = re.findall(columnPattern, columnText, re.MULTILINE)
if columnObj:
for idx, columnName in enumerate(columnObj):
default_data = None
if columnName[6] and 'default' in columnName[6].lower():
default_data = columnName[6].lower().replace("default", "").strip()
auto_increment = None
if columnName[7] and 'AUTOINCREMENT' in columnName[7].upper():
auto_increment = 'AUTOINCREMENT'
nullable = None
if columnName[7] and 'NOT NULL' in columnName[7].upper():
nullable = 'NOT NULL'
description = None
if columnName[9] and '--' in columnName[9]:
description = columnName[9]
primaryKey = None
if columnName[5] and columnName[5].upper().startswith('PRIMARY KEY'):
primaryKey = columnName[5]
unique = None
if columnName[8] and 'PRIMARY KEY' in columnName[8].upper():
unique = columnName[8]
hidden = None
if columnName[5] and 'HIDDEN' in columnName[5].upper():
hidden = columnName[5]
columnNameInfo = [idx + 1, columnName[0], columnName[4], primaryKey, nullable, unique, auto_increment,hidden, default_data, description]
columnDict[idx + 1] = tuple(columnNameInfo)
else:
logger.debug ("columns : {}".format(h_1))
# createTablePattern='''\s*(CREATE TABLE)\s+((('|").*?\4)|("?\w+"?))\s+\(\s*((((('|").*?\10)|("?\w+"?))\s+(INTEGER|FLOAT|DATETIME|VARCHAR\(\d*\))\s*(NOT NULL|PRIMARY KEY)?,?\s*)*)\s+(((\s*(PRIMARY KEY|UNIQUE)\s+\(\w+\)),?\s)*)(\)\s*;?)'''
# tableMatchObj = re.match( createTablePattern, createSql, re.I)
#
# columnPattern='''(('|").+?\1)\s+(INTEGER|FLOAT|DATETIME|VARCHAR\(\d*\))\s*(NOT NULL|PRIMARY KEY)?,?\s*'''
# if tableMatchObj:
# # logger.debug ("tableMatchObj.groups() : ", tableMatchObj.groups())
#
# for idx, matchValue in enumerate(tableMatchObj.groups()):
# if idx==6:
# columnDict[0]=("Position #", "Name", "Datatype", "Nullable", "Auto increment", "Default data")
# # this is column name
# columnMatchObj = re.findall( columnPattern, matchValue, re.I)
# if columnMatchObj:
# for idx, columnName in enumerate(columnMatchObj):
# columnNameInfo=[idx+1]+list(columnName)+[None, None]
# columnDict[idx+1]= tuple(columnNameInfo)
# else:
# logger.debug ("columns : {}".format(matchValue))
#
# else:
# logger.debug ("No match!! : {}".format(createSql))
return columnDict
def getAllConstrantInSeparteLine(self, columnText=None):
onlyColumns=[]
columnsList = columnText.split(",")
for column in columnsList:
logger.debug(column.strip())
column1=column.strip().lower()
if not column1.startswith(('constraint','primary key','unique','foreign key')):
onlyColumns.append(column)
logger.debug(onlyColumns)
return ",".join(onlyColumns)
if __name__ == "__main__":
columns = '''
id INTEGER NOT NULL,
vc1_dept_descr VARCHAR(250),
work_location_country VARCHAR(250),
middle_initial VARCHAR(250),
floor_number VARCHAR(250),
hr_latest_hire_date VARCHAR(250),
hr_department_desc VARCHAR(250),
user_type VARCHAR(250),
mu_cost_code VARCHAR(250),
assistant_dept VARCHAR(250),
countrya2 VARCHAR(250),
paygroup VARCHAR(250),
countrya3 VARCHAR(250),
employee_id_number VARCHAR(250),
comit_id VARCHAR(250),
display_name VARCHAR(250),
company VARCHAR(250),
phone VARCHAR(250),
assistant_phone VARCHAR(250),
mu_full_name VARCHAR(250),
email_id VARCHAR(250),
job_title VARCHAR(250),
descr VARCHAR(250),
hire_dt DATETIME,
assistant_name VARCHAR(250),
lync_phone VARCHAR(250),
work_location_city VARCHAR(250),
management_unit VARCHAR(250),
dept VARCHAR(250),
friendly_company VARCHAR(250),
first_name VARCHAR(250),
last_name VARCHAR(250),
building_name VARCHAR(250),
active_flag VARCHAR(250),
manager VARCHAR(250),
hr_department VARCHAR(250),
mu_company_code_description VARCHAR(250),
sector_id VARCHAR(250),
hr_supervisor_id VARCHAR(250),
preferred_first_name VARCHAR(250),
city VARCHAR(250),
deptm VARCHAR(250),
manager_email VARCHAR(250),
manager_comit VARCHAR(250),
ms_rtcsip_primary_user_address VARCHAR(250),
shift_info VARCHAR(250),
dn VARCHAR(250),
office VARCHAR(250),
dialing_code VARCHAR(250),
assistant_comit VARCHAR(250),
bud_description VARCHAR(250),
sector_description VARCHAR(250),
employee_status VARCHAR(250),
aim_number VARCHAR(250),
mu_company_code VARCHAR(250),
hr_supervisor_first_name VARCHAR(250),
preferred_last_name VARCHAR(250),
functional_manager VARCHAR(250),
business_unit VARCHAR(250),
e164_mobile VARCHAR(250),
mu_owner_comit_id VARCHAR(250),
mu_owner_first_name VARCHAR(250),
grade VARCHAR(250),
postal VARCHAR(250),
domain VARCHAR(250),
work_location_state VARCHAR(250),
dept_descr VARCHAR(250),
friendly_department VARCHAR(250),
empl_type VARCHAR(250),
state VARCHAR(250),
shift VARCHAR(250),
manager_phone VARCHAR(250),
dept_entry_dt DATETIME,
full_name VARCHAR(250),
hr_supervisor_last_name VARCHAR(250),
dept_id VARCHAR(250),
per_org VARCHAR(250),
sector_short_description VARCHAR(250),
address2 VARCHAR(250),
address1 VARCHAR(250),
hrdept_short_mb VARCHAR(250),
mail_drop VARCHAR(250),
full_part_time VARCHAR(250),
mu_owner_last_name VARCHAR(250),
mgmtchain VARCHAR(250),
subords VARCHAR(250),
management_unit_description VARCHAR(250),
downward_reporting VARCHAR(250),
"laDateTimee_date" DATETIME,
ind INTEGER,
score FLOAT,
corp_title VARCHAR(250),
_version_ INTEGER,
_lw_batch_id_s VARCHAR(250),
_lw_data_source_pipeline_s VARCHAR(250),
_lw_data_source_type_s VARCHAR(250),
_lw_data_source_collection_s VARCHAR(250),
_lw_data_source_s VARCHAR(250),
PRIMARY KEY (id),
UNIQUE (comit_id),
UNIQUE (email_id)
'''
createSql_2 = "CREATE TABLE 'Table 1' ( 'column 1' INTEGER PRIMARY KEY )"
createSql_1 = 'create TABLE "ABC" ( "id" INTEGER PRIMARY KEY ) ; '
createSql1 = '''
CREATE TABLE employee (
id INTEGER NOT NULL,
vc1_dept_descr VARCHAR(250),
work_location_country VARCHAR(250),
middle_initial VARCHAR(250),
floor_number VARCHAR(250),
hr_latest_hire_date VARCHAR(250),
hr_department_desc VARCHAR(250),
user_type VARCHAR(250),
mu_cost_code VARCHAR(250),
assistant_dept VARCHAR(250),
countrya2 VARCHAR(250),
paygroup VARCHAR(250),
countrya3 VARCHAR(250),
employee_id_number VARCHAR(250),
comit_id VARCHAR(250),
display_name VARCHAR(250),
company VARCHAR(250),
phone VARCHAR(250),
assistant_phone VARCHAR(250),
mu_full_name VARCHAR(250),
email_id VARCHAR(250),
job_title VARCHAR(250),
descr VARCHAR(250),
hire_dt DATETIME,
assistant_name VARCHAR(250),
lync_phone VARCHAR(250),
work_location_city VARCHAR(250),
management_unit VARCHAR(250),
dept VARCHAR(250),
friendly_company VARCHAR(250),
first_name VARCHAR(250),
last_name VARCHAR(250),
building_name VARCHAR(250),
active_flag VARCHAR(250),
manager VARCHAR(250),
hr_department VARCHAR(250),
mu_company_code_description VARCHAR(250),
sector_id VARCHAR(250),
hr_supervisor_id VARCHAR(250),
preferred_first_name VARCHAR(250),
city VARCHAR(250),
deptm VARCHAR(250),
manager_email VARCHAR(250),
manager_comit VARCHAR(250),
ms_rtcsip_primary_user_address VARCHAR(250),
shift_info VARCHAR(250),
dn VARCHAR(250),
office VARCHAR(250),
dialing_code VARCHAR(250),
assistant_comit VARCHAR(250),
bud_description VARCHAR(250),
sector_description VARCHAR(250),
employee_status VARCHAR(250),
aim_number VARCHAR(250),
mu_company_code VARCHAR(250),
hr_supervisor_first_name VARCHAR(250),
preferred_last_name VARCHAR(250),
functional_manager VARCHAR(250),
business_unit VARCHAR(250),
e164_mobile VARCHAR(250),
mu_owner_comit_id VARCHAR(250),
mu_owner_first_name VARCHAR(250),
grade VARCHAR(250),
postal VARCHAR(250),
domain VARCHAR(250),
work_location_state VARCHAR(250),
dept_descr VARCHAR(250),
friendly_department VARCHAR(250),
empl_type VARCHAR(250),
state VARCHAR(250),
shift VARCHAR(250),
manager_phone VARCHAR(250),
dept_entry_dt DATETIME,
full_name VARCHAR(250),
hr_supervisor_last_name VARCHAR(250),
dept_id VARCHAR(250),
per_org VARCHAR(250),
sector_short_description VARCHAR(250),
address2 VARCHAR(250),
address1 VARCHAR(250),
hrdept_short_mb VARCHAR(250),
mail_drop VARCHAR(250),
full_part_time VARCHAR(250),
mu_owner_last_name VARCHAR(250),
mgmtchain VARCHAR(250),
subords VARCHAR(250),
management_unit_description VARCHAR(250),
downward_reporting VARCHAR(250),
"laDateTimee_date" DATETIME,
ind INTEGER,
score FLOAT,
corp_title VARCHAR(250),
_version_ INTEGER,
_lw_batch_id_s VARCHAR(250),
_lw_data_source_pipeline_s VARCHAR(250),
_lw_data_source_type_s VARCHAR(250),
_lw_data_source_collection_s VARCHAR(250),
_lw_data_source_s VARCHAR(250),
PRIMARY KEY (id),
UNIQUE (comit_id),
UNIQUE (email_id)
)
'''
sqlParser = SqlParser()
# sqlParser.createSqlToDict(createSql=createSql)
# columnDict = sqlParser.getColumn(createSql=columns)
columnText=""" _lw_data_source_collection_s VARCHAR(250),
_lw_data_source_s VARCHAR(250),
PRIMARY KEY (id),
UNIQUE (comit_id),
UNIQUE (email_id)
"""
sqlParser.getAllConstrantInSeparteLine(columnText)
# logger.debug(columnDict)
logger.debug("Finish")
| 41.148256
| 815
| 0.602473
| 1,668
| 14,155
| 4.932854
| 0.163669
| 0.223627
| 0.044239
| 0.01422
| 0.663345
| 0.625425
| 0.614244
| 0.614244
| 0.600875
| 0.600875
| 0
| 0.060748
| 0.246415
| 14,155
| 343
| 816
| 41.268222
| 0.710603
| 0.164747
| 0
| 0.728522
| 0
| 0.006873
| 0.690217
| 0.121688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013746
| false
| 0.003436
| 0.010309
| 0
| 0.034364
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
001fb83c81e602e4eb87d12651ec9c4138df6e4f
| 55
|
py
|
Python
|
1_Practice_Python/1B4_txt_manipulation.py
|
CyberThulhu22/Python-Projects
|
448f7b934e0a316cf87be36c7b294f81b039a008
|
[
"MIT"
] | null | null | null |
1_Practice_Python/1B4_txt_manipulation.py
|
CyberThulhu22/Python-Projects
|
448f7b934e0a316cf87be36c7b294f81b039a008
|
[
"MIT"
] | null | null | null |
1_Practice_Python/1B4_txt_manipulation.py
|
CyberThulhu22/Python-Projects
|
448f7b934e0a316cf87be36c7b294f81b039a008
|
[
"MIT"
] | 1
|
2022-01-05T04:19:44.000Z
|
2022-01-05T04:19:44.000Z
|
#!/usr/bin/env python3
## Different ways to edit text!
| 18.333333
| 31
| 0.709091
| 9
| 55
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.145455
| 55
| 3
| 31
| 18.333333
| 0.808511
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cc43e7113c6daf4992e6c7b9fbc04dc123f641fb
| 1,141
|
py
|
Python
|
strategies/oversold_bounce.py
|
SniderThanYou/thorvaldsson-trading-co
|
d16f23d2a14d89dbddbd38100825e37cb0e27b3e
|
[
"MIT"
] | 1
|
2021-09-03T13:44:25.000Z
|
2021-09-03T13:44:25.000Z
|
strategies/oversold_bounce.py
|
SniderThanYou/thorvaldsson-trading-co
|
d16f23d2a14d89dbddbd38100825e37cb0e27b3e
|
[
"MIT"
] | null | null | null |
strategies/oversold_bounce.py
|
SniderThanYou/thorvaldsson-trading-co
|
d16f23d2a14d89dbddbd38100825e37cb0e27b3e
|
[
"MIT"
] | null | null | null |
from backtesting import Strategy
from backtesting.lib import crossover
import talib
import numpy as np
def rolling_min(a, window):
return np.array([max(a[max(0, j-window):j+1]) for j in range(len(a))])
def rolling_max(a, window):
return np.array([min(a[max(0, j-window):j+1]) for j in range(len(a))])
def chandelier_exit_long(high, low, close):
return rolling_max(close, 22) - 3 * talib.ATR(high, low, close, 22)
class OversoldBounce(Strategy):
def init(self):
price = self.data.Close
self.sma200 = self.I(talib.SMA, price, 200)
self.rsi = self.I(talib.RSI, price, 14)
self.chandelier_exit_long = self.I(chandelier_exit_long, self.data.High, self.data.Low, self.data.Close)
def next(self):
if self.data.Close[-1] > self.sma200 and crossover(self.rsi, 30):
self.buy(sl=self.data.Close[-1]*0.9)
for trade in self.trades:
# TODO this stop loss is not working out. Need a better exit signal.
if trade.is_long:
trade.sl = self.chandelier_exit_long
if self.rsi > 70:
trade.close()
| 32.6
| 112
| 0.632778
| 180
| 1,141
| 3.944444
| 0.372222
| 0.067606
| 0.101408
| 0.042254
| 0.146479
| 0.090141
| 0.090141
| 0.090141
| 0.090141
| 0.090141
| 0
| 0.032333
| 0.241017
| 1,141
| 34
| 113
| 33.558824
| 0.787529
| 0.057844
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0
| 1
| 0.208333
| false
| 0
| 0.166667
| 0.125
| 0.541667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
cc5039f6f069c9527a0d309a1475c17cf4d68e0f
| 340
|
py
|
Python
|
PythonCookbook/module_package/myapplication/__main__.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
PythonCookbook/module_package/myapplication/__main__.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
PythonCookbook/module_package/myapplication/__main__.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
# from ..myapplication import bar
# from ..myapplication import grok
# from ..myapplication import spam
import sys
from module_package.myapplication import bar
sys.path.append(['bar.py', 'grok.py', 'spam.py'])
if __name__ == '__main__':
print('Hello world')
bar.bar()
# grok.grok()
# spam.spam()
| 22.666667
| 49
| 0.664706
| 44
| 340
| 4.931818
| 0.477273
| 0.35023
| 0.317972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003546
| 0.170588
| 340
| 14
| 50
| 24.285714
| 0.765957
| 0.426471
| 0
| 0
| 0
| 0
| 0.207447
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.166667
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
cc574f5bfe815e49d3dde997b3a330ebcfe64dc1
| 99
|
py
|
Python
|
parser/Error.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
parser/Error.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
parser/Error.py
|
brian-joseph-petersen/oply
|
b30212492c7657903a88d1026b17beda0a1b9ce3
|
[
"MIT"
] | null | null | null |
def p_error( p ):
print
print( "SYNTAX ERROR %s" % str( p ) )
print
raise Exception
| 19.8
| 41
| 0.565657
| 14
| 99
| 3.928571
| 0.642857
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.323232
| 99
| 5
| 42
| 19.8
| 0.820896
| 0
| 0
| 0.4
| 0
| 0
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.2
| 0.6
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
cc8f8f0371473d22403bb74acba73212658471f0
| 15
|
py
|
Python
|
python/testData/codeInsight/smartEnter/firstClauseAfterEmptyMatchStatementWithSubjectAndColon.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/codeInsight/smartEnter/firstClauseAfterEmptyMatchStatementWithSubjectAndColon.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/codeInsight/smartEnter/firstClauseAfterEmptyMatchStatementWithSubjectAndColon.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
match x<caret>:
| 15
| 15
| 0.733333
| 3
| 15
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 15
| 1
| 15
| 15
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cc9b101184f5569524fa3f67f143981d0a51dce9
| 97
|
py
|
Python
|
java/koreantext/__init__.py
|
appetizermonster/lucy
|
f014c1536024cea0bdfa349b7033dbc524eb9cbe
|
[
"MIT"
] | 4
|
2017-12-19T01:27:58.000Z
|
2018-12-12T06:30:52.000Z
|
java/koreantext/__init__.py
|
appetizermonster/lucy
|
f014c1536024cea0bdfa349b7033dbc524eb9cbe
|
[
"MIT"
] | 1
|
2017-09-04T06:42:13.000Z
|
2017-09-04T06:42:13.000Z
|
java/koreantext/__init__.py
|
appetizermonster/lucy
|
f014c1536024cea0bdfa349b7033dbc524eb9cbe
|
[
"MIT"
] | null | null | null |
from . import jvm
from . import tagger
def init():
jvm.init_jvm()
Tagger = tagger.Tagger
| 9.7
| 22
| 0.670103
| 14
| 97
| 4.571429
| 0.428571
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226804
| 97
| 9
| 23
| 10.777778
| 0.853333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
cca509bc17ce0589b04000855de352e98f420cab
| 50
|
py
|
Python
|
python/perspective/perspective/core/_version.py
|
kevinhinterlong/perspective
|
3c5786027ccf9dcadcad5c8dc7817149c98d454b
|
[
"Apache-2.0"
] | null | null | null |
python/perspective/perspective/core/_version.py
|
kevinhinterlong/perspective
|
3c5786027ccf9dcadcad5c8dc7817149c98d454b
|
[
"Apache-2.0"
] | null | null | null |
python/perspective/perspective/core/_version.py
|
kevinhinterlong/perspective
|
3c5786027ccf9dcadcad5c8dc7817149c98d454b
|
[
"Apache-2.0"
] | null | null | null |
__version__ = "1.3.2"
major_minor_version = "1.3"
| 16.666667
| 27
| 0.7
| 9
| 50
| 3.222222
| 0.666667
| 0.551724
| 0.62069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 0.12
| 50
| 2
| 28
| 25
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cca68bd7e71cafd54232b3d20b673f56384e2d89
| 17,157
|
py
|
Python
|
tests/bayesian/test_betabinomial.py
|
sinemetu1/confidence
|
a5c46ce72f4de217708358fb12417c032e08c40e
|
[
"Apache-2.0"
] | null | null | null |
tests/bayesian/test_betabinomial.py
|
sinemetu1/confidence
|
a5c46ce72f4de217708358fb12417c032e08c40e
|
[
"Apache-2.0"
] | null | null | null |
tests/bayesian/test_betabinomial.py
|
sinemetu1/confidence
|
a5c46ce72f4de217708358fb12417c032e08c40e
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for `confidence` categorical variables."""
import pytest
import spotify_confidence
import pandas as pd
import numpy as np
spotify_confidence.options.set_option('randomization_seed', 1)
class TestCategorical(object):
def setup(self):
self.data = pd.DataFrame({
'variation_name':
['test', 'control', 'test2', 'test', 'control', 'test2'],
'success': [500, 42, 1005, 50, 4, 100],
'total': [1009, 104, 1502, 100, 10, 150],
'country': [
'us',
'us',
'us',
'gb',
'gb',
'gb',
]
})
self.test = spotify_confidence.BetaBinomial(
self.data,
numerator_column='success',
denominator_column='total',
categorical_group_columns=['country', 'variation_name'])
def test_summary(self):
"""Area plot tests"""
summary = self.test.summary()
assert (np.array_equal(summary.country,
np.array(['us', 'us', 'us', 'gb', 'gb', 'gb'])))
assert (np.array_equal(summary.point_estimate,
self.data.success / self.data.total))
assert (np.allclose(
summary['ci_lower'],
np.array([
0.46473987, 0.3132419, 0.64500415, 0.40317395, 0.1530671,
0.58861002
])))
assert (np.allclose(
summary['ci_upper'],
np.array([
0.52636595, 0.49971958, 0.69256054, 0.59682605, 0.69632051,
0.73836135
])))
def test_summary_plot(self):
"""Area plot tests"""
ch = self.test.summary_plot()
assert (len(ch.charts) == 1)
ch = self.test.summary_plot(groupby='country')
assert (len(ch.charts) == 2)
def test_difference(self):
diff = self.test.difference(('us', 'control'), ('us', 'test'))
assert (np.allclose(diff['difference'].iloc[0], 0.090737))
assert (np.allclose(diff['ci_lower'].iloc[0], -0.0092189024))
assert (np.allclose(diff['ci_upper'].iloc[0], 0.18712235))
assert (np.allclose(
diff["P(level_2 > level_1)"].iloc[0], 0.962782))
assert (np.allclose(diff["level_1 potential loss"].iloc[0],
-0.091504838))
assert (np.allclose(diff["level_1 potential gain"].iloc[0],
0.000767784))
assert (np.allclose(diff["level_2 potential loss"].iloc[0],
-0.000767784))
assert (np.allclose(diff["level_2 potential gain"].iloc[0],
0.091504838))
diff = self.test.difference(
('us', 'control'), ('us', 'test'), absolute=False)
assert (np.allclose(diff['difference'].iloc[0], 0.2416812))
assert (np.allclose(
diff["P(level_2 > level_1)"].iloc[0], 0.962782))
assert (np.allclose(diff["level_1 potential loss"].iloc[0],
-0.2431594))
diff = self.test.difference(
'control', 'test', groupby='country', absolute=False)
assert (np.allclose(diff['difference'], np.array([0.428745,
0.241799])))
assert (np.allclose(diff['P(level_2 > level_1)'],
np.array([0.725982, 0.963228])))
assert (np.allclose(
diff['level_2 potential loss'],
np.array([-0.042689, -0.001464]),
rtol=1e-05,
atol=1e-06))
def test_difference_with_interval_sizes(self):
self.test._interval_size = 0.99
diff = self.test.difference(('us', 'control'), ('us', 'test'))
assert (np.allclose(diff['difference'].iloc[0], 0.090737))
assert (np.allclose(diff['ci_lower'].iloc[0], -0.040760))
assert (np.allclose(diff['ci_upper'].iloc[0], 0.21614))
self.test._interval_size = 0.999
diff = self.test.difference(('us', 'control'), ('us', 'test'))
assert (np.allclose(diff['difference'].iloc[0], 0.090737))
assert (np.allclose(diff['ci_lower'].iloc[0], -0.0769830))
assert (np.allclose(diff['ci_upper'].iloc[0], 0.2479583))
def test_difference_plot(self):
with pytest.raises(ValueError):
self.test.difference_plot(('bad_value', 'bad_value'),
('bad_value', 'bad_value'))
ch = self.test.difference_plot(('us', 'control'), ('us', 'test'))
assert (len(ch.charts) == 1)
ch = self.test.difference_plot('control', 'test', groupby='country')
assert (len(ch.charts) == 2)
def test_multiple_difference_joint(self):
with pytest.raises(ValueError):
self.test.multiple_difference_joint(('bad_value', 'bad_value'))
diff = self.test.multiple_difference_joint(('us', 'test2'))
print(np.random.get_state()[1][0])
assert (np.allclose(
diff['difference'], np.array([0.000743]), rtol=1e-05, atol=1e-06))
assert (np.allclose(diff["P(('us', 'test2') >= all)"],
np.array([0.508644])))
assert (np.allclose(diff["('us', 'test2') potential loss"],
np.array([-0.032459])))
diff = self.test.multiple_difference_joint('test2', groupby='country')
assert (np.allclose(diff['test2 potential loss'], np.array([-0.054338,
0.])))
def test_multiple_difference_joint_plot(self):
with pytest.raises(ValueError):
self.test.multiple_difference_joint_plot(('bad_value', 'bad_value'),
('bad_value', 'bad_value'))
ch = self.test.multiple_difference_joint_plot(('us', 'control'))
assert (len(ch.charts) == 1)
ch = self.test.multiple_difference_joint_plot('test', groupby='country')
assert (len(ch.charts) == 2)
def test_multiple_difference(self):
with pytest.raises(ValueError):
self.test.multiple_difference('bad_value')
md = self.test.multiple_difference('control', groupby='country')
for i, row in md.iterrows():
diff_value = self.test.difference(
(row['country'], row['level_1']),
(row['country'], row['level_2']))['difference'].values[0]
assert (np.allclose(row['difference'], diff_value, rtol=1e-02))
def test_multiple_difference_plot(self):
ch = self.test.multiple_difference_plot(('us', 'control'))
assert (len(ch.charts) == 1)
ch = self.test.multiple_difference_plot('test', groupby='country')
assert (len(ch.charts) == 2)
def test_multiple_difference_level_as_reference(self):
md = self.test.multiple_difference('control',
groupby='country',
level_as_reference=True)
for i, row in md.iterrows():
diff_value = self.test.difference(
(row['country'], row['level_1']),
(row['country'], row['level_2']))['difference'].values[0]
assert (np.allclose(row['difference'], diff_value, rtol=1e-02))
class TestOrdinal:
def setup(self):
self.data = pd.DataFrame({
'variation_name': [
'test',
'control',
'test2',
'test',
'control',
'test2',
'test',
'control',
'test2',
'test',
'control',
'test2',
'test',
'control',
'test2',
],
'success': [
500,
8,
100,
510,
8,
100,
520,
9,
104,
530,
7,
100,
530,
8,
103,
],
'total': [
1010,
22,
150,
1000,
20,
153,
1030,
23,
154,
1000,
20,
150,
1040,
21,
155,
],
'days_since_reg': [1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5],
})
self.test = spotify_confidence.BetaBinomial(
self.data,
numerator_column='success',
denominator_column='total',
categorical_group_columns='variation_name',
ordinal_group_column='days_since_reg')
def test_summary(self):
"""Area plot tests"""
summary = self.test.summary()
assert (np.array_equal(
summary['point_estimate'], summary[self.test._numerator_column] /
summary[self.test._denominator_column]))
assert (np.allclose(
summary['ci_lower'],
np.array([
0.46426613, 0.18932289, 0.58861002, 0.479025, 0.21062941,
0.57588269, 0.47434292, 0.21431068, 0.59864266, 0.49901672,
0.17227621, 0.58861002, 0.47924165, 0.19940202, 0.5876837
])))
assert (np.allclose(
summary['ci_upper'],
np.array([
0.52586121, 0.57128953, 0.73836135, 0.54091721, 0.61607968,
0.72555745, 0.53533858, 0.59380455, 0.74548633, 0.56080991,
0.56776609, 0.73836135, 0.53993569, 0.592895, 0.73526909
])))
def test_summary_plot(self):
"""Area plot tests"""
ch = self.test.summary_plot()
assert (len(ch.charts) == 1)
def test_difference(self):
with pytest.raises(ValueError):
self.test.difference(('control', 'us'), ('test', 'usf'))
diff = self.test.difference(('control', 1), ('test', 1))
# Add more assertions here
diff = self.test.difference(
('control', 1), ('test', 1), absolute=False)
# Add more assertions here
diff = self.test.difference(
'control', 'test', groupby='days_since_reg')
# Add more assertions here
assert len(diff) > 0
def test_multiple_difference(self):
diff = self.test.multiple_difference(level='control',
groupby='days_since_reg',
level_as_reference=True)
assert (len(diff) == 10)
def test_difference_plot(self):
with pytest.raises(ValueError):
self.test.difference_plot('control', 'not_a_valid_level')
ch = self.test.difference_plot(('control', 1), ('test', 1))
assert (len(ch.charts) == 1)
ch = self.test.difference_plot('control', 'test',
groupby='days_since_reg')
assert (len(ch.charts) == 1)
def test_multiple_difference_plot(self):
with pytest.raises(ValueError):
self.test.multiple_difference_plot(level='control')
ch = self.test.multiple_difference_plot(level='control',
groupby=['days_since_reg'],
level_as_reference=True)
assert (len(ch.charts) == 1)
class TestOrdinalPlusTwoCategorical(object):
def setup(self):
self.data = pd.DataFrame(
{'variation_name': ['test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2',
'test', 'control', 'test2', ],
'success': [500, 8, 100,
510, 8, 100,
520, 9, 104,
530, 7, 100,
530, 8, 103,
500, 8, 100,
510, 8, 100,
520, 9, 104,
530, 7, 100,
530, 8, 103, ],
'total': [2010, 42, 250,
2000, 40, 253,
2030, 43, 254,
2000, 40, 250,
2040, 41, 255,
1010, 22, 150,
1000, 20, 153,
1030, 23, 154,
1000, 20, 150,
1040, 21, 155, ],
'days_since_reg': [1, 1, 1,
2, 2, 2,
3, 3, 3,
4, 4, 4,
5, 5, 5,
1, 1, 1,
2, 2, 2,
3, 3, 3,
4, 4, 4,
5, 5, 5],
'country': ['us', 'us', 'us', 'us', 'us', 'us', 'us',
'us', 'us', 'us', 'us', 'us', 'us', 'us',
'us',
'gb', 'gb', 'gb', 'gb', 'gb', 'gb', 'gb',
'gb', 'gb', 'gb', 'gb', 'gb', 'gb', 'gb',
'gb', ]})
self.test = spotify_confidence.BetaBinomial(
self.data,
numerator_column='success',
denominator_column='total',
categorical_group_columns=['variation_name', 'country'],
ordinal_group_column='days_since_reg')
def test_summary_plot(self):
ch = self.test.summary_plot()
assert (len(ch.charts) == 1)
ch = self.test.summary_plot(groupby=['country'])
assert (len(ch.charts) == 2)
ch = self.test.summary_plot(groupby=['days_since_reg'])
assert (len(ch.charts) == 5)
ch = self.test.summary_plot(groupby=['country', 'days_since_reg'])
assert (len(ch.charts) == 10)
def test_difference(self):
with pytest.raises(ValueError):
self.test.difference(('control', 'us'), ('test', 'usf'),
groupby='days_since_reg')
df = self.test.difference(level_1=('test', 'us'),
level_2=('control', 'us'),
groupby='days_since_reg')
assert (len(df) == 5)
assert ('days_since_reg' in df.columns)
df = self.test.difference(level_1=('test', 'us'),
level_2=('control', 'us'),
groupby=['days_since_reg'])
assert (len(df) == 5)
assert ('days_since_reg' in df.columns)
df = self.test.difference(level_1=('test', 1),
level_2=('control', 1),
groupby=['country'])
assert (len(df) == 2)
assert ('country' in df.columns)
df = self.test.difference(level_1='test',
level_2='control',
groupby=['country', 'days_since_reg'])
assert (len(df) == 10)
assert ('country' in df.columns)
assert ('days_since_reg' in df.columns)
def test_difference_plot(self):
with pytest.raises(ValueError):
self.test.difference_plot(('control', 'us', 10), ('test', 'us', 10))
ch = self.test.difference_plot(level_1=('control', 'us', 1),
level_2=('test', 'us', 1))
assert (len(ch.charts) == 1)
ch = self.test.difference_plot(level_1=('control', 'us'),
level_2=('test', 'us'),
groupby='days_since_reg')
assert (len(ch.charts) == 1)
ch = self.test.difference_plot(level_1='control', level_2='test',
groupby=['country', 'days_since_reg'])
assert (len(ch.charts) == 1)
ch = self.test.difference_plot(level_1=('control', 1),
level_2=('test', 1), groupby='country')
assert (len(ch.charts) == 2)
def test_multiple_difference_plot(self):
with pytest.raises(ValueError):
self.test.multiple_difference_plot(level='control')
ch = self.test.multiple_difference_plot(level='control',
groupby=['country',
'days_since_reg'],
level_as_reference=True)
assert (len(ch.charts) == 1)
| 38.641892
| 80
| 0.467215
| 1,736
| 17,157
| 4.475806
| 0.126152
| 0.060746
| 0.061776
| 0.061776
| 0.82471
| 0.786873
| 0.746589
| 0.710811
| 0.662291
| 0.577864
| 0
| 0.100441
| 0.391852
| 17,157
| 443
| 81
| 38.72912
| 0.64424
| 0.010783
| 0
| 0.545699
| 0
| 0
| 0.117269
| 0
| 0
| 0
| 0
| 0
| 0.174731
| 1
| 0.061828
| false
| 0
| 0.010753
| 0
| 0.080645
| 0.002688
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ccad8c3444abcadcc24d2854d4e8232820dd95f0
| 167
|
py
|
Python
|
mbed_tools_ci_scripts/utils/__init__.py
|
urutva/mbed-tools-ci-scripts
|
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
|
[
"Apache-2.0"
] | 2
|
2020-04-30T13:44:37.000Z
|
2020-06-11T09:39:10.000Z
|
mbed_tools_ci_scripts/utils/__init__.py
|
urutva/mbed-tools-ci-scripts
|
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
|
[
"Apache-2.0"
] | 43
|
2020-02-04T17:25:24.000Z
|
2021-02-17T19:40:09.000Z
|
mbed_tools_ci_scripts/utils/__init__.py
|
urutva/mbed-tools-ci-scripts
|
eef162a47f56f95cbb7ecaeac1e629ca8abd7a94
|
[
"Apache-2.0"
] | 4
|
2020-04-19T16:29:29.000Z
|
2020-11-13T12:08:31.000Z
|
#
# Copyright (C) 2020 Arm Mbed. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Utility scripts to abstract and assist with scripts run in the CI."""
| 27.833333
| 72
| 0.724551
| 26
| 167
| 4.653846
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.155689
| 167
| 5
| 73
| 33.4
| 0.815603
| 0.916168
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
aeaf3e6a88db8ab6b0a52204c6a404c68198899e
| 4,525
|
py
|
Python
|
PGCAltas/apps/embdata/url.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | null | null | null |
PGCAltas/apps/embdata/url.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | 5
|
2020-02-12T03:23:40.000Z
|
2020-08-06T00:50:15.000Z
|
PGCAltas/apps/embdata/url.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from . import views
# urlpatterns = [
# url(r"^fitting/$", views.FittingModelsAPIView.as_view()),
# url(r"^validations/$", views.ValidationsAPIView.as_view()),
# ]
urlpatterns = [
url(r"^fitting/(?P<mod>features)/$",
views.FittingModelsAPIViewSet.as_view({'get': 'features', 'post': 'features'}), name='fit-features'),
url(r"^fitting/(?P<mod>classify)/$",
views.FittingModelsAPIViewSet.as_view({'post': 'classify'}), name='fit-classify'),
url(r"^(?P<mod>fitting)/$", views.FittingModelsAPIViewSet.as_view({'post': 'create'}), name='fit-model'),
url(r"^fitting/(?P<mod>predict)/$",
views.FittingModelsAPIViewSet.as_view({'get': 'predict', 'post': 'predict'}), name='fit-predict'),
url(r"^validations/$", views.ValidationsAPIView.as_view(), name='validation'),
]
"""
test cases:
1. fitting features -- EMIP
Only for NEW Training data
/embdata/fitting/features/?flush=1&training=1&test_sz=0.2
Only for NEW Testing data
/embdata/fitting/features/?flush=1&training=0&test_sz=0.2
Used for redefined RDF Selector from EXISTED data
/embdata/fitting/features/?flush=0&training=1&test_sz=0.2
A way passing this stage
/embdata/fitting/features/?flush=0&training=0&test_sz=0.2
2. fitting classifier -- ENSEMBLE CLASSIFIER
Used for redefined ENSEMBLE CLASSIFIER from EXISTED data
/embdata/fitting/classify/?training=1&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
Only for PREDICT form EXISTED data and fitted model
/embdata/fitting/classify/?training=0&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
Used for redefined ENSEMBLE CLASSIFIER from EXISTED data, CHANGE PART: LDA
/embdata/fitting/classify/?training=1&n_components=12&after_filter=90&barnes_hut=0.5&n_estimator=132&record_freq=10
Used for redefined ENSEMBLE CLASSIFIER from EXISTED data, CHANGE PART: SVM
/embdata/fitting/classify/?training=1&n_components=12&after_filter=90&barnes_hut=0.5&n_estimator=87&record_freq=10
3. fitting -- WHOLE WORKFLOW
Only for NEW Training data, Used for redefined ENSEMBLE CLASSIFIER from EXISTED data
/embdata/fitting/?flush=1&trscreen=1&trclassify=1&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
# Only for NEW Training data, PREDICT form EXISTED data and fitted model
# /embdata/fitting/?flush=1&trscreen=1&trclassify=0&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
Only for Screened NEW Training data, Used for redefined ENSEMBLE CLASSIFIER from EXISTED data
/embdata/fitting/?flush=1&trscreen=0&trclassify=1&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
Only for NEW Testing data, PREDICT form EXISTED data and fitted model
/embdata/fitting/?flush=1&trscreen=0&trclassify=0&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
Used for redefined RDF Selector from EXISTED data, and redefined ENSEMBLE CLASSIFIER from EXISTED data
/embdata/fitting/?flush=0&trscreen=1&trclassify=1&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
# Used for redefined RDF Selector from EXISTED data, and PREDICT form EXISTED data and fitted model
# /embdata/fitting/?flush=0&trscreen=1&trclassify=0&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
A way passing this stage, Used for redefined ENSEMBLE CLASSIFIER from EXISTED data
/embdata/fitting/?flush=0&trscreen=0&trclassify=1&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
A way passing this stage, PREDICT form EXISTED data and fitted model
/embdata/fitting/?flush=0&trscreen=0&trclassify=0&test_sz=0.2&n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
4. validating -- S-FOLD VALIDATION
VALIDATING
/embdata/validations/?n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=132&record_freq=10
VALIDATING, CHANGE PART: SVM
/embdata/validations/?n_components=12&after_filter=120&barnes_hut=0.5&n_estimator=98&record_freq=10
VALIDATING, CHANGE PART: LDA
/embdata/validations/?n_components=12&after_filter=80&barnes_hut=0.5&n_estimator=98&record_freq=10
"""
| 48.655914
| 146
| 0.744972
| 701
| 4,525
| 4.67475
| 0.128388
| 0.068355
| 0.059506
| 0.082392
| 0.865426
| 0.782423
| 0.73726
| 0.673177
| 0.66036
| 0.645102
| 0
| 0.060406
| 0.129282
| 4,525
| 92
| 147
| 49.184783
| 0.77132
| 0.031602
| 0
| 0
| 0
| 0
| 0.335227
| 0.117898
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
aed81216483a630ba6fc3de15e932a4e782fe82c
| 265
|
py
|
Python
|
micromasters/exceptions.py
|
Wassaf-Shahzad/micromasters
|
b1340a8c233499b1d8d22872a6bc1fe7f49fd323
|
[
"BSD-3-Clause"
] | 32
|
2016-03-25T01:03:13.000Z
|
2022-01-15T19:35:42.000Z
|
micromasters/exceptions.py
|
Wassaf-Shahzad/micromasters
|
b1340a8c233499b1d8d22872a6bc1fe7f49fd323
|
[
"BSD-3-Clause"
] | 4,858
|
2016-03-03T13:48:30.000Z
|
2022-03-29T22:09:51.000Z
|
micromasters/exceptions.py
|
umarmughal824/micromasters
|
ea92d3bcea9be4601150fc497302ddacc1161622
|
[
"BSD-3-Clause"
] | 20
|
2016-08-18T22:07:44.000Z
|
2021-11-15T13:35:35.000Z
|
"""
Custom exceptions for the MicroMasters app
"""
from django.core.exceptions import ImproperlyConfigured
class PossiblyImproperlyConfigured(ImproperlyConfigured):
"""
Custom exception to be raised when the improper configuration is not certain
"""
| 22.083333
| 80
| 0.773585
| 27
| 265
| 7.592593
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162264
| 265
| 11
| 81
| 24.090909
| 0.923423
| 0.449057
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
4e039defd628f39a1140b2ff54b07827ecc20e41
| 147
|
py
|
Python
|
Disbot/cli.py
|
GSri30/Disbot
|
844c18470728402ddd95b956b03da3ab310fd933
|
[
"MIT"
] | null | null | null |
Disbot/cli.py
|
GSri30/Disbot
|
844c18470728402ddd95b956b03da3ab310fd933
|
[
"MIT"
] | 2
|
2021-03-12T09:01:46.000Z
|
2021-03-12T09:07:30.000Z
|
Disbot/cli.py
|
GSri30/Disbot
|
844c18470728402ddd95b956b03da3ab310fd933
|
[
"MIT"
] | null | null | null |
import click
from Disbot.subcommands import commands as subcommands
@click.group()
def Main():
pass
Main.add_command(subcommands.createbot)
| 14.7
| 54
| 0.782313
| 19
| 147
| 6
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136054
| 147
| 10
| 55
| 14.7
| 0.897638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0.166667
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
4e04db0d740278e3caeee9a1b95cf07b27afe981
| 186
|
py
|
Python
|
ws/handler/event/appliance/light/circadian_rhythm/saturation.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | null | null | null |
ws/handler/event/appliance/light/circadian_rhythm/saturation.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | 1
|
2021-12-21T11:34:47.000Z
|
2021-12-21T11:34:47.000Z
|
ws/handler/event/appliance/light/circadian_rhythm/saturation.py
|
fabaff/automate-ws
|
a9442f287692787e3f253e1ff23758bec8f3902e
|
[
"MIT"
] | 1
|
2021-12-21T10:10:13.000Z
|
2021-12-21T10:10:13.000Z
|
import home
from ws.handler.event.appliance.light.saturation import Handler as Parent
class Handler(Parent):
KLASS = home.appliance.light.event.circadian_rhythm.saturation.Event
| 20.666667
| 73
| 0.806452
| 25
| 186
| 5.96
| 0.6
| 0.187919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 186
| 8
| 74
| 23.25
| 0.90303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
4e0d5223b0acde4fee96581ea2e92b94ed21f41c
| 7,249
|
py
|
Python
|
hyperbit/objtypes.py
|
g1itch/hyperbit
|
82c3f3916926f0b8ae68b5526956790061d4dde2
|
[
"MIT"
] | null | null | null |
hyperbit/objtypes.py
|
g1itch/hyperbit
|
82c3f3916926f0b8ae68b5526956790061d4dde2
|
[
"MIT"
] | null | null | null |
hyperbit/objtypes.py
|
g1itch/hyperbit
|
82c3f3916926f0b8ae68b5526956790061d4dde2
|
[
"MIT"
] | null | null | null |
# Copyright 2015-2016 HyperBit developers
# pylint: disable=too-many-arguments,too-many-instance-attributes
import enum
from hyperbit import crypto, serialize
class Type(enum.IntEnum):
getpubkey = 0
pubkey = 1
msg = 2
broadcast = 3
class Getpubkey23():
def __init__(self, ripe):
self.ripe = ripe
@classmethod
def from_bytes(cls, data):
return cls(data)
def to_bytes(self):
return self.ripe
class Getpubkey4():
def __init__(self, tag):
self.tag = tag
@classmethod
def from_bytes(cls, data):
return cls(data)
def to_bytes(self):
return self.tag
class Pubkey2():
def __init__(self, behavior, verkey, enckey):
self.behavior = behavior
self.verkey = verkey
self.enckey = enckey
@classmethod
def from_bytes(cls, data):
return cls(int.from_bytes(data[:4], 'big'), data[4:68], data[68:132])
def to_bytes(self):
return self.behavior.to_bytes(4, 'big') + self.verkey + self.enckey
class Pubkey3():
def __init__(self, behavior, verkey, enckey, trials, extra, signature):
self.behavior = behavior
self.verkey = verkey
self.enckey = enckey
self.trials = trials
self.extra = extra
self.signature = signature
@classmethod
def from_bytes(cls, data):
s = serialize.Deserializer(data)
behavior = s.uint(4)
verkey = s.bytes(64)
enckey = s.bytes(64)
trials = s.vint()
extra = s.vint()
signature = s.vbytes()
return cls(behavior, verkey, enckey, trials, extra, signature)
def to_bytes(self):
s = serialize.Serializer()
s.uint(self.behavior, 4)
s.bytes(self.verkey)
s.bytes(self.enckey)
s.vint(self.trials)
s.vint(self.extra)
s.vbytes(self.signature)
return s.data
class Pubkey4():
def __init__(self, tag, encrypted):
assert len(tag) == 32
self.tag = tag
self.encrypted = encrypted
@classmethod
def from_bytes(cls, data):
return cls(data[:32], data[32:])
def to_bytes(self):
return self.tag + self.encrypted
class Msg1():
def __init__(self, encrypted):
self.encrypted = encrypted
@classmethod
def from_bytes(cls, data):
return cls(data)
def to_bytes(self):
return self.encrypted
class Broadcast4():
def __init__(self, encrypted):
self.encrypted = encrypted
@classmethod
def from_bytes(cls, data):
return cls(data)
def to_bytes(self):
return self.encrypted
class Broadcast5():
def __init__(self, tag, encrypted):
assert len(tag) == 32
self.tag = tag
self.encrypted = encrypted
@classmethod
def from_bytes(cls, data):
return cls(data[:32], data[32:])
def to_bytes(self):
return self.tag + self.encrypted
class MsgData():
def __init__(
self, addrver, stream, behavior, verkey, enckey, trials, extra,
ripe, encoding, message, ack, signature
):
self.addrver = addrver
self.stream = stream
self.behavior = behavior
self.verkey = verkey
self.enckey = enckey
self.trials = trials
self.extra = extra
self.ripe = ripe
self.encoding = encoding
self.message = message
self.ack = ack
self.signature = signature
@classmethod
def from_bytes(cls, data):
s = serialize.Deserializer(data)
addrver = s.vint()
stream = s.vint()
behavior = s.uint(4)
verkey = s.bytes(64)
enckey = s.bytes(64)
trials = s.vint()
extra = s.vint()
ripe = s.bytes(20)
encoding = s.vint()
message = s.vbytes()
ack = s.vbytes()
signature = s.vbytes()
return cls(
addrver, stream, behavior, verkey, enckey, trials, extra, ripe,
encoding, message, ack, signature)
def to_bytes(self):
s = serialize.Serializer()
s.vint(self.addrver)
s.vint(self.stream)
s.uint(self.behavior, 4)
s.bytes(self.verkey)
s.bytes(self.enckey)
s.vint(self.trials)
s.vint(self.extra)
s.bytes(self.ripe)
s.vint(self.encoding)
s.vbytes(self.message)
s.vbytes(self.ack)
s.vbytes(self.signature)
return s.data
def sign(self, sigkey, obj):
s = serialize.Serializer()
s.uint(obj.expires, 8)
s.uint(obj.type, 4)
s.vint(obj.version)
s.vint(obj.stream)
s.vint(self.addrver)
s.vint(self.stream)
s.uint(self.behavior, 4)
s.bytes(self.verkey)
s.bytes(self.enckey)
s.vint(self.trials)
s.vint(self.extra)
s.bytes(self.ripe)
s.vint(self.encoding)
s.vbytes(self.message)
s.vbytes(self.ack)
self.signature = crypto.sign(sigkey, s.data)
def verify(self, obj):
s = serialize.Serializer()
s.uint(obj.expires, 8)
s.uint(obj.type, 4)
s.vint(obj.version)
s.vint(obj.stream)
s.vint(self.addrver)
s.vint(self.stream)
s.uint(self.behavior, 4)
s.bytes(self.verkey)
s.bytes(self.enckey)
s.vint(self.trials)
s.vint(self.extra)
s.bytes(self.ripe)
s.vint(self.encoding)
s.vbytes(self.message)
s.vbytes(self.ack)
crypto.verify(self.verkey, s.data, self.signature)
class BroadcastData():
def __init__(
self, addrver, stream, behavior, verkey, enckey, trials, extra,
encoding, message, signature
):
self.addrver = addrver
self.stream = stream
self.behavior = behavior
self.verkey = verkey
self.enckey = enckey
self.trials = trials
self.extra = extra
self.encoding = encoding
self.message = message
self.signature = signature
@classmethod
def from_bytes(cls, data):
s = serialize.Deserializer(data)
addrver = s.vint()
stream = s.vint()
behavior = s.uint(4)
verkey = s.bytes(64)
enckey = s.bytes(64)
trials = s.vint()
extra = s.vint()
encoding = s.vint()
message = s.vbytes()
signature = s.vbytes()
return cls(
addrver, stream, behavior, verkey, enckey, trials, extra, encoding,
message, signature)
class Encoding(enum.IntEnum):
ignore = 0
trivial = 1
simple = 2
class SimpleMessage():
encoding = Encoding.simple
def __init__(self, subject, body):
self.subject = subject
self.body = body
@classmethod
def from_bytes(cls, data):
text = data.decode(errors='replace')
try:
subject = text[8:text.index('\n')]
except Exception: # TODO: exception type
subject = ''
try:
body = text[text.index('\nBody:')+6:]
except ValueError:
body = text
return cls(subject, body)
def to_bytes(self):
return ('Subject:' + self.subject + '\nBody:' + self.body).encode()
| 24.910653
| 79
| 0.5747
| 873
| 7,249
| 4.695304
| 0.119129
| 0.040254
| 0.037326
| 0.061722
| 0.780922
| 0.769944
| 0.717736
| 0.683581
| 0.674067
| 0.642596
| 0
| 0.014603
| 0.310388
| 7,249
| 290
| 80
| 24.996552
| 0.805361
| 0.017106
| 0
| 0.758475
| 0
| 0
| 0.005196
| 0
| 0
| 0
| 0
| 0.003448
| 0.008475
| 1
| 0.144068
| false
| 0
| 0.008475
| 0.063559
| 0.330508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4e115c0cf5cde38ebc9f90ea992d859f1562a03b
| 2,776
|
py
|
Python
|
tests/test_pipeline.py
|
noaa-ocs-hydrography/vyperdatum
|
378216b21d19119418686f975e883517a41ca19c
|
[
"CC0-1.0"
] | 2
|
2021-12-01T15:57:34.000Z
|
2021-12-03T14:37:48.000Z
|
tests/test_pipeline.py
|
noaa-ocs-hydrography/vyperdatum
|
378216b21d19119418686f975e883517a41ca19c
|
[
"CC0-1.0"
] | 2
|
2021-12-06T18:28:50.000Z
|
2021-12-17T21:51:43.000Z
|
tests/test_pipeline.py
|
noaa-ocs-hydrography/vyperdatum
|
378216b21d19119418686f975e883517a41ca19c
|
[
"CC0-1.0"
] | 1
|
2021-06-24T15:31:27.000Z
|
2021-06-24T15:31:27.000Z
|
import os
from pyproj import Transformer
from vyperdatum.pipeline import *
from vyperdatum.core import VyperCore
vc = VyperCore() # run this once so that the path to the grids is added in pyproj
def test_get_regional_pipeline_upperlower():
pipe = get_regional_pipeline('Ellipse', 'TSS', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe == get_regional_pipeline('ellipse', 'tss', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
def test_get_regional_pipeline_nad83_tss():
pipe = get_regional_pipeline('ellipse', 'TSS', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe.count('+step +proj') == 1
assert pipe.count('+step +inv +proj') == 1
assert pipe.count('gtx') == 2
transformer = Transformer.from_pipeline(pipe)
result = transformer.transform(xx=-124.853, yy=41.227, zz=0)
assert result == (-124.853, 41.227000000000004, 30.86302107201744)
def test_get_regional_pipeline_tss_nad83():
pipe = get_regional_pipeline('tss', 'ellipse', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe.count('+step +inv +proj') == 1
assert pipe.count('+step +proj') == 1
assert pipe.count('gtx') == 2
transformer = Transformer.from_pipeline(pipe)
result = transformer.transform(xx=-124.853, yy=41.227, zz=0)
assert result == (-124.853, 41.227000000000004, -30.86302107201744)
def test_get_regional_pipeline_mllw():
pipe = get_regional_pipeline('ellipse', 'mllw', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe.count('+step +proj') == 2
assert pipe.count('+step +inv +proj') == 1
assert pipe.count('gtx') == 3
assert pipe.count('mllw') == 1
transformer = Transformer.from_pipeline(pipe)
result = transformer.transform(xx=-124.853, yy=41.227, zz=0)
assert result == (-124.853, 41.227000000000004, 31.97132104264427)
def test_get_regional_pipeline_mhw():
pipe = get_regional_pipeline('ellipse', 'mhw', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe.count('+step +proj') == 2
assert pipe.count('+step +inv +proj') == 1
assert pipe.count('gtx') == 3
assert pipe.count('mhw') == 1
transformer = Transformer.from_pipeline(pipe)
result = transformer.transform(xx=-124.853, yy=41.227, zz=0)
assert result == (-124.853, 41.227000000000004, 30.11322104560066)
def test_get_regional_pipeline_null():
pipe = get_regional_pipeline('mllw', 'mllw', 'CAORblan01_8301', r'core\geoid12b\g2012bu0.gtx')
assert pipe is None
if __name__ == '__main__':
test_get_regional_pipeline_mhw()
test_get_regional_pipeline_mllw()
test_get_regional_pipeline_nad83_tss()
test_get_regional_pipeline_null()
test_get_regional_pipeline_tss_nad83()
test_get_regional_pipeline_upperlower()
| 39.657143
| 108
| 0.716138
| 378
| 2,776
| 5.034392
| 0.179894
| 0.109827
| 0.1897
| 0.145034
| 0.857068
| 0.712559
| 0.659485
| 0.659485
| 0.659485
| 0.656858
| 0
| 0.133586
| 0.145173
| 2,776
| 69
| 109
| 40.231884
| 0.668352
| 0.022334
| 0
| 0.384615
| 0
| 0
| 0.181047
| 0.067109
| 0
| 0
| 0
| 0
| 0.384615
| 1
| 0.115385
| false
| 0
| 0.076923
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4e2df84a284f3b5f39f5f6a7f9835dc394eba71b
| 186
|
py
|
Python
|
demo.py
|
wisedu1/Testing
|
a2a27174de36cc069e0582340f17e5f3ee331937
|
[
"Apache-2.0"
] | null | null | null |
demo.py
|
wisedu1/Testing
|
a2a27174de36cc069e0582340f17e5f3ee331937
|
[
"Apache-2.0"
] | null | null | null |
demo.py
|
wisedu1/Testing
|
a2a27174de36cc069e0582340f17e5f3ee331937
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# from testing.demo2 import *
# # Demo1()
# # demo2 = Demo2()
# print(hello)
# f()
# print(demo2.hello)
# demo2.f()
from testing import *
| 14.307692
| 29
| 0.596774
| 25
| 186
| 4.44
| 0.6
| 0.198198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046053
| 0.182796
| 186
| 12
| 30
| 15.5
| 0.684211
| 0.763441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9db678db803f64d09b8db83761a18e385b7bab97
| 179
|
py
|
Python
|
nsm/__init__.py
|
AdamPrzybyla/robotframework-nsm
|
c911de683c3f565882cce183fcf46b72f1234838
|
[
"MIT"
] | null | null | null |
nsm/__init__.py
|
AdamPrzybyla/robotframework-nsm
|
c911de683c3f565882cce183fcf46b72f1234838
|
[
"MIT"
] | null | null | null |
nsm/__init__.py
|
AdamPrzybyla/robotframework-nsm
|
c911de683c3f565882cce183fcf46b72f1234838
|
[
"MIT"
] | null | null | null |
import sys
from .nsm import *
if (sys.version_info > (3, 0)):
from .nsm3 import nsm3 as nsm
else:
from .nsm2 import nsm
if __name__=='__main__':
main()
| 19.888889
| 37
| 0.597765
| 26
| 179
| 3.769231
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03937
| 0.290503
| 179
| 8
| 38
| 22.375
| 0.732283
| 0
| 0
| 0
| 0
| 0
| 0.044693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9dc51cd843e8873d50007a80716ea3d497ed53e5
| 99
|
py
|
Python
|
app_questions/apps.py
|
Audiotuete/backend_kassel_api
|
97bb1f38eea51147660dd2eda052b540293f27a7
|
[
"MIT"
] | null | null | null |
app_questions/apps.py
|
Audiotuete/backend_kassel_api
|
97bb1f38eea51147660dd2eda052b540293f27a7
|
[
"MIT"
] | null | null | null |
app_questions/apps.py
|
Audiotuete/backend_kassel_api
|
97bb1f38eea51147660dd2eda052b540293f27a7
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class AppQuestionsConfig(AppConfig):
name = 'app_questions'
| 19.8
| 36
| 0.787879
| 11
| 99
| 7
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141414
| 99
| 4
| 37
| 24.75
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0.131313
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d180204916e0b7013509fdf69be7db87834470e0
| 553
|
py
|
Python
|
tests/test_api/test_auth.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | 2
|
2019-04-11T20:02:45.000Z
|
2019-04-15T01:43:09.000Z
|
tests/test_api/test_auth.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | 37
|
2019-03-17T14:45:38.000Z
|
2019-06-07T03:19:22.000Z
|
tests/test_api/test_auth.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | null | null | null |
def test__delete__401_when_no_auth_headers(client):
r = client.delete('/themes/1')
assert r.status_code == 401, r.get_json()
def test__delete__401_when_incorrect_auth(client, faker):
r = client.delete('/themes/1', headers={
'Authentication': 'some_incorrect_token'})
assert r.status_code == 401, r.get_json()
# TODO
# def test__delete__403_when_incorrect_role(client, faker):
# r = client.delete('/themes/1', headers={
# 'Authentication': 'some_incorrect_token'})
# assert r.status_code == 401, r.get_json()
| 30.722222
| 59
| 0.696203
| 77
| 553
| 4.584416
| 0.324675
| 0.05949
| 0.110482
| 0.161473
| 0.804533
| 0.634561
| 0.634561
| 0.634561
| 0.555241
| 0.555241
| 0
| 0.045259
| 0.16094
| 553
| 17
| 60
| 32.529412
| 0.715517
| 0.368897
| 0
| 0.285714
| 0
| 0
| 0.152493
| 0
| 0
| 0
| 0
| 0.058824
| 0.285714
| 1
| 0.285714
| false
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d1803d84202157eed89835d5fa6d46f628a18115
| 163
|
py
|
Python
|
src/org/app/test/test_it.py
|
kojiromike/example-python-package
|
d886ac5b22e72c65712a9d36cda286eece129f1f
|
[
"Apache-2.0"
] | null | null | null |
src/org/app/test/test_it.py
|
kojiromike/example-python-package
|
d886ac5b22e72c65712a9d36cda286eece129f1f
|
[
"Apache-2.0"
] | null | null | null |
src/org/app/test/test_it.py
|
kojiromike/example-python-package
|
d886ac5b22e72c65712a9d36cda286eece129f1f
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from org.app.app import whatevz
class AnyTestCase(TestCase):
def test_thing(self) -> None:
self.assertTrue(whatevz())
| 18.111111
| 34
| 0.723926
| 21
| 163
| 5.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184049
| 163
| 8
| 35
| 20.375
| 0.879699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d1b781e21da9b9f2a309a9e4ea50ff321d678e42
| 170
|
py
|
Python
|
houses/admin.py
|
Totalboy/sputnik_blog
|
154ddbea4bd8f089476f6b6fb25f82d1e5de0b31
|
[
"MIT"
] | null | null | null |
houses/admin.py
|
Totalboy/sputnik_blog
|
154ddbea4bd8f089476f6b6fb25f82d1e5de0b31
|
[
"MIT"
] | null | null | null |
houses/admin.py
|
Totalboy/sputnik_blog
|
154ddbea4bd8f089476f6b6fb25f82d1e5de0b31
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import House
#декоратор
@admin.register(House)
class AdminHouse(admin.ModelAdmin):
list_display = ["name", "price", "id"]
| 21.25
| 39
| 0.758824
| 22
| 170
| 5.818182
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111765
| 170
| 7
| 40
| 24.285714
| 0.847682
| 0.052941
| 0
| 0
| 0
| 0
| 0.06875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ae0eb2cb56aeab0808618a3e7a9bfb3f5aa7802d
| 56
|
py
|
Python
|
advance/cv-master/Assignments/gavPackage/greet.py
|
GavinK-ai/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | 1
|
2021-11-15T06:16:44.000Z
|
2021-11-15T06:16:44.000Z
|
advance/cv-master/Assignments/gavPackage/greet.py
|
JKai96/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | null | null | null |
advance/cv-master/Assignments/gavPackage/greet.py
|
JKai96/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | null | null | null |
def greeting(name="Default"):
print(f"Hello {name}")
| 28
| 29
| 0.660714
| 8
| 56
| 4.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 56
| 2
| 30
| 28
| 0.755102
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
ae248aee3e4baa487eec21aed305b8efc01abc81
| 199
|
py
|
Python
|
notebooks/__code/images_metadata_matcher.py
|
mabrahamdevops/python_notebooks
|
6d5e7383b60cc7fd476f6e85ab93e239c9c32330
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/__code/images_metadata_matcher.py
|
mabrahamdevops/python_notebooks
|
6d5e7383b60cc7fd476f6e85ab93e239c9c32330
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/__code/images_metadata_matcher.py
|
mabrahamdevops/python_notebooks
|
6d5e7383b60cc7fd476f6e85ab93e239c9c32330
|
[
"BSD-3-Clause"
] | null | null | null |
try:
import ipywe.fileselector
from ipywidgets import widgets
except:
pass
class ImagesMetadataMatcher:
def __init__(self, working_dir='./'):
self.working_dir = working_dir
| 18.090909
| 41
| 0.708543
| 22
| 199
| 6.090909
| 0.727273
| 0.223881
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21608
| 199
| 11
| 42
| 18.090909
| 0.858974
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.125
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
ae4add78411574209f708b8e0934082ea185d244
| 294
|
py
|
Python
|
minitf/__init__.py
|
guocuimi/minitf
|
f272a6b1546b82aaec41ec7d2c2d34fa40a40385
|
[
"MIT"
] | 7
|
2020-02-10T08:16:30.000Z
|
2021-01-31T14:08:02.000Z
|
minitf/__init__.py
|
guocuimi/minitf
|
f272a6b1546b82aaec41ec7d2c2d34fa40a40385
|
[
"MIT"
] | 1
|
2020-02-29T01:57:54.000Z
|
2020-02-29T01:57:54.000Z
|
minitf/__init__.py
|
guocuimi/minitf
|
f272a6b1546b82aaec41ec7d2c2d34fa40a40385
|
[
"MIT"
] | null | null | null |
import minitf.kernel
from minitf.autodiff import GradientTape
from minitf.autodiff import def_vjp_maker
from minitf.kernel import *
from minitf.kernel.core import primitive
from minitf.tensor import Tensor
from minitf.variable import Variable
from minitf.vjps import *
__version__ = '0.1.1.4'
| 26.727273
| 41
| 0.826531
| 44
| 294
| 5.386364
| 0.409091
| 0.295359
| 0.151899
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.115646
| 294
| 10
| 42
| 29.4
| 0.896154
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.888889
| 0
| 0.888889
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ae69e0687af0d4d4217bc7e3a1caee3e34159bdd
| 209
|
py
|
Python
|
payment_gateway/api/service/serializers.py
|
MayaraMachado/sns_and_sqs_project
|
4fcc5bbb5f6841543ea8dda353dd85a43024f683
|
[
"MIT"
] | 5
|
2020-06-22T21:29:54.000Z
|
2021-11-01T20:12:04.000Z
|
payment_gateway/api/service/serializers.py
|
MayaraMachado/sns_and_sqs_project
|
4fcc5bbb5f6841543ea8dda353dd85a43024f683
|
[
"MIT"
] | 5
|
2021-03-30T13:38:15.000Z
|
2021-09-22T19:10:27.000Z
|
payment_gateway/api/service/serializers.py
|
MayaraMachado/sns_and_sqs_project
|
4fcc5bbb5f6841543ea8dda353dd85a43024f683
|
[
"MIT"
] | null | null | null |
from api.models import Revenue
from rest_framework import serializers
class RevenueSerializer(serializers.Serializer):
seller_id = serializers.UUIDField()
total_received = serializers.IntegerField()
| 26.125
| 48
| 0.813397
| 22
| 209
| 7.590909
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124402
| 209
| 7
| 49
| 29.857143
| 0.912568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ae6b378824e321d74164b9f79812c4c2dff7f302
| 214
|
py
|
Python
|
pycode/network_graph.py
|
VanillaBrooks/instagram_scrape
|
80a30e3cd77a51b5b8bf73c967b79d5482a44ebb
|
[
"MIT"
] | null | null | null |
pycode/network_graph.py
|
VanillaBrooks/instagram_scrape
|
80a30e3cd77a51b5b8bf73c967b79d5482a44ebb
|
[
"MIT"
] | 10
|
2019-02-10T23:28:10.000Z
|
2019-02-18T11:39:49.000Z
|
pycode/network_graph.py
|
VanillaBrooks/instagram_scrape
|
80a30e3cd77a51b5b8bf73c967b79d5482a44ebb
|
[
"MIT"
] | 1
|
2019-03-17T07:21:36.000Z
|
2019-03-17T07:21:36.000Z
|
import networkx
#import pymysql
def get_sql_data(list_of_users):
# construct mysql query to pull all data from
pass
def build_graph():
# export a graph of all nodes in the swarm
pass
import instagram_scrape
| 16.461538
| 46
| 0.780374
| 36
| 214
| 4.472222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17757
| 214
| 12
| 47
| 17.833333
| 0.914773
| 0.462617
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
ae79b6e4f8e213ca23a2679648cd0e691285086e
| 740
|
py
|
Python
|
check-brackets/test_brackets.py
|
orionoiro/test-tasks
|
ff3191067b784af68a01d4bb26fea8f68554e75d
|
[
"MIT"
] | null | null | null |
check-brackets/test_brackets.py
|
orionoiro/test-tasks
|
ff3191067b784af68a01d4bb26fea8f68554e75d
|
[
"MIT"
] | 1
|
2022-03-12T00:56:47.000Z
|
2022-03-12T00:56:47.000Z
|
check-brackets/test_brackets.py
|
orionoiro/test-tasks
|
ff3191067b784af68a01d4bb26fea8f68554e75d
|
[
"MIT"
] | null | null | null |
import subprocess
from check_brackets import check_with_oddness
def test_empty():
assert check_with_oddness('') == True
def test_simple():
assert check_with_oddness('([])') == True
def test_simple_odd():
assert check_with_oddness('{[(]}') == False
def test_nested():
assert check_with_oddness('[{(([[[{}]]]))}]') == True
def test_nested_wrong():
assert check_with_oddness('[[[{((([{(}{)})}])))}]]]') == False
def test_multi_expr():
assert check_with_oddness('([{}])({})') == True
def test_multi_expr_wrong():
assert check_with_oddness('[{({)(})}]({[]]]})') == False
def test_odd():
assert check_with_oddness('(({{[[{]]}}))') == False
if __name__ == '__main__':
subprocess.run('pytest')
| 18.974359
| 66
| 0.614865
| 84
| 740
| 4.940476
| 0.285714
| 0.195181
| 0.346988
| 0.424096
| 0.696386
| 0.696386
| 0.616867
| 0.375904
| 0
| 0
| 0
| 0
| 0.152703
| 740
| 38
| 67
| 19.473684
| 0.661882
| 0
| 0
| 0
| 0
| 0
| 0.140541
| 0.032432
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.1
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ae849483a6aaf5c7a3fb6edb143f20fc2ad230f4
| 633
|
py
|
Python
|
home/kwatters/harry/gestures/agreeanswer.py
|
rv8flyboy/pyrobotlab
|
4e04fb751614a5cb6044ea15dcfcf885db8be65a
|
[
"Apache-2.0"
] | 63
|
2015-02-03T18:49:43.000Z
|
2022-03-29T03:52:24.000Z
|
home/kwatters/harry/gestures/agreeanswer.py
|
hirwaHenryChristian/pyrobotlab
|
2debb381fc2db4be1e7ea6e5252a50ae0de6f4a9
|
[
"Apache-2.0"
] | 16
|
2016-01-26T19:13:29.000Z
|
2018-11-25T21:20:51.000Z
|
home/kwatters/harry/gestures/agreeanswer.py
|
hirwaHenryChristian/pyrobotlab
|
2debb381fc2db4be1e7ea6e5252a50ae0de6f4a9
|
[
"Apache-2.0"
] | 151
|
2015-01-03T18:55:54.000Z
|
2022-03-04T07:04:23.000Z
|
def agreeanswer():
i01.disableRobotRandom(30)
i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(1.0, 0.90)
i01.setTorsoSpeed(1.0, 1.0, 1.0)
i01.moveHead(120,90)
sleep(0.5)
i01.moveHead(20,90)
sleep(0.5)
i01.moveArm("left",20,93,42,16)
i01.moveArm("right",20,93,37,18)
i01.moveHand("left",180,180,65,81,41,143)
i01.moveHand("right",180,180,18,61,36,21)
i01.moveTorso(90,90,90)
sleep(0.5)
i01.moveHead(90,90)
sleep(0.2)
relax()
| 28.772727
| 57
| 0.627172
| 133
| 633
| 2.984962
| 0.278195
| 0.120907
| 0.13602
| 0.18136
| 0.38539
| 0.355164
| 0.254408
| 0.231738
| 0.231738
| 0.15869
| 0
| 0.282847
| 0.134281
| 633
| 21
| 58
| 30.142857
| 0.441606
| 0
| 0
| 0.142857
| 0
| 0
| 0.056872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| true
| 0
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
88426cd325779694eeccef9b89433ed9d697c8e5
| 37,449
|
py
|
Python
|
spytest/tests/routing/test_ip.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 132
|
2016-10-19T12:34:44.000Z
|
2022-03-16T09:00:39.000Z
|
spytest/tests/routing/test_ip.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 3,152
|
2016-09-21T23:05:58.000Z
|
2022-03-31T23:29:08.000Z
|
spytest/tests/routing/test_ip.py
|
shubav/sonic-mgmt
|
0ff71b907a55489bb4ed7d17b1682380fd459bf2
|
[
"Apache-2.0"
] | 563
|
2016-09-20T01:00:15.000Z
|
2022-03-31T22:43:54.000Z
|
import random
import math
import re
import pytest
from spytest import st, tgapi, SpyTestDict
from spytest.utils import random_vlan_list
import apis.routing.ip as ipfeature
import apis.switching.vlan as vlan_obj
import apis.switching.portchannel as pc_obj
import apis.system.basic as basic_obj
import apis.common.asic as asicapi
import apis.routing.bgp as bgpfeature
import apis.system.interface as intf_obj
import apis.routing.route_map as rmap_obj
import apis.switching.mac as mac_obj
import apis.routing.arp as arp_obj
vars = dict()
data = SpyTestDict()
data.ip4_addr = ["192.168.1.1", "192.168.1.2", "192.168.2.1", "192.168.2.2", "192.168.3.1", "192.168.3.3",
"192.168.4.1", "192.168.4.2", "192.168.5.1", "192.168.5.2", "192.168.6.1", "192.168.6.2"]
data.ip4_addr_rt = ["192.168.1.0", "192.168.2.0", "192.168.3.0", "192.168.4.0", "192.168.5.0", "192.168.6.0"]
data.ip6_addr = ["2001::1", "2001::2", "3301::1", "3301::2", "4441::1", "4441::2", "5551::1", "5551::2", "6661::1",
"6661::2", "7771::1", "7771::2"]
data.ip6_addr_rt = ["2001::", "3301::", "4441::", "5551::", "6661::", "7771::"]
data.loopback_1 = ["11.11.11.1", "22.22.22.1", "33.33.33.1"]
data.loopback6_1 = ["7767:12::2", "6671:230f:12::f", "9109:2cd1:341::3"]
data.af_ipv4 = "ipv4"
data.af_ipv6 = "ipv6"
data.shell_sonic = "sonic"
data.shell_vtysh = "vtysh"
data.vlan_1 = str(random_vlan_list()[0])
data.vlan_2 = str(random_vlan_list()[0])
data.vlan_int_1 = "Vlan{}".format(data.vlan_1)
data.vlan_int_2 = "Vlan{}".format(data.vlan_2)
data.port_channel = "PortChannel100"
data.tg_mac1 = "00:00:00:EA:23:0F"
data.tg_mac2 = "00:00:11:0A:45:33"
data.rate_pps = 2000
data.static_ip6_rt_drop = "blackhole"
data.static_ip6_rt = "6661::/64"
data.static_ip_rt = "192.168.5.0/24"
data.as_num = 100
data.remote_as_num = 200
data.routemap = "preferGlobal"
data.wait_tgstats = 2
data.no_of_ports = 8
data.ipv4_mask = '24'
data.ipv6_mask = '96'
data.host1_mac="00:00:01:00:00:01"
data.host2_mac="00:00:02:00:00:02"
data.host1_vlan="100"
data.host2_vlan="101"
data.vlan1_ip="10.10.10.2"
data.vlan2_ip="10.10.11.3"
data_tg_ip="10.10.10.1"
@pytest.fixture(scope="module", autouse=True)
def ip_module_hooks(request):
global vars, tg_handler, tg
# Min topology verification
st.log("Ensuring minimum topology")
vars = st.ensure_min_topology("D1T1:4", "D2T1:2", "D1D2:4")
# Initialize TG and TG port handlers
tg_handler = tgapi.get_handles_byname("T1D1P1", "T1D1P2", "T1D2P1", "T1D2P2")
tg = tg_handler["tg"]
# IP module configuration
st.log("Vlan routing configuration on D1D2P1,D2D1P1")
vlan_obj.create_vlan(vars.D1, data.vlan_1)
vlan_obj.add_vlan_member(vars.D1, data.vlan_1, [vars.D1D2P1], tagging_mode=True)
vlan_obj.create_vlan(vars.D2, data.vlan_1)
vlan_obj.add_vlan_member(vars.D2, data.vlan_1, [vars.D2D1P1], tagging_mode=True)
ipfeature.config_ip_addr_interface(vars.D1, data.vlan_int_1, data.ip4_addr[2], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D1, data.vlan_int_1, data.ip6_addr[2], 96, family=data.af_ipv6)
ipfeature.config_ip_addr_interface(vars.D2, data.vlan_int_1, data.ip4_addr[3],24, family = data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, data.vlan_int_1, data.ip6_addr[3], 96, family=data.af_ipv6)
st.log("Port routing configuration on port-channel")
data.dut1_pc_members = [vars.D1D2P2, vars.D1D2P3]
data.dut2_pc_members = [vars.D2D1P2, vars.D2D1P3]
pc_obj.create_portchannel(vars.D1, data.port_channel)
pc_obj.add_portchannel_member(vars.D1, data.port_channel, data.dut1_pc_members)
pc_obj.create_portchannel(vars.D2, data.port_channel)
pc_obj.add_portchannel_member(vars.D2, data.port_channel, data.dut2_pc_members)
ipfeature.config_ip_addr_interface(vars.D1, data.port_channel, data.ip4_addr[4], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, data.port_channel, data.ip4_addr[5], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D1, data.port_channel, data.ip6_addr[4], 96, family=data.af_ipv6)
ipfeature.config_ip_addr_interface(vars.D2, data.port_channel, data.ip6_addr[5], 96, family=data.af_ipv6)
st.log("port routing configuration on D1D2P4,D2D1P4")
ipfeature.config_ip_addr_interface(vars.D1, vars.D1D2P4, data.ip4_addr[6], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, vars.D2D1P4, data.ip4_addr[7], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D1, vars.D1D2P4, data.ip6_addr[6], 96, family=data.af_ipv6)
ipfeature.config_ip_addr_interface(vars.D2, vars.D2D1P4, data.ip6_addr[7], 96, family=data.af_ipv6)
st.log("configuring the dut1 ports connected to TGen with ip addresses")
ipfeature.config_ip_addr_interface(vars.D1, vars.D1T1P1, data.ip4_addr[1], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D1, vars.D1T1P2, data.ip6_addr[1], 96, family=data.af_ipv6)
ipfeature.create_static_route(vars.D1, data.ip6_addr[7], data.static_ip6_rt, shell=data.shell_vtysh,
family=data.af_ipv6)
ipfeature.create_static_route(vars.D1, data.ip4_addr[7], data.static_ip_rt, shell=data.shell_vtysh,
family=data.af_ipv4)
st.log("configuring the dut2 ports connected to TGen with ip addresses")
ipfeature.config_ip_addr_interface(vars.D2, vars.D2T1P1, data.ip4_addr[8], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, vars.D2T1P2, data.ip6_addr[8], 96, family=data.af_ipv6)
yield
ipfeature.clear_ip_configuration(st.get_dut_names())
ipfeature.clear_ip_configuration(st.get_dut_names(), 'ipv6')
vlan_obj.clear_vlan_configuration(st.get_dut_names())
pc_obj.clear_portchannel_configuration(st.get_dut_names())
ipfeature.delete_static_route(vars.D1, data.ip4_addr[7], data.static_ip_rt, shell=data.shell_vtysh,
family=data.af_ipv4)
ipfeature.delete_static_route(vars.D1, data.static_ip6_rt_drop, data.static_ip6_rt, shell=data.shell_vtysh,
family=data.af_ipv6)
@pytest.fixture(scope="function", autouse=True)
def ip_func_hooks(request):
yield
def delete_bgp_router(dut, router_id, as_num):
"""
:param router_id:
:type router_id:
:param as_num:
:type as_num:
:param dut:
:type dut:
:return:
:rtype:
"""
st.log("delete bgp router info")
bgpfeature.config_bgp_router(dut, as_num, router_id=router_id, config='no')
def create_bgp_neighbor_route_map_config(dut, local_asn, neighbor_ip, routemap):
rmap = rmap_obj.RouteMap(routemap)
rmap.add_permit_sequence('10')
rmap.add_sequence_set_ipv6_next_hop_prefer_global('10')
rmap.execute_command(dut)
bgpfeature.config_bgp(dut, addr_family='ipv6', local_as=local_asn, neighbor=neighbor_ip, routeMap=routemap, diRection='in', config = 'yes', config_type_list =["routeMap"])
bgpfeature.config_bgp(dut, addr_family='ipv6', local_as=local_asn, neighbor=neighbor_ip, routeMap=routemap, diRection='out', config = 'yes', config_type_list =["routeMap"])
return
def create_v4_route(route_count):
vars = st.get_testbed_vars()
dut = vars.D1
ipfeature.show_ip_route(dut)
ipfeature.get_interface_ip_address(dut)
intf_obj.interface_status_show(dut)
bgpfeature.create_bgp_router(dut, data.as_num, '')
bgpfeature.create_bgp_neighbor(dut, data.as_num, data.ip4_addr[0], data.remote_as_num)
tg_handler = tgapi.get_handles_byname("T1D1P1", "T1D2P1")
tg = tg_handler["tg"]
tg.tg_traffic_control(action="reset", port_handle=tg_handler["tg_ph_list"])
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
dut_rt_int_mac1 = basic_obj.get_ifconfig_ether(vars.D1, vars.D1T1P1)
h1 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_1"], mode='config', intf_ip_addr=data.ip4_addr[0], \
gateway=data.ip4_addr[1], src_mac_addr=data.tg_mac1, arp_send_req='1')
st.log("INTFCONF: " + str(h1))
h2 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_2"], mode='config', intf_ip_addr=data.ip4_addr[9], \
gateway=data.ip4_addr[8], src_mac_addr=data.tg_mac2, arp_send_req='1')
st.log("INTFCONF: " + str(h2))
# Ping from tgen to DUT.
res = tgapi.verify_ping(src_obj=tg, port_handle=tg_handler["tg_ph_1"], dev_handle=h1['handle'], dst_ip=data.ip4_addr[1], \
ping_count='1', exp_count='1')
if res:
st.log("Ping succeeded.")
else:
st.warn("Ping failed.")
conf_var = { 'mode' : 'enable',
'active_connect_enable' : '1',
'local_as' : '200',
'remote_as' : '100',
'remote_ip_addr' : data.ip4_addr[1]
}
route_var = { 'mode' : 'add',
'num_routes' : route_count,
'prefix' : '121.1.1.0',
'as_path' : 'as_seq:1'
}
ctrl_start = { 'mode' : 'start'}
# Configuring the BGP router.
bgp_rtr1 = tgapi.tg_bgp_config(tg = tg,
handle = h1['handle'],
conf_var = conf_var,
route_var = route_var,
ctrl_var = ctrl_start)
st.log("BGP_HANDLE: "+str(bgp_rtr1))
st.log("waiting for 10 sec to get the BGP neighbor started before going for another TG operation")
st.wait(10)
# Verified at neighbor.
tr1 = tg.tg_traffic_config(port_handle=tg_handler["tg_ph_1"], mode='create', transmit_mode='single_burst',
pkts_per_burst=2000, \
length_mode='fixed', rate_pps=2000, l3_protocol='ipv4', mac_src=data.tg_mac1, \
mac_dst=dut_rt_int_mac1, ip_src_addr=data.ip4_addr[0],
ip_dst_addr=data.ip4_addr[9])
st.log("TRAFCONF: " + str(tr1))
res = tg.tg_traffic_control(action='run', stream_handle=tr1['stream_id'])
st.log("TR_CTRL: " + str(res))
tg.tg_traffic_control(action='stop', stream_handle=tr1['stream_id'])
st.log("Checking the stats and verifying the traffic flow")
traffic_details = {
'1': {
'tx_ports' : [vars.T1D1P1],
'tx_obj' : [tg_handler["tg"]],
'exp_ratio' : [1],
'rx_ports' : [vars.T1D2P1],
'rx_obj' : [tg_handler["tg"]],
}
}
#verify statistics
aggrResult = tgapi.validate_tgen_traffic(traffic_details=traffic_details, mode='aggregate', comp_type='packet_count')
if not aggrResult:
return False
return True
def test_l3_v4_route_po_1():
dut = vars.D1
asicapi.dump_vlan(dut)
asicapi.dump_l2(dut)
asicapi.dump_trunk(dut)
ret = create_v4_route(30000)
if (ret):
st.report_pass("test_case_passed")
else:
st.report_fail("test_case_failed")
def create_v6_route(route_count):
vars = st.get_testbed_vars()
dut = vars.D1
ipfeature.show_ip_route(dut, family='ipv6')
ipfeature.get_interface_ip_address(dut, family='ipv6')
bgpfeature.create_bgp_router(dut, data.as_num, '')
bgpfeature.create_bgp_neighbor(dut, data.as_num, data.ip6_addr[0], data.remote_as_num, family="ipv6")
create_bgp_neighbor_route_map_config(dut, data.as_num, data.ip6_addr[0], data.routemap)
tg_handler = tgapi.get_handles_byname("T1D1P2", "T1D2P2")
tg = tg_handler["tg"]
tg.tg_traffic_control(action="reset", port_handle=tg_handler["tg_ph_list"])
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
dut_rt_int_mac1 = basic_obj.get_ifconfig_ether(vars.D1, vars.D1T1P1)
h1 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_1"], mode='config', ipv6_intf_addr=data.ip6_addr[0], \
ipv6_prefix_length='64', ipv6_gateway=data.ip6_addr[1],
src_mac_addr=data.tg_mac1, arp_send_req='1')
st.log("INTFCONF: " + str(h1))
h2 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_2"], mode='config', ipv6_intf_addr=data.ip6_addr[9], \
ipv6_prefix_length='64', ipv6_gateway=data.ip6_addr[8],
src_mac_addr=data.tg_mac2, arp_send_req='1')
st.log("INTFCONF: " + str(h2))
# Ping from tgen to DUT.
res = tgapi.verify_ping(src_obj=tg, port_handle=tg_handler["tg_ph_1"], dev_handle=h1['handle'], dst_ip=data.ip6_addr[1], \
ping_count='1', exp_count='1')
if res:
st.log("Ping succeeded.")
else:
st.warn("Ping failed.")
bgp_conf=tg.tg_emulation_bgp_config(handle=h1['handle'], mode='enable', ip_version='6',
active_connect_enable='1', local_as=data.as_num, remote_as=data.remote_as_num, remote_ipv6_addr=data.ip6_addr[1])
tg.tg_emulation_bgp_route_config(handle=bgp_conf['handle'], mode='add', ip_version='6',
num_routes=route_count, prefix='3300:1::', as_path='as_seq:1')
tg.tg_emulation_bgp_control(handle=bgp_conf['handle'], mode='start')
# Configuring the BGP router.
st.log("BGP neighborship established.")
tr1 = tg.tg_traffic_config(port_handle=tg_handler["tg_ph_1"], mode='create', transmit_mode='single_burst',
pkts_per_burst=2000, \
length_mode='fixed', rate_pps=2000, l3_protocol='ipv6', mac_src=data.tg_mac1, \
mac_dst=dut_rt_int_mac1, ipv6_src_addr=data.ip6_addr[0],
ipv6_dst_addr=data.ip6_addr[9])
st.log("TRAFCONF: " + str(tr1))
res = tg.tg_traffic_control(action='run', stream_handle=tr1['stream_id'])
st.log("TR_CTRL: " + str(res))
tg.tg_traffic_control(action='stop', stream_handle=tr1['stream_id'])
st.log("Checking the stats and verifying the traffic flow")
traffic_details = {
'1': {
'tx_ports' : [vars.T1D1P2],
'tx_obj' : [tg_handler["tg"]],
'exp_ratio' : [1],
'rx_ports' : [vars.T1D2P2],
'rx_obj' : [tg_handler["tg"]],
}
}
# verify statistics
aggrResult = tgapi.validate_tgen_traffic(traffic_details=traffic_details, mode='aggregate', comp_type='packet_count')
if not aggrResult:
return False
else:
return True
def test_l3_v6_route_po_1():
dut = vars.D1
asicapi.dump_vlan(dut)
asicapi.dump_l2(dut)
asicapi.dump_trunk(dut)
ret = create_v6_route(30000)
if (ret):
st.report_pass("test_case_passed")
else:
st.report_fail("test_case_failed")
@pytest.mark.ip_basic_ping
@pytest.mark.community
@pytest.mark.community_fail
def test_ft_ping_v4_v6_vlan():
# Objective - Verify that IPv6 & Ipv4 ping is successful over vlan routing interfaces.
st.log("Checking IPv4 ping from {} to {} over vlan routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D1, data.ip4_addr[3], family=data.af_ipv4, count=1):
st.report_fail("ping_fail",data.ip4_addr[2], data.ip4_addr[3])
st.log("Checking IPv6 ping from {} to {} over vlan routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D2, data.ip6_addr[2], family=data.af_ipv6, count=1):
st.report_fail("ping_fail",data.ip6_addr[3], data.ip6_addr[2])
st.report_pass("test_case_passed")
@pytest.mark.ip_basic_ping
@pytest.mark.community
@pytest.mark.community_pass
def test_ft_ping__v4_v6_after_ip_change_pc():
# Objective - Verify that ping is successful between L3 interfaces when Ip address is removed and new ip
# is assigned
st.log("In {} check portchannel is UP or not".format(vars.D2))
if not pc_obj.verify_portchannel_state(vars.D2, data.port_channel, state="up"):
st.report_fail("portchannel_state_fail", data.port_channel, vars.D2, "Up")
st.log("Checking IPv4 ping from {} to {} over portchannel routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping_poll(vars.D1, data.ip4_addr[5], family=data.af_ipv4, iter=5, count=1):
st.report_fail("ping_fail",data.ip4_addr[4], data.ip4_addr[5])
st.log("Checking IPv6 ping from {} to {} over portchannel routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping_poll(vars.D2, data.ip6_addr[4], family=data.af_ipv6, iter=5, count=1):
st.report_fail("ping_fail",data.ip6_addr[5], data.ip6_addr[4])
st.log("Removing the Ipv4 address on portchannel")
ipfeature.delete_ip_interface(vars.D1, data.port_channel, data.ip4_addr[4],24, family = data.af_ipv4)
ipfeature.delete_ip_interface(vars.D2, data.port_channel, data.ip4_addr[5], 24, family = data.af_ipv4)
st.log("Removing the Ipv6 address on portchannel")
ipfeature.delete_ip_interface(vars.D1, data.port_channel, data.ip6_addr[4], 96, family = data.af_ipv6)
ipfeature.delete_ip_interface(vars.D2, data.port_channel, data.ip6_addr[5], 96, family = data.af_ipv6)
st.log("configuring new Ipv4 address on portchannel")
ipfeature.config_ip_addr_interface(vars.D1, data.port_channel, data.ip4_addr[10], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, data.port_channel, data.ip4_addr[11], 24, family=data.af_ipv4)
st.log("configuring new Ipv6 address on portchannel")
ipfeature.config_ip_addr_interface(vars.D1, data.port_channel, data.ip6_addr[10],
96, family = data.af_ipv6)
ipfeature.config_ip_addr_interface(vars.D2, data.port_channel, data.ip6_addr[11], 96, family=data.af_ipv6)
st.log("After Ipv4 address change, checking IPv4 ping from {} to {} over portchannel "
"routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping_poll(vars.D1, data.ip4_addr[11], family=data.af_ipv4, iter=5, count=1):
st.report_fail("ping_fail",data.ip4_addr[10],data.ip4_addr[11])
st.log("After Ipv6 address change, checking IPv6 ping from {} to {} over portchannel "
"routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping_poll(vars.D1, data.ip6_addr[11], family=data.af_ipv6, iter=5, count=1):
st.report_fail("ping_fail",data.ip6_addr[10], data.ip6_addr[11])
st.report_pass("test_case_passed")
@ pytest.mark.ip6_basic
def test_ft_ip6_static_route_traffic_forward_blackhole():
# Objective - Verify the Ipv6 traffic forwarding over static route.
tg_handler = tgapi.get_handles_byname("T1D1P2", "T1D2P2")
tg = tg_handler["tg"]
tg.tg_traffic_control(action="reset", port_handle=tg_handler["tg_ph_list"])
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
dut_rt_int_mac1 = basic_obj.get_ifconfig_ether(vars.D1, vars.D1T1P2)
h1 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_1"], mode='config', ipv6_intf_addr=data.ip6_addr[0], \
ipv6_prefix_length='64', ipv6_gateway=data.ip6_addr[1],
src_mac_addr=data.tg_mac1, arp_send_req='1')
st.log("INTFCONF: " + str(h1))
h2 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_2"], mode='config', ipv6_intf_addr=data.ip6_addr[9], \
ipv6_prefix_length='64', ipv6_gateway=data.ip6_addr[8],
src_mac_addr=data.tg_mac2, arp_send_req='1')
st.log("INTFCONF: " + str(h2))
# Ping from tgen to DUT.
res = tgapi.verify_ping(src_obj=tg, port_handle=tg_handler["tg_ph_1"], dev_handle=h1['handle'], dst_ip=data.ip6_addr[1], \
ping_count='1', exp_count='1')
if res:
st.log("Ping succeeded.")
else:
st.warn("Ping failed.")
tr1 = tg.tg_traffic_config(port_handle=tg_handler["tg_ph_1"], mode='create', transmit_mode='single_burst',
pkts_per_burst=2000, \
length_mode='fixed', rate_pps=2000, l3_protocol='ipv6', mac_src=data.tg_mac1, \
mac_dst=dut_rt_int_mac1, ipv6_src_addr=data.ip6_addr[0],
ipv6_dst_addr=data.ip6_addr[9])
st.log("TRAFCONF: " + str(tr1))
res = tg.tg_traffic_control(action='run', stream_handle=tr1['stream_id'])
st.log("TR_CTRL: " + str(res))
tg.tg_traffic_control(action='stop', stream_handle=tr1['stream_id'])
st.log("Checking the stats and verifying the traffic flow")
traffic_details = {
'1': {
'tx_ports' : [vars.T1D1P2],
'tx_obj' : [tg_handler["tg"]],
'exp_ratio' : [1],
'rx_ports' : [vars.T1D2P2],
'rx_obj' : [tg_handler["tg"]],
}
}
# verify statistics
aggrResult = tgapi.validate_tgen_traffic(traffic_details=traffic_details, mode='aggregate', comp_type='packet_count')
if not aggrResult:
st.report_fail("traffic_verification_failed")
ipfeature.delete_static_route(vars.D1, data.ip6_addr[7], data.static_ip6_rt, shell=data.shell_vtysh,
family=data.af_ipv6)
st.log("Create a static route with nexthop as blackhole")
ipfeature.create_static_route(vars.D1, data.static_ip6_rt_drop, data.static_ip6_rt, shell=data.shell_vtysh,
family=data.af_ipv6)
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
res = tg.tg_traffic_control(action='run', stream_handle=tr1['stream_id'])
st.log("TR_CTRL: " + str(res))
tg.tg_traffic_control(action='stop', stream_handle=tr1['stream_id'])
st.log("Checking the stats and verifying the traffic flow")
traffic_details = {
'1': {
'tx_ports' : [vars.T1D1P2],
'tx_obj' : [tg_handler["tg"]],
'exp_ratio' : [1],
'rx_ports' : [vars.T1D2P2],
'rx_obj' : [tg_handler["tg"]],
}
}
# verify statistics
aggrResult = tgapi.validate_tgen_traffic(traffic_details=traffic_details, mode='aggregate', comp_type='packet_count')
if aggrResult:
st.report_fail("traffic_verification_failed")
st.report_pass("test_case_passed")
@pytest.mark.ip_basic13
def test_ft_ip_static_route_traffic_forward():
# Objective - Verify the Ipv4 traffic forwarding over IPv4 static route.
tg_handler = tgapi.get_handles_byname("T1D1P1", "T1D2P1")
tg = tg_handler["tg"]
tg.tg_traffic_control(action="reset", port_handle=tg_handler["tg_ph_list"])
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
dut_rt_int_mac1 = basic_obj.get_ifconfig_ether(vars.D1, vars.D1T1P1)
h1 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_1"], mode='config', intf_ip_addr=data.ip4_addr[0], \
gateway=data.ip4_addr[1], src_mac_addr=data.tg_mac1, arp_send_req='1')
st.log("INTFCONF: " + str(h1))
h2 = tg.tg_interface_config(port_handle=tg_handler["tg_ph_2"], mode='config', intf_ip_addr=data.ip4_addr[9], \
gateway=data.ip4_addr[8], src_mac_addr=data.tg_mac2, arp_send_req='1')
st.log("INTFCONF: " + str(h2))
# Ping from tgen to DUT.
res = tgapi.verify_ping(src_obj=tg, port_handle=tg_handler["tg_ph_1"], dev_handle=h1['handle'], dst_ip=data.ip4_addr[1], \
ping_count='1', exp_count='1')
if res:
st.log("Ping succeeded.")
else:
st.warn("Ping failed.")
tr1 = tg.tg_traffic_config(port_handle=tg_handler["tg_ph_1"], mode='create', transmit_mode='single_burst',
pkts_per_burst=2000, \
length_mode='fixed', rate_pps=2000, l3_protocol='ipv4', mac_src=data.tg_mac1, \
mac_dst=dut_rt_int_mac1, ip_src_addr=data.ip4_addr[0],
ip_dst_addr=data.ip4_addr[9])
st.log("TRAFCONF: " + str(tr1))
res = tg.tg_traffic_control(action='run', stream_handle=tr1['stream_id'])
st.log("TR_CTRL: " + str(res))
tg.tg_traffic_control(action='stop', stream_handle=tr1['stream_id'])
st.log("Checking the stats and verifying the traffic flow")
traffic_details = {
'1': {
'tx_ports' : [vars.T1D1P1],
'tx_obj' : [tg_handler["tg"]],
'exp_ratio' : [1],
'rx_ports' : [vars.T1D2P1],
'rx_obj' : [tg_handler["tg"]],
}
}
#verify statistics
aggrResult = tgapi.validate_tgen_traffic(traffic_details=traffic_details, mode='aggregate', comp_type='packet_count')
if not aggrResult:
st.report_fail("traffic_verification_failed")
st.report_pass("test_case_passed")
@pytest.mark.ip_basic_L2_L3_translation
def test_ft_ip_v4_v6_L2_L3_translation():
# Objective - Verify that L2 port to IPv4 L3 port transition and vice-versa is successful.
st.log("Checking IPv4 ping from {} to {} over routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D1, data.ip4_addr[7], family=data.af_ipv4, count=1):
st.report_fail("ping_fail",data.ip4_addr[6], data.ip4_addr[7])
st.log("Checking IPv6 ping from {} to {} over vlan routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D2, data.ip6_addr[6], family=data.af_ipv6, count=1):
st.report_fail("ping_fail",data.ip6_addr[7], data.ip6_addr[6])
st.log("L3 to L2 port transition")
st.log("Removing ipv4,ipv6 address from interface")
ipfeature.delete_ip_interface(vars.D1, vars.D1D2P4, data.ip4_addr[6], 24, family=data.af_ipv4)
ipfeature.delete_ip_interface(vars.D2, vars.D2D1P4, data.ip4_addr[7], 24, family=data.af_ipv4)
ipfeature.delete_ip_interface(vars.D1, vars.D1D2P4, data.ip6_addr[6], 96, family=data.af_ipv6)
ipfeature.delete_ip_interface(vars.D2, vars.D2D1P4, data.ip6_addr[7], 96, family=data.af_ipv6)
ipfeature.delete_ip_interface(vars.D1, vars.D1T1P1, data.ip4_addr[1], 24, family=data.af_ipv4)
ipfeature.delete_ip_interface(vars.D2, vars.D2T1P1, data.ip4_addr[8], 24, family=data.af_ipv4)
st.log("Removing the static routes")
ipfeature.delete_static_route(vars.D1, data.ip4_addr[7], data.static_ip_rt, shell=data.shell_vtysh, family=data.af_ipv4)
ipfeature.delete_static_route(vars.D1, data.static_ip6_rt_drop, data.static_ip6_rt, shell=data.shell_vtysh, family=data.af_ipv6)
st.log("Vlan creation and port association configuration")
vlan_obj.create_vlan(vars.D1, data.vlan_2)
st.log("Adding back to back connecting ports to vlan {}".format(data.vlan_2))
vlan_obj.add_vlan_member(vars.D1, data.vlan_2, [vars.D1D2P4], tagging_mode=True)
vlan_obj.create_vlan(vars.D2, data.vlan_2)
vlan_obj.add_vlan_member(vars.D2, data.vlan_2, [vars.D2D1P4], tagging_mode=True)
st.log("Adding TG connecting ports to vlan {}".format(data.vlan_1))
vlan_obj.add_vlan_member(vars.D1, data.vlan_2, vars.D1T1P1, tagging_mode=True)
vlan_obj.add_vlan_member(vars.D2, data.vlan_2, vars.D2T1P1, tagging_mode=True)
tg_handler = tgapi.get_handles_byname("T1D1P1", "T1D2P1")
tg.tg_traffic_control(action="reset", port_handle=tg_handler["tg_ph_list"])
tg.tg_traffic_control(action="clear_stats", port_handle=tg_handler["tg_ph_list"])
tr2 = tg.tg_traffic_config(port_handle=tg_handler["tg_ph_2"], mode='create', rate_pps="2000",
mac_src_mode="fixed",
transmit_mode="single_burst", pkts_per_burst=2000,
length_mode='fixed', l2_encap='ethernet_ii_vlan',
vlan_id=data.vlan_2, mac_dst_mode="fixed",
vlan="enable",
mac_src="00:a1:bb:cc:dd:01",
mac_dst="00:b1:bb:cc:dd:01")
st.log("TRAFCONF: " + str(tr2))
res = tg.tg_traffic_control(action='run', stream_handle=tr2['stream_id'])
tg.tg_traffic_control(action='stop', stream_handle=tr2['stream_id'])
st.wait(data.wait_tgstats)
st.log("TR_CTRL: " + str(res))
st.log("Fetching TGen statistics")
stats_tg1 = tgapi.get_traffic_stats(tg_handler["tg"], mode="aggregate", port_handle=tg_handler["tg_ph_2"])
total_tx_tg1 = stats_tg1.tx.total_packets
stats_tg2 = tgapi.get_traffic_stats(tg_handler["tg"], mode="aggregate", port_handle=tg_handler["tg_ph_1"])
total_rx_tg2 = stats_tg2.rx.total_packets
st.log("total_tx_tg1 = {}".format(total_tx_tg1))
total_tx_tg1_95_percentage = int(total_tx_tg1) * 0.95
st.log("total_tx_tg1_95_percentage= {}".format(total_tx_tg1_95_percentage))
st.log("total_rx_tg2 = {}".format(total_rx_tg2))
if int(total_tx_tg1_95_percentage) > int(total_rx_tg2):
st.report_fail("traffic_verification_failed")
st.log("Removing vlan configuration")
vlan_obj.delete_vlan_member(vars.D1, data.vlan_2, [vars.D1D2P4, vars.D1T1P1], True)
vlan_obj.delete_vlan_member(vars.D2, data.vlan_2, [vars.D2D1P4, vars.D2T1P1], True)
st.log("L2 to L3 port transition")
ipfeature.config_ip_addr_interface(vars.D1, vars.D1D2P4, data.ip4_addr[6], 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D2, vars.D2D1P4, data.ip4_addr[7], 24, family=data.af_ipv4)
ipfeature.create_static_route(vars.D1, data.ip4_addr[7], data.static_ip_rt, shell=data.shell_vtysh, family=data.af_ipv4)
st.log("Checking IPv4 ping from {} to {} over routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D1, data.ip4_addr[7], family=data.af_ipv4, count=1):
st.report_fail("ping_fail",data.ip4_addr[6], data.ip4_addr[7])
ipfeature.config_ip_addr_interface(vars.D1, vars.D1D2P4, data.ip6_addr[6], 96, family=data.af_ipv6)
ipfeature.config_ip_addr_interface(vars.D2, vars.D2D1P4, data.ip6_addr[7], 96, family=data.af_ipv6)
ipfeature.create_static_route(vars.D1, data.static_ip6_rt_drop, data.static_ip6_rt, shell=data.shell_vtysh, family=data.af_ipv6)
st.log("Checking IPv6 ping from {} to {} over vlan routing interface".format(vars.D1, vars.D2))
if not ipfeature.ping(vars.D2, data.ip6_addr[6], family=data.af_ipv6, count=1):
st.report_fail("ping_fail",data.ip6_addr[7], data.ip6_addr[6])
st.report_pass("test_case_passed")
@pytest.mark.community
@pytest.mark.community_pass
def test_ft_verify_interfaces_order():
'''
@author: Ramprakash Reddy (ramprakash-reddy.kanala@broadcom.com)
ipv4_intf_order : Verify order of interfaces in "show ip interfaces"
ipv6_intf_order : Verify order of interfaces in "show ipv6 interfaces'
Verify order of interfaces in "show ip/ipv6 interfaces" in sorted order or not
:return:
'''
flag = 1
st.log("This test is to ensure that interfaces are listed in sorted order by 'interface name' in 'show ip/ipv6 "
"interfaces'")
free_ports = st.get_free_ports(vars.D1)
if len(free_ports) < data.no_of_ports:
data.no_of_ports = len(free_ports)
req_ports = random.sample(free_ports, data.no_of_ports)
ipv4_addr = data.ip4_addr[11]+'/'+data.ipv4_mask
ipv6_addr = data.ip6_addr[0]+'/'+data.ipv6_mask
intf_list = []
for i in range(int(math.ceil(float(data.no_of_ports)/2))):
_, ipv4_addr = ipfeature.increment_ip_addr(ipv4_addr, "network")
ipfeature.config_ip_addr_interface(vars.D1, interface_name=req_ports[i], ip_address=ipv4_addr.split('/')[0],
subnet=data.ipv4_mask, family="ipv4")
for i in range(int(math.floor(float(data.no_of_ports)/2))):
_, ipv6_addr = ipfeature.increment_ip_addr(ipv6_addr, "network", family="ipv6")
ipfeature.config_ip_addr_interface(vars.D1, interface_name=req_ports[i+int(math.ceil(float(data.no_of_ports)/2))],
ip_address=ipv6_addr.split('/')[0], subnet=data.ipv6_mask, family="ipv6")
output = ipfeature.get_interface_ip_address(vars.D1)
for each in output:
intf_list.append(each['interface'])
temp = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [temp(c) for c in re.split('([0-9]+)', key)]
intf_list_sorted = sorted(intf_list, key=alphanum_key)
if intf_list == intf_list_sorted:
st.log("Ipv4 interfaces are in sorted order")
else:
st.error("Ipv4 interfaces are not in soretd order")
flag = 0
del intf_list[:]
del intf_list_sorted[:]
output = ipfeature.get_interface_ip_address(vars.D1, family="ipv6")
for each in output:
intf_list.append(each['interface'])
temp = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [temp(c) for c in re.split('([0-9]+)', key)]
intf_list_sorted = sorted(intf_list, key=alphanum_key)
if intf_list == intf_list_sorted:
st.log("Ipv6 interfaces are in sorted order")
else:
st.error("Ipv6 interfaces are not in soretd order")
flag = 0
#Unconfig
ipv4_addr = data.ip4_addr[11] + '/' + data.ipv4_mask
ipv6_addr = data.ip6_addr[0] + '/' + data.ipv6_mask
for i in range(int(math.ceil(float(data.no_of_ports)/2))):
_, ipv4_addr = ipfeature.increment_ip_addr(ipv4_addr, "network")
ipfeature.delete_ip_interface(vars.D1, interface_name=req_ports[i], ip_address=ipv4_addr.split('/')[0],
subnet=data.ipv4_mask, family="ipv4")
for i in range(int(math.floor(float(data.no_of_ports)/2))):
_, ipv6_addr = ipfeature.increment_ip_addr(ipv6_addr, "network", family="ipv6")
ipfeature.delete_ip_interface(vars.D1, interface_name=req_ports[i+int(math.ceil(float(data.no_of_ports)/2))],
ip_address=ipv6_addr.split('/')[0], subnet=data.ipv6_mask, family="ipv6")
if flag == 0:
st.report_fail("test_case_failed")
st.report_pass("test_case_passed")
@pytest.fixture(scope="function")
def ceta_31902_fixture(request,ip_module_hooks):
ipfeature.config_ip_addr_interface(vars.D1, vars.D1T1P2, data.ip6_addr[1],
96, family=data.af_ipv6,config="remove")
vlan_obj.create_vlan(vars.D1, [data.host1_vlan,data.host2_vlan])
vlan_obj.add_vlan_member(vars.D1, data.host1_vlan, [vars.D1T1P1,vars.D1T1P2], tagging_mode=True)
vlan_obj.add_vlan_member(vars.D1, data.host2_vlan, [vars.D1T1P1, vars.D1T1P2], tagging_mode=True)
ipfeature.config_ip_addr_interface(vars.D1, "Vlan"+data.host1_vlan, data.vlan1_ip, 24, family=data.af_ipv4)
ipfeature.config_ip_addr_interface(vars.D1, "Vlan" + data.host2_vlan, data.vlan2_ip, 24, family=data.af_ipv4)
yield
ipfeature.delete_ip_interface(vars.D1, "Vlan"+data.host1_vlan, data.vlan1_ip, "24", family="ipv4")
ipfeature.delete_ip_interface(vars.D1, "Vlan"+data.host2_vlan, data.vlan2_ip, "24", family="ipv4")
vlan_obj.delete_vlan_member(vars.D1, data.host1_vlan, [vars.D1T1P1, vars.D1T1P2], True)
vlan_obj.delete_vlan_member(vars.D1, data.host2_vlan, [vars.D1T1P1, vars.D1T1P2], True)
vlan_obj.delete_vlan(vars.D1, [data.host1_vlan,data.host2_vlan])
ipfeature.config_ip_addr_interface(vars.D1, vars.D1T1P2, data.ip6_addr[1], 96, family=data.af_ipv6)
def test_Ceta_31902(ceta_31902_fixture):
success=True
mac_obj.config_mac(dut=vars.D1,mac=data.host1_mac, vlan=data.host1_vlan, intf=vars.D1T1P1)
arp_obj.add_static_arp(dut=vars.D1, ipaddress=data_tg_ip, macaddress=data.host1_mac,
interface="Vlan"+data.host1_vlan)
arp_obj.add_static_arp(dut=vars.D1, ipaddress=data_tg_ip, macaddress=data.host2_mac,
interface="Vlan"+data.host2_vlan)
arp_obj.delete_static_arp(dut=vars.D1, ipaddress=data_tg_ip, interface="Vlan"+data.host2_vlan, mac=data.host2_mac)
arp_obj.add_static_arp(dut=vars.D1, ipaddress=data_tg_ip, macaddress=data.host2_mac,
interface="Vlan"+data.host1_vlan)
arp_obj.delete_static_arp(dut=vars.D1, ipaddress=data_tg_ip, interface="Vlan"+data.host1_vlan, mac=data.host2_mac)
mac_obj.config_mac(dut=vars.D1,mac=data.host2_mac, vlan=data.host1_vlan, intf=vars.D1T1P2)
arp_obj.add_static_arp(dut=vars.D1, ipaddress=data_tg_ip, macaddress=data.host2_mac,
interface="Vlan"+data.host1_vlan)
mac_obj.delete_mac(dut=vars.D1, mac=data.host1_mac, vlan=data.host1_vlan)
mac_obj.config_mac(dut=vars.D1,mac=data.host1_mac, vlan=data.host1_vlan, intf=vars.D1T1P1)
l2_out = asicapi.get_l2_out(vars.D1, data.host2_mac)
l3_out = asicapi.get_l3_out(vars.D1, data.host2_mac)
if l2_out and l3_out:
l2_gport=l2_out[0]["gport"][9]
l3_port=l3_out[0]["port"]
if l2_gport == l3_port:
st.log("MAC {} points port {} correctly in both ARP and MAC table".format(data.host2_mac,l2_gport))
else:
success=False
st.error("MAC and ARP table are NOT in SYNC; "
"MAC {} points to gport {} in \"l2 show\""
"but in \"l3 egress show\" it is port {}".format(data.host2_mac,l2_gport,l3_port))
else:
success=False
st.error("MAC NOT present in \"l2 show\" or \"l3 egress show\" output")
arp_obj.delete_static_arp(dut=vars.D1, ipaddress=data_tg_ip, interface="Vlan"+data.host1_vlan, mac=data.host2_mac)
mac_obj.delete_mac(dut=vars.D1, mac=data.host1_mac, vlan=data.host1_vlan)
mac_obj.delete_mac(dut=vars.D1, mac=data.host2_mac, vlan=data.host1_vlan)
if success:
st.report_pass("test_case_id_passed", "test_Ceta_31902")
else:
st.report_fail("test_case_id_failed", "test_Ceta_31902")
| 53.119149
| 176
| 0.680606
| 5,778
| 37,449
| 4.127899
| 0.074247
| 0.023395
| 0.029181
| 0.026414
| 0.79221
| 0.765083
| 0.736992
| 0.71628
| 0.684709
| 0.665213
| 0
| 0.055061
| 0.183316
| 37,449
| 704
| 177
| 53.194602
| 0.724791
| 0.031429
| 0
| 0.485904
| 0
| 0.001658
| 0.149302
| 0.004921
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026534
| false
| 0.018242
| 0.026534
| 0
| 0.06136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
88990ad89dfe0100516659e974c59c547416b339
| 98
|
py
|
Python
|
setup.py
|
b0uh/python-semver
|
54a1af6fe62e98725d2f0d1eb175995681ca78ac
|
[
"BSD-3-Clause"
] | 159
|
2019-11-14T11:47:44.000Z
|
2022-03-29T02:57:46.000Z
|
setup.py
|
b0uh/python-semver
|
54a1af6fe62e98725d2f0d1eb175995681ca78ac
|
[
"BSD-3-Clause"
] | 146
|
2019-11-05T08:22:43.000Z
|
2022-03-04T18:59:52.000Z
|
setup.py
|
b0uh/python-semver
|
54a1af6fe62e98725d2f0d1eb175995681ca78ac
|
[
"BSD-3-Clause"
] | 65
|
2015-04-12T11:36:38.000Z
|
2019-11-03T18:33:24.000Z
|
#!/usr/bin/env python3
import setuptools
setuptools.setup() # For compatibility with python 3.6
| 19.6
| 55
| 0.765306
| 14
| 98
| 5.357143
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 0.132653
| 98
| 4
| 56
| 24.5
| 0.847059
| 0.561224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ee13d8ddd732f144586d7209263576799209b7d3
| 401
|
py
|
Python
|
delaunay/quadedge/point.py
|
mkirc/delaunayTriangulation
|
e0bc46867a27da8373ad647deda37390de6a0f89
|
[
"MIT"
] | null | null | null |
delaunay/quadedge/point.py
|
mkirc/delaunayTriangulation
|
e0bc46867a27da8373ad647deda37390de6a0f89
|
[
"MIT"
] | null | null | null |
delaunay/quadedge/point.py
|
mkirc/delaunayTriangulation
|
e0bc46867a27da8373ad647deda37390de6a0f89
|
[
"MIT"
] | null | null | null |
class Vertex:
def __init__(self, x=None, y=None):
self.pos = [x, y]
self.x = x
self.y = y
self.data = None
def __str__(self):
return f"{self.pos}"
def __repr__(self):
return f"{self.pos}"
def __hash__(self):
return hash(''.join([str(x) for x in self.pos]))
@property
def id(self):
return self.__hash__()
| 16.04
| 56
| 0.516209
| 56
| 401
| 3.339286
| 0.357143
| 0.149733
| 0.117647
| 0.160428
| 0.224599
| 0.224599
| 0
| 0
| 0
| 0
| 0
| 0
| 0.341646
| 401
| 24
| 57
| 16.708333
| 0.708333
| 0
| 0
| 0.133333
| 0
| 0
| 0.049875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.266667
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ee168167b2717558d5cc8a0878a90d1ca7d7bd29
| 209
|
py
|
Python
|
src/accounts/models.py
|
DiceNameIsMy/proper-DRF-startup
|
4d8923377f87ef0b576ba14528b9f3e59d7a27ac
|
[
"MIT"
] | 1
|
2022-03-02T12:38:40.000Z
|
2022-03-02T12:38:40.000Z
|
src/accounts/models.py
|
DiceNameIsMy/proper-DRF-startup
|
4d8923377f87ef0b576ba14528b9f3e59d7a27ac
|
[
"MIT"
] | null | null | null |
src/accounts/models.py
|
DiceNameIsMy/proper-DRF-startup
|
4d8923377f87ef0b576ba14528b9f3e59d7a27ac
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import AbstractUser, UserManager, AnonymousUser as DjangoAnonymousUser
class User(AbstractUser):
objects: UserManager
class AnonymousUser(DjangoAnonymousUser):
pass
| 20.9
| 102
| 0.813397
| 20
| 209
| 8.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129187
| 209
| 9
| 103
| 23.222222
| 0.934066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
ee2ede120d7f8698c856a5d0ef1188ce8f29534d
| 1,202
|
py
|
Python
|
Scientific-Computing-With-Python/polygon-area-calculator/shape_calculator.py
|
HRashidi/freeCodeCamp
|
b9bdba0162543415f31fc80c467941ffbb49e04a
|
[
"MIT"
] | null | null | null |
Scientific-Computing-With-Python/polygon-area-calculator/shape_calculator.py
|
HRashidi/freeCodeCamp
|
b9bdba0162543415f31fc80c467941ffbb49e04a
|
[
"MIT"
] | null | null | null |
Scientific-Computing-With-Python/polygon-area-calculator/shape_calculator.py
|
HRashidi/freeCodeCamp
|
b9bdba0162543415f31fc80c467941ffbb49e04a
|
[
"MIT"
] | null | null | null |
import math
class Rectangle:
def __init__(self, width, height):
self.width = width
self.height = height
def __str__(self):
return "Rectangle(width={}, height={})".format(self.width, self.height)
def set_width(self, width):
self.width = width
def set_height(self, height):
self.height = height
def get_area(self):
return self.width * self.height
def get_perimeter(self):
return 2 * self.width + 2 * self.height
def get_diagonal(self):
return math.sqrt(self.width ** 2 + self.height ** 2)
def get_picture(self):
if self.width > 50 or self.height > 50:
return "Too big for picture."
shape = ("*" * self.width + "\n" ) * self.height
return shape
def get_amount_inside(self, item):
columns = self.width // item.width
rows = self.height // item.height
return columns * rows
class Square(Rectangle):
def __init__(self, width):
super().__init__(width, width)
def __str__(self):
return "Square(side={})".format(self.width)
def set_side(self, width):
self.width = width
self.height = width
def set_width(self, width):
self.width = width
self.height = width
def set_height(self, height):
self.width = height
self.height = height
| 21.464286
| 73
| 0.680532
| 173
| 1,202
| 4.549133
| 0.208092
| 0.205845
| 0.095299
| 0.068615
| 0.416773
| 0.219822
| 0.219822
| 0.161372
| 0.111817
| 0.111817
| 0
| 0.00818
| 0.186356
| 1,202
| 56
| 74
| 21.464286
| 0.796524
| 0
| 0
| 0.375
| 0
| 0
| 0.056525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.35
| false
| 0
| 0.025
| 0.125
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ee38a8249b859c8785a6abf36af4cff579b10bee
| 81
|
py
|
Python
|
mecha01/__init__.py
|
mizahnyx/panda3dplanetgen
|
92d50e8d236be44c4dc5ddcb03f5a6fc05eb86ff
|
[
"BSD-3-Clause"
] | 1
|
2018-05-17T20:12:14.000Z
|
2018-05-17T20:12:14.000Z
|
mecha01/__init__.py
|
mizahnyx/panda3dplanetgen
|
92d50e8d236be44c4dc5ddcb03f5a6fc05eb86ff
|
[
"BSD-3-Clause"
] | 1
|
2021-04-12T10:26:20.000Z
|
2021-05-14T10:28:12.000Z
|
mecha01/__init__.py
|
mizahnyx/panda3dplanetgen
|
92d50e8d236be44c4dc5ddcb03f5a6fc05eb86ff
|
[
"BSD-3-Clause"
] | 1
|
2017-06-07T11:01:54.000Z
|
2017-06-07T11:01:54.000Z
|
from .planet import Planet
from .character_controller import CharacterController
| 27
| 53
| 0.876543
| 9
| 81
| 7.777778
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 54
| 40.5
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ee8a2acd28177ea51803084c1978ca031d21609b
| 2,355
|
py
|
Python
|
gitcd/interface/kivy/tagpanel.py
|
pchr-srf/gitcd
|
8b4f08a8221c6fd09ba83e055c1dddabcde80b01
|
[
"Apache-2.0"
] | null | null | null |
gitcd/interface/kivy/tagpanel.py
|
pchr-srf/gitcd
|
8b4f08a8221c6fd09ba83e055c1dddabcde80b01
|
[
"Apache-2.0"
] | 1
|
2019-03-11T19:44:33.000Z
|
2019-03-11T19:44:33.000Z
|
gitcd/interface/kivy/tagpanel.py
|
pchr-srf/gitcd
|
8b4f08a8221c6fd09ba83e055c1dddabcde80b01
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
import threading
import kivy
from kivy.lang import Builder
from gitcd.interface.kivy.panel import GitcdInlineNavigationPanel
Builder.load_string('''
#:import MDSpinner kivymd.spinner.MDSpinner
#:import MDNavigationDrawer kivymd.navigationdrawer.MDNavigationDrawer
#:import NavigationDrawerIconButton kivymd.navigationdrawer.NavigationDrawerIconButton
<GitcdTagPanel>:
do_scroll_x: False
id: branch_panel
MDNavigationDrawer:
id: branch_list
NavigationDrawerIconButton:
text: "v0.0.1"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.2"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.3"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.4"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.5"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.6"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.7"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.8"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.9"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.10"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.11"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.12"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.13"
icon: 'tag'
on_release: root.onRelease
NavigationDrawerIconButton:
text: "v0.0.14"
icon: 'tag'
on_release: root.onRelease
''')
class GitcdTagPanel(GitcdInlineNavigationPanel):
def onRelease(self, **kwargs):
pass
| 28.035714
| 86
| 0.578769
| 210
| 2,355
| 6.4
| 0.266667
| 0.3125
| 0.333333
| 0.34375
| 0.62128
| 0.62128
| 0.599702
| 0.599702
| 0.599702
| 0.599702
| 0
| 0.030769
| 0.33758
| 2,355
| 83
| 87
| 28.373494
| 0.830769
| 0.005096
| 0
| 0.575342
| 0
| 0
| 0.889791
| 0.222127
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0.013699
| 0.09589
| 0
| 0.123288
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c98b6818d34257a5fbf08702b296257e4abb2fce
| 320
|
py
|
Python
|
config/env_config.py
|
namuan/crypto-folio
|
18a98168bd8b9ccd4dc0e31a793e57c5b34c1e24
|
[
"Unlicense"
] | 3
|
2018-10-01T10:31:04.000Z
|
2021-01-15T08:01:06.000Z
|
config/env_config.py
|
namuan/crypto-folio
|
18a98168bd8b9ccd4dc0e31a793e57c5b34c1e24
|
[
"Unlicense"
] | null | null | null |
config/env_config.py
|
namuan/crypto-folio
|
18a98168bd8b9ccd4dc0e31a793e57c5b34c1e24
|
[
"Unlicense"
] | 1
|
2021-01-03T06:44:39.000Z
|
2021-01-03T06:44:39.000Z
|
import configparser
def config(key, default_value=None):
c = configparser.ConfigParser()
c.read('env.cfg')
return c.get('ALL', key) or default_value
def fiat_currencies():
return config("FIAT_CURRENCIES").split(",")
def min_balance_to_exclude():
return float(config("MIN_BALANCE_TO_EXCLUDE"))
| 18.823529
| 50
| 0.7125
| 43
| 320
| 5.069767
| 0.55814
| 0.110092
| 0.110092
| 0.174312
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153125
| 320
| 16
| 51
| 20
| 0.804428
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.06875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.111111
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.