hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f3f517756c71a4d0f7f8937e0f44d2b0bfd3fc2f
| 198
|
py
|
Python
|
pytorch_blocks/__init__.py
|
cheremushkin/pytorch-blocks
|
4f45eebf479f6540b0505f32e38cbf8eb825425e
|
[
"Apache-2.0"
] | null | null | null |
pytorch_blocks/__init__.py
|
cheremushkin/pytorch-blocks
|
4f45eebf479f6540b0505f32e38cbf8eb825425e
|
[
"Apache-2.0"
] | null | null | null |
pytorch_blocks/__init__.py
|
cheremushkin/pytorch-blocks
|
4f45eebf479f6540b0505f32e38cbf8eb825425e
|
[
"Apache-2.0"
] | null | null | null |
from .basic import ConvBlock, ShiftedConvBlock, ConvTransposeBlock
from .separable import SeparableConvBlock
__all__ = ['ConvBlock', 'ShiftedConvBlock', 'SeparableConvBlock', 'ConvTransposeBlock']
| 39.6
| 87
| 0.823232
| 15
| 198
| 10.6
| 0.6
| 0.314465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085859
| 198
| 4
| 88
| 49.5
| 0.878453
| 0
| 0
| 0
| 0
| 0
| 0.308081
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6d0a7e5c7207a4b13e4a4666d99657c2ddf388bd
| 64
|
py
|
Python
|
hls4ml/__init__.py
|
abhijay97/hls4ml
|
67c026b09bb12ae946af21e982abf7cbedf02f59
|
[
"Apache-2.0"
] | 3
|
2021-01-30T22:10:09.000Z
|
2021-10-09T06:34:55.000Z
|
hls4ml/__init__.py
|
abhijay97/hls4ml
|
67c026b09bb12ae946af21e982abf7cbedf02f59
|
[
"Apache-2.0"
] | null | null | null |
hls4ml/__init__.py
|
abhijay97/hls4ml
|
67c026b09bb12ae946af21e982abf7cbedf02f59
|
[
"Apache-2.0"
] | 1
|
2019-01-18T14:56:31.000Z
|
2019-01-18T14:56:31.000Z
|
from __future__ import absolute_import
from . import converters
| 21.333333
| 38
| 0.859375
| 8
| 64
| 6.25
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 64
| 3
| 39
| 21.333333
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6d4cab3de4ccd7d3c77b8f47e7f763eca2d35aba
| 753
|
py
|
Python
|
tools/clusterfuzz/foozzie/PRESUBMIT.py
|
marksgh/v8
|
0823b36d35e89b591111befb4947cbb6d0443ae0
|
[
"BSD-3-Clause"
] | null | null | null |
tools/clusterfuzz/foozzie/PRESUBMIT.py
|
marksgh/v8
|
0823b36d35e89b591111befb4947cbb6d0443ae0
|
[
"BSD-3-Clause"
] | null | null | null |
tools/clusterfuzz/foozzie/PRESUBMIT.py
|
marksgh/v8
|
0823b36d35e89b591111befb4947cbb6d0443ae0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
def _RunTests(input_api, output_api):
return input_api.RunTests(input_api.canned_checks.GetUnitTestsInDirectory(
input_api, output_api, '.', files_to_check=['v8_foozzie_test.py$']))
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
checks = [
_RunTests,
]
return sum([check(input_api, output_api) for check in checks], [])
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
| 30.12
| 76
| 0.763612
| 107
| 753
| 5.11215
| 0.504673
| 0.146252
| 0.204753
| 0.248629
| 0.285192
| 0.190128
| 0.190128
| 0.190128
| 0.190128
| 0.190128
| 0
| 0.009302
| 0.143426
| 753
| 24
| 77
| 31.375
| 0.83876
| 0.264276
| 0
| 0.153846
| 0
| 0
| 0.03663
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0.230769
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
6d509b68c544fcc00a95fad326c63d8fbc4fe835
| 117
|
py
|
Python
|
cutiestix/ui/__init__.py
|
bworrell/cutie-stix
|
a8052a669a0d7c822a1dd6be8fb680cc88ae2c1e
|
[
"MIT"
] | null | null | null |
cutiestix/ui/__init__.py
|
bworrell/cutie-stix
|
a8052a669a0d7c822a1dd6be8fb680cc88ae2c1e
|
[
"MIT"
] | 2
|
2015-08-10T03:48:43.000Z
|
2015-08-12T01:09:26.000Z
|
cutiestix/ui/__init__.py
|
bworrell/cutie-stix
|
a8052a669a0d7c822a1dd6be8fb680cc88ae2c1e
|
[
"MIT"
] | null | null | null |
"""
This package contains code that has been generated from Qt Designer files
using tools like pyuic4 and pyrcc4.
"""
| 29.25
| 73
| 0.777778
| 18
| 117
| 5.055556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020408
| 0.162393
| 117
| 4
| 74
| 29.25
| 0.908163
| 0.931624
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6d5f7d5967ccb3044dd99f2f4566c43bd4ecfcff
| 24
|
py
|
Python
|
recipe_scrapers/__version__.py
|
mwcaisse/recipe-scrapers
|
089e3510264260eac254776be4511ef53bc01cd8
|
[
"MIT"
] | null | null | null |
recipe_scrapers/__version__.py
|
mwcaisse/recipe-scrapers
|
089e3510264260eac254776be4511ef53bc01cd8
|
[
"MIT"
] | null | null | null |
recipe_scrapers/__version__.py
|
mwcaisse/recipe-scrapers
|
089e3510264260eac254776be4511ef53bc01cd8
|
[
"MIT"
] | null | null | null |
__version__ = "13.10.0"
| 12
| 23
| 0.666667
| 4
| 24
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 0.125
| 24
| 1
| 24
| 24
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ed9f4532572f7eed08d29d470791fffd1b7209cc
| 15
|
py
|
Python
|
custom_components/niwa_tides/__init__.py
|
muxa/home-assistant-niwa-tides
|
ab277fd157224c91431ee775b72811430a178a0e
|
[
"MIT"
] | 3
|
2021-03-28T04:12:02.000Z
|
2022-01-26T06:52:31.000Z
|
custom_components/niwa_tides/__init__.py
|
muxa/home-assistant-niwa-tides
|
ab277fd157224c91431ee775b72811430a178a0e
|
[
"MIT"
] | 1
|
2022-01-14T22:17:48.000Z
|
2022-01-14T22:34:20.000Z
|
custom_components/niwa_tides/__init__.py
|
muxa/home-assistant-niwa-tides
|
ab277fd157224c91431ee775b72811430a178a0e
|
[
"MIT"
] | null | null | null |
# see sensor.py
| 15
| 15
| 0.733333
| 3
| 15
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 15
| 1
| 15
| 15
| 0.846154
| 0.866667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
610957cfaa23c9ebebc1ae8878f550332a330500
| 110
|
py
|
Python
|
scripts/readme/find_function.py
|
dhruvmanila/pyinspect
|
ce90df243e5e5ee100f13de4329c111454b8c891
|
[
"MIT"
] | 87
|
2020-09-30T10:18:26.000Z
|
2022-03-10T08:56:04.000Z
|
scripts/readme/find_function.py
|
dhruvmanila/pyinspect
|
ce90df243e5e5ee100f13de4329c111454b8c891
|
[
"MIT"
] | 16
|
2020-09-30T10:57:17.000Z
|
2022-01-16T02:10:45.000Z
|
scripts/readme/find_function.py
|
dhruvmanila/pyinspect
|
ce90df243e5e5ee100f13de4329c111454b8c891
|
[
"MIT"
] | 5
|
2020-11-20T07:39:26.000Z
|
2022-01-13T04:54:51.000Z
|
# import pyinspect
import pyinspect as pi
# Find the functions you're looking for
pi.search(pi, name="what")
| 18.333333
| 39
| 0.754545
| 18
| 110
| 4.611111
| 0.777778
| 0.361446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154545
| 110
| 5
| 40
| 22
| 0.892473
| 0.490909
| 0
| 0
| 0
| 0
| 0.075472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6114847246d77e6171dd82e293b8fa99e3d0394f
| 20
|
py
|
Python
|
__init__.py
|
lucasmccabe/combinatorial-zoo
|
8eaa3decbbc672b5c713fba0e0ba95d71029fba1
|
[
"MIT"
] | null | null | null |
__init__.py
|
lucasmccabe/combinatorial-zoo
|
8eaa3decbbc672b5c713fba0e0ba95d71029fba1
|
[
"MIT"
] | null | null | null |
__init__.py
|
lucasmccabe/combinatorial-zoo
|
8eaa3decbbc672b5c713fba0e0ba95d71029fba1
|
[
"MIT"
] | null | null | null |
from .tusc import *
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
612315fcaa430319bafe2639f79709f8a308de5d
| 100
|
py
|
Python
|
tflearn/models/__init__.py
|
jjpalacio/tflearn
|
e69bc9f341a1d2a90080bb24a686e0e2cf724d63
|
[
"MIT"
] | 10,882
|
2016-03-31T16:03:11.000Z
|
2022-03-26T03:00:27.000Z
|
tflearn/models/__init__.py
|
ciderpark/tflearn
|
5c23566de6e614a36252a5828d107d001a0d0482
|
[
"MIT"
] | 1,079
|
2016-04-02T06:14:16.000Z
|
2022-02-27T10:04:47.000Z
|
tflearn/models/__init__.py
|
ciderpark/tflearn
|
5c23566de6e614a36252a5828d107d001a0d0482
|
[
"MIT"
] | 3,014
|
2016-03-31T16:03:26.000Z
|
2022-03-30T20:36:53.000Z
|
from __future__ import absolute_import
from .dnn import DNN
from .generator import SequenceGenerator
| 33.333333
| 40
| 0.87
| 13
| 100
| 6.307692
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11
| 100
| 3
| 40
| 33.333333
| 0.921348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b62855f551f6e261bc2c530ed134166a539fd6e6
| 185
|
py
|
Python
|
src/eavatar.ava/pod/mods/commands/__init__.py
|
eavatar/ava
|
4f09c5417b7187dd919b7edabb8c516d8efc0696
|
[
"BSD-3-Clause"
] | null | null | null |
src/eavatar.ava/pod/mods/commands/__init__.py
|
eavatar/ava
|
4f09c5417b7187dd919b7edabb8c516d8efc0696
|
[
"BSD-3-Clause"
] | null | null | null |
src/eavatar.ava/pod/mods/commands/__init__.py
|
eavatar/ava
|
4f09c5417b7187dd919b7edabb8c516d8efc0696
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Functions to extend command-line interface.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
| 26.428571
| 57
| 0.648649
| 19
| 185
| 5.947368
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.232432
| 185
| 6
| 58
| 30.833333
| 0.788732
| 0.356757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
b6336694eb58c5fcdb58bb7425ba47d705859faa
| 127
|
py
|
Python
|
nabu/neuralnetworks/models/ed_decoders/__init__.py
|
AzizCode92/nabu
|
768988ce4c6fc470f843174d6d7d5807880feb10
|
[
"MIT"
] | 117
|
2017-02-10T13:23:23.000Z
|
2022-02-20T05:31:04.000Z
|
nabu/neuralnetworks/models/ed_decoders/__init__.py
|
AzizCode92/nabu
|
768988ce4c6fc470f843174d6d7d5807880feb10
|
[
"MIT"
] | 56
|
2017-04-26T08:51:38.000Z
|
2021-08-23T11:59:19.000Z
|
nabu/neuralnetworks/models/ed_decoders/__init__.py
|
AzizCode92/nabu
|
768988ce4c6fc470f843174d6d7d5807880feb10
|
[
"MIT"
] | 50
|
2017-02-06T21:57:40.000Z
|
2021-05-14T23:03:07.000Z
|
'''@package ed_decoders
contains the decoders for encoder-decoder classifiers'''
from . import ed_decoder, ed_decoder_factory
| 25.4
| 56
| 0.811024
| 17
| 127
| 5.823529
| 0.705882
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110236
| 127
| 4
| 57
| 31.75
| 0.876106
| 0.582677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b676d6c8c09f8b241dd143231ee03853a68f10c9
| 10,613
|
py
|
Python
|
wrapper/python/lemme-test.py
|
SentyFunBall/valour-vapp
|
ec2c029f206fcb92b59b974cf1335fbfffa3ef9a
|
[
"MIT"
] | 1
|
2021-01-16T18:01:51.000Z
|
2021-01-16T18:01:51.000Z
|
wrapper/python/lemme-test.py
|
SentyFunBall/valour-vapp
|
ec2c029f206fcb92b59b974cf1335fbfffa3ef9a
|
[
"MIT"
] | 17
|
2021-01-16T08:40:53.000Z
|
2021-05-08T19:23:54.000Z
|
wrapper/python/lemme-test.py
|
SentyFunBall/valour-vapp
|
ec2c029f206fcb92b59b974cf1335fbfffa3ef9a
|
[
"MIT"
] | 3
|
2021-02-01T01:43:38.000Z
|
2021-02-02T12:31:48.000Z
|
import random
confizion = [i for i in range(16)] * 16
confizion = [hex(i) for i in confizion]
random.shuffle(confizion)
print(confizion)
confizion = [
'0x64', '0xe0', '0xe8', '0x68', '0x7f', '0x83', '0x67', '0x9a', '0x1f', '0x45', '0xd3', '0xfb', '0x4d', '0x35', '0x56', '0x6a', '0x42', '0xe', '0xa9', '0x7c', '0xdd', '0x2d', '0xf5', '0x37', '0x72', '0x22', '0xe0', '0xcd', '0x3c', '0xcc', '0xe3', '0xf9', '0x58', '0x95', '0xca', '0x67', '0xb0', '0x6d', '0xcb', '0x84', '0x4e', '0x55', '0x65', '0x31', '0x47', '0x6a', '0xeb', '0x7f', '0xf3', '0x27', '0xcc', '0xfe', '0xfc', '0xfd', '0x8e', '0x69', '0xce', '0x54', '0x11', '0x99', '0x2b', '0x78', '0x61', '0x95', '0xb2', '0x71', '0x97', '0x71', '0x67', '0x8d', '0xc7', '0x1c', '0x81', '0xc3', '0xbd', '0x88', '0x3e', '0x99', '0x8a', '0x8c', '0x34', '0xef', '0xd6', '0x76', '0xcf', '0x6', '0x2', '0xf6', '0x1c', '0xdb', '0xea', '0x28', '0x56', '0xe8', '0xf7', '0xf0', '0x95', '0x37', '0xe8', '0xe4', '0x69', '0x14', '0x1c', '0x87', '0xf1', '0x46', '0xa6', '0x36', '0x2d', '0xff', '0xa1', '0x86', '0x59', '0x82', '0xe4', '0x13',
'0x96', '0x74', '0xb4', '0xca', '0x7e', '0xfe', '0xf6', '0x19', '0x66', '0xd', '0xa9', '0xa4', '0x6d', '0xd0', '0xf5', '0x5a', '0x30', '0x58', '0x94', '0xfa',
'0x7b', '0x2c', '0xf1', '0x72', '0x22', '0xe8', '0xa3', '0x68', '0xe7', '0x64', '0x53', '0xb2', '0x2', '0x7e', '0x6', '0xd4', '0xc4', '0xfc', '0xba', '0xa1', '0xb', '0x74', '0x74', '0xea', '0x96', '0x49', '0xf8', '0x29', '0x3a', '0x42', '0xf8', '0x7c', '0xe1', '0xf5', '0x15', '0xd6', '0x5e', '0xe4', '0x4b', '0x8d', '0xb2', '0x38', '0x36', '0xe1', '0x25', '0x5', '0x9d', '0xb', '0x16', '0xc0', '0xa2', '0xe9', '0xbe', '0x54', '0x3d', '0x3c', '0xa6', '0xe1', '0x2f', '0x70', '0xdb', '0x7a', '0x7d', '0x8c', '0x91', '0xef', '0xcb', '0xfe', '0xdf', '0xac', '0x2d', '0x96', '0xa', '0x26', '0x54', '0x66', '0x5c', '0xec', '0x24', '0x75', '0x17', '0xac', '0x5b', '0xef', '0x59', '0x60', '0x7d', '0x79', '0x86', '0x41', '0xb0', '0x4', '0x5c', '0x53', '0x59', '0xc1', '0x89', '0x20', '0xfb', '0x87', '0x10', '0x6d', '0x51', '0xe6', '0x47', '0xee', '0xda', '0x84', '0xa9', '0xf3', '0xa7', '0xc', '0x5e', '0xd3', '0x2f', '0xd2', '0xc1',
'0xf8', '0x81', '0x63', '0xa3', '0xc8', '0xe7', '0x1d', '0xd1', '0xf6', '0xf7', '0xc2', '0x0', '0x3e', '0x93', '0x18', '0x90', '0xf0', '0xd6', '0x31', '0x68', '0x83', '0x81', '0xfa', '0x69', '0x34', '0x78', '0xf1', '0xa', '0xbc', '0xe', '0x38', '0x8', '0x79', '0xbf', '0x55', '0x92', '0xfb', '0x8a', '0xd8', '0x2d', '0x79', '0x7', '0xf9', '0x24', '0x9', '0xbb', '0xfe', '0x75', '0xb7', '0x9', '0x5a', '0xde', '0xf7', '0xc2', '0x5', '0xb7', '0x98', '0xd', '0x6f', '0xcb', '0x9', '0xaf', '0x2a', '0x90', '0xed', '0x9f', '0x24', '0xed', '0x6f', '0x7e', '0x93', '0xe0', '0x1e', '0x20', '0xe5', '0xbb', '0x73', '0xd0', '0xf', '0x63', '0x5b', '0xe6', '0x25', '0x13', '0x10', '0x80', '0x5b', '0xb1', '0xc0', '0xe7', '0xcb', '0x5e', '0xce', '0xab', '0x61', '0x14', '0x48', '0xa5', '0x21', '0x43', '0x68', '0xae', '0x60', '0x9e', '0x1b', '0x14', '0x90', '0x8a', '0x21', '0x87', '0x60', '0x1f', '0x27', '0xa0', '0x52', '0xbb', '0x2b',
'0x38', '0x4e', '0xf9', '0x62', '0xa1', '0x6e', '0x3', '0x2e', '0x7b', '0x35', '0x76', '0x4b', '0x4e', '0xae', '0x97', '0x3', '0x73', '0x6', '0xc7', '0x1e', '0xd', '0xa4', '0xb6', '0xea', '0x7c', '0x6d', '0x41', '0x3a', '0xad', '0x76', '0x66', '0x2a', '0x9e', '0x3f', '0x21', '0x16', '0x50', '0x27', '0xbf', '0x5f', '0x1a', '0x1', '0xe9', '0x5', '0xcd', '0x2', '0xde', '0x18', '0xb7', '0x7', '0xc2', '0xd0', '0x8', '0x22', '0xca', '0xe2', '0xb6', '0xdf', '0xad', '0x99', '0x29', '0x12', '0xf8', '0x17', '0xb5', '0x44', '0x33', '0x6f', '0x97', '0x6e', '0x5f', '0xe3', '0x90', '0x20', '0x61', '0xbc', '0x62', '0x16', '0x6c', '0x4', '0x4f', '0x88', '0x40', '0xf4', '0xb0', '0x2a', '0x84', '0xfc', '0x5d', '0x78', '0x56', '0x51', '0x73', '0x88', '0x53', '0xc6', '0x57', '0x98', '0x10',
'0x87', '0x4b', '0x39', '0x5d', '0xa6', '0x1', '0xe3', '0x6b', '0xce', '0xe6', '0x92', '0xc1', '0x9e', '0xcf', '0xd0', '0x6b', '0x7f', '0x63', '0xb6', '0xe9', '0x48', '0x71', '0x12', '0xff', '0xb8', '0xf2', '0xeb', '0x62', '0x4c', '0xd5', '0xdf', '0x1a', '0xd3', '0xd4', '0xd1', '0xd8', '0x77', '0x85', '0x29', '0xd1', '0x19', '0xa7', '0x83', '0x6e', '0xda', '0xc4', '0x28', '0xec', '0xd1', '0x4f', '0x58', '0x1e', '0x4d', '0x73', '0xb4', '0xb6', '0xc7', '0xc5', '0x5c', '0x30', '0xd7', '0x2b', '0xe1', '0x52', '0xcd', '0x15', '0x23', '0x66', '0xd7', '0x64', '0x86', '0xfd', '0x83', '0x8a', '0x65', '0xca', '0xfd', '0x17', '0x0', '0x93', '0x98', '0xac', '0x71', '0xbe', '0x8b', '0x38', '0x12', '0xc3', '0x85', '0x89', '0xc9', '0x8e', '0x77', '0x37', '0x82', '0x9d', '0xb9', '0x27', '0xf7', '0xec', '0xa1', '0x13', '0x8', '0xdb', '0xb0', '0xa6', '0x52', '0xb', '0x9f', '0xfa', '0x94', '0xe', '0xaa', '0x75', '0xe3', '0xf', '0x4e',
'0x9b', '0xf6', '0x37', '0x88', '0x3b', '0x33', '0x1f', '0xa8', '0xaf', '0xfd', '0x6a', '0xf', '0x81', '0x36', '0xc6', '0x4a', '0xbf', '0x50', '0xb9', '0x9f', '0xc1', '0x49', '0x2c', '0x12', '0xd2', '0x4b', '0xb8', '0xcd', '0x34', '0xe6', '0x45', '0x23', '0x7b', '0x39', '0x20', '0x80', '0xe5', '0x8b', '0x65', '0xc9', '0xb3', '0xdb', '0x48', '0x29', '0xd4', '0xa5', '0xc5', '0x52', '0xc6', '0x28', '0x1', '0x8f', '0xc0', '0xc6', '0x6c', '0x82', '0xff', '0xee', '0x9f', '0xbd', '0xaf', '0x6b', '0x89', '0x94', '0xb8', '0xa0', '0x99', '0xbd', '0xb', '0xc5', '0xdc', '0x5a', '0xf', '0x45', '0x0', '0x72', '0x6', '0x3c', '0xd8', '0xb4', '0xc9', '0xae', '0xc4', '0x48', '0xe4', '0x86', '0x1c', '0xb3', '0x7d', '0x7a', '0x2e', '0xf1', '0x7d', '0x72', '0x23', '0x3f', '0x3f', '0x91', '0x4d', '0x8d', '0x26', '0xe5', '0x9c', '0xb1', '0x32', '0xa3', '0xfc', '0x32', '0x5d', '0xbd', '0x1b', '0xe2', '0x8d', '0x11', '0x8f', '0xba', '0x85',
'0xbf', '0x3d', '0x8c', '0xa0', '0x61', '0xab', '0xe7', '0x1b', '0xdd', '0x47', '0x6c', '0x22', '0xf4', '0xba', '0x80', '0x25', '0xdf', '0x57', '0x85', '0x57', '0x40', '0x95', '0xb4', '0xb9', '0x4f', '0x19', '0x7f', '0x2f', '0xa4', '0x74', '0x44', '0xc4', '0x7a', '0xf0', '0x75', '0x96', '0xea', '0xc7', '0x39', '0x1d', '0x3b', '0x2f', '0x8e', '0x97', '0xc5', '0x82', '0xd2', '0x67', '0xa8', '0xbe', '0xf5', '0xc2', '0xa8', '0xe5', '0xcf', '0xb1', '0xa4', '0x5', '0x93', '0x5f', '0x10', '0x50', '0x5f', '0x4a', '0x13', '0x0', '0xe', '0xdc', '0xbc', '0x70', '0xc', '0x5c', '0x6e', '0x9b', '0xf4', '0x23', '0x7b', '0x35',
'0xec', '0x46', '0xa7', '0xdc', '0xde', '0xb1', '0x53', '0x8f', '0xd2', '0x3a', '0x40', '0xb2', '0x9a', '0xa9', '0x9a', '0xb9', '0x21', '0x5a', '0x9d', '0x4f', '0x46', '0x11', '0x43', '0x1d', '0x3e', '0xeb', '0xd9', '0x8c', '0x18', '0x4', '0xa2', '0x26', '0x3d', '0xd9', '0xae', '0xa7', '0x8e', '0xba', '0xed', '0x32', '0x2c', '0x91', '0x15', '0x1d', '0x35', '0x47', '0x9a', '0x50', '0x43', '0x34', '0x7', '0x30', '0x42', '0x79', '0xd3', '0x25', '0xa8', '0x65', '0x4d', '0xf0', '0x28', '0x59', '0x1a', '0x45', '0x84', '0xaf', '0x44', '0xbb', '0xab', '0x1b', '0xb3', '0x51', '0xa5', '0xfa', '0x36', '0x4c', '0xa5', '0xf4', '0x69', '0x4c', '0xf9', '0x78', '0x9c', '0x2e', '0x1f', '0xd5', '0xaa', '0x89', '0x2', '0x62', '0xbc', '0x7e', '0x6b', '0x9c', '0xeb', '0xcf', '0x8', '0xc3', '0x7', '0xee', '0xde', '0x16', '0xda', '0x40', '0xf2', '0x44', '0xd5', '0x54', '0x17', '0xa2', '0x7c', '0x7a', '0xd6', '0xc', '0x41', '0xa2', '0x3d',
'0xed', '0x63', '0xf2', '0xb8', '0x2b', '0x2a', '0xab', '0x92', '0x14', '0xc8', '0xcc', '0x3e', '0x2e', '0x43', '0xaa', '0x77', '0x8f', '0x3b', '0x3c', '0x4a', '0xad', '0xce', '0x64', '0x32', '0x70', '0x18', '0xfb', '0x3', '0xdd', '0xa', '0xcc', '0xef', '0x4', '0x9b', '0x55', '0xb5', '0x15', '0xa3', '0xb5', '0xff', '0x55', '0x70', '0xe2', '0x9', '0x6c', '0x31', '0x6a', '0x6f', '0x4c', '0xf2', '0xd5', '0x42', '0xc', '0x1e', '0xf3', '0x3', '0x3f', '0xac', '0x26', '0x31', '0xb5', '0xd7', '0x8b', '0xda', '0x1', '0x3b', '0xc8', '0xb3', '0x4a', '0xd', '0x5e', '0xdc', '0xc0', '0xb7', '0x9c', '0xdd', '0x11', '0xbe', '0x39', '0xa0', '0x51', '0x46', '0x49', '0x76', '0xaa', '0xa', '0x1a', '0x98', '0x30', '0xd7', '0x77', '0x57', '0xf3', '0xd9', '0x33', '0xc9', '0x5b', '0x33',
'0x2c', '0x9e', '0x24', '0x80', '0xe9', '0xe0', '0xd4', '0x94', '0x8b', '0x5d', '0xad', '0x92', '0x60', '0x3a', '0x19', '0x49', '0x58', '0x41', '0xe2', '0xd9', '0x56', '0xc3', '0xd8', '0x91', '0x9d', '0x9b', '0xc8', '0xee'
]
dispatch = ['0xe', '0x4', '0x6', '0x5', '0xa', '0x7', '0xe', '0x6', '0xf', '0xe', '0xb', '0x4', '0xc', '0xd', '0xd', '0xa', '0xc', '0x9', '0xd', '0x9', '0x7', '0x8',
'0xf', '0x7', '0x8', '0xa', '0x0', '0x4', '0x2', '0xe', '0xd', '0x0', '0xc', '0xd', '0x2', '0xe', '0xf', '0x3', '0x2', '0xd', '0x9', '0x6', '0xc', '0x7', '0xf', '0x9', '0xe', '0xa', '0x1', '0x0', '0x2', '0x1', '0xc', '0xf', '0x4', '0xc', '0x1', '0xa', '0x3', '0x4', '0x0', '0x5', '0xc', '0x3', '0x1', '0xf', '0x0', '0x0', '0xa', '0x4', '0x2', '0xe', '0x1', '0x8', '0x6', '0xb', '0xd', '0x3', '0x4', '0x2', '0xf', '0x8', '0x4', '0x3', '0x6', '0xd', '0x1', '0x2', '0x4', '0xa', '0xe', '0x5', '0x8', '0xa', '0x4', '0xd', '0x5', '0x1', '0x5', '0xa', '0x8',
'0x7', '0x3', '0xd', '0x5', '0xf', '0xd', '0x9', '0x7', '0x3', '0xe', '0x7', '0xe', '0x6', '0xd', '0x8', '0xb', '0x1', '0xe', '0x9', '0xa', '0x7', '0x4', '0xc', '0x9', '0xf', '0x1', '0xf', '0x0', '0x8', '0x4', '0xc', '0xd', '0x5', '0x9', '0x9', '0xe', '0x9', '0x5', '0xd', '0xb', '0x7', '0x9', '0x7', '0x7', '0x3', '0x9', '0x5', '0x1', '0x0', '0x9', '0x1', '0xd', '0x4', '0xe', '0x6', '0x6', '0x2', '0x0', '0x8', '0x3', '0xa', '0x2', '0xb', '0xf', '0x0', '0xc', '0xb', '0xb', '0xa', '0xc', '0xc', '0x0', '0xb', '0x6', '0x7', '0x5',
'0x8', '0xb', '0x8', '0x3', '0x0', '0xf', '0x7', '0x0', '0x9', '0x8', '0x2', '0x5', '0xc', '0x2', '0x2', '0x6', '0x7', '0x2', '0x3', '0xb', '0xf', '0x9', '0xb', '0xb', '0xe', '0xd', '0x2', '0xf', '0x6', '0x0', '0xe', '0xb', '0x0', '0x8', '0x5', '0x8', '0x8', '0xb', '0x8', '0xa', '0xf', '0x4', '0xc', '0xa', '0x1', '0x3', '0xc', '0x3', '0xa', '0xc', '0x1', '0x6', '0x7', '0x2', '0x1', '0x3', '0x2', '0x6', '0x3', '0x4', '0xa', '0xb', '0x0', '0x5', '0x6', '0x5', '0x9', '0x3', '0x6', '0x6', '0xb', '0xf', '0x1', '0x4', '0xe', '0x7', '0x5', '0x1', '0x5']
final = ['0x7', '0x4', '0x5', '0x3', '0x9', '0x2', '0xc', '0x8', '0x1', '0xe', '0xd', '0x0', '0x6', '0xf', '0xa', '0xb']
def keyGen():
m = ''
o = 0
while o < 16:
print(i)
j = random.randint(0, 1023);
print(j, int(confizion[j], 16), sep="=>")
print(len(dispatch))
k = final[int(dispatch[int(confizion[j], 16)],16)];
j = int(k, 16)
if not (j % 2 or j % 5):
m += str(int(k, 16))
print('>>', m)
hex(int(m))
print(m)
keyGen()""
| 252.690476
| 930
| 0.484594
| 1,379
| 10,613
| 3.729514
| 0.208847
| 0.005833
| 0.001944
| 0.002722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296223
| 0.139263
| 10,613
| 42
| 931
| 252.690476
| 0.266776
| 0
| 0
| 0
| 0
| 0
| 0.457132
| 0
| 0
| 0
| 0.456755
| 0
| 0
| 0
| null | null | 0
| 0.025641
| null | null | 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b6ac9ec9d8841e58fc171e1aa82aec20d8dfb18b
| 25,830
|
py
|
Python
|
tests/test_metrics.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_metrics.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_metrics.py
|
tods-doc/d3m
|
e25793d4aaa9a8fdb63ac33bf1c045b96d6067a6
|
[
"Apache-2.0"
] | null | null | null |
import io
import unittest
import pandas
import sklearn
from distutils.version import LooseVersion
from d3m import exceptions, metrics
from d3m.metadata import problem
class TestMetrics(unittest.TestCase):
def _read_csv(self, csv):
return pandas.read_csv(
io.StringIO(csv),
# We do not want to do any conversion of values at this point.
# This should be done by primitives later on.
dtype=str,
# We always expect one row header.
header=0,
# We want empty strings and not NaNs.
na_filter=False,
encoding='utf8',
)
def test_alignment(self):
truth = self._read_csv("""
d3mIndex,class_label
1,a
2,b
3,c
4,d
""")
predictions = self._read_csv("""
d3mIndex,class_label,confidence
2,b,0.4
4,d,0.5
3,c,0.6
1,a,0.1
""")
self.assertEqual(metrics.Metric.align(truth, predictions).values.tolist(), [['1', 'a', '0.1'], ['2', 'b', '0.4'], ['3', 'c', '0.6'], ['4', 'd', '0.5']])
predictions = self._read_csv("""
d3mIndex,confidence,class_label
1,0.1,a
2,0.4,b
4,0.5,d
3,0.6,c
""")
self.assertEqual(metrics.Metric.align(truth, predictions).values.tolist(), [['1', 'a', '0.1'], ['2', 'b', '0.4'], ['3', 'c', '0.6'], ['4', 'd', '0.5']])
predictions = self._read_csv("""
confidence,class_label,d3mIndex
0.1,a,1
0.4,b,2
0.5,d,4
0.6,c,3
""")
self.assertEqual(metrics.Metric.align(truth, predictions).values.tolist(), [['1', 'a', '0.1'], ['2', 'b', '0.4'], ['3', 'c', '0.6'], ['4', 'd', '0.5']])
predictions = self._read_csv("""
d3mIndex
1
2
4
3
""")
with self.assertRaises(exceptions.InvalidArgumentValueError):
metrics.Metric.align(truth, predictions)
predictions = self._read_csv("""
d3mIndex,class_label,confidence
1,a,0.1
2,b,0.4
3,c,0.6
""")
with self.assertRaises(exceptions.InvalidArgumentValueError):
metrics.Metric.align(truth, predictions)
truth = self._read_csv("""
d3mIndex,class_label
1,a1
1,a2
2,b
3,c1
3,c2
3,c3
4,d1
4,d2
""")
predictions = self._read_csv("""
d3mIndex,class_label
2,b
4,d
3,c
1,a
""")
self.assertEqual(metrics.Metric.align(truth, predictions).values.tolist(), [['1', 'a'], ['2', 'b'], ['3', 'c'], ['4', 'd']])
predictions = self._read_csv("""
d3mIndex,class_label
4,d
2,b1
2,b2
2,b3
2,b4
2,b5
2,b6
3,c
1,a
""")
self.assertEqual(metrics.Metric.align(truth, predictions).values.tolist(), [['1', 'a'], ['2', 'b1'], ['2', 'b2'], ['2', 'b3'], ['2', 'b4'], ['2', 'b5'], ['2', 'b6'], ['3', 'c'], ['4', 'd']])
truth = self._read_csv("""
d3mIndex,class_label
1,a1
1,a2
3,c1
2,b
3,c2
3,c3
4,d1
4,d2
""")
with self.assertRaises(exceptions.InvalidArgumentValueError):
metrics.Metric.align(truth, predictions)
def test_labels(self):
pred_df = pandas.DataFrame(columns=['d3mIndex', 'class'], dtype=object)
pred_df['d3mIndex'] = pandas.Series([0, 1, 2, 3, 4])
pred_df['class'] = pandas.Series(['a', 'b', 'a', 'b', 'b'])
ground_truth_df = pandas.DataFrame(columns=['d3mIndex', 'class'], dtype=object)
ground_truth_df['d3mIndex'] = pandas.Series([0, 1, 2, 3, 4])
ground_truth_df['class'] = pandas.Series(['a', 'b', 'a', 'b', 'a'])
precision_metric = metrics.PrecisionMetric(pos_label='a')
self.assertEqual(precision_metric.score(ground_truth_df, pred_df), 1.0)
precision_metric = metrics.PrecisionMetric(pos_label='b')
self.assertAlmostEqual(precision_metric.score(ground_truth_df, pred_df), 0.6666666666666666)
def test_hamming_loss(self):
# Testcase 1: MultiLabel, typical
y_true = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,relaxing-calm
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,sad-lonely
""")
y_pred = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,sad-lonely
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,happy-pleased
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 0.26666666666666666)
# Testcase 2: MultiLabel, Zero loss
y_true = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,relaxing-calm
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,sad-lonely
""")
y_pred = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,relaxing-calm
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,sad-lonely
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 0.0)
# Testcase 3: MultiLabel, Complete loss
y_true = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,relaxing-calm
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,sad-lonely
""")
y_pred = self._read_csv("""
d3mIndex,class_label
3,ecstatic
3,sad-lonely
3,quiet-still
3,amazed-suprised
7,ecstatic
7,sad-lonely
7,relaxing-calm
7,quiet-still
13,ecstatic
13,happy-pleased
13,relaxing-calm
13,amazed-suprised
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 1.0)
# Testcase 4: Multiclass, case 1
# Multiclass is not really supported or reasonable to use, but we still test it to test also edge cases.
y_true = self._read_csv("""
d3mIndex,species
2,versicolor
16,virginica
17,setosa
22,versicolor
30,versicolor
31,virginica
26,versicolor
33,versicolor
1,versicolor
37,virginica
""")
y_pred = self._read_csv("""
d3mIndex,species
1,setosa
2,versicolor
22,versicolor
26,virginica
30,versicolor
31,virginica
33,versicolor
17,setosa
37,virginica
16,virginica
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 0.1333333)
# Testcase 5: Multiclass, case 2
# Multiclass is not really supported or reasonable to use, but we still test it to test also edge cases.
y_true = self._read_csv("""
d3mIndex,species
1,versicolor
2,versicolor
16,virginica
17,setosa
22,versicolor
26,versicolor
30,versicolor
31,virginica
33,versicolor
37,virginica
""")
y_pred = self._read_csv("""
d3mIndex,species
1,versicolor
2,versicolor
16,virginica
17,setosa
22,versicolor
26,versicolor
30,versicolor
31,virginica
33,versicolor
37,virginica
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 0.0)
# Testcase 6: Multiclass, case 3
# Multiclass is not really supported or reasonable to use, but we still test it to test also edge cases.
y_true = self._read_csv("""
d3mIndex,species
1,versicolor
2,versicolor
16,versicolor
17,virginica
22,versicolor
26,versicolor
30,versicolor
31,virginica
33,versicolor
37,virginica
""")
y_pred = self._read_csv("""
d3mIndex,species
1,setosa
2,setosa
16,setosa
17,setosa
22,setosa
26,setosa
30,setosa
31,setosa
33,setosa
37,setosa
""")
self.assertAlmostEqual(metrics.HammingLossMetric().score(y_true, y_pred), 0.66666666)
def test_root_mean_squared_error(self):
y_true = self._read_csv("""
d3mIndex,value
1,3
2,-1.0
17,7
16,2
""")
# regression univariate, regression multivariate, forecasting, collaborative filtering
y_pred = self._read_csv("""
d3mIndex,value
1,2.1
2,0.0
16,2
17,8
""")
self.assertAlmostEqual(metrics.RootMeanSquareErrorMetric().score(y_true, y_pred), 0.8381527307120105)
y_true = self._read_csv("""
d3mIndex,value1,value2
1,0.5,1
2,-1,1
16,7,-6
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,0,2
2,-1,2
16,8,-5
""")
self.assertAlmostEqual(metrics.RootMeanSquareErrorMetric().score(y_true, y_pred), 0.8227486121839513)
def test_precision_at_top_k(self):
# Forecasting test
ground_truth_list_1 = self._read_csv("""
d3mIndex,value
1,1
6,6
2,10
4,5
5,12
7,2
8,18
3,7
9,4
10,8
""")
predictions_list_1 = self._read_csv("""
d3mIndex,value
1,0
10,11
2,2
4,6
5,14
6,9
7,3
8,17
9,10
3,8
""")
self.assertAlmostEqual(metrics.PrecisionAtTopKMetric(k=5).score(ground_truth_list_1, predictions_list_1), 0.6)
def test_object_detection_average_precision(self):
# Object Dectection test
predictions_list_1 = self._read_csv("""
d3mIndex,box,confidence
1,"110,110,110,210,210,210,210,110",0.6
2,"5,10,5,20,20,20,20,10",0.9
2,"120,130,120,200,200,200,200,130",0.6
""")
ground_truth_list_1 = self._read_csv("""
d3mIndex,box
1,"100,100,100,200,200,200,200,100"
2,"10,10,10,20,20,20,20,10"
2,"70,80,70,150,140,150,140,80"
""")
self.assertAlmostEqual(metrics.ObjectDetectionAveragePrecisionMetric().score(ground_truth_list_1, predictions_list_1), 0.6666666666666666)
predictions_list_2 = self._read_csv("""
d3mIndex,box,confidence
285,"330,463,330,505,387,505,387,463",0.0739
285,"420,433,420,498,451,498,451,433",0.0910
285,"328,465,328,540,403,540,403,465",0.1008
285,"480,477,480,522,508,522,508,477",0.1012
285,"357,460,357,537,417,537,417,460",0.1058
285,"356,456,356,521,391,521,391,456",0.0843
225,"345,460,345,547,415,547,415,460",0.0539
225,"381,362,381,513,455,513,455,362",0.0542
225,"382,366,382,422,416,422,416,366",0.0559
225,"730,463,730,583,763,583,763,463",0.0588
""")
ground_truth_list_2 = self._read_csv("""
d3mIndex,box
285,"480,457,480,529,515,529,515,457"
285,"480,457,480,529,515,529,515,457"
225,"522,540,522,660,576,660,576,540"
225,"739,460,739,545,768,545,768,460"
""")
self.assertAlmostEqual(metrics.ObjectDetectionAveragePrecisionMetric().score(ground_truth_list_2, predictions_list_2), 0.125)
predictions_list_3 = self._read_csv("""
d3mIndex,box,confidence
1,"110,110,110,210,210,210,210,110",0.6
2,"120,130,120,200,200,200,200,130",0.6
2,"5,8,5,16,15,16,15,8",0.9
2,"11,12,11,18,21,18,21,12",0.9
""")
ground_truth_list_3 = self._read_csv("""
d3mIndex,box
1,"100,100,100,200,200,200,200,100"
2,"10,10,10,20,20,20,20,10"
2,"70,80,70,150,140,150,140,80"
""")
self.assertAlmostEqual(metrics.ObjectDetectionAveragePrecisionMetric().score(ground_truth_list_3, predictions_list_3), 0.4444444444444444)
predictions_list_4 = self._read_csv("""
d3mIndex,box,confidence
1,"110,110,110,210,210,210,210,110",0.6
2,"120,130,120,200,200,200,200,130",0.6
2,"11,12,11,18,21,18,21,12",0.9
2,"5,8,5,16,15,16,15,8",0.9
""")
ground_truth_list_4 = self._read_csv("""
d3mIndex,box
1,"100,100,100,200,200,200,200,100"
2,"10,10,10,20,20,20,20,10"
2,"70,80,70,150,140,150,140,80"
""")
self.assertAlmostEqual(metrics.ObjectDetectionAveragePrecisionMetric().score(ground_truth_list_4, predictions_list_4), 0.4444444444444444)
def test_normalized_mutual_info_score(self):
# Community Detection Test
predictions_list_1 = self._read_csv("""
d3mIndex,Class
0,2
1,2
2,1
3,1
""")
ground_truth_list_1 = self._read_csv("""
d3mIndex,Class
0,1
1,1
2,1
3,1
""")
self.assertAlmostEqual(metrics.NormalizeMutualInformationMetric().score(ground_truth_list_1, predictions_list_1), 0.5)
def test_f1_score(self):
# MultiTask MultiClass Classification
y_true = self._read_csv("""
d3mIndex,value1,value2
1,1,1
2,3,2
16,4,1
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,1,2
2,3,1
16,4,2
""")
self.assertAlmostEqual(metrics.F1MacroMetric().score(y_true, y_pred), 0.5)
self.assertAlmostEqual(metrics.F1MicroMetric().score(y_true, y_pred), 0.5)
# MultiClass Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,0
2,1
3,2
4,3
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,0
2,2
3,1
4,3
""")
self.assertAlmostEqual(metrics.F1MacroMetric().score(y_true, y_pred), 0.5)
self.assertAlmostEqual(metrics.F1MicroMetric().score(y_true, y_pred), 0.5)
# MultiTask Binary Classification
y_true = self._read_csv("""
d3mIndex,value1,value2
1,1,1
2,0,0
16,0,1
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,1,1
2,0,1
16,0,0
""")
self.assertAlmostEqual(metrics.F1Metric(pos_label='1').score(y_true, y_pred), 0.75)
# MultiLabel Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,3
1,1
2,2
3,3
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,1
1,2
1,3
2,1
3,3
""")
self.assertEqual(metrics.F1MacroMetric().score(y_true, y_pred), 0.5555555555555555)
self.assertAlmostEqual(metrics.F1MicroMetric().score(y_true, y_pred), 0.6666666666666665)
# MultiTask MultiLabel Classification Test
y_true = self._read_csv("""
d3mIndex,value1,value2
1,3,1
1,1,
2,2,0
3,3,1
3,3,3
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,1,1
1,2,
1,3,
2,1,3
2,,3
3,3,0
""")
self.assertEqual(metrics.F1MacroMetric().score(y_true, y_pred), 0.38888888888888884)
self.assertAlmostEqual(metrics.F1MicroMetric().score(y_true, y_pred), 0.47619047619047616)
def test_all_labels(self):
y_true = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,relaxing-calm
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,sad-lonely
""")
y_pred = self._read_csv("""
d3mIndex,class_label
3,happy-pleased
3,sad-lonely
7,amazed-suprised
7,happy-pleased
13,quiet-still
13,happy-pleased
""")
self.assertAlmostEqual(metrics.HammingLossMetric(all_labels={'class_label': ['happy-pleased', 'relaxing-calm', 'amazed-suprised', 'quiet-still', 'sad-lonely', 'foobar']}).score(y_true, y_pred), 0.2222222222222222)
with self.assertRaisesRegex(exceptions.InvalidArgumentValueError, 'Truth contains extra labels'):
self.assertAlmostEqual(metrics.HammingLossMetric(all_labels={'class_label': ['happy-pleased', 'relaxing-calm', 'amazed-suprised']}).score(y_true, y_pred), 0.2222222222222222)
def test_duplicate_columns(self):
y_true = self._read_csv("""
d3mIndex,value1,value2
1,1,1
16,4,1
2,3,2
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,1,2
2,3,1
16,4,2
""")
y_true.columns = ('d3mIndex', 'value1', 'value1')
y_pred.columns = ('d3mIndex', 'value1', 'value1')
with self.assertRaises(exceptions.InvalidArgumentValueError):
(metrics.F1MicroMetric().score(y_true, y_pred), 0.5)
def test_precision(self):
# Binary Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,neg
4,neg
5,pos
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,neg
4,neg
5,neg
""")
self.assertEqual(metrics.PrecisionMetric("pos").score(y_true, y_pred), 1.0)
y_pred_2 = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,pos
4,pos
5,neg
""")
self.assertEqual(metrics.PrecisionMetric("pos").score(y_true, y_pred_2), 0.5)
y_pred_3 = self._read_csv("""
d3mIndex,class_label
1,neg
2,neg
3,pos
4,pos
5,neg
""")
self.assertEqual(metrics.PrecisionMetric("pos").score(y_true, y_pred_3), 0.0)
def test_accuracy(self):
# Binary Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,neg
4,neg
5,pos
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,neg
4,neg
5,pos
""")
self.assertEqual(metrics.AccuracyMetric().score(y_true, y_pred), 1.0)
y_pred_2 = self._read_csv("""
d3mIndex,class_label
1,pos
2,pos
3,pos
4,pos
5,neg
""")
self.assertEqual(metrics.AccuracyMetric().score(y_true, y_pred_2), 0.4)
y_pred_3 = self._read_csv("""
d3mIndex,class_label
1,neg
2,neg
3,pos
4,pos
5,neg
""")
self.assertEqual(metrics.AccuracyMetric().score(y_true, y_pred_3), 0.0)
# MultiClass Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,0
2,1
3,2
4,3
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,0
2,2
4,3
3,1
""")
self.assertEqual(metrics.AccuracyMetric().score(y_true, y_pred), 0.5)
# MultiLabel Classification Test
y_true = self._read_csv("""
d3mIndex,class_label
1,3
1,1
2,2
3,3
""")
y_pred = self._read_csv("""
d3mIndex,class_label
1,1
1,2
1,3
2,1
3,3
""")
self.assertEqual(metrics.AccuracyMetric().score(y_true, y_pred), 0.3333333333333333)
def test_mean_squared_error(self):
# regression univariate, regression multivariate, forecasting, collaborative filtering
y_true = self._read_csv("""
d3mIndex,value
1,3
16,2
2,-1.0
17,7
""")
y_pred = self._read_csv("""
d3mIndex,value
1,2.1
2,0.0
16,2
17,8
""")
self.assertAlmostEqual(metrics.MeanSquareErrorMetric().score(y_true, y_pred), 0.7024999999999999)
y_true = self._read_csv("""
d3mIndex,value1,value2
1,0.5,1
2,-1,1
16,7,-6
""")
y_pred = self._read_csv("""
d3mIndex,value1,value2
1,0,2
2,-1,2
16,8,-5
""")
self.assertAlmostEqual(metrics.MeanSquareErrorMetric().score(y_true, y_pred), 0.7083333333333334)
def test_mean_absolute_error(self):
# regression univariate, regression multivariate, forecasting, collaborative filtering
y_true = self._read_csv("""
d3mIndex,value
1,3
2,-0.5
16,2
17,7
""")
y_pred = self._read_csv("""
d3mIndex,value
17,8
1,2.5
2,0.0
16,2
""")
self.assertAlmostEqual(metrics.MeanAbsoluteErrorMetric().score(y_true, y_pred), 0.5)
y_true = self._read_csv("""
d3mIndex,value1,value2
1,0.5,1
2,-1,1
16,7,-6
""")
y_pred = self._read_csv("""
d3mIndex,value2,value1
1,2,0
16,-5,8
2,2,-1
""")
self.assertAlmostEqual(metrics.MeanAbsoluteErrorMetric().score(y_true, y_pred), 0.75)
def test_r_squared(self):
# regression univariate, regression multivariate, forecasting, collaborative filtering
y_true = self._read_csv("""
d3mIndex,value
1,3
2,-0.5
16,2
17,7
""")
y_pred = self._read_csv("""
d3mIndex,value
1,2.5
2,0.0
16,2
17,8
""")
self.assertAlmostEqual(metrics.RSquaredMetric().score(y_true, y_pred), 0.9486081370449679)
y_true = self._read_csv("""
d3mIndex,value1,value2
1,0.5,1
2,-1,1
16,7,-6
""")
y_pred = self._read_csv("""
d3mIndex,value2,value1
1,2,0
16,-5,8
2,2,-1
""")
self.assertAlmostEqual(metrics.RSquaredMetric().score(y_true, y_pred), 0.9368005266622779)
y_true = self._read_csv("""
d3mIndex,value
1,1
2,2
16,3
""")
y_pred = self._read_csv("""
d3mIndex,value
1,1
2,2
16,3
""")
self.assertAlmostEqual(metrics.RSquaredMetric().score(y_true, y_pred), 1.0)
y_true = self._read_csv("""
d3mIndex,value
1,1
2,2
16,3
""")
y_pred = self._read_csv("""
d3mIndex,value
1,2
2,2
16,2
""")
self.assertAlmostEqual(metrics.RSquaredMetric().score(y_true, y_pred), 0.0)
y_true = self._read_csv("""
d3mIndex,value
1,1
2,2
16,3
""")
y_pred = self._read_csv("""
d3mIndex,value
1,3
2,2
16,1
""")
self.assertAlmostEqual(metrics.RSquaredMetric().score(y_true, y_pred), -3.0)
def test_recall(self):
# Binary Classification Test
y_true = self._read_csv("""
d3mIndex,value
1,0
6,1
3,0
4,0
5,1
2,1
""")
y_pred = self._read_csv("""
d3mIndex,value
3,1
1,0
2,1
4,0
5,0
6,1
""")
self.assertAlmostEqual(metrics.RecallMetric(pos_label='1').score(y_true, y_pred), 0.6666666666666666)
@unittest.skipUnless(sklearn.__version__ >= LooseVersion("0.21"), "jaccard_score introduced in sklearn version 0.21")
def test_jaccard(self):
# Binary Classification Test
y_true = self._read_csv("""
d3mIndex,value
1,0
2,1
16,1
""")
y_pred = self._read_csv("""
d3mIndex,value
1,1
2,1
16,1
""")
self.assertAlmostEqual(metrics.JaccardSimilarityScoreMetric(pos_label='1').score(y_true, y_pred), 0.6666666666666666)
def test_meanReciprocalRank(self):
y_true = self._read_csv("""
d3mIndex,relationship
0,father
1,sister
2,brother
""")
# case 1: all correct
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,father,1
0,cousin,2
0,mother,3
0,brother,4
0,grandfather,5
1,sister,1
1,mother,2
1,aunt,3
2,brother,1
2,father,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.MeanReciprocalRankMetric().score(y_true, y_pred), 1.0)
# case 2: all wrong
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,grandfather,4
1,brother,1
1,mother,2
1,aunt,3
2,father,1
2,grandmother,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.MeanReciprocalRankMetric().score(y_true, y_pred), 0.0)
# case 3 (typical case): some correct and some low ranks
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,father,4
0,grandfather,5
1,sister,1
1,mother,2
1,aunt,3
2,father,1
2,brother,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.MeanReciprocalRankMetric().score(y_true, y_pred), 0.5833333333333334)
# case 4: some are not ranked at all
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,grandfather,4
1,sister,1
1,mother,2
1,aunt,3
2,father,1
2,uncle,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.MeanReciprocalRankMetric().score(y_true, y_pred), 0.33466666666666667)
def test_hitsAtK(self):
y_true = self._read_csv("""
d3mIndex,relationship
0,father
1,sister
2,brother
""")
# case 1: all correct
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,father,1
0,cousin,2
0,mother,3
0,brother,4
0,grandfather,5
1,sister,1
1,mother,2
1,aunt,3
2,brother,1
2,father,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.HitsAtKMetric(k=3).score(y_true, y_pred), 1.0)
# case 2: all wrong
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,grandfather,4
1,brother,1
1,mother,2
1,aunt,3
2,father,1
2,grandmother,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.HitsAtKMetric(k=3).score(y_true, y_pred), 0.0)
# case 3 (typical case): some correct and some low ranks
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,father,4
0,grandfather,5
1,sister,1
1,mother,2
1,aunt,3
2,father,1
2,brother,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.HitsAtKMetric(k=3).score(y_true, y_pred), 0.6666666666666666)
self.assertAlmostEqual(metrics.HitsAtKMetric(k=1).score(y_true, y_pred), 0.3333333333333333)
self.assertAlmostEqual(metrics.HitsAtKMetric(k=5).score(y_true, y_pred), 1.0)
# case 4: some are not ranked at all
y_pred = self._read_csv("""
d3mIndex,relationship,rank
0,brother,1
0,cousin,2
0,mother,3
0,grandfather,4
1,sister,1
1,mother,2
1,aunt,3
2,father,1
2,uncle,2
2,sister,3
2,grandfather,4
2,aunt,5
""")
self.assertAlmostEqual(metrics.HitsAtKMetric(k=3).score(y_true, y_pred), 0.3333333)
def test_custom_metric(self):
class FooBar():
def score(self, truth: metrics.Truth, predictions: metrics.Predictions) -> float:
return 1.0
problem.PerformanceMetric.register_metric('FOOBAR', best_value=1.0, worst_value=0.0, score_class=FooBar)
self.assertEqual(problem.PerformanceMetric.FOOBAR.best_value(), 1.0)
self.assertEqual(problem.PerformanceMetric['FOOBAR'].worst_value(), 0.0)
self.assertEqual(problem.PerformanceMetric('FOOBAR').requires_confidence(), False)
self.assertIs(problem.PerformanceMetric.FOOBAR.get_class(), FooBar)
def test_roc_auc(self):
# Binary Classification Test
y_true = self._read_csv("""
d3mIndex,value
640,0
641,1
642,0
643,0
644,1
645,1
646,0
""")
y_pred = self._read_csv("""
d3mIndex,value,confidence
640,0,0.612
640,1,0.388
641,0,0.6
641,1,0.4
645,1,0.9
645,0,0.1
642,1,0.0
642,0,1.0
643,0,0.52
643,1,0.48
644,0,0.3
644,1,0.7
646,0,1.0
646,1,0.0
""")
self.assertAlmostEqual(metrics.RocAucMetric().score(y_true, y_pred), 0.9166666666666667)
def test_roc_auc_micro(self):
# Testcase 1: MultiLabel, typical
y_true = self._read_csv("""
d3mIndex,value
3,d
4,a
4,b
4,c
7,a
7,b
7,d
9,b
9,e
""")
y_pred = self._read_csv("""
d3mIndex,value,confidence
9,b,0.1
4,a,0.4
4,b,0.3
3,a,0.2
3,b,0.1
3,c,0.6
3,d,0.1
3,e,0
4,c,0.1
4,e,0.1
4,d,0.1
7,a,0.1
7,b,0.1
7,d,0.7
7,c,0.1
7,e,0
9,a,0.4
9,c,0.15
9,d,0.3
9,e,0.05
""")
self.assertAlmostEqual(metrics.RocAucMicroMetric().score(y_true, y_pred), 0.5151515151515151)
def test_roc_auc_macro(self):
# Testcase 1: MultiLabel, typical
y_true = self._read_csv("""
d3mIndex,value
3,d
4,a
4,b
4,c
7,a
7,b
7,d
9,b
9,e
""")
y_pred = self._read_csv("""
d3mIndex,value,confidence
3,a,0.2
3,b,0.1
3,c,0.6
3,d,0.1
3,e,0
7,b,0.1
7,a,0.1
4,a,0.4
4,b,0.3
4,c,0.1
4,d,0.1
4,e,0.1
9,a,0.4
9,b,0.1
9,c,0.15
9,d,0.3
9,e,0.05
7,c,0.1
7,d,0.7
7,e,0
""")
self.assertAlmostEqual(metrics.RocAucMacroMetric().score(y_true, y_pred), 0.5)
if __name__ == '__main__':
unittest.main()
| 20.467512
| 221
| 0.650832
| 4,123
| 25,830
| 3.933301
| 0.089255
| 0.044891
| 0.069187
| 0.118333
| 0.78689
| 0.765616
| 0.744096
| 0.724795
| 0.672936
| 0.642721
| 0
| 0.142042
| 0.192141
| 25,830
| 1,261
| 222
| 20.483743
| 0.635118
| 0.068602
| 0
| 0.774594
| 0
| 0.020057
| 0.398501
| 0.089069
| 0
| 0
| 0
| 0
| 0.069723
| 1
| 0.023878
| false
| 0
| 0.006686
| 0.00191
| 0.034384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1e0acd1ef80b204b130e376f70c6968b3c2f8ce4
| 129
|
py
|
Python
|
wificontrol/__init__.py
|
FingerLeakers/pywificontrol
|
1324a5d5bdab6bfd9aea77a72e0c46412cecf026
|
[
"BSD-3-Clause"
] | 115
|
2017-10-19T17:23:13.000Z
|
2022-02-01T21:54:45.000Z
|
wificontrol/__init__.py
|
s0lst1c3/pywificontrol
|
1324a5d5bdab6bfd9aea77a72e0c46412cecf026
|
[
"BSD-3-Clause"
] | 8
|
2017-10-21T03:56:33.000Z
|
2022-01-04T12:18:13.000Z
|
wificontrol/__init__.py
|
s0lst1c3/pywificontrol
|
1324a5d5bdab6bfd9aea77a72e0c46412cecf026
|
[
"BSD-3-Clause"
] | 46
|
2017-10-21T02:17:33.000Z
|
2022-01-15T20:53:18.000Z
|
from wificontrol import WiFiControl
from wificommon import WiFiControlError
from wifimonitor import WiFiMonitor, WiFiMonitorError
| 43
| 53
| 0.899225
| 13
| 129
| 8.923077
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 129
| 3
| 53
| 43
| 0.991453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1e1f83f8c9973a374b630b08d4f88c6e9f107d23
| 60
|
py
|
Python
|
src/main.py
|
skypaw/rconcrete
|
30bc7e5ada2afa975caabcd38461707e094d695b
|
[
"MIT"
] | null | null | null |
src/main.py
|
skypaw/rconcrete
|
30bc7e5ada2afa975caabcd38461707e094d695b
|
[
"MIT"
] | 2
|
2022-02-05T18:49:44.000Z
|
2022-02-06T01:11:07.000Z
|
src/main.py
|
skypaw/rconcrete
|
30bc7e5ada2afa975caabcd38461707e094d695b
|
[
"MIT"
] | null | null | null |
def sample_function(add: int) -> int:
return add + add
| 15
| 37
| 0.65
| 9
| 60
| 4.222222
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 60
| 3
| 38
| 20
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1e3deaa3769bd24fc72c24ae981b2b17c668f1f8
| 47
|
py
|
Python
|
sublime-plugin/poli/repl/__init__.py
|
egnartsms/poli
|
3a9eab2261688ed84b83808722360356b8e67522
|
[
"MIT"
] | 1
|
2020-06-07T20:55:27.000Z
|
2020-06-07T20:55:27.000Z
|
sublime-plugin/poli/repl/__init__.py
|
egnartsms/poli
|
3a9eab2261688ed84b83808722360356b8e67522
|
[
"MIT"
] | 2
|
2021-01-22T08:45:48.000Z
|
2021-01-22T08:45:49.000Z
|
sublime-plugin/poli/repl/__init__.py
|
egnartsms/poli
|
3a9eab2261688ed84b83808722360356b8e67522
|
[
"MIT"
] | null | null | null |
from .command import *
from .listener import *
| 15.666667
| 23
| 0.744681
| 6
| 47
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 24
| 23.5
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1e46701585fdc6b2ca2af74905b02882d2dbfbad
| 163
|
py
|
Python
|
python/deltalake/__init__.py
|
viirya/delta-rs
|
3d16de918a4b373413afae95ecedc7fe36b25ecd
|
[
"Apache-2.0"
] | null | null | null |
python/deltalake/__init__.py
|
viirya/delta-rs
|
3d16de918a4b373413afae95ecedc7fe36b25ecd
|
[
"Apache-2.0"
] | null | null | null |
python/deltalake/__init__.py
|
viirya/delta-rs
|
3d16de918a4b373413afae95ecedc7fe36b25ecd
|
[
"Apache-2.0"
] | null | null | null |
from .deltalake import RawDeltaTable, RawDeltaTableMetaData, rust_core_version
from .schema import DataType, Field, Schema
from .table import DeltaTable, Metadata
| 40.75
| 78
| 0.846626
| 19
| 163
| 7.157895
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104294
| 163
| 3
| 79
| 54.333333
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1e744bb62e6240f958a418e237f5044183b869e8
| 7,116
|
py
|
Python
|
tests/components/fritz/test_switch.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/fritz/test_switch.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
tests/components/fritz/test_switch.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Tests for Fritz!Tools switch platform."""
from __future__ import annotations
import pytest
from homeassistant.components.fritz.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from .const import MOCK_FB_SERVICES, MOCK_USER_DATA
from tests.common import MockConfigEntry
MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi2",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = {
"WLANConfiguration1": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 13,
"NewSSID": "WiFi",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:12",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
"WLANConfiguration2": {
"GetInfo": {
"NewEnable": True,
"NewStatus": "Up",
"NewMaxBitRate": "Auto",
"NewChannel": 52,
"NewSSID": "WiFi+",
"NewBeaconType": "11iandWPA3",
"NewX_AVM-DE_PossibleBeaconTypes": "None,11i,11iandWPA3",
"NewMACAddressControlEnabled": False,
"NewStandard": "ax",
"NewBSSID": "1C:ED:6F:12:34:13",
"NewBasicEncryptionModes": "None",
"NewBasicAuthenticationMode": "None",
"NewMaxCharsSSID": 32,
"NewMinCharsSSID": 1,
"NewAllowedCharsSSID": "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz !\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~",
"NewMinCharsPSK": 64,
"NewMaxCharsPSK": 64,
"NewAllowedCharsPSK": "0123456789ABCDEFabcdef",
}
},
}
@pytest.mark.parametrize(
"fc_data, expected_wifi_names",
[
(
{**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_SAME_SSID},
["WiFi (2.4Ghz)", "WiFi (5Ghz)"],
),
({**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF_SSID}, ["WiFi", "WiFi2"]),
(
{**MOCK_FB_SERVICES, **MOCK_WLANCONFIGS_DIFF2_SSID},
["WiFi (2.4Ghz)", "WiFi+ (5Ghz)"],
),
],
)
async def test_switch_setup(
hass: HomeAssistant,
expected_wifi_names: list[str],
fc_class_mock,
fh_class_mock,
):
"""Test setup of Fritz!Tools switches."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA)
entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
assert entry.state == ConfigEntryState.LOADED
switches = hass.states.async_all(Platform.SWITCH)
assert len(switches) == 3
assert switches[0].name == f"Mock Title Wi-Fi {expected_wifi_names[0]}"
assert switches[1].name == f"Mock Title Wi-Fi {expected_wifi_names[1]}"
assert switches[2].name == "printer Internet Access"
| 37.452632
| 135
| 0.566189
| 505
| 7,116
| 7.823762
| 0.239604
| 0.022779
| 0.030372
| 0.044039
| 0.774487
| 0.752468
| 0.741838
| 0.741838
| 0.741838
| 0.72412
| 0
| 0.057188
| 0.282462
| 7,116
| 189
| 136
| 37.650794
| 0.716608
| 0.00534
| 0
| 0.677966
| 0
| 0
| 0.419997
| 0.169819
| 0
| 0
| 0
| 0
| 0.033898
| 1
| 0
| false
| 0
| 0.050847
| 0
| 0.050847
| 0.00565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1e7932ec016be2e499b9e15db711ec1a3967154f
| 160
|
py
|
Python
|
app_todo/admin.py
|
falken20/falken_home
|
3f34e3efc403591293a582ac95f49c5721db18b6
|
[
"MIT"
] | null | null | null |
app_todo/admin.py
|
falken20/falken_home
|
3f34e3efc403591293a582ac95f49c5721db18b6
|
[
"MIT"
] | 9
|
2021-06-04T23:49:09.000Z
|
2022-01-03T22:53:49.000Z
|
app_todo/admin.py
|
falken20/falken_home
|
3f34e3efc403591293a582ac95f49c5721db18b6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# ROD: Register your models here and you can use in admin console
from .models import TodoItem
admin.site.register(TodoItem)
| 22.857143
| 65
| 0.8
| 25
| 160
| 5.12
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 160
| 7
| 66
| 22.857143
| 0.941176
| 0.39375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1e7b7b4316c37cd6d1104c1eb26d9896b1a7da45
| 39,260
|
py
|
Python
|
sampledb/logic/object_search.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
sampledb/logic/object_search.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
sampledb/logic/object_search.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import functools
import json
import typing
from sqlalchemy import String, and_, or_
from sqlalchemy.sql.expression import select, true, false, not_
from . import where_filters
from . import datatypes
from . import object_search_parser
from .. import db
import sqlalchemy.dialects.postgresql as postgresql
class Attribute:
def __init__(self, input_text: str, start_position: int, value):
self.value = value
self.input_text = input_text
self.start_position = start_position
self.end_position = self.start_position + len(self.input_text)
class Expression:
def __init__(self, input_text: str, start_position: int, value):
self.value = value
self.input_text = input_text
self.start_position = start_position
self.end_position = self.start_position + len(self.input_text)
unary_operator_handlers = {}
binary_operator_handlers = {}
def unary_operator_handler(operand_type, operator):
def unary_operator_handler_decorator(func, operand_type=operand_type, operator=operator):
@functools.wraps(func)
def unary_operator_handler_wrapper(operator, operand, outer_filter, search_notes):
input_text = operator.input_text + operand.input_text
start_position = operator.start_position
end_position = operand.end_position
filter_func, outer_filter = func(operand.value, outer_filter, search_notes, input_text, start_position, end_position)
filter_func = Expression(input_text, start_position, filter_func)
return filter_func, outer_filter
assert (operand_type, operator) not in unary_operator_handlers
unary_operator_handlers[(operand_type, operator)] = unary_operator_handler_wrapper
return unary_operator_handler_wrapper
return unary_operator_handler_decorator
def binary_operator_handler(left_operand_type, right_operand_type, operator):
def binary_operator_handler_decorator(func, left_operand_type=left_operand_type, right_operand_type=right_operand_type, operator=operator):
@functools.wraps(func)
def binary_operator_handler_wrapper(left_operand, operator, right_operand, outer_filter, search_notes):
input_text = left_operand.input_text + operator.input_text + right_operand.input_text
start_position = left_operand.start_position
end_position = right_operand.end_position
filter_func, outer_filter = func(left_operand.value, right_operand.value, outer_filter, search_notes, input_text, start_position, end_position)
filter_func = Expression(input_text, start_position, filter_func)
return filter_func, outer_filter
assert (left_operand_type, right_operand_type, operator) not in binary_operator_handlers
binary_operator_handlers[(left_operand_type, right_operand_type, operator)] = binary_operator_handler_wrapper
return binary_operator_handler_wrapper
return binary_operator_handler_decorator
@unary_operator_handler(datatypes.Boolean, 'not')
def _(operand, outer_filter, search_notes, input_text, start_position, end_position):
if not operand.value:
search_notes.append(('warning', 'This expression will always be true', start_position, end_position))
return outer_filter(true()), None
else:
search_notes.append(('warning', 'This expression will always be false', start_position, end_position))
return outer_filter(false()), None
@unary_operator_handler(Attribute, 'not')
def _(operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.boolean_false(operand)), None
@unary_operator_handler(None, 'not')
def _(operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(not_(operand)), None
@binary_operator_handler(datatypes.Boolean, datatypes.Boolean, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value and right_operand.value:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(None, None, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(and_(left_operand, right_operand)), None
@binary_operator_handler(None, datatypes.Boolean, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if right_operand.value:
return outer_filter(left_operand), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Boolean, None, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value:
return outer_filter(right_operand), None
else:
return outer_filter(false()), None
@binary_operator_handler(None, Attribute, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(and_(left_operand, where_filters.boolean_true(right_operand))), None
@binary_operator_handler(Attribute, None, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(and_(where_filters.boolean_true(left_operand), right_operand)), None
@binary_operator_handler(Attribute, datatypes.Boolean, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if right_operand.value:
return outer_filter(where_filters.boolean_true(left_operand)), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Boolean, Attribute, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value:
return outer_filter(where_filters.boolean_true(right_operand)), None
else:
return outer_filter(false()), None
@binary_operator_handler(Attribute, Attribute, 'and')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(and_(where_filters.boolean_true(left_operand), where_filters.boolean_true(right_operand))), None
@binary_operator_handler(datatypes.Boolean, datatypes.Boolean, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value or right_operand.value:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(None, None, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(or_(left_operand, right_operand)), None
@binary_operator_handler(None, datatypes.Boolean, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if right_operand.value:
return outer_filter(true()), None
else:
return outer_filter(left_operand), None
@binary_operator_handler(datatypes.Boolean, None, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value:
return outer_filter(true()), None
else:
return outer_filter(right_operand), None
@binary_operator_handler(None, Attribute, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(or_(left_operand, where_filters.boolean_true(right_operand))), None
@binary_operator_handler(Attribute, None, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(or_(where_filters.boolean_true(left_operand), right_operand)), None
@binary_operator_handler(Attribute, datatypes.Boolean, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if right_operand.value:
return outer_filter(true()), None
else:
return outer_filter(where_filters.boolean_true(left_operand)), None
@binary_operator_handler(datatypes.Boolean, Attribute, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.value:
return outer_filter(true()), None
else:
return outer_filter(where_filters.boolean_true(right_operand)), None
@binary_operator_handler(Attribute, Attribute, 'or')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(or_(where_filters.boolean_true(left_operand), where_filters.boolean_true(right_operand))), None
@binary_operator_handler(datatypes.Boolean, datatypes.Boolean, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand == right_operand:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand == right_operand:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Text, datatypes.Text, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand == right_operand:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(false()), None
if left_operand == right_operand:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Boolean, datatypes.Boolean, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if not (left_operand == right_operand):
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if not (left_operand == right_operand):
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Text, datatypes.Text, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if not (left_operand == right_operand):
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(true()), None
if not (left_operand == right_operand):
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Boolean, Attribute, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.boolean_equals(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.Boolean, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.boolean_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Boolean, Attribute, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.boolean_equals(right_operand, not left_operand.value)), None
@binary_operator_handler(Attribute, datatypes.Boolean, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.boolean_equals(left_operand, not right_operand.value)), None
@binary_operator_handler(Attribute, datatypes.DateTime, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.DateTime, Attribute, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_equals(right_operand, left_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_equals(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.Quantity, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(not_(where_filters.quantity_equals(right_operand, left_operand))), None
@binary_operator_handler(Attribute, datatypes.Quantity, '!=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(not_(where_filters.quantity_equals(left_operand, right_operand))), None
@binary_operator_handler(datatypes.Text, Attribute, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.text_equals(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.Text, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.text_equals(left_operand, right_operand)), None
@binary_operator_handler(Attribute, Attribute, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(left_operand == right_operand), None
@binary_operator_handler(None, None, '==')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(left_operand == right_operand), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '<')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.utc_datetime < right_operand.utc_datetime:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '<')
def _(left_operand: datatypes.Quantity, right_operand: datatypes.Quantity, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(false()), None
if left_operand.maginitude_in_base_units < right_operand.maginitude_in_base_units:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(Attribute, datatypes.Quantity, '<')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_less_than(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '<')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_greater_than(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.DateTime, '<')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_less_than(left_operand, right_operand)), None
@binary_operator_handler(datatypes.DateTime, Attribute, '<')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_greater_than(right_operand, left_operand)), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '>')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.utc_datetime > right_operand.utc_datetime:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '>')
def _(left_operand: datatypes.Quantity, right_operand: datatypes.Quantity, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(false()), None
if left_operand.maginitude_in_base_units > right_operand.maginitude_in_base_units:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(Attribute, datatypes.Quantity, '>')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_greater_than(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '>')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_less_than(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.DateTime, '>')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_greater_than(left_operand, right_operand)), None
@binary_operator_handler(datatypes.DateTime, Attribute, '>')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_less_than_equals(right_operand, left_operand)), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '<=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.utc_datetime <= right_operand.utc_datetime:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '<=')
def _(left_operand: datatypes.Quantity, right_operand: datatypes.Quantity, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(false()), None
if left_operand.maginitude_in_base_units <= right_operand.maginitude_in_base_units:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(Attribute, datatypes.Quantity, '<=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_less_than_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '<=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_greater_than_equals(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.DateTime, '<=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_less_than_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.DateTime, Attribute, '<=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_greater_than_equals(right_operand, left_operand)), None
@binary_operator_handler(datatypes.DateTime, datatypes.DateTime, '>=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.utc_datetime >= right_operand.utc_datetime:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(datatypes.DateTime, Attribute, '>=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_less_than_equals(right_operand, left_operand)), None
@binary_operator_handler(datatypes.Quantity, datatypes.Quantity, '>=')
def _(left_operand: datatypes.Quantity, right_operand: datatypes.Quantity, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.dimensionality != right_operand.dimensionality:
search_notes.append(('warning', 'Invalid comparison between quantities of different dimensionalities', 0, None))
return outer_filter(false()), None
if left_operand.maginitude_in_base_units >= right_operand.maginitude_in_base_units:
return outer_filter(true()), None
else:
return outer_filter(false()), None
@binary_operator_handler(Attribute, datatypes.Quantity, '>=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_greater_than_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Quantity, Attribute, '>=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.quantity_less_than_equals(right_operand, left_operand)), None
@binary_operator_handler(Attribute, datatypes.DateTime, '>=')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.datetime_greater_than_equals(left_operand, right_operand)), None
@binary_operator_handler(datatypes.Text, Attribute, 'in')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
return outer_filter(where_filters.text_contains(right_operand, left_operand.text)), None
@binary_operator_handler(datatypes.Text, datatypes.Text, 'in')
def _(left_operand, right_operand, outer_filter, search_notes, input_text, start_position, end_position):
if left_operand.text in right_operand.text:
return outer_filter(true()), None
else:
return outer_filter(false()), None
def transform_literal_to_query(data, literal: object_search_parser.Literal, search_notes: typing.List[typing.Tuple[str, str, int, typing.Optional[int]]]) -> typing.Tuple[typing.Any, typing.Optional[typing.Callable]]:
if isinstance(literal, object_search_parser.Tag):
return Expression(literal.input_text, literal.start_position, where_filters.tags_contain(data[('tags',)], literal.value)), None
if isinstance(literal, object_search_parser.Attribute):
attributes = literal.value
# covert any numeric arguments to integers (array indices)
for i, attribute in enumerate(attributes):
try:
attributes[i] = int(attribute)
except ValueError:
pass
if '?' in attributes:
array_placeholder_index = attributes.index('?')
# no danger of SQL injection as attributes may only consist of
# characters and underscores at this point
jsonb_selector = ''
for i, attribute in enumerate(attributes[:array_placeholder_index]):
if isinstance(attribute, int):
attribute = str(attribute)
else:
attribute = "'" + attribute + "'"
if i > 0:
jsonb_selector += " -> "
jsonb_selector += attribute
array_items = select(columns=[db.text('value FROM jsonb_array_elements_text(data -> {})'.format(jsonb_selector))])
db_obj = db.literal_column('value').cast(postgresql.JSONB)
for attribute in attributes[array_placeholder_index + 1:]:
db_obj = db_obj[attribute]
return Attribute(literal.input_text, literal.start_position, db_obj), lambda filter: db.Query(db.literal(True)).select_entity_from(array_items).filter(filter).exists()
return Attribute(literal.input_text, literal.start_position, data[attributes]), None
if isinstance(literal, object_search_parser.Boolean):
return literal, None
if isinstance(literal, object_search_parser.Date):
return literal, None
if isinstance(literal, object_search_parser.Quantity):
return literal, None
if isinstance(literal, object_search_parser.Text):
return literal, None
search_notes.append(('error', "Invalid search query", 0, None))
return false(), None
def transform_unary_operation_to_query(data, operator, operand, search_notes: typing.List[typing.Tuple[str, str, int, typing.Optional[int]]]) -> typing.Tuple[Expression, typing.Optional[typing.Callable]]:
start_token = operator
start = start_token.start_position
end_token = operand
while isinstance(end_token, list):
end_token = end_token[0]
end = end_token.start_position + len(end_token.input_text)
operand, outer_filter = transform_tree_to_query(data, operand, search_notes)
if not outer_filter:
def outer_filter(filter):
return filter
str_operator = operator.operator
operator_aliases = {
'!': 'not'
}
if str_operator in operator_aliases:
str_operator = operator_aliases[str_operator]
if isinstance(operand, object_search_parser.Boolean):
operand_type = datatypes.Boolean
elif isinstance(operand, object_search_parser.Date):
operand_type = datatypes.DateTime
elif isinstance(operand, object_search_parser.Quantity):
operand_type = datatypes.Quantity
elif isinstance(operand, object_search_parser.Text):
operand_type = datatypes.Text
elif isinstance(operand, Attribute):
operand_type = Attribute
else:
operand_type = None
if (operand_type, str_operator) in unary_operator_handlers:
return unary_operator_handlers[(operand_type, str_operator)](operator, operand, outer_filter, search_notes)
search_notes.append(('error', "Unknown unary operation", start, end))
return Expression(operator.input_text + operand.input_text, operator.start_position, false()), None
def transform_binary_operation_to_query(data, left_operand, operator, right_operand, search_notes: typing.List[typing.Tuple[str, str, int, typing.Optional[int]]]) -> typing.Tuple[Expression, typing.Optional[typing.Callable]]:
start_token = left_operand
while isinstance(start_token, list):
start_token = start_token[0]
start = start_token.start_position
end_token = right_operand
while isinstance(end_token, list):
end_token = end_token[-1]
end = end_token.start_position + len(end_token.input_text)
left_operand, left_outer_filter = transform_tree_to_query(data, left_operand, search_notes)
right_operand, right_outer_filter = transform_tree_to_query(data, right_operand, search_notes)
if left_outer_filter and right_outer_filter:
search_notes.append(('error', "Multiple array placeholders", start, end))
return Expression(left_operand.input_text + operator.input_text + right_operand.input_text, left_operand.start_position, false()), None
if left_outer_filter:
outer_filter = left_outer_filter
elif right_outer_filter:
outer_filter = right_outer_filter
else:
def outer_filter(filter):
return filter
str_operator = operator.operator
operator_aliases = {
'||': 'or',
'&&': 'and',
'=': '=='
}
if str_operator in operator_aliases:
str_operator = operator_aliases[str_operator]
if isinstance(left_operand, object_search_parser.Boolean):
left_operand_type = datatypes.Boolean
elif isinstance(left_operand, object_search_parser.Date):
left_operand_type = datatypes.DateTime
elif isinstance(left_operand, object_search_parser.Quantity):
left_operand_type = datatypes.Quantity
elif isinstance(left_operand, object_search_parser.Text):
left_operand_type = datatypes.Text
elif isinstance(left_operand, Attribute):
left_operand_type = Attribute
else:
left_operand_type = None
if isinstance(right_operand, object_search_parser.Boolean):
right_operand_type = datatypes.Boolean
elif isinstance(right_operand, object_search_parser.Date):
right_operand_type = datatypes.DateTime
elif isinstance(right_operand, object_search_parser.Quantity):
right_operand_type = datatypes.Quantity
elif isinstance(right_operand, object_search_parser.Text):
right_operand_type = datatypes.Text
elif isinstance(right_operand, Attribute):
right_operand_type = Attribute
else:
right_operand_type = None
if datatypes.DateTime in (left_operand_type, right_operand_type):
if str_operator.strip() == 'after':
str_operator = '>'
elif str_operator.strip() == 'before':
str_operator = '<'
elif str_operator.strip() == 'on':
str_operator = '=='
if (left_operand_type, right_operand_type, str_operator) in binary_operator_handlers:
return binary_operator_handlers[(left_operand_type, right_operand_type, str_operator)](left_operand, operator, right_operand, outer_filter, search_notes)
search_notes.append(('error', "Unknown binary operation", start, end))
return Expression(left_operand.input_text + operator.input_text + right_operand.input_text, left_operand.start_position, false()), None
def transform_tree_to_query(data, tree: typing.Union[object_search_parser.Literal, list], search_notes: typing.List[typing.Tuple[str, str, int, typing.Optional[int]]]) -> typing.Tuple[Expression, typing.Optional[typing.Callable]]:
if isinstance(tree, object_search_parser.Literal):
return transform_literal_to_query(data, tree, search_notes)
if not isinstance(tree, list):
search_notes.append(('error', "Invalid search query", 0, None))
return false(), None
if len(tree) == 1:
value, = tree
return transform_tree_to_query(data, value, search_notes)
if len(tree) == 2:
operator, operand = tree
if isinstance(operator, object_search_parser.Operator):
return transform_unary_operation_to_query(data, operator, operand, search_notes)
search_notes.append(('error', "Invalid search query (missing operator)", 0, None))
return false(), None
if len(tree) == 3:
left_operand, operator, right_operand = tree
if isinstance(operator, object_search_parser.Operator):
return transform_binary_operation_to_query(data, left_operand, operator, right_operand, search_notes)
search_notes.append(('error', "Invalid search query (missing operator)", 0, None))
return false(), None
def should_use_advanced_search(query_string: str) -> typing.Tuple[bool, str]:
"""
Detect whether the advanced search should be used automatically.
The user can force the use of the advanced search, but for specific search
query strings, the advanced search will be more appropriate, e.g. when
the user tries to use comparisons or search for tags.
To prevent an automatic advanced search, users can quote their query
strings. This way they will use the simple, text-based search.
:param query_string: the original query string
:return: whether to use the advanced search and a modified query string
"""
if query_string[0] == query_string[-1] == '"':
# Remove quotes around the query string
return False, query_string[1:-1]
for operator in ('=', '<', '>', '#', '&', '|'):
if operator in query_string:
return True, query_string
return False, query_string
def generate_filter_func(query_string: str, use_advanced_search: bool) -> typing.Tuple[typing.Callable, typing.Any, bool]:
"""
Generates a filter function for use with SQLAlchemy and the JSONB data
attribute in the object tables.
The generated filter functions can be used for objects.get_objects()
:param query_string: the query string
:param use_advanced_search: whether to use simple text search (False) or advanced search (True)
:return: filter func, search tree and whether the advanced search was used
"""
tree = None
query_string = query_string.strip()
if query_string:
if not use_advanced_search:
use_advanced_search, query_string = should_use_advanced_search(query_string)
if use_advanced_search:
# Advanced search using parser and where_filters
try:
tree = object_search_parser.parse_query_string(query_string)
except object_search_parser.ParseError as e:
def filter_func(data, search_notes, e=e):
""" Return no objects and set search_notes"""
search_notes.append(('error', e.message, e.start, e.end))
return False
return filter_func, None, use_advanced_search
except Exception:
def filter_func(data, search_notes, start=0, end=len(query_string)):
""" Return no objects and set search_notes"""
search_notes.append(('error', "Failed to parse query string", start, end))
return False
return filter_func, None, use_advanced_search
if isinstance(tree, list) and not tree:
def filter_func(data, search_notes, start=0, end=len(query_string)):
""" Return no objects and set search_notes"""
search_notes.append(('error', 'Empty search', start, end))
return False
return filter_func, None, use_advanced_search
if isinstance(tree, object_search_parser.Literal):
if isinstance(tree, object_search_parser.Boolean):
if tree.value.value:
def filter_func(data, search_notes, start=0, end=len(query_string)):
""" Return all objects and set search_notes"""
search_notes.append(('warning', 'This search will always return all objects', start, end))
return True
return filter_func, tree, use_advanced_search
else:
def filter_func(data, search_notes, start=0, end=len(query_string)):
""" Return no objects and set search_notes"""
search_notes.append(('warning', 'This search will never return any objects', start, end))
return False
return filter_func, tree, use_advanced_search
elif isinstance(tree, object_search_parser.Attribute):
pass
elif isinstance(tree, object_search_parser.Tag):
pass
else:
def filter_func(data, search_notes, start=0, end=len(query_string)):
""" Return no objects and set search_notes"""
search_notes.append(('error', 'Unable to use literal as search query', start, end))
return False
return filter_func, None, use_advanced_search
def filter_func(data, search_notes, tree=tree):
""" Filter objects based on search query string """
filter_func, outer_filter = transform_tree_to_query(data, tree, search_notes)
# check bool if filter_func is only an attribute
if isinstance(filter_func, Expression):
filter_func = filter_func.value
if isinstance(filter_func, Attribute):
filter_func = where_filters.boolean_true(filter_func.value)
if outer_filter:
filter_func = outer_filter(filter_func)
return filter_func
else:
# Simple search in values
def filter_func(data, search_notes, query_string=query_string):
""" Filter objects based on search query string """
# The query string is converted to json to escape quotes, backslashes, etc
query_string = json.dumps(query_string)[1:-1]
return data.cast(String).ilike('%: "%' + query_string + '%"%')
else:
def filter_func(data, search_notes):
""" Return all objects"""
return True
return filter_func, tree, use_advanced_search
def wrap_filter_func(filter_func):
"""
Wrap a filter function so that a new list will be filled with the search notes.
:param filter_func: the filter function to wrap
:return: the wrapped filter function and the search notes list
"""
search_notes = []
def wrapped_filter_func(*args, search_notes=search_notes, filter_func_impl=filter_func, **kwargs):
return filter_func_impl(*args, search_notes=search_notes, **kwargs)
return wrapped_filter_func, search_notes
| 47.472793
| 230
| 0.736449
| 4,872
| 39,260
| 5.585591
| 0.049466
| 0.08044
| 0.064344
| 0.075221
| 0.833499
| 0.809062
| 0.758645
| 0.720795
| 0.694962
| 0.67115
| 0
| 0.000923
| 0.171931
| 39,260
| 826
| 231
| 47.530266
| 0.836143
| 0.046867
| 0
| 0.466216
| 0
| 0
| 0.032236
| 0.000805
| 0
| 0
| 0
| 0
| 0.003378
| 1
| 0.162162
| false
| 0.005068
| 0.016892
| 0.074324
| 0.439189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1e8e03463130770f1b6c3dd6f829b5b08c569b5c
| 127
|
py
|
Python
|
contrib/pyln-testing/tests/test_start.py
|
Bladez1753/lightning
|
6d256fdbf9e8fe571e6338ab7abd1183546a1a81
|
[
"MIT"
] | 2,288
|
2015-06-13T04:01:10.000Z
|
2022-03-30T16:23:20.000Z
|
contrib/pyln-testing/tests/test_start.py
|
Bladez1753/lightning
|
6d256fdbf9e8fe571e6338ab7abd1183546a1a81
|
[
"MIT"
] | 4,311
|
2015-06-13T23:39:10.000Z
|
2022-03-31T23:23:29.000Z
|
contrib/pyln-testing/tests/test_start.py
|
Bladez1753/lightning
|
6d256fdbf9e8fe571e6338ab7abd1183546a1a81
|
[
"MIT"
] | 750
|
2015-06-13T16:34:01.000Z
|
2022-03-31T15:50:39.000Z
|
from pyln.testing.fixtures import * # noqa: F401 F403
def test_peers(node_factory):
l1, l2 = node_factory.line_graph(2)
| 21.166667
| 54
| 0.732283
| 20
| 127
| 4.45
| 0.9
| 0.247191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084906
| 0.165354
| 127
| 5
| 55
| 25.4
| 0.754717
| 0.11811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1e94d556a4a9eb3292a7c9e6b783df1d1891b100
| 347
|
py
|
Python
|
build/lib/rotation_averaging/__init__.py
|
nishant34/RotationAveraging
|
79f77482681665fcf0eb16b539238432205b9d54
|
[
"MIT"
] | null | null | null |
build/lib/rotation_averaging/__init__.py
|
nishant34/RotationAveraging
|
79f77482681665fcf0eb16b539238432205b9d54
|
[
"MIT"
] | null | null | null |
build/lib/rotation_averaging/__init__.py
|
nishant34/RotationAveraging
|
79f77482681665fcf0eb16b539238432205b9d54
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Rafael Marinheiro
# @Date: 2014-10-28 02:40:15
# @Last Modified by: marinheiro
# @Last Modified time: 2014-12-08 20:37:58
import rotation_averaging.algorithms
import rotation_averaging.graph
import rotation_averaging.so3
import rotation_averaging.compare
import rotation_averaging.util
| 28.916667
| 42
| 0.772334
| 50
| 347
| 5.26
| 0.68
| 0.26616
| 0.437262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09772
| 0.115274
| 347
| 12
| 43
| 28.916667
| 0.758958
| 0.492795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1ee4cf63825ebc8df0dbccd94ef4ac8312c1abb0
| 199
|
py
|
Python
|
AbstractFactory/Table/ITable.py
|
ahaile505/Python_Design_Patterns
|
fe68f7357a2441afe9bfb0d217e3c1fa36a8c36d
|
[
"MIT"
] | null | null | null |
AbstractFactory/Table/ITable.py
|
ahaile505/Python_Design_Patterns
|
fe68f7357a2441afe9bfb0d217e3c1fa36a8c36d
|
[
"MIT"
] | null | null | null |
AbstractFactory/Table/ITable.py
|
ahaile505/Python_Design_Patterns
|
fe68f7357a2441afe9bfb0d217e3c1fa36a8c36d
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractstaticmethod
class ITable(metaclass=ABCMeta):
"""The Table Interface"""
@abstractstaticmethod
def dimensions():
"""Get the table dimensions"""
| 22.111111
| 45
| 0.693467
| 19
| 199
| 7.263158
| 0.736842
| 0.115942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201005
| 199
| 9
| 46
| 22.111111
| 0.867925
| 0.221106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
949278aaa54eda4fe90c21042a94cd620f37c768
| 91
|
py
|
Python
|
deepmath/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | 2
|
2019-05-18T22:47:38.000Z
|
2019-05-19T00:27:13.000Z
|
deepmath/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | null | null | null |
deepmath/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | null | null | null |
name = "deepmath"
from . import base_classes
from . import layers
from . import optimizers
| 18.2
| 26
| 0.769231
| 12
| 91
| 5.75
| 0.666667
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 4
| 27
| 22.75
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
949327e230eb365f567b56be9e361f0385c6da2c
| 199
|
py
|
Python
|
lightpath/__init__.py
|
klauer/lightpath
|
34e434bdd4c023721b5dbcc859e36cdcaf91662b
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2020-08-09T17:18:40.000Z
|
2020-08-09T17:18:40.000Z
|
lightpath/__init__.py
|
klauer/lightpath
|
34e434bdd4c023721b5dbcc859e36cdcaf91662b
|
[
"BSD-3-Clause-LBNL"
] | 70
|
2018-02-15T01:42:13.000Z
|
2021-11-10T18:36:09.000Z
|
lightpath/__init__.py
|
klauer/lightpath
|
34e434bdd4c023721b5dbcc859e36cdcaf91662b
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2018-06-26T00:08:22.000Z
|
2020-10-28T02:35:20.000Z
|
__all__ = ['device']
from .path import BeamPath # noqa
from .controller import LightController # noqa
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| 22.111111
| 47
| 0.768844
| 24
| 199
| 5.875
| 0.541667
| 0.234043
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145729
| 199
| 8
| 48
| 24.875
| 0.829412
| 0.045226
| 0
| 0
| 0
| 0
| 0.069519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
94b15575b5d789ec66562ee65df79ee8da5685a1
| 162
|
py
|
Python
|
BayesRate.py
|
schnitzler-j/BayesRate
|
86fb252df8589c89fce1c42699c69c5e2bbccb6f
|
[
"MIT"
] | 3
|
2016-11-10T00:04:47.000Z
|
2018-02-01T17:39:09.000Z
|
BayesRate.py
|
schnitzler-j/BayesRate
|
86fb252df8589c89fce1c42699c69c5e2bbccb6f
|
[
"MIT"
] | null | null | null |
BayesRate.py
|
schnitzler-j/BayesRate
|
86fb252df8589c89fce1c42699c69c5e2bbccb6f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Created by Daniele Silvestro on 13/06/2011. => dsilvestro@senckenberg.de
import sys
import os
import os.path
from bayesrate import main
| 27
| 75
| 0.771605
| 26
| 162
| 4.807692
| 0.846154
| 0.128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057554
| 0.141975
| 162
| 6
| 76
| 27
| 0.841727
| 0.580247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
94c5672b1b70de4922c4dd75ad5570f838538f5a
| 161
|
py
|
Python
|
is_core/models/humanize.py
|
cedel1/django-is-core
|
39f7f79077da2956c29df6844af84b56d98a17cf
|
[
"BSD-3-Clause"
] | null | null | null |
is_core/models/humanize.py
|
cedel1/django-is-core
|
39f7f79077da2956c29df6844af84b56d98a17cf
|
[
"BSD-3-Clause"
] | null | null | null |
is_core/models/humanize.py
|
cedel1/django-is-core
|
39f7f79077da2956c29df6844af84b56d98a17cf
|
[
"BSD-3-Clause"
] | null | null | null |
from django.utils.html import format_html
def url_humanized(field, val, *args, **kwargs):
return format_html('<a href="{0}">{0}</a>', val) if val else val
| 26.833333
| 68
| 0.68323
| 27
| 161
| 3.962963
| 0.703704
| 0.186916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014493
| 0.142857
| 161
| 5
| 69
| 32.2
| 0.76087
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
94d4f09faa96b53a3aa82e31e300d6c98e68c5fa
| 18,325
|
py
|
Python
|
podpac/datalib/weathercitizen_sensorburst_pb2.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 46
|
2018-04-06T19:54:32.000Z
|
2022-02-08T02:00:02.000Z
|
podpac/datalib/weathercitizen_sensorburst_pb2.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 474
|
2018-04-05T22:21:09.000Z
|
2022-02-24T14:21:16.000Z
|
podpac/datalib/weathercitizen_sensorburst_pb2.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 4
|
2019-04-11T17:49:53.000Z
|
2020-11-29T22:36:53.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: sensorburst.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="sensorburst.proto",
package="sensorburst",
syntax="proto3",
serialized_options=_b("H\003"),
serialized_pb=_b(
'\n\x11sensorburst.proto\x12\x0bsensorburst"\xbf\x04\n\x06Record\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0c\n\x04long\x18\x02 \x01(\x02\x12\x0b\n\x03lat\x18\x03 \x01(\x02\x12\x10\n\x08\x61ltitude\x18\x04 \x01(\x02\x12\x13\n\x0btemperature\x18\x05 \x01(\x02\x12\x10\n\x08pressure\x18\x06 \x01(\x02\x12\r\n\x05light\x18\x07 \x01(\x02\x12\x11\n\tproximity\x18\x08 \x01(\x05\x12\x17\n\x0f\x61\x63\x63\x65lerometer_x\x18\t \x01(\x02\x12\x17\n\x0f\x61\x63\x63\x65lerometer_y\x18\n \x01(\x02\x12\x17\n\x0f\x61\x63\x63\x65lerometer_z\x18\x0b \x01(\x02\x12\x1d\n\x15linear_acceleration_x\x18\x0c \x01(\x02\x12\x1d\n\x15linear_acceleration_y\x18\r \x01(\x02\x12\x1d\n\x15linear_acceleration_z\x18\x0e \x01(\x02\x12\x15\n\rorientation_x\x18\x0f \x01(\x02\x12\x15\n\rorientation_y\x18\x10 \x01(\x02\x12\x15\n\rorientation_z\x18\x11 \x01(\x02\x12\x18\n\x10magnetic_field_x\x18\x12 \x01(\x02\x12\x18\n\x10magnetic_field_y\x18\x13 \x01(\x02\x12\x18\n\x10magnetic_field_z\x18\x14 \x01(\x02\x12\x13\n\x0bgyroscope_x\x18\x15 \x01(\x02\x12\x13\n\x0bgyroscope_y\x18\x16 \x01(\x02\x12\x13\n\x0bgyroscope_z\x18\x17 \x01(\x02\x12\x11\n\tgravity_x\x18\x18 \x01(\x02\x12\x11\n\tgravity_y\x18\x19 \x01(\x02\x12\x11\n\tgravity_z\x18\x1a \x01(\x02"-\n\x05\x42urst\x12$\n\x07records\x18\x01 \x03(\x0b\x32\x13.sensorburst.RecordB\x02H\x03\x62\x06proto3'
),
)
_RECORD = _descriptor.Descriptor(
name="Record",
full_name="sensorburst.Record",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="time",
full_name="sensorburst.Record.time",
index=0,
number=1,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="long",
full_name="sensorburst.Record.long",
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="lat",
full_name="sensorburst.Record.lat",
index=2,
number=3,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="altitude",
full_name="sensorburst.Record.altitude",
index=3,
number=4,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="temperature",
full_name="sensorburst.Record.temperature",
index=4,
number=5,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="pressure",
full_name="sensorburst.Record.pressure",
index=5,
number=6,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="light",
full_name="sensorburst.Record.light",
index=6,
number=7,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="proximity",
full_name="sensorburst.Record.proximity",
index=7,
number=8,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="accelerometer_x",
full_name="sensorburst.Record.accelerometer_x",
index=8,
number=9,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="accelerometer_y",
full_name="sensorburst.Record.accelerometer_y",
index=9,
number=10,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="accelerometer_z",
full_name="sensorburst.Record.accelerometer_z",
index=10,
number=11,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="linear_acceleration_x",
full_name="sensorburst.Record.linear_acceleration_x",
index=11,
number=12,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="linear_acceleration_y",
full_name="sensorburst.Record.linear_acceleration_y",
index=12,
number=13,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="linear_acceleration_z",
full_name="sensorburst.Record.linear_acceleration_z",
index=13,
number=14,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="orientation_x",
full_name="sensorburst.Record.orientation_x",
index=14,
number=15,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="orientation_y",
full_name="sensorburst.Record.orientation_y",
index=15,
number=16,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="orientation_z",
full_name="sensorburst.Record.orientation_z",
index=16,
number=17,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="magnetic_field_x",
full_name="sensorburst.Record.magnetic_field_x",
index=17,
number=18,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="magnetic_field_y",
full_name="sensorburst.Record.magnetic_field_y",
index=18,
number=19,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="magnetic_field_z",
full_name="sensorburst.Record.magnetic_field_z",
index=19,
number=20,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gyroscope_x",
full_name="sensorburst.Record.gyroscope_x",
index=20,
number=21,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gyroscope_y",
full_name="sensorburst.Record.gyroscope_y",
index=21,
number=22,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gyroscope_z",
full_name="sensorburst.Record.gyroscope_z",
index=22,
number=23,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gravity_x",
full_name="sensorburst.Record.gravity_x",
index=23,
number=24,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gravity_y",
full_name="sensorburst.Record.gravity_y",
index=24,
number=25,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="gravity_z",
full_name="sensorburst.Record.gravity_z",
index=25,
number=26,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=35,
serialized_end=610,
)
_BURST = _descriptor.Descriptor(
name="Burst",
full_name="sensorburst.Burst",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="records",
full_name="sensorburst.Burst.records",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=612,
serialized_end=657,
)
_BURST.fields_by_name["records"].message_type = _RECORD
DESCRIPTOR.message_types_by_name["Record"] = _RECORD
DESCRIPTOR.message_types_by_name["Burst"] = _BURST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Record = _reflection.GeneratedProtocolMessageType(
"Record",
(_message.Message,),
{
"DESCRIPTOR": _RECORD,
"__module__": "sensorburst_pb2"
# @@protoc_insertion_point(class_scope:sensorburst.Record)
},
)
_sym_db.RegisterMessage(Record)
Burst = _reflection.GeneratedProtocolMessageType(
"Burst",
(_message.Message,),
{
"DESCRIPTOR": _BURST,
"__module__": "sensorburst_pb2"
# @@protoc_insertion_point(class_scope:sensorburst.Burst)
},
)
_sym_db.RegisterMessage(Burst)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 31.271331
| 1,332
| 0.547285
| 1,858
| 18,325
| 5.138321
| 0.102799
| 0.069551
| 0.057714
| 0.070703
| 0.769666
| 0.71478
| 0.656541
| 0.636745
| 0.633497
| 0.620719
| 0
| 0.051478
| 0.357599
| 18,325
| 585
| 1,333
| 31.324786
| 0.759514
| 0.015825
| 0
| 0.719361
| 1
| 0.001776
| 0.147778
| 0.121152
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008881
| 0
| 0.008881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
94d52e38dfc452d86f1f239211765ade751f73c1
| 17
|
py
|
Python
|
test.py
|
miryazdi/Metering
|
d0f6feac859480e415811473f5efeec0add4b2d6
|
[
"MIT"
] | null | null | null |
test.py
|
miryazdi/Metering
|
d0f6feac859480e415811473f5efeec0add4b2d6
|
[
"MIT"
] | null | null | null |
test.py
|
miryazdi/Metering
|
d0f6feac859480e415811473f5efeec0add4b2d6
|
[
"MIT"
] | null | null | null |
print("salamm")
| 8.5
| 16
| 0.647059
| 2
| 17
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 17
| 1
| 17
| 17
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
a205cc6e3ad3427b945759b7822e3c433b2a7d3d
| 126
|
py
|
Python
|
lez/admin.py
|
fredley/lez
|
7cd575e54d22b600a1335580d48e36a83be566b8
|
[
"MIT"
] | 1
|
2018-01-15T18:21:25.000Z
|
2018-01-15T18:21:25.000Z
|
lez/admin.py
|
fredley/lez
|
7cd575e54d22b600a1335580d48e36a83be566b8
|
[
"MIT"
] | 2
|
2021-04-08T18:28:12.000Z
|
2021-06-09T17:22:30.000Z
|
lez/admin.py
|
fredley/lez
|
7cd575e54d22b600a1335580d48e36a83be566b8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import List, ListItem
admin.site.register(List)
admin.site.register(ListItem)
| 18
| 34
| 0.809524
| 18
| 126
| 5.666667
| 0.555556
| 0.176471
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103175
| 126
| 6
| 35
| 21
| 0.902655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bf4ba273bad55c09bcd1cee04a5a32d8d558f4c3
| 661
|
py
|
Python
|
src/quicknlp/data/__init__.py
|
jalajthanaki/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 287
|
2018-04-10T10:58:09.000Z
|
2022-03-22T02:05:40.000Z
|
src/quicknlp/data/__init__.py
|
scutcyr/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 1
|
2018-07-03T17:10:03.000Z
|
2018-07-03T17:10:03.000Z
|
src/quicknlp/data/__init__.py
|
scutcyr/quick-nlp
|
861a54c9e30de076a2316cb6712d934de4058cc5
|
[
"MIT"
] | 51
|
2018-04-10T11:38:02.000Z
|
2021-10-17T06:23:43.000Z
|
from .data_loaders import DialogueDataLoader
from .datasets import DialogueDataset, HierarchicalDatasetFromDataFrame, HierarchicalDatasetFromFiles, \
TabularDatasetFromDataFrame, TabularDatasetFromFiles, DialDataset, HREDDataset, HREDConstraintsDataset
from .dialogue_analysis import DialogueAnalysis
from .dialogue_model_data_loader import CVAEModelData, HREDModelData, HREDAttentionModelData
from .hierarchical_model_data_loader import HierarchicalModelData
from .s2s_model_data_loader import S2SAttentionModelData, S2SModelData, TransformerModelData
from .sampler import DialogueRandomSampler, DialogueSampler
from .spacy_tokenizer import SpacyTokenizer
| 66.1
| 106
| 0.8941
| 56
| 661
| 10.339286
| 0.607143
| 0.046632
| 0.07772
| 0.108808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004902
| 0.07413
| 661
| 9
| 107
| 73.444444
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.888889
| 0
| 0.888889
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bf6168cb7acba96b551b6b3820dc17bea6066f1d
| 104
|
py
|
Python
|
insomniac/extra_features/action_dm.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 533
|
2020-06-01T10:40:11.000Z
|
2022-03-29T17:05:50.000Z
|
insomniac/extra_features/action_dm.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 399
|
2020-06-01T22:01:55.000Z
|
2022-03-29T20:39:29.000Z
|
insomniac/extra_features/action_dm.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 166
|
2020-06-01T21:51:52.000Z
|
2022-03-12T14:14:44.000Z
|
from insomniac import activation_controller
exec(activation_controller.get_extra_feature('action_dm'))
| 26
| 58
| 0.875
| 13
| 104
| 6.615385
| 0.846154
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 104
| 3
| 59
| 34.666667
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bfb6a40f446467b439507cdf7546d730ddc7de7e
| 113
|
py
|
Python
|
phytools/__init__.py
|
Kricki/phytools
|
72856b267a64fdad42e9885b67fbc87ec632438d
|
[
"MIT"
] | null | null | null |
phytools/__init__.py
|
Kricki/phytools
|
72856b267a64fdad42e9885b67fbc87ec632438d
|
[
"MIT"
] | null | null | null |
phytools/__init__.py
|
Kricki/phytools
|
72856b267a64fdad42e9885b67fbc87ec632438d
|
[
"MIT"
] | null | null | null |
from . import constants
from . import optics
from . import functions
from . import misc
from . import signalproc
| 18.833333
| 24
| 0.778761
| 15
| 113
| 5.866667
| 0.466667
| 0.568182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176991
| 113
| 5
| 25
| 22.6
| 0.946237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
44a5ea5963106696963418107b4c8a2d6c741eed
| 163
|
py
|
Python
|
edman/__init__.py
|
yuskyamada/EDMAN
|
542ac53fcc95a70824191da0b392f67c38a310e6
|
[
"MIT"
] | null | null | null |
edman/__init__.py
|
yuskyamada/EDMAN
|
542ac53fcc95a70824191da0b392f67c38a310e6
|
[
"MIT"
] | null | null | null |
edman/__init__.py
|
yuskyamada/EDMAN
|
542ac53fcc95a70824191da0b392f67c38a310e6
|
[
"MIT"
] | 1
|
2019-11-01T00:40:18.000Z
|
2019-11-01T00:40:18.000Z
|
from .config import Config
from .convert import Convert
from .file import File
from .db import DB
from .search import Search
from .json_manager import JsonManager
| 23.285714
| 37
| 0.815951
| 25
| 163
| 5.28
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147239
| 163
| 6
| 38
| 27.166667
| 0.94964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
44bb5d6b772c631f6b76626731e10b526094fa9c
| 127
|
py
|
Python
|
tools/external_converter_v2/parser/frontend/run.py
|
Shixiaowei02/Anakin
|
f1ea086c5dfa1009ba15a64bc3e30cde07356360
|
[
"Apache-2.0"
] | 533
|
2018-05-18T06:14:04.000Z
|
2022-03-23T11:46:30.000Z
|
tools/external_converter_v2/parser/frontend/run.py
|
Shixiaowei02/Anakin
|
f1ea086c5dfa1009ba15a64bc3e30cde07356360
|
[
"Apache-2.0"
] | 100
|
2018-05-26T08:32:48.000Z
|
2022-03-17T03:26:25.000Z
|
tools/external_converter_v2/parser/frontend/run.py
|
Shixiaowei02/Anakin
|
f1ea086c5dfa1009ba15a64bc3e30cde07356360
|
[
"Apache-2.0"
] | 167
|
2018-05-18T06:14:35.000Z
|
2022-02-14T01:44:20.000Z
|
from dash_board import GraphBoard
if __name__ == "__main__":
pass
#GraphBoard.run(host='0.0.0.0', port=8888, debug=True)
| 21.166667
| 55
| 0.708661
| 20
| 127
| 4.05
| 0.8
| 0.074074
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073395
| 0.141732
| 127
| 5
| 56
| 25.4
| 0.669725
| 0.417323
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
78007fdc4dd682e7aaa9a97d6d4543de4afbf885
| 127
|
py
|
Python
|
projects/webptspy/webpts/__init__.py
|
codelieche/testing
|
1f4a3393f761654d98588c9ba90596a307fa59db
|
[
"MIT"
] | 2
|
2017-08-10T03:40:22.000Z
|
2017-08-17T13:20:16.000Z
|
projects/webptspy/webpts/__init__.py
|
codelieche/webpts
|
1f4a3393f761654d98588c9ba90596a307fa59db
|
[
"MIT"
] | null | null | null |
projects/webptspy/webpts/__init__.py
|
codelieche/webpts
|
1f4a3393f761654d98588c9ba90596a307fa59db
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from .celery import app as celery_app
__all__ = ['celery_app']
| 18.142857
| 38
| 0.732283
| 18
| 127
| 4.555556
| 0.611111
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.149606
| 127
| 6
| 39
| 21.166667
| 0.75
| 0.15748
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
788bf497e496f2bdbc820cf701a3fff2faaa5b5d
| 65
|
py
|
Python
|
supersql/datatypes/uuid.py
|
rayattack/supersql
|
0a592e7f303ac18b8df7bebac226b26f38f6d192
|
[
"MIT"
] | 2
|
2019-11-04T00:19:30.000Z
|
2020-10-04T01:24:04.000Z
|
supersql/datatypes/uuid.py
|
rayattack/supersql
|
0a592e7f303ac18b8df7bebac226b26f38f6d192
|
[
"MIT"
] | 2
|
2021-03-31T14:07:04.000Z
|
2021-03-31T14:07:20.000Z
|
supersql/datatypes/uuid.py
|
rayattack/supersql
|
0a592e7f303ac18b8df7bebac226b26f38f6d192
|
[
"MIT"
] | 2
|
2021-03-30T21:40:14.000Z
|
2022-03-17T20:52:25.000Z
|
from supersql.datatypes.base import Base
class UUID(Base):...
| 10.833333
| 40
| 0.738462
| 9
| 65
| 5.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 5
| 41
| 13
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
152d4e4115821cbfacb87fa20c0857ccb7ba4f68
| 2,417
|
py
|
Python
|
problem/test.py
|
TCKACHIKSIS/lms
|
fd06eb7a2baa9b9f82caa5223c86ba500f88333c
|
[
"MIT"
] | 8
|
2021-02-09T12:15:27.000Z
|
2022-03-14T07:41:02.000Z
|
problem/test.py
|
TCKACHIKSIS/lms
|
fd06eb7a2baa9b9f82caa5223c86ba500f88333c
|
[
"MIT"
] | 70
|
2021-04-14T12:45:17.000Z
|
2021-08-04T04:51:34.000Z
|
problem/test.py
|
TCKACHIKSIS/lms
|
fd06eb7a2baa9b9f82caa5223c86ba500f88333c
|
[
"MIT"
] | 3
|
2021-08-03T08:22:01.000Z
|
2022-02-27T23:20:05.000Z
|
from django.urls import reverse
from model_mommy import mommy
from rest_framework import status
from imcslms.test import MainSetup
from course.models import Course
from lesson.models import Lesson
from problem.models import Problem
from problem.serializers import ProblemSerializer
from users.models import CourseAssignTeacher
class ProblemTests(MainSetup):
def test_create_problem(self):
self.test_setup()
(course := mommy.make(Course)).save()
mommy.make(Lesson, course=course).save()
instance = mommy.make(Problem)
instance.lesson = Lesson.objects.first()
instance.save()
data = ProblemSerializer(instance).data
url = reverse('problem-list')
amount = Problem.objects.count()
CourseAssignTeacher(course=course, user=self.user).save()
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Problem.objects.count(), amount+1)
def test_delete_problem(self):
self.test_setup()
(course := mommy.make(Course)).save()
mommy.make(Lesson, course=course).save()
instance = mommy.make(Problem)
instance.lesson = Lesson.objects.first()
instance.save()
data = ProblemSerializer(instance).data
url = reverse('problem-detail', kwargs=dict(pk=instance.id))
amount = Problem.objects.count()
CourseAssignTeacher(course=course, user=self.user).save()
response = self.client.delete(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Problem.objects.count(), amount - 1)
def test_update_problem(self):
self.test_setup()
(course := mommy.make(Course)).save()
mommy.make(Lesson, course=course).save()
instance = mommy.make(Problem)
instance.lesson = Lesson.objects.first()
instance.save()
data = ProblemSerializer(instance).data
data['id'] = instance.id
data['lesson'] = instance.lesson.id
url = reverse('problem-detail', kwargs=dict(pk=instance.id))
CourseAssignTeacher(course=course, user=self.user).save()
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_course(self):
pass
| 38.365079
| 74
| 0.678527
| 282
| 2,417
| 5.734043
| 0.22695
| 0.050093
| 0.047001
| 0.03525
| 0.724181
| 0.724181
| 0.724181
| 0.724181
| 0.724181
| 0.619666
| 0
| 0.005723
| 0.204799
| 2,417
| 62
| 75
| 38.983871
| 0.835588
| 0
| 0
| 0.518519
| 0
| 0
| 0.024834
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 1
| 0.074074
| false
| 0.018519
| 0.166667
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
156f600c0f411ee5a242a4cfa771cc405bde9a7d
| 170
|
py
|
Python
|
main/model/__init__.py
|
vanessa-bell/hd-kiosk-v2
|
6e691c564ea3189bd81c9886fc39e5c5cbc19f8e
|
[
"MIT"
] | null | null | null |
main/model/__init__.py
|
vanessa-bell/hd-kiosk-v2
|
6e691c564ea3189bd81c9886fc39e5c5cbc19f8e
|
[
"MIT"
] | 1
|
2017-05-19T19:51:57.000Z
|
2017-05-19T21:01:11.000Z
|
main/model/__init__.py
|
vanessa-bell/hd-kiosk-v2
|
6e691c564ea3189bd81c9886fc39e5c5cbc19f8e
|
[
"MIT"
] | 1
|
2017-05-30T22:40:47.000Z
|
2017-05-30T22:40:47.000Z
|
# coding: utf-8
from .base import Base
from .config_auth import ConfigAuth
from .config import Config
from .user import User
from .faq import Faq
from .stat import Stat
| 18.888889
| 35
| 0.782353
| 28
| 170
| 4.714286
| 0.428571
| 0.151515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.164706
| 170
| 8
| 36
| 21.25
| 0.922535
| 0.076471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
157dc89fee8defead8f7a048a015a409843d87eb
| 957
|
py
|
Python
|
robot_utils/__init__.py
|
LSTM-Kirigaya/MsnEnvironment
|
29c6e02525c7671f304d0f9d7689942509f12a16
|
[
"MIT"
] | null | null | null |
robot_utils/__init__.py
|
LSTM-Kirigaya/MsnEnvironment
|
29c6e02525c7671f304d0f9d7689942509f12a16
|
[
"MIT"
] | null | null | null |
robot_utils/__init__.py
|
LSTM-Kirigaya/MsnEnvironment
|
29c6e02525c7671f304d0f9d7689942509f12a16
|
[
"MIT"
] | null | null | null |
__all__ = [
"miniBox",
"register",
"scene",
"utils"
]
__version__ = "0.0.2"
from robot_utils.miniBox import Robot
from robot_utils.scene import BaseScene
from robot_utils.scene import Scene1
from robot_utils.scene import Scene2
from robot_utils.scene import Scene3
from robot_utils.scene import RegisterScenes
from robot_utils.utils import UP, DOWN, LEFT, RIGHT
from robot_utils.utils import R2D2_POS, ROBOT_POS, DOOR_POS
from robot_utils.utils import getJointInfo
from robot_utils.utils import keyboard_control_miniBox, control_miniBox
from robot_utils.utils import setCameraPicAndGetPic
from robot_utils.utils import addDoor
from robot_utils.utils import addCylinder
from robot_utils.utils import addSphere
from robot_utils.utils import addBox
from robot_utils.utils import addFence
from robot_utils.utils import rayTest
from robot_utils.utils import checkCollision
from robot_utils.log import msn_info, msn_debug, msn_warn, msn_error
| 30.870968
| 71
| 0.829676
| 142
| 957
| 5.330986
| 0.28169
| 0.225892
| 0.351387
| 0.301189
| 0.561427
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009512
| 0.121212
| 957
| 31
| 72
| 30.870968
| 0.890606
| 0
| 0
| 0
| 0
| 0
| 0.031315
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.730769
| 0
| 0.730769
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ec5d53b1513d64df8d07cb3412b9f405df657388
| 1,454
|
py
|
Python
|
tests/test_srtm3.py
|
masfaraud/altimetry
|
147bf734d2b36b29ec5820bf39c5841f885e433b
|
[
"Apache-2.0"
] | 4
|
2019-02-22T09:18:14.000Z
|
2020-06-29T22:45:51.000Z
|
tests/test_srtm3.py
|
masfaraud/altimetry
|
147bf734d2b36b29ec5820bf39c5841f885e433b
|
[
"Apache-2.0"
] | 6
|
2016-08-15T10:17:48.000Z
|
2021-02-18T20:05:35.000Z
|
tests/test_srtm3.py
|
masfaraud/altimetry
|
147bf734d2b36b29ec5820bf39c5841f885e433b
|
[
"Apache-2.0"
] | 1
|
2020-08-12T11:05:19.000Z
|
2020-08-12T11:05:19.000Z
|
import logging
import altitude
from altitude.base import SRTM3DataLoader
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(message)s',
level='DEBUG'
)
class TestSRTM3Results:
@classmethod
def setup_class(cls):
data_loader = SRTM3DataLoader()
cls.file_engine = altitude.ElevationService(data_loader)
cls.get_elevation = cls.file_engine.get_elevation
def test_dead_sea(self):
assert self.get_elevation(31.5, 35.5) == -415
def test_around_zero_longitude(self):
assert self.get_elevation(51.2, 0.0) == 61
assert self.get_elevation(51.2, -0.1) == 100
assert self.get_elevation(51.2, 0.1) == 59
def test_around_zero_latitude(self):
assert self.get_elevation(0, 15) == 393
assert self.get_elevation(-0.1, 15) == 423
assert self.get_elevation(0.1, 15) == 381
def test_point_with_invalid_elevation(self):
assert self.get_elevation(47.0, 13.07) is None
def test_point_without_file(self):
assert self.get_elevation(0, 0) is None
def test_random_points(self):
assert self.get_elevation(46.0, 13.0) == 63
assert self.get_elevation(46.999999, 13.0) == 2714
assert self.get_elevation(46.999999, 13.999999) == 1643
assert self.get_elevation(46.0, 13.999999) == 553
assert self.get_elevation(45.2732, 13.7139) == 203
assert self.get_elevation(45.287, 13.905) == 460
| 32.311111
| 64
| 0.669188
| 211
| 1,454
| 4.42654
| 0.35545
| 0.218415
| 0.208779
| 0.353319
| 0.441113
| 0.325482
| 0.267666
| 0.057816
| 0
| 0
| 0
| 0.129484
| 0.213893
| 1,454
| 44
| 65
| 33.045455
| 0.687664
| 0
| 0
| 0
| 0
| 0
| 0.031637
| 0
| 0
| 0
| 0
| 0
| 0.441176
| 1
| 0.205882
| false
| 0
| 0.088235
| 0
| 0.323529
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ecdba39e6c2a7959baaf49033a837cace61486bd
| 60
|
py
|
Python
|
src/Core/CoreExternals/FreeType/src/tools/PaxHeaders.3012/glnames.py
|
bluespeck/OakVR
|
65d56942af390dc2ab2d969b44285d23bd53f139
|
[
"MIT"
] | 15
|
2016-09-17T16:29:42.000Z
|
2021-11-24T06:21:27.000Z
|
libs/freetype/src/tools/PaxHeaders.3012/glnames.py
|
podgorskiy/TinyFEM
|
c1a5fedf21e6306fc11fa19afdaf48dab1b6740f
|
[
"MIT"
] | null | null | null |
libs/freetype/src/tools/PaxHeaders.3012/glnames.py
|
podgorskiy/TinyFEM
|
c1a5fedf21e6306fc11fa19afdaf48dab1b6740f
|
[
"MIT"
] | 3
|
2017-04-07T13:33:57.000Z
|
2021-03-31T02:04:48.000Z
|
30 atime=1394144032.488146467
30 ctime=1374498496.139314428
| 20
| 29
| 0.866667
| 8
| 60
| 6.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0.066667
| 60
| 2
| 30
| 30
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ecde439968e62b102f9285cf83764433c7079483
| 109
|
py
|
Python
|
tasks/__init__.py
|
plocharz-9livesdata/pip
|
75b504976343e6d7128cd8a5e8d8a40132012b80
|
[
"MIT"
] | null | null | null |
tasks/__init__.py
|
plocharz-9livesdata/pip
|
75b504976343e6d7128cd8a5e8d8a40132012b80
|
[
"MIT"
] | 1
|
2020-11-08T22:26:43.000Z
|
2020-11-08T22:26:43.000Z
|
tasks/__init__.py
|
plocharz-9livesdata/pip
|
75b504976343e6d7128cd8a5e8d8a40132012b80
|
[
"MIT"
] | null | null | null |
import invoke
from tools.automation import generate, vendoring
ns = invoke.Collection(generate, vendoring)
| 18.166667
| 48
| 0.816514
| 13
| 109
| 6.846154
| 0.692308
| 0.382022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119266
| 109
| 5
| 49
| 21.8
| 0.927083
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
01a25ea1d6a2900500cf276af2a96822d6b35989
| 46
|
py
|
Python
|
main.py
|
Nichodon/Democracy-Bot
|
708613944b25f7331b18153e5f90c18f44e38aff
|
[
"MIT"
] | null | null | null |
main.py
|
Nichodon/Democracy-Bot
|
708613944b25f7331b18153e5f90c18f44e38aff
|
[
"MIT"
] | 8
|
2018-05-12T17:11:04.000Z
|
2020-02-24T22:28:23.000Z
|
main.py
|
UnsignedByte/Democracy-Bot
|
da37e8428ea7252712d7c2f8771f06658afdb990
|
[
"MIT"
] | null | null | null |
from demobot.client.client import *
runBot()
| 11.5
| 35
| 0.76087
| 6
| 46
| 5.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 46
| 3
| 36
| 15.333333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
01e88e20bc6dab2824e4919bec3625d3993e7bf7
| 232
|
py
|
Python
|
data_providers/utils.py
|
muralikrishnasn/3DNet
|
b0757790858cc0569716895b00e3745dbf1705d5
|
[
"MIT"
] | 5
|
2018-10-22T14:54:37.000Z
|
2019-07-04T10:30:52.000Z
|
data_providers/utils.py
|
Hxwinchina/3d-DenseNet
|
e5cbd2dbf9aa983d6ae5763d69eaae1fe2d6e057
|
[
"MIT"
] | null | null | null |
data_providers/utils.py
|
Hxwinchina/3d-DenseNet
|
e5cbd2dbf9aa983d6ae5763d69eaae1fe2d6e057
|
[
"MIT"
] | 1
|
2019-10-26T12:33:55.000Z
|
2019-10-26T12:33:55.000Z
|
from .data import DataProvider
"""Args
path: path to the video data folder
"""
def get_data_provider_by_path(path, train_params):
"""Return required data provider class"""
return DataProvider(path, **train_params)
| 23.2
| 50
| 0.719828
| 31
| 232
| 5.193548
| 0.612903
| 0.099379
| 0.186335
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181034
| 232
| 9
| 51
| 25.777778
| 0.847368
| 0.150862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
01ed8026926837756fe6f645ccdcb64002dd3fa2
| 542
|
py
|
Python
|
Plutupus/Types/CurrencySymbol.py
|
athena-labz/plutupus
|
5f85ed047465c1d8ea154044959fab6bd2d380fb
|
[
"MIT"
] | null | null | null |
Plutupus/Types/CurrencySymbol.py
|
athena-labz/plutupus
|
5f85ed047465c1d8ea154044959fab6bd2d380fb
|
[
"MIT"
] | null | null | null |
Plutupus/Types/CurrencySymbol.py
|
athena-labz/plutupus
|
5f85ed047465c1d8ea154044959fab6bd2d380fb
|
[
"MIT"
] | null | null | null |
import json
class CurrencySymbol(object):
def __init__(self, currency_symbol):
self.__currency_symbol = currency_symbol
def get(self):
return self.__currency_symbol
def json(self):
return json.dumps({
"bytes": self.__currency_symbol
})
@staticmethod
def from_json(_json):
return CurrencySymbol(_json["bytes"])
def __eq__(self, other):
if type(other) is CurrencySymbol:
return self.get() == other.get()
else:
return False
| 22.583333
| 48
| 0.608856
| 58
| 542
| 5.310345
| 0.396552
| 0.227273
| 0.233766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297048
| 542
| 24
| 49
| 22.583333
| 0.808399
| 0
| 0
| 0
| 0
| 0
| 0.018416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| false
| 0
| 0.055556
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1722ce07b27b4552d740a5c3586e36006423f853
| 118
|
py
|
Python
|
test/__init__.py
|
collinb9/understatAPI
|
5e3b464ec2b8c1e40d7513491e2d402daab2849a
|
[
"MIT"
] | 1
|
2022-03-21T15:16:04.000Z
|
2022-03-21T15:16:04.000Z
|
test/__init__.py
|
collinb9/understatAPI
|
5e3b464ec2b8c1e40d7513491e2d402daab2849a
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
collinb9/understatAPI
|
5e3b464ec2b8c1e40d7513491e2d402daab2849a
|
[
"MIT"
] | null | null | null |
""" tests """
from .mock_requests import mocked_requests_get
from .mock_selenium import MockWebDriver, MockWebElement
| 29.5
| 56
| 0.822034
| 14
| 118
| 6.642857
| 0.714286
| 0.172043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 3
| 57
| 39.333333
| 0.877358
| 0.042373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1723588d85ae009cb07bb17baa5aa6c28a316b53
| 34
|
py
|
Python
|
__init__.py
|
Colton-Bryce/windgrabber
|
5ad942addbf21bb8b9baaf3b6625eb518a75680c
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
Colton-Bryce/windgrabber
|
5ad942addbf21bb8b9baaf3b6625eb518a75680c
|
[
"Unlicense"
] | null | null | null |
__init__.py
|
Colton-Bryce/windgrabber
|
5ad942addbf21bb8b9baaf3b6625eb518a75680c
|
[
"Unlicense"
] | null | null | null |
from .grabber import WindowCamera
| 17
| 33
| 0.852941
| 4
| 34
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1735c84966175e56591954e3993681b6f7366908
| 110
|
py
|
Python
|
clasehola.py
|
JaimeMatias/Python-Flask-HelloWorld-master
|
b0204bf66bc2fbabf37a04b7d50ed83996d6a679
|
[
"MIT"
] | null | null | null |
clasehola.py
|
JaimeMatias/Python-Flask-HelloWorld-master
|
b0204bf66bc2fbabf37a04b7d50ed83996d6a679
|
[
"MIT"
] | null | null | null |
clasehola.py
|
JaimeMatias/Python-Flask-HelloWorld-master
|
b0204bf66bc2fbabf37a04b7d50ed83996d6a679
|
[
"MIT"
] | 2
|
2018-06-09T20:43:16.000Z
|
2019-03-05T05:18:42.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def hello_world():
return 'Hola es una aplicación de Python!'
| 22
| 44
| 0.654545
| 17
| 110
| 4.176471
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.163636
| 110
| 4
| 45
| 27.5
| 0.76087
| 0.381818
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
1774825b88951df13a031abd129cd2a423d43a79
| 46,332
|
py
|
Python
|
tests/data/test_filter_tree.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | 8
|
2019-10-30T08:07:44.000Z
|
2020-11-13T08:02:36.000Z
|
tests/data/test_filter_tree.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | 342
|
2019-02-14T15:53:31.000Z
|
2020-11-03T18:11:27.000Z
|
tests/data/test_filter_tree.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | 5
|
2021-04-28T17:05:16.000Z
|
2022-03-31T23:11:22.000Z
|
"""Test VaRA filter tree."""
import unittest
import unittest.mock as mock
import yaml
from PyQt5.QtCore import QDateTime, Qt
from varats.data.filtertree_data import (
AndOperator,
AuthorDateDeltaMaxFilter,
AuthorDateDeltaMinFilter,
AuthorDateMaxFilter,
AuthorDateMinFilter,
AuthorFilter,
CommitDateDeltaMaxFilter,
CommitDateDeltaMinFilter,
CommitDateMaxFilter,
CommitDateMinFilter,
CommitterFilter,
NotOperator,
OrOperator,
SourceOperator,
TargetOperator,
)
YAML_DOC_1 = """!AndOperator
_children:
- !OrOperator
_children:
- !SourceOperator
_child: !CommitterFilter
_comment: ''
_committer_email: doe@example.com
_committer_name: Jane Doe
_parent: null
_type: CommitterFilter
_comment: ''
_parent: null
_type: SourceOperator
- !SourceOperator
_child: !NotOperator
_child: !AuthorFilter
_author_email: doe@example.com
_author_name: Jane Doe
_comment: ''
_parent: null
_type: AuthorFilter
_comment: ''
_parent: null
_type: NotOperator
_comment: ''
_parent: null
_type: SourceOperator
- !TargetOperator
_child: !AuthorDateMinFilter
_author_date_min: '2000-01-01T00:00:00Z'
_comment: ''
_parent: null
_type: AuthorDateMinFilter
_comment: ''
_parent: null
_type: TargetOperator
- !TargetOperator
_child: !AuthorDateMaxFilter
_author_date_max: '2001-01-01T00:00:00Z'
_comment: ''
_parent: null
_type: AuthorDateMaxFilter
_comment: ''
_parent: null
_type: TargetOperator
- !TargetOperator
_child: !CommitDateMinFilter
_comment: ''
_commit_date_min: '2002-01-01T00:00:00Z'
_parent: null
_type: CommitDateMinFilter
_comment: ''
_parent: null
_type: TargetOperator
- !TargetOperator
_child: !CommitDateMaxFilter
_comment: ''
_commit_date_max: '2003-01-01T00:00:00Z'
_parent: null
_type: CommitDateMaxFilter
_comment: ''
_parent: null
_type: TargetOperator
- !AuthorDateDeltaMinFilter
_author_date_delta_min: P1DT1H
_comment: ''
_parent: null
_type: AuthorDateDeltaMinFilter
- !AuthorDateDeltaMaxFilter
_author_date_delta_max: P1DT2H
_comment: ''
_parent: null
_type: AuthorDateDeltaMaxFilter
- !CommitDateDeltaMinFilter
_comment: ''
_commit_date_delta_min: P1DT3H
_parent: null
_type: CommitDateDeltaMinFilter
- !CommitDateDeltaMaxFilter
_comment: ''
_commit_date_delta_max: P1DT4H
_parent: null
_type: CommitDateDeltaMaxFilter
_comment: comment
_parent: null
_type: OrOperator
_comment: ''
_parent: null
_type: AndOperator
"""
class TestFilterTreeElements(unittest.TestCase):
"""Test filter tree node types."""
@classmethod
def setUpClass(cls):
"""Setup file and CommitReport."""
cls.parent_dummy = AndOperator()
cls.author_filter = AuthorFilter(
cls.parent_dummy, "comment", "Jane Doe", "doe@example.com"
)
cls.committer_filter = CommitterFilter(
cls.parent_dummy, "comment", "Jane Doe", "doe@example.com"
)
cls.author_date_min_filter = AuthorDateMinFilter(
cls.parent_dummy, "comment", "2000-01-01T00:00:00Z"
)
cls.author_date_max_filter = AuthorDateMaxFilter(
cls.parent_dummy, "comment", "2000-01-01T00:00:00Z"
)
cls.commit_date_min_filter = CommitDateMinFilter(
cls.parent_dummy, "comment", "2000-01-01T00:00:00Z"
)
cls.commit_date_max_filter = CommitDateMaxFilter(
cls.parent_dummy, "comment", "2000-01-01T00:00:00Z"
)
cls.author_date_delta_min_filter = AuthorDateDeltaMinFilter(
cls.parent_dummy, "comment", "P3DT12H30M"
)
cls.author_date_delta_max_filter = AuthorDateDeltaMaxFilter(
cls.parent_dummy, "comment", "P3DT12H30M"
)
cls.commit_date_delta_min_filter = CommitDateDeltaMinFilter(
cls.parent_dummy, "comment", "P3DT12H30M"
)
cls.commit_date_delta_max_filter = CommitDateDeltaMaxFilter(
cls.parent_dummy, "comment", "P3DT12H30M"
)
cls.and_operator = AndOperator(cls.parent_dummy, "comment")
cls.or_operator = OrOperator(cls.parent_dummy, "comment")
cls.not_operator = NotOperator(cls.parent_dummy, "comment")
cls.source_operator = SourceOperator(cls.parent_dummy, "comment")
cls.target_operator = TargetOperator(cls.parent_dummy, "comment")
def test_author_filter(self):
filter_node = self.author_filter
self.assertEqual(filter_node.name(), "AuthorFilter")
self.assertEqual(filter_node.data(0), "AuthorFilter")
self.assertEqual(filter_node._type, "AuthorFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AuthorFilter", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AuthorFilter", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.authorName(), "Jane Doe")
self.assertEqual(filter_node.data(2), "Jane Doe")
filter_node.setAuthorName("Jane Doe 2")
self.assertEqual(filter_node.authorName(), "Jane Doe 2")
self.assertEqual(filter_node.data(2), "Jane Doe 2")
filter_node.setData(2, "Jane Doe 3")
self.assertEqual(filter_node.authorName(), "Jane Doe 3")
self.assertEqual(filter_node.data(2), "Jane Doe 3")
self.assertEqual(filter_node.authorEmail(), "doe@example.com")
self.assertEqual(filter_node.data(3), "doe@example.com")
filter_node.setAuthorEmail("doe2@example.com")
self.assertEqual(filter_node.authorEmail(), "doe2@example.com")
self.assertEqual(filter_node.data(3), "doe2@example.com")
filter_node.setData(3, "doe3@example.com")
self.assertEqual(filter_node.authorEmail(), "doe3@example.com")
self.assertEqual(filter_node.data(3), "doe3@example.com")
self.assertEqual(filter_node.resource(), ":/breeze/light/im-user.svg")
def test_committer_filter(self):
filter_node = self.committer_filter
self.assertEqual(filter_node.name(), "CommitterFilter")
self.assertEqual(filter_node.data(0), "CommitterFilter")
self.assertEqual(filter_node._type, "CommitterFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "CommitterFilter", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "CommitterFilter", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.committerName(), "Jane Doe")
self.assertEqual(filter_node.data(2), "Jane Doe")
filter_node.setCommitterName("Jane Doe 2")
self.assertEqual(filter_node.committerName(), "Jane Doe 2")
self.assertEqual(filter_node.data(2), "Jane Doe 2")
filter_node.setData(2, "Jane Doe 3")
self.assertEqual(filter_node.committerName(), "Jane Doe 3")
self.assertEqual(filter_node.data(2), "Jane Doe 3")
self.assertEqual(filter_node.committerEmail(), "doe@example.com")
self.assertEqual(filter_node.data(3), "doe@example.com")
filter_node.setCommitterEmail("doe2@example.com")
self.assertEqual(filter_node.committerEmail(), "doe2@example.com")
self.assertEqual(filter_node.data(3), "doe2@example.com")
filter_node.setData(3, "doe3@example.com")
self.assertEqual(filter_node.committerEmail(), "doe3@example.com")
self.assertEqual(filter_node.data(3), "doe3@example.com")
self.assertEqual(filter_node.resource(), ":/breeze/light/im-user.svg")
def test_author_date_min_filter(self):
filter_node = self.author_date_min_filter
self.assertEqual(filter_node.name(), "AuthorDateMinFilter")
self.assertEqual(filter_node.data(0), "AuthorDateMinFilter")
self.assertEqual(filter_node._type, "AuthorDateMinFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AuthorDateMinFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AuthorDateMinFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(
filter_node.authorDateMin().toString(Qt.ISODate),
"2000-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2000-01-01T00:00:00Z"
)
filter_node.setAuthorDateMin(
QDateTime.fromString("2010-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.authorDateMin().toString(Qt.ISODate),
"2010-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2010-01-01T00:00:00Z"
)
filter_node.setData(
2, QDateTime.fromString("2015-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.authorDateMin().toString(Qt.ISODate),
"2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.resource(), ":/breeze/light/appointment-new.svg"
)
def test_author_date_max_filter(self):
filter_node = self.author_date_max_filter
self.assertEqual(filter_node.name(), "AuthorDateMaxFilter")
self.assertEqual(filter_node.data(0), "AuthorDateMaxFilter")
self.assertEqual(filter_node._type, "AuthorDateMaxFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AuthorDateMaxFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AuthorDateMaxFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(
filter_node.authorDateMax().toString(Qt.ISODate),
"2000-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2000-01-01T00:00:00Z"
)
filter_node.setAuthorDateMax(
QDateTime.fromString("2010-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.authorDateMax().toString(Qt.ISODate),
"2010-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2010-01-01T00:00:00Z"
)
filter_node.setData(
2, QDateTime.fromString("2015-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.authorDateMax().toString(Qt.ISODate),
"2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.resource(), ":/breeze/light/appointment-new.svg"
)
def test_commit_date_min_filter(self):
filter_node = self.commit_date_min_filter
self.assertEqual(filter_node.name(), "CommitDateMinFilter")
self.assertEqual(filter_node.data(0), "CommitDateMinFilter")
self.assertEqual(filter_node._type, "CommitDateMinFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "CommitDateMinFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "CommitDateMinFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(
filter_node.commitDateMin().toString(Qt.ISODate),
"2000-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2000-01-01T00:00:00Z"
)
filter_node.setCommitDateMin(
QDateTime.fromString("2010-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.commitDateMin().toString(Qt.ISODate),
"2010-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2010-01-01T00:00:00Z"
)
filter_node.setData(
2, QDateTime.fromString("2015-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.commitDateMin().toString(Qt.ISODate),
"2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.resource(), ":/breeze/light/appointment-new.svg"
)
def test_commit_date_max_filter(self):
filter_node = self.commit_date_max_filter
self.assertEqual(filter_node.name(), "CommitDateMaxFilter")
self.assertEqual(filter_node.data(0), "CommitDateMaxFilter")
self.assertEqual(filter_node._type, "CommitDateMaxFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "CommitDateMaxFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "CommitDateMaxFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(
filter_node.commitDateMax().toString(Qt.ISODate),
"2000-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2000-01-01T00:00:00Z"
)
filter_node.setCommitDateMax(
QDateTime.fromString("2010-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.commitDateMax().toString(Qt.ISODate),
"2010-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2010-01-01T00:00:00Z"
)
filter_node.setData(
2, QDateTime.fromString("2015-01-01T00:00:00Z", Qt.ISODate)
)
self.assertEqual(
filter_node.commitDateMax().toString(Qt.ISODate),
"2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.data(2).toString(Qt.ISODate), "2015-01-01T00:00:00Z"
)
self.assertEqual(
filter_node.resource(), ":/breeze/light/appointment-new.svg"
)
def test_author_date_delta_min_filter(self):
filter_node = self.author_date_delta_min_filter
self.assertEqual(filter_node.name(), "AuthorDateDeltaMinFilter")
self.assertEqual(filter_node.data(0), "AuthorDateDeltaMinFilter")
self.assertEqual(filter_node._type, "AuthorDateDeltaMinFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AuthorDateDeltaMinFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AuthorDateDeltaMinFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.authorDateDeltaMin(), "P3DT12H30M")
self.assertEqual(filter_node.data(2), "P3DT12H30M")
filter_node.setAuthorDateDeltaMin("P23DT12H30M")
self.assertEqual(filter_node.authorDateDeltaMin(), "P23DT12H30M")
self.assertEqual(filter_node.data(2), "P23DT12H30M")
filter_node.setData(2, "P42DT12H30M")
self.assertEqual(filter_node.authorDateDeltaMin(), "P42DT12H30M")
self.assertEqual(filter_node.data(2), "P42DT12H30M")
self.assertEqual(
filter_node.resource(), ":/breeze/light/chronometer.svg"
)
def test_author_date_delta_max_filter(self):
filter_node = self.author_date_delta_max_filter
self.assertEqual(filter_node.name(), "AuthorDateDeltaMaxFilter")
self.assertEqual(filter_node.data(0), "AuthorDateDeltaMaxFilter")
self.assertEqual(filter_node._type, "AuthorDateDeltaMaxFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AuthorDateDeltaMaxFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AuthorDateDeltaMaxFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.authorDateDeltaMax(), "P3DT12H30M")
self.assertEqual(filter_node.data(2), "P3DT12H30M")
filter_node.setAuthorDateDeltaMax("P23DT12H30M")
self.assertEqual(filter_node.authorDateDeltaMax(), "P23DT12H30M")
self.assertEqual(filter_node.data(2), "P23DT12H30M")
filter_node.setData(2, "P42DT12H30M")
self.assertEqual(filter_node.authorDateDeltaMax(), "P42DT12H30M")
self.assertEqual(filter_node.data(2), "P42DT12H30M")
self.assertEqual(
filter_node.resource(), ":/breeze/light/chronometer.svg"
)
def test_commit_date_delta_min_filter(self):
filter_node = self.commit_date_delta_min_filter
self.assertEqual(filter_node.name(), "CommitDateDeltaMinFilter")
self.assertEqual(filter_node.data(0), "CommitDateDeltaMinFilter")
self.assertEqual(filter_node._type, "CommitDateDeltaMinFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "CommitDateDeltaMinFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "CommitDateDeltaMinFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.commitDateDeltaMin(), "P3DT12H30M")
self.assertEqual(filter_node.data(2), "P3DT12H30M")
filter_node.setCommitDateDeltaMin("P23DT12H30M")
self.assertEqual(filter_node.commitDateDeltaMin(), "P23DT12H30M")
self.assertEqual(filter_node.data(2), "P23DT12H30M")
filter_node.setData(2, "P42DT12H30M")
self.assertEqual(filter_node.commitDateDeltaMin(), "P42DT12H30M")
self.assertEqual(filter_node.data(2), "P42DT12H30M")
self.assertEqual(
filter_node.resource(), ":/breeze/light/chronometer.svg"
)
def test_commit_date_delta_max_filter(self):
filter_node = self.commit_date_delta_max_filter
self.assertEqual(filter_node.name(), "CommitDateDeltaMaxFilter")
self.assertEqual(filter_node.data(0), "CommitDateDeltaMaxFilter")
self.assertEqual(filter_node._type, "CommitDateDeltaMaxFilter")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "CommitDateDeltaMaxFilter",
"Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "CommitDateDeltaMaxFilter",
"Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertEqual(filter_node.commitDateDeltaMax(), "P3DT12H30M")
self.assertEqual(filter_node.data(2), "P3DT12H30M")
filter_node.setCommitDateDeltaMax("P23DT12H30M")
self.assertEqual(filter_node.commitDateDeltaMax(), "P23DT12H30M")
self.assertEqual(filter_node.data(2), "P23DT12H30M")
filter_node.setData(2, "P42DT12H30M")
self.assertEqual(filter_node.commitDateDeltaMax(), "P42DT12H30M")
self.assertEqual(filter_node.data(2), "P42DT12H30M")
self.assertEqual(
filter_node.resource(), ":/breeze/light/chronometer.svg"
)
def test_and_operator(self):
filter_node = self.and_operator
self.assertEqual(filter_node.name(), "AndOperator")
self.assertEqual(filter_node.data(0), "AndOperator")
self.assertEqual(filter_node._type, "AndOperator")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "AndOperator", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "AndOperator", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
child1 = NotOperator()
child2 = NotOperator()
child3 = NotOperator()
filter_node.addChild(child1)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child1)
self.assertIs(child1.parent(), filter_node)
self.assertIs(filter_node.child(-1), None)
self.assertIs(filter_node.child(1), None)
filter_node.addChild(child2)
self.assertEqual(filter_node.childCount(), 2)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child2)
self.assertIs(child2.parent(), filter_node)
filter_node.insertChild(1, child3)
self.assertEqual(filter_node.childCount(), 3)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child3)
self.assertIs(filter_node.child(2), child2)
self.assertIs(child3.parent(), filter_node)
filter_node.moveChild(1, 3)
self.assertEqual(filter_node.childCount(), 3)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child2)
self.assertIs(filter_node.child(2), child3)
filter_node.removeChild(1)
self.assertEqual(filter_node.childCount(), 2)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child3)
self.assertIs(child2.parent(), None)
self.assertEqual(filter_node.resource(), ":/operators/and-operator.svg")
def test_or_operator(self):
filter_node = self.or_operator
self.assertEqual(filter_node.name(), "OrOperator")
self.assertEqual(filter_node.data(0), "OrOperator")
self.assertEqual(filter_node._type, "OrOperator")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "OrOperator", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "OrOperator", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
child1 = NotOperator()
child2 = NotOperator()
child3 = NotOperator()
filter_node.addChild(child1)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child1)
self.assertIs(child1.parent(), filter_node)
self.assertIs(filter_node.child(-1), None)
self.assertIs(filter_node.child(1), None)
filter_node.addChild(child2)
self.assertEqual(filter_node.childCount(), 2)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child2)
self.assertIs(child2.parent(), filter_node)
filter_node.insertChild(1, child3)
self.assertEqual(filter_node.childCount(), 3)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child3)
self.assertIs(filter_node.child(2), child2)
self.assertIs(child3.parent(), filter_node)
filter_node.moveChild(1, 3)
self.assertEqual(filter_node.childCount(), 3)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child2)
self.assertIs(filter_node.child(2), child3)
filter_node.removeChild(1)
self.assertEqual(filter_node.childCount(), 2)
self.assertIs(filter_node.child(0), child1)
self.assertIs(filter_node.child(1), child3)
self.assertIs(child2.parent(), None)
self.assertEqual(filter_node.resource(), ":/operators/or-operator.svg")
def test_not_operator(self):
filter_node = self.not_operator
self.assertEqual(filter_node.name(), "NotOperator")
self.assertEqual(filter_node.data(0), "NotOperator")
self.assertEqual(filter_node._type, "NotOperator")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "NotOperator", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "NotOperator", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
child1 = AndOperator()
child2 = AndOperator()
filter_node.addChild(child1)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child1)
self.assertIs(child1.parent(), filter_node)
self.assertIs(filter_node.child(-1), None)
self.assertIs(filter_node.child(1), None)
filter_node.removeChild(0)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertIs(child1.parent(), None)
filter_node.insertChild(0, child2)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child2)
self.assertIs(child2.parent(), filter_node)
self.assertEqual(filter_node.resource(), ":/operators/not-operator.svg")
def test_source_operator(self):
filter_node = self.source_operator
self.assertEqual(filter_node.name(), "SourceOperator")
self.assertEqual(filter_node.data(0), "SourceOperator")
self.assertEqual(filter_node._type, "SourceOperator")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "SourceOperator", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "SourceOperator", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
child1 = NotOperator()
child2 = NotOperator()
filter_node.addChild(child1)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child1)
self.assertIs(child1.parent(), filter_node)
self.assertIs(filter_node.child(-1), None)
self.assertIs(filter_node.child(1), None)
filter_node.removeChild(0)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertIs(child1.parent(), None)
filter_node.insertChild(0, child2)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child2)
self.assertIs(child2.parent(), filter_node)
self.assertEqual(
filter_node.resource(), ":/operators/source-operator.svg"
)
def test_target_operator(self):
filter_node = self.target_operator
self.assertEqual(filter_node.name(), "TargetOperator")
self.assertEqual(filter_node.data(0), "TargetOperator")
self.assertEqual(filter_node._type, "TargetOperator")
filter_node.setData(0, "name_2")
self.assertEqual(
filter_node.name(), "TargetOperator", "Name should be read-only!"
)
self.assertEqual(
filter_node.data(0), "TargetOperator", "Name should be read-only!"
)
self.assertEqual(filter_node.comment(), "comment")
self.assertEqual(filter_node.data(1), "comment")
filter_node.setComment("comment_2")
self.assertEqual(filter_node.comment(), "comment_2")
self.assertEqual(filter_node.data(1), "comment_2")
filter_node.setData(1, "comment_3")
self.assertEqual(filter_node.comment(), "comment_3")
self.assertEqual(filter_node.data(1), "comment_3")
self.assertIs(filter_node.parent(), self.parent_dummy)
new_parent_dummy = OrOperator()
filter_node.setParent(new_parent_dummy)
self.assertIs(filter_node.parent(), new_parent_dummy)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
child1 = NotOperator()
child2 = NotOperator()
filter_node.addChild(child1)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child1)
self.assertIs(child1.parent(), filter_node)
self.assertIs(filter_node.child(-1), None)
self.assertIs(filter_node.child(1), None)
filter_node.removeChild(0)
self.assertIs(filter_node.child(0), None)
self.assertEqual(filter_node.childCount(), 0)
self.assertIs(child1.parent(), None)
filter_node.insertChild(0, child2)
self.assertEqual(filter_node.childCount(), 1)
self.assertIs(filter_node.child(0), child2)
self.assertIs(child2.parent(), filter_node)
self.assertEqual(
filter_node.resource(), ":/operators/target-operator.svg"
)
class TestFilterTreeYamlLoad(unittest.TestCase):
"""Test filter tree loading from yaml file."""
@classmethod
def setUpClass(cls):
with mock.patch(
'builtins.open', new=mock.mock_open(read_data=YAML_DOC_1)
):
root_node = yaml.load(open("path/to/open"), Loader=yaml.Loader)
root_node.fixParentPointers()
cls.root_node = root_node
def test_filter_tree_yaml_load(self):
node0 = self.root_node
self.assertEqual(node0.name(), "AndOperator")
self.assertEqual(node0._type, "AndOperator")
self.assertEqual(node0.comment(), "")
self.assertIs(node0.parent(), None)
self.assertEqual(node0.childCount(), 1)
node1 = node0.child(0)
self.assertEqual(node1.name(), "OrOperator")
self.assertEqual(node1._type, "OrOperator")
self.assertEqual(node1.comment(), "comment")
self.assertIs(node1.parent(), node0)
self.assertEqual(node1.childCount(), 10)
node2 = node1.child(0)
node3 = node1.child(1)
node4 = node1.child(2)
node5 = node1.child(3)
node6 = node1.child(4)
node7 = node1.child(5)
node8 = node1.child(6)
node9 = node1.child(7)
node10 = node1.child(8)
node11 = node1.child(9)
self.assertEqual(node2.name(), "SourceOperator")
self.assertEqual(node2._type, "SourceOperator")
self.assertEqual(node2.comment(), "")
self.assertIs(node2.parent(), node1)
self.assertEqual(node2.childCount(), 1)
self.assertEqual(node3.name(), "SourceOperator")
self.assertEqual(node3._type, "SourceOperator")
self.assertEqual(node3.comment(), "")
self.assertIs(node3.parent(), node1)
self.assertEqual(node3.childCount(), 1)
self.assertEqual(node4.name(), "TargetOperator")
self.assertEqual(node4._type, "TargetOperator")
self.assertEqual(node4.comment(), "")
self.assertIs(node4.parent(), node1)
self.assertEqual(node4.childCount(), 1)
self.assertEqual(node5.name(), "TargetOperator")
self.assertEqual(node5._type, "TargetOperator")
self.assertEqual(node5.comment(), "")
self.assertIs(node5.parent(), node1)
self.assertEqual(node5.childCount(), 1)
self.assertEqual(node6.name(), "TargetOperator")
self.assertEqual(node6._type, "TargetOperator")
self.assertEqual(node6.comment(), "")
self.assertIs(node6.parent(), node1)
self.assertEqual(node6.childCount(), 1)
self.assertEqual(node7.name(), "TargetOperator")
self.assertEqual(node7._type, "TargetOperator")
self.assertEqual(node7.comment(), "")
self.assertIs(node7.parent(), node1)
self.assertEqual(node7.childCount(), 1)
self.assertEqual(node8.name(), "AuthorDateDeltaMinFilter")
self.assertEqual(node8._type, "AuthorDateDeltaMinFilter")
self.assertEqual(node8.comment(), "")
self.assertIs(node8.parent(), node1)
self.assertEqual(node8.childCount(), 0)
self.assertEqual(node8.authorDateDeltaMin(), "P1DT1H")
self.assertEqual(node9.name(), "AuthorDateDeltaMaxFilter")
self.assertEqual(node9._type, "AuthorDateDeltaMaxFilter")
self.assertEqual(node9.comment(), "")
self.assertIs(node9.parent(), node1)
self.assertEqual(node9.childCount(), 0)
self.assertEqual(node9.authorDateDeltaMax(), "P1DT2H")
self.assertEqual(node10.name(), "CommitDateDeltaMinFilter")
self.assertEqual(node10._type, "CommitDateDeltaMinFilter")
self.assertEqual(node10.comment(), "")
self.assertIs(node10.parent(), node1)
self.assertEqual(node10.childCount(), 0)
self.assertEqual(node10.commitDateDeltaMin(), "P1DT3H")
self.assertEqual(node11.name(), "CommitDateDeltaMaxFilter")
self.assertEqual(node11._type, "CommitDateDeltaMaxFilter")
self.assertEqual(node11.comment(), "")
self.assertIs(node11.parent(), node1)
self.assertEqual(node11.childCount(), 0)
self.assertEqual(node11.commitDateDeltaMax(), "P1DT4H")
node12 = node2.child(0)
node13 = node3.child(0)
node14 = node4.child(0)
node15 = node5.child(0)
node16 = node6.child(0)
node17 = node7.child(0)
self.assertEqual(node12.name(), "CommitterFilter")
self.assertEqual(node12._type, "CommitterFilter")
self.assertEqual(node12.comment(), "")
self.assertIs(node12.parent(), node2)
self.assertEqual(node12.childCount(), 0)
self.assertEqual(node12.committerName(), "Jane Doe")
self.assertEqual(node12.committerEmail(), "doe@example.com")
self.assertEqual(node13.name(), "NotOperator")
self.assertEqual(node13._type, "NotOperator")
self.assertEqual(node13.comment(), "")
self.assertIs(node13.parent(), node3)
self.assertEqual(node13.childCount(), 1)
self.assertEqual(node14.name(), "AuthorDateMinFilter")
self.assertEqual(node14._type, "AuthorDateMinFilter")
self.assertEqual(node14.comment(), "")
self.assertIs(node14.parent(), node4)
self.assertEqual(node14.childCount(), 0)
self.assertEqual(
node14.authorDateMin().toString(Qt.ISODate), "2000-01-01T00:00:00Z"
)
self.assertEqual(node15.name(), "AuthorDateMaxFilter")
self.assertEqual(node15._type, "AuthorDateMaxFilter")
self.assertEqual(node15.comment(), "")
self.assertIs(node15.parent(), node5)
self.assertEqual(node15.childCount(), 0)
self.assertEqual(
node15.authorDateMax().toString(Qt.ISODate), "2001-01-01T00:00:00Z"
)
self.assertEqual(node16.name(), "CommitDateMinFilter")
self.assertEqual(node16._type, "CommitDateMinFilter")
self.assertEqual(node16.comment(), "")
self.assertIs(node16.parent(), node6)
self.assertEqual(node16.childCount(), 0)
self.assertEqual(
node16.commitDateMin().toString(Qt.ISODate), "2002-01-01T00:00:00Z"
)
self.assertEqual(node17.name(), "CommitDateMaxFilter")
self.assertEqual(node17._type, "CommitDateMaxFilter")
self.assertEqual(node17.comment(), "")
self.assertIs(node17.parent(), node7)
self.assertEqual(node17.childCount(), 0)
self.assertEqual(
node17.commitDateMax().toString(Qt.ISODate), "2003-01-01T00:00:00Z"
)
node18 = node13.child(0)
self.assertEqual(node18.name(), "AuthorFilter")
self.assertEqual(node18._type, "AuthorFilter")
self.assertEqual(node18.comment(), "")
self.assertIs(node18.parent(), node13)
self.assertEqual(node18.childCount(), 0)
self.assertEqual(node18.authorName(), "Jane Doe")
self.assertEqual(node18.authorEmail(), "doe@example.com")
| 36.140406
| 80
| 0.651191
| 5,033
| 46,332
| 5.795947
| 0.037353
| 0.172089
| 0.205889
| 0.245106
| 0.768811
| 0.721161
| 0.696205
| 0.655411
| 0.636523
| 0.626718
| 0
| 0.043351
| 0.223323
| 46,332
| 1,281
| 81
| 36.168618
| 0.767292
| 0.002612
| 0
| 0.561024
| 0
| 0
| 0.179628
| 0.032496
| 0
| 0
| 0
| 0
| 0.488189
| 1
| 0.017717
| false
| 0
| 0.004921
| 0
| 0.024606
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1779db73f6a2bf2d6ab06829f0b8b4cdde68f189
| 57
|
py
|
Python
|
manuscript/case2/3_kipoi_export/ExampleDeeperDeepSEA/model_arch/__init__.py
|
taskina-alena/selene
|
3d86c61346909cdae5a3a4d9559e60e0736cf6b0
|
[
"BSD-3-Clause-Clear"
] | 307
|
2018-09-21T16:48:12.000Z
|
2022-03-23T21:42:04.000Z
|
manuscript/case2/3_kipoi_export/ExampleDeeperDeepSEA/model_arch/__init__.py
|
taskina-alena/selene
|
3d86c61346909cdae5a3a4d9559e60e0736cf6b0
|
[
"BSD-3-Clause-Clear"
] | 104
|
2018-08-07T13:44:29.000Z
|
2022-01-12T01:35:30.000Z
|
manuscript/case2/3_kipoi_export/ExampleDeeperDeepSEA/model_arch/__init__.py
|
taskina-alena/selene
|
3d86c61346909cdae5a3a4d9559e60e0736cf6b0
|
[
"BSD-3-Clause-Clear"
] | 85
|
2018-10-20T08:06:31.000Z
|
2022-03-29T15:17:30.000Z
|
from .wrapped_deeper_deepsea import WrappedDeeperDeepSEA
| 28.5
| 56
| 0.912281
| 6
| 57
| 8.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 1
| 57
| 57
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
178c3836aebd14534609fe08c993964f8c1255f1
| 68
|
py
|
Python
|
torchprofile/__init__.py
|
tanglang96/torchprofile
|
e918353e95e19a6bc9e02794d5c96510ed858741
|
[
"MIT"
] | 1
|
2022-01-11T05:31:17.000Z
|
2022-01-11T05:31:17.000Z
|
torchprofile/__init__.py
|
tanglang96/torchprofile
|
e918353e95e19a6bc9e02794d5c96510ed858741
|
[
"MIT"
] | null | null | null |
torchprofile/__init__.py
|
tanglang96/torchprofile
|
e918353e95e19a6bc9e02794d5c96510ed858741
|
[
"MIT"
] | null | null | null |
from .profile import profile_model
from .version import __version__
| 22.666667
| 34
| 0.852941
| 9
| 68
| 5.888889
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 35
| 34
| 0.883333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bd5f567863813eb9580d040f3a2a5855c8a81d3e
| 6,333
|
py
|
Python
|
mysite/patterns/15.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 19
|
2016-06-17T23:36:27.000Z
|
2020-01-13T16:41:55.000Z
|
mysite/patterns/15.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 13
|
2016-06-06T12:57:05.000Z
|
2019-02-05T02:21:00.000Z
|
patterns/15.py
|
OmnesRes/GRIMMER
|
173c99ebdb6a9edb1242d24a791d0c5d778ff643
|
[
"MIT"
] | 7
|
2017-03-28T18:12:22.000Z
|
2021-06-16T09:32:59.000Z
|
pattern_zero=[0.0, 0.062222222222, 0.115555555556, 0.133333333333, 0.16, 0.195555555556, 0.222222222222, 0.24, 0.248888888889, 0.266666666667, 0.293333333333, 0.328888888889, 0.355555555556, 0.373333333333, 0.382222222222, 0.4, 0.426666666667, 0.462222222222, 0.488888888889, 0.506666666667, 0.515555555556, 0.533333333333, 0.56, 0.595555555556, 0.622222222222, 0.64, 0.648888888889, 0.666666666667, 0.693333333333, 0.728888888889, 0.755555555556, 0.773333333333, 0.782222222222, 0.8, 0.826666666667, 0.862222222222, 0.888888888889, 0.906666666667, 0.915555555556, 0.933333333333, 0.96, 0.995555555556]
pattern_odd=[0.022222222222, 0.04, 0.048888888889, 0.066666666667, 0.093333333333, 0.128888888889, 0.155555555556, 0.173333333333, 0.182222222222, 0.2, 0.226666666667, 0.262222222222, 0.288888888889, 0.306666666667, 0.315555555556, 0.333333333333, 0.36, 0.395555555556, 0.422222222222, 0.44, 0.448888888889, 0.466666666667, 0.493333333333, 0.528888888889, 0.555555555556, 0.573333333333, 0.582222222222, 0.6, 0.626666666667, 0.662222222222, 0.688888888889, 0.706666666667, 0.715555555556, 0.733333333333, 0.76, 0.795555555556, 0.822222222222, 0.84, 0.848888888889, 0.866666666667, 0.893333333333, 0.928888888889, 0.955555555556, 0.973333333333, 0.982222222222]
pattern_even=[0.0, 0.026666666667, 0.062222222222, 0.088888888889, 0.106666666667, 0.115555555556, 0.133333333333, 0.16, 0.195555555556, 0.222222222222, 0.24, 0.248888888889, 0.266666666667, 0.293333333333, 0.328888888889, 0.355555555556, 0.373333333333, 0.382222222222, 0.4, 0.426666666667, 0.462222222222, 0.488888888889, 0.506666666667, 0.515555555556, 0.533333333333, 0.56, 0.595555555556, 0.622222222222, 0.64, 0.648888888889, 0.666666666667, 0.693333333333, 0.728888888889, 0.755555555556, 0.773333333333, 0.782222222222, 0.8, 0.826666666667, 0.862222222222, 0.888888888889, 0.906666666667, 0.915555555556, 0.933333333333, 0.96, 0.995555555556]
averages_even={0.0: [0.0], 0.648888888889: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.693333333333: [0.8, 0.2], 0.506666666667: [0.6, 0.4], 0.115555555556: [0.5333333333333, 0.1333333333333, 0.8666666666667, 0.4666666666667], 0.533333333333: [0.0], 0.462222222222: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.595555555556: [0.0666666666667, 0.2666666666667, 0.7333333333333, 0.9333333333333], 0.96: [0.2, 0.8], 0.106666666667: [0.6, 0.4], 0.195555555556: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.266666666667: [0.0], 0.826666666667: [0.2, 0.8], 0.16: [0.8, 0.2], 0.088888888889: [0.3333333333333, 0.6666666666667], 0.782222222222: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.888888888889: [0.3333333333333, 0.6666666666667], 0.915555555556: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.728888888889: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.24: [0.6, 0.4], 0.773333333333: [0.6, 0.4], 0.293333333333: [0.8, 0.2], 0.64: [0.6, 0.4], 0.862222222222: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.906666666667: [0.6, 0.4], 0.133333333333: [0.0], 0.382222222222: [0.5333333333333, 0.1333333333333, 0.8666666666667, 0.4666666666667], 0.062222222222: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.248888888889: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.622222222222: [0.3333333333333, 0.6666666666667], 0.426666666667: [0.8, 0.2], 0.666666666667: [0.0], 0.355555555556: [0.3333333333333, 0.6666666666667], 0.755555555556: [0.6666666666667, 0.3333333333333], 0.8: [0.0], 0.4: [0.0], 0.995555555556: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.328888888889: [0.0666666666667, 0.2666666666667, 0.7333333333333, 0.9333333333333], 0.222222222222: [0.3333333333333, 0.6666666666667], 0.026666666667: [0.8, 0.2], 0.515555555556: [0.5333333333333, 0.1333333333333, 0.8666666666667, 0.4666666666667], 0.933333333333: [0.0], 0.373333333333: [0.6, 0.4], 0.56: [0.8, 0.2], 0.488888888889: [0.3333333333333, 0.6666666666667]}
averages_odd={0.626666666667: [0.2, 0.8], 0.44: [0.6, 0.4], 0.173333333333: [0.6, 0.4], 0.973333333333: [0.6, 0.4], 0.715555555556: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.528888888889: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.76: [0.2, 0.8], 0.573333333333: [0.6, 0.4], 0.048888888889: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.04: [0.6, 0.4], 0.848888888889: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.662222222222: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.893333333333: [0.8, 0.2], 0.706666666667: [0.6, 0.4], 0.182222222222: [0.5333333333333, 0.1333333333333, 0.8666666666667, 0.4666666666667], 0.982222222222: [0.5333333333333, 0.1333333333333, 0.8666666666667, 0.4666666666667], 0.795555555556: [0.0666666666667, 0.2666666666667, 0.7333333333333, 0.9333333333333], 0.315555555556: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.84: [0.6, 0.4], 0.226666666667: [0.2, 0.8], 0.36: [0.2, 0.8], 0.928888888889: [0.0666666666667, 0.2666666666667, 0.7333333333333, 0.9333333333333], 0.066666666667: [0.0], 0.555555555556: [0.6666666666667, 0.3333333333333], 0.288888888889: [0.6666666666667, 0.3333333333333], 0.6: [0.0], 0.155555555556: [0.6666666666667, 0.3333333333333], 0.333333333333: [0.0], 0.688888888889: [0.3333333333333, 0.6666666666667], 0.448888888889: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.022222222222: [0.3333333333333, 0.6666666666667], 0.262222222222: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333], 0.733333333333: [0.0], 0.493333333333: [0.2, 0.8], 0.2: [0.0], 0.306666666667: [0.6, 0.4], 0.822222222222: [0.3333333333333, 0.6666666666667], 0.422222222222: [0.6666666666667, 0.3333333333333], 0.866666666667: [0.0], 0.128888888889: [0.0666666666667, 0.2666666666667, 0.7333333333333, 0.9333333333333], 0.466666666667: [0.0], 0.093333333333: [0.2, 0.8], 0.955555555556: [0.3333333333333, 0.6666666666667], 0.582222222222: [0.1333333333333, 0.5333333333333, 0.8666666666667, 0.4666666666667], 0.395555555556: [0.2666666666667, 0.0666666666667, 0.7333333333333, 0.9333333333333]}
| 1,266.6
| 2,228
| 0.758408
| 904
| 6,333
| 5.307522
| 0.11615
| 0.015006
| 0.01188
| 0.087536
| 0.659233
| 0.542935
| 0.542935
| 0.542935
| 0.531263
| 0.186744
| 0
| 0.805674
| 0.070425
| 6,333
| 5
| 2,228
| 1,266.6
| 0.009343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bd65bb05f1bebc696e79f06a07b80ecef7742f41
| 104
|
py
|
Python
|
web/whim/core/scrapers/dummy.py
|
andrewgleave/whim
|
ddcadfa57dc0842be3a513605494e2d9a1eee8ef
|
[
"MIT"
] | null | null | null |
web/whim/core/scrapers/dummy.py
|
andrewgleave/whim
|
ddcadfa57dc0842be3a513605494e2d9a1eee8ef
|
[
"MIT"
] | null | null | null |
web/whim/core/scrapers/dummy.py
|
andrewgleave/whim
|
ddcadfa57dc0842be3a513605494e2d9a1eee8ef
|
[
"MIT"
] | null | null | null |
from .base import BaseScraper
class DummyScraper(BaseScraper):
def run(self):
return [{}]
| 14.857143
| 32
| 0.663462
| 11
| 104
| 6.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 104
| 6
| 33
| 17.333333
| 0.8625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
bdcb69810498a267dc3eb349458b17a3170cea26
| 20
|
py
|
Python
|
evaluate/previous_works/svsyn/exporters/__init__.py
|
Syniez/Joint_360depth
|
4f28c3b5b7f648173480052e205e898c6c7a5151
|
[
"MIT"
] | 92
|
2019-09-08T09:55:05.000Z
|
2022-02-21T21:29:40.000Z
|
exporters/__init__.py
|
zjsprit/SphericalViewSynthesis
|
fcdec95bf3ad109767d27396434b51cf3aad2b4b
|
[
"BSD-2-Clause"
] | 15
|
2020-12-30T22:36:08.000Z
|
2022-02-23T05:47:14.000Z
|
exporters/__init__.py
|
zjsprit/SphericalViewSynthesis
|
fcdec95bf3ad109767d27396434b51cf3aad2b4b
|
[
"BSD-2-Clause"
] | 26
|
2019-09-16T02:26:33.000Z
|
2021-10-21T03:55:02.000Z
|
from .image import *
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
da105a54334384e6f64e71950a9eda7c6b3f7564
| 339
|
py
|
Python
|
Ekeopara_Praise/Phase 1/Python Basic 2/Day26 Tasks/Task1.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Ekeopara_Praise/Phase 1/Python Basic 2/Day26 Tasks/Task1.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Ekeopara_Praise/Phase 1/Python Basic 2/Day26 Tasks/Task1.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
'''1. Write a Python program to count the number of arguments in a given function.
Sample Output:
0
1
2
3
4
1'''
def num_of_args(*args):
return(len(args))
print(num_of_args())
print(num_of_args(1))
print(num_of_args(1, 2))
print(num_of_args(1, 2, 3))
print(num_of_args(1, 2, 3, 4))
print(num_of_args([1, 2, 3, 4]))
#Reference: w3resource
| 18.833333
| 82
| 0.710914
| 71
| 339
| 3.197183
| 0.394366
| 0.154185
| 0.277533
| 0.370044
| 0.449339
| 0.303965
| 0.23348
| 0.15859
| 0
| 0
| 0
| 0.074576
| 0.129794
| 339
| 18
| 83
| 18.833333
| 0.694915
| 0.377581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0
| 0.125
| 0.125
| 0.75
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
|
0
| 5
|
da1118f784670817f802afb48a7d54b0df35ddad
| 13
|
py
|
Python
|
code/var.py
|
brianshen0522/Crypto
|
cb422350c650308cbdcfb56e68aac61c46cb06bd
|
[
"MIT"
] | null | null | null |
code/var.py
|
brianshen0522/Crypto
|
cb422350c650308cbdcfb56e68aac61c46cb06bd
|
[
"MIT"
] | null | null | null |
code/var.py
|
brianshen0522/Crypto
|
cb422350c650308cbdcfb56e68aac61c46cb06bd
|
[
"MIT"
] | null | null | null |
my_id = "999"
| 13
| 13
| 0.615385
| 3
| 13
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.153846
| 13
| 1
| 13
| 13
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
da5bb81d38a192dc095cd0c9a6429216c3c69a9f
| 197
|
py
|
Python
|
server/xyz/apps/blog/admin.py
|
hellolex/xyz
|
21291c688f2be49900901fed72d1d9f0917689a6
|
[
"MIT"
] | null | null | null |
server/xyz/apps/blog/admin.py
|
hellolex/xyz
|
21291c688f2be49900901fed72d1d9f0917689a6
|
[
"MIT"
] | 6
|
2020-06-06T01:43:21.000Z
|
2022-03-21T22:18:23.000Z
|
server/xyz/apps/blog/admin.py
|
hellolex/xyz
|
21291c688f2be49900901fed72d1d9f0917689a6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import User,Category,Article
# Register your models here.
admin.site.register(User)
admin.site.register(Category)
admin.site.register(Article)
| 24.625
| 42
| 0.786802
| 27
| 197
| 5.740741
| 0.481481
| 0.174194
| 0.329032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121827
| 197
| 7
| 43
| 28.142857
| 0.895954
| 0.13198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
da624fa6d083f6e0678300adeefc40e2f06023ba
| 57
|
py
|
Python
|
password_input.py
|
learnsomecode/python
|
d7b6f9ba228f7dc60670c427c4cf96a572193c97
|
[
"Unlicense"
] | 1
|
2016-01-21T02:29:45.000Z
|
2016-01-21T02:29:45.000Z
|
password_input.py
|
learnsomecode/python
|
d7b6f9ba228f7dc60670c427c4cf96a572193c97
|
[
"Unlicense"
] | null | null | null |
password_input.py
|
learnsomecode/python
|
d7b6f9ba228f7dc60670c427c4cf96a572193c97
|
[
"Unlicense"
] | null | null | null |
import getpass
password = getpass.getpass('Password: ')
| 14.25
| 40
| 0.754386
| 6
| 57
| 7.166667
| 0.5
| 0.697674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 57
| 3
| 41
| 19
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
da6f29be2db95f904e863cfce8e135728fcc49e2
| 222
|
py
|
Python
|
test/test_pole_summation.py
|
freude/NanoNet
|
37848691c0912e384b7b31f63d9785f30d9839cf
|
[
"MIT"
] | 23
|
2018-09-28T11:49:23.000Z
|
2022-03-05T05:49:14.000Z
|
test/test_pole_summation.py
|
floatingCatty/NanoNet
|
9d9f7f47319ee1509ccb3459b7b81542f4360b64
|
[
"MIT"
] | 10
|
2018-09-06T01:11:04.000Z
|
2021-12-22T13:04:28.000Z
|
test/test_pole_summation.py
|
floatingCatty/NanoNet
|
9d9f7f47319ee1509ccb3459b7b81542f4360b64
|
[
"MIT"
] | 5
|
2018-10-10T04:11:14.000Z
|
2021-05-17T10:47:40.000Z
|
# from negf import pole_summation_method
#
#
# def test_density_integration():
# """ """
# ans = pole_summation_method()
# ans1 = numerical_intrgration()
# np.testing.assert_allclose(ans, ans1, rtol=1e-2)
| 22.2
| 54
| 0.671171
| 26
| 222
| 5.423077
| 0.807692
| 0.184397
| 0.269504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.189189
| 222
| 9
| 55
| 24.666667
| 0.761111
| 0.918919
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
da75de7b17c36dd0e5e6a70c1e492d8eb38c4c91
| 77
|
py
|
Python
|
src/polyswarmclient/ethereum/transaction/__init__.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 21
|
2018-09-15T00:12:42.000Z
|
2020-10-28T00:42:59.000Z
|
src/polyswarmclient/ethereum/transaction/__init__.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 435
|
2018-09-05T18:53:21.000Z
|
2021-11-30T17:32:10.000Z
|
src/polyswarmclient/ethereum/transaction/__init__.py
|
polyswarm/polyswarm-client
|
1ce057725d7db59c3582e4cd3cf148cde7ddddeb
|
[
"MIT"
] | 3
|
2019-07-26T00:14:47.000Z
|
2021-04-26T10:57:56.000Z
|
from .base import EthereumTransaction
from .noncemanager import NonceManager
| 25.666667
| 38
| 0.87013
| 8
| 77
| 8.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 77
| 2
| 39
| 38.5
| 0.971014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e514d0bd5bdffe39422c254d05df5741a1b2cb2b
| 109
|
py
|
Python
|
python/rbbt.py
|
mikisvaz/rbbt-util
|
09e94f02c85da3f6e241270631dae84d88ecf560
|
[
"MIT"
] | 8
|
2015-05-01T17:39:00.000Z
|
2022-01-27T15:26:13.000Z
|
python/rbbt.py
|
mikisvaz/rbbt-util
|
09e94f02c85da3f6e241270631dae84d88ecf560
|
[
"MIT"
] | 6
|
2016-05-23T10:50:49.000Z
|
2017-09-04T09:19:41.000Z
|
python/rbbt.py
|
mikisvaz/rbbt-util
|
09e94f02c85da3f6e241270631dae84d88ecf560
|
[
"MIT"
] | 1
|
2016-06-07T09:49:05.000Z
|
2016-06-07T09:49:05.000Z
|
import warnings
import sys
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def rbbt():
print("Rbbt")
| 13.625
| 40
| 0.706422
| 18
| 109
| 4.055556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.155963
| 109
| 7
| 41
| 15.571429
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0.229358
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e5232a3294c63e3b72f8372790c21c542f731e0f
| 681
|
py
|
Python
|
generator.py
|
CrazyCCBlog/telegram-bot-factory
|
bd42133e6347d689c3f9802fbf43ac1922713b22
|
[
"MIT"
] | null | null | null |
generator.py
|
CrazyCCBlog/telegram-bot-factory
|
bd42133e6347d689c3f9802fbf43ac1922713b22
|
[
"MIT"
] | null | null | null |
generator.py
|
CrazyCCBlog/telegram-bot-factory
|
bd42133e6347d689c3f9802fbf43ac1922713b22
|
[
"MIT"
] | null | null | null |
def generate_functions(function_name):
return "def {}(chat_id, message):\n\
bot = Bot(token=TELEGRAM_TOKEN)\
bot.sendMessage(chat_id=chat_id, text=message)\n\n".format(function_name)
def generate_callback(function_name):
return "def {}(ch, method, properties, body):\n\tpass\n\n".format(function_name)
def generate_subscriber():
return "def subscribe_topic(channel, topic):\n\
channel.queue_declare(queue=topic)\n\
channel.basic_consume(queue=topic,\
auto_ack=True, on_message_callback=callback)\n\n"
def insert_string(string, string_to_insert, tag):
pos = string.find(tag)
return string[:pos] + string_to_insert + string[pos:] + "\n"
| 32.428571
| 84
| 0.723935
| 98
| 681
| 4.806122
| 0.408163
| 0.101911
| 0.076433
| 0.089172
| 0.131635
| 0.131635
| 0.131635
| 0
| 0
| 0
| 0
| 0
| 0.132159
| 681
| 20
| 85
| 34.05
| 0.796954
| 0
| 0
| 0
| 1
| 0
| 0.07489
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.071429
| 0
| 0.214286
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
e52433c52425f415ff8021f2d76f9fd769c00fbe
| 52
|
py
|
Python
|
__init__.py
|
cassberk/xps_peakfit
|
bbdd62dbfc4d64ec2af0c509361de81b0762bd41
|
[
"MIT"
] | 1
|
2021-07-21T13:13:25.000Z
|
2021-07-21T13:13:25.000Z
|
__init__.py
|
cassberk/xps_peakfit
|
bbdd62dbfc4d64ec2af0c509361de81b0762bd41
|
[
"MIT"
] | null | null | null |
__init__.py
|
cassberk/xps_peakfit
|
bbdd62dbfc4d64ec2af0c509361de81b0762bd41
|
[
"MIT"
] | null | null | null |
from . import helper_functions
from . import bkgrds
| 26
| 31
| 0.807692
| 7
| 52
| 5.857143
| 0.714286
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 2
| 32
| 26
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e52c092fe1929735843b49eb07201a86a92854c9
| 87
|
py
|
Python
|
backend/generate_secret_key.py
|
grchristensen/avpd
|
f7617844ae454a93825aa231e04c125cb4e58a20
|
[
"Apache-2.0"
] | null | null | null |
backend/generate_secret_key.py
|
grchristensen/avpd
|
f7617844ae454a93825aa231e04c125cb4e58a20
|
[
"Apache-2.0"
] | 9
|
2021-03-04T20:29:54.000Z
|
2021-03-31T22:03:51.000Z
|
backend/generate_secret_key.py
|
grchristensen/avpd
|
f7617844ae454a93825aa231e04c125cb4e58a20
|
[
"Apache-2.0"
] | 3
|
2021-01-30T02:19:07.000Z
|
2021-04-11T19:48:37.000Z
|
from django.core.management import utils
print(utils.get_random_secret_key(), end="")
| 21.75
| 44
| 0.793103
| 13
| 87
| 5.076923
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08046
| 87
| 3
| 45
| 29
| 0.825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
e5ac0dd484a60d14c536b7fe0417f35712e35d4d
| 881
|
py
|
Python
|
files/keystone_ldap_schema.py
|
HPCStack/openldap-server-charm
|
a15aa9c352756fbdf6b36d0166a9ca42bee992fa
|
[
"Apache-2.0"
] | null | null | null |
files/keystone_ldap_schema.py
|
HPCStack/openldap-server-charm
|
a15aa9c352756fbdf6b36d0166a9ca42bee992fa
|
[
"Apache-2.0"
] | null | null | null |
files/keystone_ldap_schema.py
|
HPCStack/openldap-server-charm
|
a15aa9c352756fbdf6b36d0166a9ca42bee992fa
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
"""
https://wiki.openstack.org/wiki/OpenLDAP
"""
import sys
if sys.argv.__len__() < 3:
usage = """
USAGE: {0} subtree organization
{0} Generates an LDIF file that can then be added to a Directory server via
the ldapadd command. The Schema is in the format expected by the LDAP
Identity Driver in Keystone
"""
print usage.format(sys.argv[0])
sys.exit(1)
subtree = sys.argv[1]
organization = sys.argv[2]
ldif_file = """
dn: {0}
dc: {1}
objectClass: dcObject
objectClass: organizationalUnit
ou: {1}
dn: ou=Groups,{0}
objectClass: top
objectClass: organizationalUnit
ou: groups
dn: ou=Users,{0}
objectClass: top
objectClass: organizationalUnit
ou: users
dn: ou=Roles,{0}
objectClass: top
objectClass: organizationalUnit
ou: roles
dn: ou=Projects,{0}
objectClass: organizationalUnit
ou: Projects
"""
print ldif_file.format(subtree, organization)
| 17.979592
| 75
| 0.733258
| 126
| 881
| 5.079365
| 0.47619
| 0.226563
| 0.242188
| 0.121875
| 0.215625
| 0.215625
| 0
| 0
| 0
| 0
| 0
| 0.018592
| 0.145289
| 881
| 48
| 76
| 18.354167
| 0.831341
| 0.018161
| 0
| 0.285714
| 1
| 0
| 0.720588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.028571
| null | null | 0.057143
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e5f46838c5f3892fb4c67f118f7ce2e148b2bd02
| 17
|
py
|
Python
|
test3.py
|
lefthandedfreak/pynet
|
729fa2a635fc3471a08d78ff430193d44b584d0b
|
[
"Apache-2.0"
] | null | null | null |
test3.py
|
lefthandedfreak/pynet
|
729fa2a635fc3471a08d78ff430193d44b584d0b
|
[
"Apache-2.0"
] | null | null | null |
test3.py
|
lefthandedfreak/pynet
|
729fa2a635fc3471a08d78ff430193d44b584d0b
|
[
"Apache-2.0"
] | null | null | null |
print("xxxxxxx")
| 8.5
| 16
| 0.705882
| 2
| 17
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 17
| 1
| 17
| 17
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f91bedf0ea029a13baa915ea221c928dca074d68
| 97
|
py
|
Python
|
fastapi_skeleton/models/song.py
|
Sayar1106/OTTPlatformRecommender
|
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
|
[
"MIT"
] | null | null | null |
fastapi_skeleton/models/song.py
|
Sayar1106/OTTPlatformRecommender
|
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
|
[
"MIT"
] | 1
|
2020-09-10T17:48:09.000Z
|
2020-09-10T17:50:28.000Z
|
fastapi_skeleton/models/song.py
|
Sayar1106/OTTPlatformRecommender
|
85b72dfe9f810e3b6e12f8c7702ef94db3a03190
|
[
"MIT"
] | 1
|
2020-09-25T11:32:57.000Z
|
2020-09-25T11:32:57.000Z
|
from fastapi_skeleton.models.payload import (baseclass)
class song(baseclass):
id: int
| 16.166667
| 55
| 0.731959
| 12
| 97
| 5.833333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185567
| 97
| 6
| 56
| 16.166667
| 0.886076
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0090570118c45a54e2e29b59d7a2a46636c466d2
| 142
|
py
|
Python
|
src/awesome_python/operator/threadpool_flag.py
|
JoshuaZero/awesome_python_test
|
358813173d324e510e9793ea57ccfcac4146e2e5
|
[
"MIT"
] | null | null | null |
src/awesome_python/operator/threadpool_flag.py
|
JoshuaZero/awesome_python_test
|
358813173d324e510e9793ea57ccfcac4146e2e5
|
[
"MIT"
] | 11
|
2020-07-01T06:42:40.000Z
|
2021-09-22T19:21:40.000Z
|
src/awesome_python/operator/threadpool_flag.py
|
JoshuaZero/awesome_python_test
|
358813173d324e510e9793ea57ccfcac4146e2e5
|
[
"MIT"
] | null | null | null |
#coding=utf-8
#!env python
# Author: joshua_zero@outlook.com
import collections
from concurrent import futures
import requests
import tqdm
| 14.2
| 34
| 0.802817
| 20
| 142
| 5.65
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.133803
| 142
| 9
| 35
| 15.777778
| 0.910569
| 0.394366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
00e69b6eb09a22eb69f04f11984aa33f9fd00e90
| 339
|
py
|
Python
|
authenticate/views.py
|
DarkHorse1997/Django_site
|
18ca9a10079695c20896fefc88ce4f78ac6729f2
|
[
"MIT"
] | null | null | null |
authenticate/views.py
|
DarkHorse1997/Django_site
|
18ca9a10079695c20896fefc88ce4f78ac6729f2
|
[
"MIT"
] | null | null | null |
authenticate/views.py
|
DarkHorse1997/Django_site
|
18ca9a10079695c20896fefc88ce4f78ac6729f2
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.contrib.auth import authenticate, login, logout
# Create your views here.
def home(request):
return render(request,'authenticate/home.html',{})
def login_user(request):
if request.method == "POST":
pass
else:
return render(request,'authenticate/login.html',{})
| 26.076923
| 59
| 0.707965
| 42
| 339
| 5.690476
| 0.595238
| 0.083682
| 0.158996
| 0.259414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174041
| 339
| 12
| 60
| 28.25
| 0.853571
| 0.067847
| 0
| 0
| 0
| 0
| 0.156051
| 0.143312
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0.222222
| 0.111111
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
dae5ced43ba1078b88db010d058e0aee54f2d477
| 39
|
py
|
Python
|
deepvo/networks/common.py
|
SimonsRoad/deepvo
|
e5bc1a0a22ce95804c4ef9e49a03aef7ce827234
|
[
"MIT"
] | 1
|
2021-07-13T08:36:40.000Z
|
2021-07-13T08:36:40.000Z
|
deepvo/networks/common.py
|
SimonsRoad/deepvo
|
e5bc1a0a22ce95804c4ef9e49a03aef7ce827234
|
[
"MIT"
] | null | null | null |
deepvo/networks/common.py
|
SimonsRoad/deepvo
|
e5bc1a0a22ce95804c4ef9e49a03aef7ce827234
|
[
"MIT"
] | null | null | null |
def flownet_v1_s(input):
pass
| 4.875
| 24
| 0.615385
| 6
| 39
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.307692
| 39
| 7
| 25
| 5.571429
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
971c5d21e3053ce1b099d36a6c40e3980d8a78fa
| 115
|
py
|
Python
|
src/batch/__init__.py
|
johannchopin/htw-m1-softwarearchitecture
|
85256c5643b0b8b3016f007ca56ade37e048c683
|
[
"Apache-2.0"
] | null | null | null |
src/batch/__init__.py
|
johannchopin/htw-m1-softwarearchitecture
|
85256c5643b0b8b3016f007ca56ade37e048c683
|
[
"Apache-2.0"
] | 1
|
2021-01-20T21:48:22.000Z
|
2021-01-20T21:50:42.000Z
|
src/batch/__init__.py
|
johannchopin/htw-m1-softwarearchitecture
|
85256c5643b0b8b3016f007ca56ade37e048c683
|
[
"Apache-2.0"
] | null | null | null |
from .CassandraWrapper import CassandraWrapper
from .BatchLayer import BatchLayer
from .singleton import singleton
| 28.75
| 46
| 0.869565
| 12
| 115
| 8.333333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104348
| 115
| 3
| 47
| 38.333333
| 0.970874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
97272dd70706df7f6363189f2467368d7fba03c5
| 99
|
py
|
Python
|
Demographic-data-analyzer/main.py
|
PedroEduardoSS/Data-Analisys-projects
|
f06c2d7091a9a61509525019f2f0375e21698f6a
|
[
"MIT"
] | null | null | null |
Demographic-data-analyzer/main.py
|
PedroEduardoSS/Data-Analisys-projects
|
f06c2d7091a9a61509525019f2f0375e21698f6a
|
[
"MIT"
] | null | null | null |
Demographic-data-analyzer/main.py
|
PedroEduardoSS/Data-Analisys-projects
|
f06c2d7091a9a61509525019f2f0375e21698f6a
|
[
"MIT"
] | null | null | null |
from demographic_data import *
# Test your function by calling it here
calculate_demographic_data()
| 33
| 39
| 0.838384
| 14
| 99
| 5.714286
| 0.857143
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 99
| 3
| 40
| 33
| 0.91954
| 0.373737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
977ea82a55bfb6e2e07f23192c1c2df42d554cef
| 266
|
py
|
Python
|
src/fourier_feature_encoding.py
|
Synerise/recsys-challenge-2021
|
f8e8005a1553c14bae16951d787d6864094f7a3b
|
[
"Apache-2.0"
] | 3
|
2021-12-18T17:30:06.000Z
|
2022-02-16T10:54:00.000Z
|
src/fourier_feature_encoding.py
|
Synerise/recsys-challenge-2021
|
f8e8005a1553c14bae16951d787d6864094f7a3b
|
[
"Apache-2.0"
] | 1
|
2021-12-18T11:49:11.000Z
|
2021-12-18T14:04:28.000Z
|
src/fourier_feature_encoding.py
|
Synerise/recsys-challenge-2021
|
f8e8005a1553c14bae16951d787d6864094f7a3b
|
[
"Apache-2.0"
] | 3
|
2021-11-17T08:44:35.000Z
|
2022-02-16T10:53:59.000Z
|
import numpy as np
def multiscale(x, scales):
return np.hstack([x.reshape(-1, 1)/pow(2., i) for i in scales])
def encode_scalar_column(x, scales=[-1, 0, 1, 2, 3, 4, 5, 6]):
return np.hstack([np.sin(multiscale(x, scales)), np.cos(multiscale(x, scales))])
| 26.6
| 84
| 0.646617
| 49
| 266
| 3.469388
| 0.55102
| 0.164706
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049107
| 0.157895
| 266
| 9
| 85
| 29.555556
| 0.709821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
97988c95e8b6a57f1faa91beb4c87b027c14a7bd
| 5,171
|
py
|
Python
|
test/iching/cipin_4909.py
|
IWantToClearMyHead/IWantToClearMyHead.github.io
|
13968b46bf6da5e61fc628309ed2a7e7529af58e
|
[
"MIT"
] | null | null | null |
test/iching/cipin_4909.py
|
IWantToClearMyHead/IWantToClearMyHead.github.io
|
13968b46bf6da5e61fc628309ed2a7e7529af58e
|
[
"MIT"
] | null | null | null |
test/iching/cipin_4909.py
|
IWantToClearMyHead/IWantToClearMyHead.github.io
|
13968b46bf6da5e61fc628309ed2a7e7529af58e
|
[
"MIT"
] | null | null | null |
from collections import Counter
from pprint import pprint
import sys
b = '''乾元亨利貞初九潛龍勿用九二見龍在田利見大人九三君子終日乾乾夕惕若厲旡咎九四或躍在淵旡咎九五飛龍在天利見大人上九亢龍有悔用九見群龍旡首吉坤元亨利牝馬之貞君子有攸往先迷後得主利西南得朋東北喪朋安貞吉初六履霜堅冰至六二直方大不習无不利六三含章可貞或從王事无成有終六四括囊无咎无譽六五黃裳元吉上六龍戰于野其血玄黃用六利永貞屯元亨利貞勿用有攸往利建侯初九磐桓利居貞利建侯六二屯如邅如乘馬班如匪寇婚媾女子貞不字十年乃字六三即鹿无虞惟入于林中君子幾不如舍往吝六四乘馬班如求婚媾往吉无不利九五屯其膏小貞吉大貞凶上六乘馬班如泣血漣如蒙亨匪我求童蒙童蒙求我初筮告再三瀆瀆則不告利貞初六發蒙利用刑人用說桎梏以往吝九二包蒙吉納婦吉子克家六三勿用取女見金夫不有躬无攸利六四困蒙吝六五童蒙吉上九擊蒙不利為寇利禦寇需有孚光亨貞吉利涉大川初九需于郊利用恆无咎九二需于沙小有言終吉九三需于泥致寇至六四需于血出自穴九五需于酒食貞吉上六入于穴有不速之客三人來敬之終吉訟有孚窒惕中吉終凶利見大人不利涉大川初六不永所事小有言終吉九二不克訟歸而逋其邑人三百戶无眚六三食舊德貞厲終吉或從王事无成九四不克訟復即命渝安貞吉九五訟元吉上九或錫之鞶帶終朝三褫之師貞丈人吉无咎初六師出以律否臧凶九二在師中吉无咎王三錫命六三師或輿尸凶六四師左次无咎六五田有禽利執言无咎長子帥師弟子輿尸貞凶上六大君有命開國承家小人勿用比吉原筮元永貞无咎不寧方來後夫凶初六有孚比之无咎有孚盈缶終來有它吉六二比之自內貞吉六三比之匪人六四外比之貞吉九五顯比王用三驅失前禽邑人不誡吉上六比之无首凶小畜亨密雲不雨自我西郊初九復自道何其咎吉九二牽復吉九三輿說輻夫妻反目六四有孚血去惕出无咎九五有孚攣如富以其鄰上九既雨既處尚德載婦貞厲月幾望君子征凶履虎尾不咥人亨初九素履往无咎九二履道坦坦幽人貞吉六三眇能視跛能履履虎尾咥人凶武人為于大君九四履虎尾愬愬終吉九五夬履貞厲上九視履考祥其旋元吉泰小往大來吉亨初九拔茅茹以其彙征吉九二包荒用馮河不遐遺朋亡得尚于中行九三无平不陂无往不復艱貞无咎勿恤其孚于食有福六四翩翩不富以其鄰不戒以孚六五帝乙歸妹以祉元吉上六城復于隍勿用師自邑告命貞吝否之匪人不利君子貞大往小來初六拔茅茹以其彙貞吉亨六二包承小人吉大人否亨六三包羞九四有命无咎疇離祉九五休否大人吉其亡其亡繫于苞桑上九傾否先否後喜同人于野亨利涉大川利君子貞君子以類族辨物初九同人于門无咎六二同人于宗吝九三伏戎于莽升其高陵三歲不興九四乘其墉弗克攻吉九五同人先號咷而後笑大師克相遇上九同人于郊无悔大有元亨初九无交害匪咎艱則无咎九二大車以載有攸往无咎九三公用亨于天子小人弗克九四匪其彭无咎六五厥孚交如威如吉上九自天祐之吉无不利謙亨君子有終初六謙謙君子用涉大川吉六二鳴謙貞吉九三勞謙君子有終吉六四无不利撝謙六五不富以其鄰利用侵伐无不利上六鳴謙利用行師征邑國豫利建侯行師初六鳴豫凶六二介于石不終日貞吉六三盱豫悔遲有悔九四由豫大有得勿疑朋盍簪六五貞疾恒不死上六冥豫成有渝无咎隨元亨利貞无咎初九官有渝貞吉出門交有功六二係小子失丈夫六三係丈夫失小子隨有求得利居貞九四隨有獲貞凶有孚在道以明何咎九五孚于嘉吉上六拘係之乃從維之王用亨于西山蠱元亨利涉大川先甲三日後甲三日初六幹父之蠱有子考无咎厲終吉九二幹母之蠱不可貞九三幹父之蠱小有悔无大咎六四裕父之蠱往見吝六五幹父之蠱用譽上九不事王侯高尚其事臨元亨利貞至于八月有凶初九咸臨貞吉九二咸臨吉无不利六三甘臨无攸利既憂之无咎六四至臨无咎六五知臨大君之宜吉上六敦臨吉无咎觀盥而不薦有孚顒若初六童觀小人无咎君子吝六二闚觀利女貞六三觀我生進退六四觀國之光利用賓于王九五觀我生君子无咎上九觀其生君子无咎噬嗑亨利用獄初九屨校滅趾无咎六二噬膚滅鼻无咎六三噬腊肉遇毒小吝无咎九四噬乾胏得金矢利艱貞吉六五噬乾肉得黃金貞厲无咎上九何校滅耳凶賁亨小利有攸往初九賁其趾舍車而徒六二賁其須九三賁如濡如永貞吉六四賁如皤如白馬翰如匪寇婚媾六五賁于丘園束帛戔戔吝終吉上九白賁无咎剝不利有攸往初六剝床以足蔑貞凶六二剝床以辨蔑貞凶六三剝之无咎六四剝床以膚凶六五貫魚以宮人寵无不利上九碩果不食君子得輿小人剝廬復亨出入无疾朋來无咎反復其道七日來復利有攸往初九不遠復无祇悔元吉六二休復吉六三頻復厲无咎六四中行獨復六五敦復无悔上六迷復凶有災眚用行師終有大敗以其國君凶至于十年不克征无妄元亨利貞其匪正有眚不利有攸往初九无妄往吉六二不耕穫不菑畬則利有攸往六三无妄之災或繫之牛行人之得邑人之災九四可貞无咎九五无妄之疾勿藥有喜上九无妄行有眚无攸利大畜利貞不家食吉利涉大川初九有厲利已九二輿說輹六四童牛之牿元吉六五豶豕之牙吉上九何天之衢亨頤貞吉觀頤自求口實初九舍爾靈龜觀我朵頤凶六二顛頤拂經于丘頤征凶六三拂頤貞凶十年勿用无攸利六四顛頤吉虎視眈眈其欲逐逐无咎六五拂經居貞吉不可涉大川上九由頤厲吉利涉大川大過棟撓利有攸往亨初六藉用白茅无咎九二枯楊生稊老夫得其女妻无不利九三棟橈凶九四棟隆吉有它吝九五枯楊生華老婦得其士夫无咎无譽上六過涉滅頂凶无咎習坎有孚維心亨行有尚初六習坎入于坎窞凶九二坎有險求小得六三來之坎坎險且枕入于坎窞勿用六四樽酒簋貳用缶納約自牖終无咎九五坎不盈祗既平无咎上六係用黴纆寘于叢棘三歲不得凶離利貞亨畜牝牛吉初九履錯然敬之无咎六二黃離元吉九三日昃之離不鼓缶而歌則大耋之嗟凶九四突如其來如焚如死如棄如六五出涕沱若戚嗟若吉上九王用出征有嘉折首獲匪其醜无咎咸亨利貞取女吉初六咸其拇六二咸其腓凶居吉九三咸其股執其隨往吝九四貞吉悔亡憧憧往來朋從爾思九五咸其脢无悔上六咸其輔頰舌恒亨无咎利貞利有攸往初六浚恒貞凶无攸利九二悔亡九三不恒其德或承之羞貞吝九四田无禽六五恒其德貞婦人吉夫子凶上六振恒凶遯亨小利貞初六遯尾厲勿用有攸往六二執之用黃牛之革莫之勝說九三係遯有疾厲畜臣妾吉九四好遯君子吉小人否九五嘉遯貞吉上九肥遯无不利大壯利貞初九壯于趾征凶有孚九二貞吉九三小人用壯君子用罔貞厲羝羊觸藩羸其角九四貞吉悔亡藩決不羸壯于大輿之輹六五喪羊于易无悔上六羝羊觸藩不能退不能遂无攸利艱則吉晉康侯用錫馬蕃庶晝日三接初六晉如摧如貞吉罔孚裕无咎六二晉如愁如貞吉受茲介福于其王母六三眾允悔亡九四晉如鼫鼠貞厲六五悔亡失得勿恤往吉无不利上九晉其角維用伐邑厲吉无咎貞吝明夷利艱貞初九明夷于飛垂其翼君子于行三日不食有攸往主人有言六二明夷夷于左股用拯馬壯吉九三明夷于南狩得其大首不可疾貞六四入于左腹獲明夷之心于出門庭六五箕子之明夷利貞上六不明晦初登于天後入于地家人利女貞初九閑有家悔亡六二无攸遂在中饋貞吉九三家人嗃嗃悔厲吉婦子嘻嘻終吝六四富家大吉九五王假有家勿恤吉上九有孚威如終吉睽小事吉初九悔亡喪馬勿逐自復見惡人无咎九二遇主于巷无咎六三見輿曳其牛掣其人天且劓无初有終九四睽孤遇元夫交孚厲无咎六五悔亡厥宗噬膚往何咎上九睽孤見豕負塗載鬼一車先張之弧後說之弧匪寇婚媾往遇雨則吉蹇利西南不利東北利見大人貞吉初六往蹇來譽六二王臣蹇蹇匪躬之故九三往蹇來反六四往蹇來連九五大蹇朋來上六往蹇來碩吉利見大人解利西南无所往其來復吉有攸往夙吉初六无咎九二田獲三狐得黃矢貞吉六三負且乘致寇至貞吝九四解而拇朋至斯孚六五君子維有解吉有孚于小人上六公用射隼于高墉之上獲之无不利損有孚元吉无咎可貞利有攸往曷之用二簋可用享初九已事遄往无咎酌損之九二利貞征凶弗損益之六三三人行則損一人一人行則得其友六四損其疾使遄有喜无咎六五或益之十朋之龜弗克違元吉上九弗損益之无咎貞吉利有攸往得臣无家益利有攸往利涉大川初九利用為大作元吉无咎六二或益之十朋之龜弗克違永貞吉王用享于帝吉六三益之用凶事无咎有孚中行告公用圭六四中行告公從利用為依遷國九五有孚惠心勿問元吉有孚惠我德上九莫益之或擊之立心勿恆凶夬揚于王庭孚號有厲告自邑不利即戎利有攸往初九壯于前趾往不勝為咎九二惕號莫夜有戎勿恤九三壯于頄有凶君子夬夬獨行遇雨若濡有慍无咎九四臀无膚其行次且牽羊悔亡聞言不信九五莧陸夬夬中行无咎上六无號終有凶姤女壯勿用取女初六繫于金柅貞吉有攸往見凶羸豕孚蹢躅九二包有魚无咎不利賓九三臀无膚其行次且厲无大咎九四包无魚起凶九五以杞包瓜含章有隕自天上九姤其角吝无咎萃亨王假有廟利見大人亨利貞用大牲吉利有攸往初六有孚不終乃亂乃萃若號一握為笑勿恤往无咎六二引吉无咎孚乃利用禴六三萃如嗟如无攸利往无咎小吝九四大吉无咎九五萃有位无咎匪孚元永貞悔亡上六齎咨涕洟无咎升元亨用見大人勿恤南征吉初六允升大吉九二孚乃利用禴无咎九三升虛邑六四王用亨于岐山吉无咎六五貞吉升階上六冥升利于不息之貞困亨貞大人吉无咎有言不信初六臀困于株木入于幽谷三歲不覿九二困于酒食朱紱方來利用享祀征凶无咎六三困于石據于蒺蔾入于其宮不見其妻凶九四來徐徐困于金車吝有終九五劓刖困于赤紱乃徐有說利用祭祀井改邑不改井无喪无得往來井井汔至亦未繘井羸其瓶凶初六井泥不食舊井无禽九二井谷射鮒甕敝漏九三井渫不食為我心惻可用汲王明並受其福六四井甃无咎九五井洌寒泉食上六井收勿幕有孚元吉革已日乃孚元亨利貞悔亡初九鞏用黃牛之革六二已日乃革之征吉无咎九三征凶貞厲革言三就有孚九四悔亡有孚改命吉九五大人虎變未占有孚上六君子豹變小人革面征凶居貞吉鼎元吉亨初六鼎顛趾利出否得妾以其子无咎九二鼎有實我仇有疾不我能即吉九三鼎耳革其行塞雉膏不食方雨虧悔終吉九四鼎折足覆公餗其形渥凶六五鼎黃耳金鉉利貞上九鼎玉鉉大吉无不利震亨震來虩虩笑言啞啞震驚百里不喪匕鬯初九震來虩虩後笑言啞啞吉六二震來厲億喪貝躋于九陵勿逐七日得六三震蘇蘇震行无眚九四震遂泥六五震往來厲億无喪有事上六震索索視矍矍征凶震不于其躬于其鄰无咎婚媾有言艮其背不獲其身行其庭不見其人无咎初六艮其趾无咎利永貞六二艮其腓不拯其隨其心不快九三艮其限列其夤厲薰心六四艮其身无咎六五艮其輔言有序悔亡上九敦艮吉漸女歸吉利貞初六鴻漸于干小子厲有言无咎六二鴻漸于磐飲食衎衎吉九三鴻漸于陸夫征不復婦孕不育凶利禦寇六四鴻漸于木或得其桷无咎九五鴻漸于陵婦三歲不孕終莫之勝吉上九鴻漸于陸其羽可用為儀吉歸妹征凶无攸利初九歸妹以娣跛能履征吉九二眇能視利幽人之貞六三歸妹以須反歸以娣九四歸妹愆期遲歸有時六五帝乙歸妹其君之袂不如其娣之袂良月幾望吉上六女承筐无實士刲羊无血无攸利豐亨王假之勿憂宜日中初九遇其配主雖旬无咎往有尚六二豐其蔀日中見斗往得疑疾有孚發若吉九三豐其沛日中見沬折其右肱无咎九四豐其蔀日中見斗遇其夷主吉六五來章有慶譽吉上六豐其屋蔀其家闚其戶闃其无人三歲不覿凶旅小亨旅貞吉初六旅瑣瑣斯其所取災六二旅即次懷其資得童僕貞九三旅焚其次喪其童僕貞厲九四旅于處得其資斧我心不快六五射雉一矢亡終以譽命上九鳥焚其巢旅人先笑後號咷喪牛于易凶巽小亨利有攸往利見大人初六進退利武人之貞九二巽在床下用史巫紛若吉无咎九三頻巽吝六四悔亡田獲三品九五貞吉悔亡无不利无初有終先庚三日後庚三日吉上九巽在床下喪其資斧貞凶兌亨利貞初九和兌吉九二孚兌吉悔亡六三來兌凶九四商兌未寧介疾有喜九五孚于剝有厲上六引兌渙亨王假有廟利涉大川利貞初六用拯馬壯吉九二渙奔其机悔亡六三渙其躬无悔六四渙其群元吉渙有丘匪夷所思九五渙汗其大號渙王居无咎上九渙其血去逖出无咎節亨苦節不可貞初九不出戶庭无咎九二不出門庭凶六三不節若則嗟若无咎六四安節亨九五甘節吉往有尚上六苦節貞凶悔亡中孚豚魚吉利涉大川利貞初九虞吉有它不燕九二鶴鳴在陰其子和之我有好爵吾與爾靡之六三得敵或鼓或罷或泣或歌六四月幾望馬匹亡无咎九五有孚攣如无咎上九翰音登于天貞凶小過亨利貞可小事不可大事飛鳥遺之音不宜上宜下大吉初六飛鳥以凶六二過其祖遇其妣不及其君遇其臣无咎九三弗過防之從或戕之凶九四无咎弗過遇之往厲必戒勿用永貞六五密雲不雨自我西郊公弋取彼在穴上六弗遇過之飛鳥離之凶是謂災眚既濟亨小利貞初吉終亂初九曳其輪濡其尾无咎六二婦喪其茀勿逐七日得九三高宗伐鬼方三年克之小人勿用六四繻有衣袽終日戒九五東鄰殺牛不如西鄰之禴祭實受其福上六濡其首厲未濟亨小狐汔濟濡其尾无攸利初六濡其尾吝九二曳其輪貞吉六三未濟征凶利涉大川九四貞吉悔亡震用伐鬼方三年有賞于大國六五貞吉无悔君子之光有孚吉上九有孚于飲酒无咎濡其首有孚失是'''
counter = Counter(b)
#pprint(counter.most_common())
sum = 0
for i in counter.most_common():
sum += int(i[1])
print('|', i[0].replace("\n", "\\n"), '|', i[1], '|')
print(sum)
| 430.916667
| 4,919
| 0.980468
| 41
| 5,171
| 123.609756
| 0.487805
| 0.004736
| 0.006709
| 0.007893
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00078
| 0.008122
| 5,171
| 11
| 4,920
| 470.090909
| 0.987327
| 0.005608
| 0
| 0
| 0
| 0
| 0.956429
| 0.954873
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.3
| 0
| 0.3
| 0.3
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
97b1cff01313739aa388e803b08b5ac4342cbf88
| 101
|
py
|
Python
|
mundo 1/aula 7/exer5.py
|
jonatan098/cursopython
|
6e4cbaef6229e230fdbc66d80ec1b5a089887b0d
|
[
"MIT"
] | null | null | null |
mundo 1/aula 7/exer5.py
|
jonatan098/cursopython
|
6e4cbaef6229e230fdbc66d80ec1b5a089887b0d
|
[
"MIT"
] | null | null | null |
mundo 1/aula 7/exer5.py
|
jonatan098/cursopython
|
6e4cbaef6229e230fdbc66d80ec1b5a089887b0d
|
[
"MIT"
] | 1
|
2020-02-22T17:21:05.000Z
|
2020-02-22T17:21:05.000Z
|
n = int(input('digite um numero '))
print(f'o antecessor do numero {n} e {n-1} e o sucessor e {n+1}')
| 50.5
| 65
| 0.643564
| 22
| 101
| 2.954545
| 0.636364
| 0.061538
| 0.092308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.168317
| 101
| 2
| 65
| 50.5
| 0.75
| 0
| 0
| 0
| 0
| 0.5
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
8af3a78f7e9a59d3865bb82bef8c443236213dc9
| 43,566
|
py
|
Python
|
GhClimHub/app/drought.py
|
Techyiad/Climate-Mitigant
|
3fdbd01d4e2230fa95fc184682351cce389ec87a
|
[
"MIT"
] | null | null | null |
GhClimHub/app/drought.py
|
Techyiad/Climate-Mitigant
|
3fdbd01d4e2230fa95fc184682351cce389ec87a
|
[
"MIT"
] | null | null | null |
GhClimHub/app/drought.py
|
Techyiad/Climate-Mitigant
|
3fdbd01d4e2230fa95fc184682351cce389ec87a
|
[
"MIT"
] | null | null | null |
import numpy as np
import ee
import datetime as dt
import math
from calendar import monthrange
from google.appengine.api import urlfetch
urlfetch.set_default_fetch_deadline(45)
def create_datelist(start_date, n_months):
dates = [start_date + relativedelta(months=i)
for i in range(0, n_months)]
return np.array(dates)
def calcMonthlyMean(imageCollection,years,months):
mylist = ee.List([])
for y in years:
for m in months:
w = imageCollection.filter(ee.Filter.calendarRange(y, y, 'year')).filter(ee.Filter.calendarRange(m, m, 'month')).sum()
mylist = mylist.add(ee.Image(w.set('year', y).set('month', m).set('date', ee.Date.fromYMD(y,m,1)).set('system:time_start',ee.Date.fromYMD(y,m,1))))
return ee.ImageCollection.fromImages(mylist)
#new landsat collection
btl5 = ee.ImageCollection("LANDSAT/LT05/C01/T1_SR").select(["B6"],["SR"])
btl7 = ee.ImageCollection("LANDSAT/LE07/C01/T1_SR").select(["B6"],["SR"])
btl8 = ee.ImageCollection("LANDSAT/LC08/C01/T1_SR").select(["B10"],["SR"])
nl5 = ee.ImageCollection("LANDSAT/LT05/C01/T1_TOA")
nl7 = ee.ImageCollection("LANDSAT/LE07/C01/T1_TOA")
nl8 = ee.ImageCollection("LANDSAT/LC08/C01/T1_TOA")
#============================
# Download Map
#============================
def getData(collection,geometry,scale,name):
path = ee.Image(collection).getDownloadUrl({
'name':str(name),
'scale':scale,
'crs':'EPSG:4326',
'region':str(geometry)
})
return path
def vhi(img):
property_list = ['system:index','system:time_start', 'system:time_end']
brightness_temp = ee.Image(img).select("SR").multiply(0.1)
ndvi = ee.Image(img).normalizedDifference(["nir", "red"]).rename("NDVI")
BT_max = ee.Image(brightness_temp).reduce(ee.Reducer.max())
BT_min = ee.Image(brightness_temp).reduce(ee.Reducer.min())
tci = ee.Image(BT_max).subtract(brightness_temp).multiply(100).divide(ee.Image(BT_max).subtract(BT_min))
ndvi_min = ee.Image(ndvi).reduceRegion(ee.Reducer.min(),region_Gh,3000).get("NDVI")
ndvi_max = ee.Image(ndvi).reduceRegion(ee.Reducer.max(),region_Gh,3000).get("NDVI")
vci = ee.Image(ndvi).subtract(ee.Number(ndvi_min)).multiply(100).divide(ee.Number(ndvi_max).subtract(ee.Number(ndvi_min)))
VHI = ee.Image(vci).multiply(0.5).add(ee.Image(tci).multiply(0.5))
VHI_I = VHI.rename("VHI")
return VHI_I.copyProperties(img, property_list)
def lst5(img):
property_list = ['system:index','system:time_start', 'system:time_end']
band_to_toa = ee.Image(img).select(["B6"])
toa_radiance = ee.Image(band_to_toa).expression('(Ml * band) + Al', {
'Ml': 0.0003342,
'Al': 0.01,
'band': band_to_toa
})
brightness_temp = ee.Image(img).select("SR").multiply(0.1)
ndvi = ee.Image(img).normalizedDifference(["nir", "red"])
ndvi_min = ee.Image(ndvi).reduce(ee.Reducer.min())
ndvi_max = ee.Image(ndvi).reduce(ee.Reducer.max())
pv = ee.Image(ndvi).subtract(ndvi_min).divide(ee.Image(ndvi_max).subtract(ndvi_min)).pow(2)
lse = ee.Image(pv).expression('(0.004 * pv_img) + 0.986', {
'pv_img': pv
})
lse_band = lse
lse_log = ee.Image(lse_band).log()
p = 14380
LST = ee.Image(lse_log).expression('BT / 1 + B10 * (BT / p) * lse_log', {
'p': p,
'BT': brightness_temp,
'B10': toa_radiance,
'lse_log': lse_log
}).subtract(273.5)
return ee.Image(LST).copyProperties(img, property_list)
def lst7(img):
property_list = ['system:index','system:time_start', 'system:time_end']
band_to_toa = img.select(["B6"])
toa_radiance = ee.Image(band_to_toa).expression('(Ml * band) + Al', {
'Ml': 0.0003342,
'Al': 0.01,
'band': band_to_toa
})
brightness_temp = ee.Image(img).select("SR").multiply(0.1)
ndvi = ee.Image(img).normalizedDifference(["nir", "red"])
ndvi_min = ee.Image(ndvi).reduce(ee.Reducer.min())
ndvi_max = ee.Image(ndvi).reduce(ee.Reducer.max())
pv = ee.Image(ndvi).subtract(ndvi_min).divide(ee.Image(ndvi_max).subtract(ndvi_min)).pow(2)
lse = ee.Image(pv).expression('(0.004 * pv_img) + 0.986', {
'pv_img': pv
})
lse_band = lse
lse_log = ee.Image(lse_band).log()
p = 14380
LST = ee.Image(lse_log).expression('BT / 1 + B10 * (BT / p) * lse_log', {
'p': p,
'BT': brightness_temp,
'B10': toa_radiance,
'lse_log': lse_log
}).subtract(273.5)
return ee.Image(LST).copyProperties(img, property_list)
def lst8(img):
property_list = ['system:index','system:time_start', 'system:time_end']
band_to_toa = img.select(["B10"])
toa_radiance = ee.Image(band_to_toa).expression('(Ml * band) + Al', {
'Ml': 0.0003342,
'Al': 0.01,
'band': band_to_toa
}).rename('TOA_Radiance')
brightness_temp = ee.Image(img).select("SR").multiply(0.1)
ndvi = ee.Image(img).normalizedDifference(["nir", "red"])
ndvi_min = ee.Image(ndvi).reduce(ee.Reducer.min())
ndvi_max = ee.Image(ndvi).reduce(ee.Reducer.max())
pv = ee.Image(ndvi).subtract(ndvi_min).divide(ee.Image(ndvi_max).subtract(ndvi_min)).pow(2)
lse = ee.Image(pv).expression('(0.004 * pv_img) + 0.986', {
'pv_img': pv
}).rename('LSE')
p = 14380
lse_band = lse
lse_log = ee.Image(lse_band).log()
LST = ee.Image(img).expression('BT / 1 + B10 * (BT / p) * lse_log', {
'p': p,
'BT': brightness_temp,
'B10': toa_radiance,
'lse_log': lse_log
}).subtract(273.5)
return ee.Image(LST).copyProperties(img, property_list)
def cloudfunction(img):
"""Apply basic ACCA cloud mask to a daily Landsat 4, 5, or 7 image"""
cloud_mask = ee.Algorithms.Landsat.simpleCloudScore(img).\
select(['cloud']).lt(ee.Image.constant(70))
return img.mask(cloud_mask.mask(cloud_mask))
# calculate ndvi
def ndvi(img):
property_list = ['system:index','system:time_start', 'system:time_end']
ndvi = img.normalizedDifference(["nir", "red"]).rename(["NDVI"]).copyProperties(img, property_list)
return ndvi
def ndwi(img):
property_list = ['system:index','system:time_start', 'system:time_end']
return img.normalizedDifference(["green", "nir"]).rename("NDWI").copyProperties(img, property_list)
#===========================================
# NDWI ANOMALY
#===========================================
def ndwi_anomaly(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
satelite = options["satelite"]
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# Define time range
startyear = 2000
endyear = 2018
# Set date in ee date format
startdate = ee.Date.fromYMD(startyear,1,1)
enddate = ee.Date.fromYMD(endyear,12,31)
# make a list with years
years = range(startyear, endyear)
global scale, name
months = range(1,12)
if satelite == "modis":
collection = ee.ImageCollection('MODIS/MCD43A4_006_NDWI').filterDate(startdate,enddate).filterBounds(region_Gh)
scale = 250
elif satelite == "landsat":
scale = 250
# filter on date and bounds
l5images = nl5.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B2","B4"],["green","nir"]).map(ndwi)
l7images = nl7.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B2","B4"],["green","nir"]).map(ndwi)
l8images = nl8.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B3","B5"],["green","nir"]).map(ndwi)
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
#calculate ndwi for each image in imagecollection
l578NDWI = ee.ImageCollection(ee.ImageCollection(l5images).merge(l7images)).merge(l8images)
if satelite == "landsat":
selected_year_month_data = ee.ImageCollection(l578NDWI).filterDate(startDate,endDate).mean()
col_mean = ee.Number(-0.2990153174811877)
col_std = ee.Number( 0.1001232100835506)
NDWI_anom = ee.Image(selected_year_month_data).subtract(col_mean).divide(col_std)
min = ee.Image(NDWI_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDWI']
max = ee.Image(NDWI_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDWI']
else:
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
#select ndwi
selected_NDWI = ee.ImageCollection(collection).filterDate(startDate,endDate).mean()
histo_mean = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.mean()))
histo_std = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.stdDev()))
selected_year_month_data = ee.Image(selected_NDWI)
NDWI_anom = ee.Image(selected_year_month_data).subtract(histo_mean).divide(histo_std)
min = ee.Image(NDWI_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDWI']
max = ee.Image(NDWI_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDWI']
print(min,max)
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(["#e20000 ","32cd32","ffff00","ff8c00","#00f9f9","#3570dd","0000ff"])
}
notes = "NORMALIZED DIFFERENCE WATER INDEX ANOMALY calculated" + " for " + str(date_year) + "-" + str(date_month)
name = notes
download = getData(NDWI_anom,str(region_Gh.geometry().getInfo()['coordinates']),scale,"NDWI" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(NDWI_anom).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'],'note':notes ,'type':'ndwi_anomaly' ,'min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# PRECIPITATION
#===========================================
def precipitation(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh,scale, name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
print(startDate, endDate)
collection1 = ee.ImageCollection('UCSB-CHG/CHIRPS/DAILY').filterDate(startDate,endDate).filterBounds(region_Gh).mean()
#select Precipitation
selected_Precipitation = ee.Image(collection1)
max = ee.Image(selected_Precipitation).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo().get("precipitation")
min = ee.Image(selected_Precipitation).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo().get("precipitation")
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(['#730000', '#E60000', '#FFAA00', '#FCD37F', '#FFFF00', '#FFFFFF', '#AAFF55', '#00FFFF', '#00AAFF', '#0000FF', '#0000AA'])
}
notes = "PRECIPITATION calculated" + " for " + str(date_year) + "-" + str(date_month)
name = notes
scale = 600
download = getData(selected_Precipitation,str(region_Gh.geometry().getInfo()['coordinates']),scale,"Precipitation" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(selected_Precipitation).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'],'note':notes,'type':'precipitation','min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# PRECIPITATION ANOMALY
#===========================================
def precipitation_anom(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh,scale, name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
col = ee.ImageCollection('UCSB-CHG/CHIRPS/DAILY').filterBounds(region_Gh.geometry())
collection1 = ee.ImageCollection('UCSB-CHG/CHIRPS/DAILY').filterDate(startDate,endDate).filterBounds(region_Gh.geometry())
#anom = precip- avg_precip divided by std_precip
#select Precipitation
selected_Precipitation = ee.ImageCollection(collection1).mean()
std_precip=ee.ImageCollection(col).reduce(ee.Reducer.stdDev())
avg_precip=ee.ImageCollection(col).reduce(ee.Reducer.mean())
precip_anom= ee.Image(selected_Precipitation).subtract(avg_precip).divide(std_precip)
max = ee.Image(precip_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['precipitation']
min = ee.Image(precip_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['precipitation']
print(min,max)
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(['#730000', '#E60000', '#FFAA00', '#FCD37F', '#FFFF00', '#FFFFFF', '#AAFF55', '#00FFFF', '#00AAFF', '#0000FF', '#0000AA'])
}
notes = "PRECIPITATION Anomaly calculated" + " for " + str(date_year) + "-" + str(date_month)
name = notes
scale = 600
download = getData(precip_anom,str(region_Gh.geometry().getInfo()['coordinates']),scale,"Precipitation" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(precip_anom).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'],'note':notes,'type':'precipitation','min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# NDVI ANOMALY
#===========================================
def ndvi_anomaly(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
region_selected = str(options["region_selected"])
region = options["region"]
satelite = options["satelite"]
global region_Gh,scale,name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# Define time range
startyear = 2000
endyear = 2018
# Set date in ee date format
startdate = ee.Date.fromYMD(startyear,1,1)
enddate = ee.Date.fromYMD(endyear ,12,31)
# make a list with years
years = range(startyear, endyear)
months = range(1,12)
if satelite == "modis":
scale = 250
startyear = 2000
endyear = 2018
# Set date in ee date format
startdate = ee.Date.fromYMD(startyear,1,1)
enddate = ee.Date.fromYMD(endyear + 1 ,12,31)
# make a list with years
years = range(startyear, endyear)
collection = ee.ImageCollection('MODIS/006/MOD13Q1').select('NDVI').filterDate(startdate,enddate).filterBounds(region_Gh)
elif satelite == "landsat":
scale = 250
l5images = nl5.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B4","B3"],["nir","red"]).map(ndvi)
l7images = nl7.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B4","B3"],["nir","red"]).map(ndvi)
l8images = nl8.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B5","B4"],["nir","red"]).map(ndvi)
#calculate ndwi for each image in imagecollection
l578NDVI = ee.ImageCollection(ee.ImageCollection(l5images).merge(l7images)).merge(l8images)
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
elif satelite == "avhrr":
# make a list with years
scale = 5000
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
collection = ee.ImageCollection('NOAA/CDR/AVHRR/NDVI/V4').select('NDVI').filterDate('1982-01-01','2018-12-31').filterBounds(region_Gh)
#select ndvi
if satelite == "avhrr":
selected_ndvi = ee.ImageCollection(collection).filterDate(startDate,endDate).mean()
histo_mean = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.mean())).multiply(0.0001)
histo_std = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.stdDev())).multiply(0.0001)
selected_year_month_data = ee.Image(selected_ndvi)
ndvi_anom = ee.Image(selected_year_month_data).subtract(histo_mean).divide(histo_std)
min = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
max = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
print(max,min)
elif satelite == "landsat":
selected_year_month_data = ee.ImageCollection(l578NDVI).filterDate(startDate,endDate).mean()
col_mean = ee.Number( 0.33004655922067055)
col_std = ee.Number(0.11068838329126819)
ndvi_anom = ee.Image(selected_year_month_data).subtract(col_mean).divide(col_std)
min = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
max = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
else:
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
selected_ndvi = ee.ImageCollection(collection).filterDate(startDate,endDate).mean()
histo_mean = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.mean()))
histo_std = ee.Image(ee.ImageCollection(collection).reduce(ee.Reducer.stdDev()))
selected_year_month_data = ee.Image(selected_ndvi)
ndvi_anom = ee.Image(selected_year_month_data).subtract(histo_mean).divide(histo_std)
min = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
max = ee.Image(ndvi_anom).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
print(max,min)
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(["87000A","7C3E28","EC712C","FABF45","FFFFFF","51FF78","3DCF4C","215229"])
}
notes = "NORMALIZED DIFFERENCE VEGETATION INDEX ANOMALY calculated" + " for " + str(date_year) + "-" + str(date_month)
name = notes
download = getData(ndvi_anom,str(region_Gh.geometry().getInfo()['coordinates']),scale,"NDVI" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(ndvi_anom).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'],'note':notes ,'type':'ndvi_anomaly' ,'min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# VEGETATION HEALTH INDEX
#===========================================
def VHI(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
satelite = options["satelite"]
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh,scale, name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# Define time range
if satelite == "avhrr":
scale = 600
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
collection = ee.ImageCollection('NOAA/CDR/AVHRR/NDVI/V4').select('NDVI').filterDate(startDate,endDate).filterBounds(region_Gh).mean()
#select ndvi
selected_ndvi = ee.Image(collection).multiply(0.0001)
print(ee.Image(selected_ndvi).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo())
Brightness_Temp = ee.ImageCollection('NOAA/CDR/AVHRR/SR/V4').select('BT_CH4').filterDate(startDate,endDate).filterBounds(region_Gh).mean()
selected_bt = ee.Image(Brightness_Temp).multiply(0.01)
# Normalize THe NDVI
min = ee.Image(selected_ndvi).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
max = ee.Image(selected_ndvi).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
VCI = ee.Image(selected_ndvi).subtract(ee.Number(min)).multiply(100).divide(ee.Number(max).subtract(ee.Number(min)))
bt_min = ee.Image(selected_bt).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo().get("BT_CH4")
bt_max = ee.Image(selected_bt).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo().get("BT_CH4")
TCI = selected_bt.subtract(ee.Number(bt_max)).multiply(-100).divide(ee.Number(bt_max).subtract(bt_min))
VHI = VCI.multiply(0.5).add(TCI.multiply(0.5))
vhi_min = ee.Image(VHI).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
vhi_max = ee.Image(VHI).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
vizAnomaly = {
'min':vhi_min, 'max':vhi_max,
'palette': ','.join(['#087702','#52f904','#ffee00','#ff7700','#ef0404'])
}
elif satelite == "landsat":
scale = 250
# make a list with years
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
l5images = ee.ImageCollection(nl5.combine(btl5).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","SR"],["nir","red","SR"]).map(vhi)
l7images = ee.ImageCollection(nl7.combine(btl7).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","SR"],["nir","red","SR"]).map(vhi)
l8images = ee.ImageCollection(nl8.combine(btl8).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B5","B4","SR"],["nir","red","SR"]).map(vhi)
total_col = ee.ImageCollection(ee.ImageCollection(l5images).merge(l7images)).merge(l8images)
VHI = ee.ImageCollection(total_col).filterDate(startDate,endDate).mean()
vhi_min = ee.Image(VHI).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['VHI']
vhi_max = ee.Image(VHI).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['VHI']
vizAnomaly = {
'min':vhi_min, 'max':vhi_max,
'palette': ','.join(['#087702','#52f904','#ffee00','#ff7700','#ef0404'])
}
notes = "VEGETATION HEALTH INDEX calculated from NOAA/CDR/AVHRR data" + " for " + str(date_year) + "-" + str(date_month)
name = notes
download = getData(VHI,str(region_Gh.geometry().getInfo()['coordinates']),scale,"VHI" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(VHI).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'] ,'note':notes , 'type':'vhi','min':vhi_min,'max':vhi_max }
col['download_data'] = download
return col
def lst_map(img):
return img.multiply(0.02).subtract(273.15).copyProperties(img,['system:time_start','syst em:time_end'])
#===========================================
# LST
#===========================================
def LST(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
satelite = options["satelite"]
hist_year_start = 2000
hist_year_end = 2018
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh,scale,name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# Define time range
if satelite == "modis":
startyear = hist_year_start
endyear = hist_year_end
scale = 250
# Set date in ee date format
startdate = ee.Date.fromYMD(startyear,1,1)
enddate = ee.Date.fromYMD(endyear,12,31)
# make a list with years
years = range(startyear, endyear)
months = range(1,12)
collection = ee.ImageCollection('MODIS/006/MOD11A2').select('LST_Day_1km').filterDate(startdate,enddate).filterBounds(region_Gh)
modLSTday = collection.map(lst_map)
monthlyLST = ee.ImageCollection(calcMonthlyMean(modLSTday,years,months))
#select LST
selected_LST = ee.Image(monthlyLST.filter(ee.Filter.eq('year',date_year)).filter(ee.Filter.eq('month',date_month)).mean())
max = ee.Image(selected_LST).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()["LST_Day_1km"]
min = ee.Image(selected_LST).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()["LST_Day_1km"]
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(["0000ff","32cd32","ffff00","ff8c00", "#e20000 "])
}
elif satelite == "landsat":
# make a list with years
year = date_year
scale = 250
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
l5images = ee.ImageCollection(nl5.combine(btl5).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","B6","SR"],["nir","red","B6","SR"]).map(lst5)
l7images = ee.ImageCollection(nl7.combine(btl7).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","B6_VCID_1","SR"],["nir","red","B6","SR"]).map(lst7)
l8images = ee.ImageCollection(nl8.combine(btl8).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B5","B4","B10","SR"],["nir","red","B10","SR"]).map(lst8)
total_col = ee.ImageCollection(ee.ImageCollection(l5images).merge(l7images)).merge(l8images)
LST = ee.ImageCollection(total_col).filterDate(startDate,endDate).mean()
selected_LST = ee.Image(LST)
max = ee.Image(selected_LST).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()["SR"]
print(max)
min = ee.Image(selected_LST).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()["SR"]
vizAnomaly = {
'min':0, 'max':max,
'palette': ','.join(["0000ff","32cd32","ffff00","ff8c00", "#e20000 "])
}
mapid = ee.Image(selected_LST).clip(region_Gh).getMapId(vizAnomaly)
notes = "LAND SURFACE TEMPERATURE calculated" + " for " + str(date_year) + "-" + str(date_month)
name = notes
download = getData(selected_LST,region_Gh.geometry().getInfo()['coordinates'],scale,"LST" + str(date_year) + "-" + str(date_month))
print(download)
col = {'mapid':mapid['mapid'],'token':mapid['token'] ,'note':notes, 'type':'lst','min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# SMI
#===========================================
def SMI(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
satelite = options["satelite"]
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh, scale,name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
year = date_year
month = date_month
endingInDays = monthrange(date_year,month)[1]
startMonth = '-' + str(month) + '-01'
endMonth = '-' + str(month) + '-' + str(endingInDays)
#setting up the start and ending date
startDate = str(year) + startMonth
endDate = str(year) + endMonth
#****************************** LST
if satelite=="modis":
collection = ee.ImageCollection('MODIS/006/MOD11A2').select('LST_Day_1km').filterDate(startDate,endDate).filterBounds(region_Gh.geometry())
modLSTday = collection.map(lst_map)
selected_LST=modLSTday.mean();
min_LST=modLSTday.reduce(ee.Reducer.min())
max_LST=modLSTday.reduce(ee.Reducer.max())
else:
l5images = ee.ImageCollection(nl5.combine(btl5).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","B6","SR"],["nir","red","B6","SR"]).map(lst5)
l7images = ee.ImageCollection(nl7.combine(btl7).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B4","B3","B6_VCID_1","SR"],["nir","red","B6","SR"]).map(lst7)
l8images = ee.ImageCollection(nl8.combine(btl8).filterBounds(region_Gh.geometry())).map(cloudfunction).select(["B5","B4","B10","SR"],["nir","red","B10","SR"]).map(lst8)
if year>1999 & year<2015:
total_col = ee.ImageCollection(l7images)
elif year<1999:
total_col = ee.ImageCollection(l5images)
elif year>2015:
total_col = ee.ImageCollection(l8images)
LST = ee.ImageCollection(total_col).filterDate(startDate,endDate)
selected_LST=LST.mean();
min_LST=LST.reduce(ee.Reducer.min())
max_LST=LST.reduce(ee.Reducer.max())
#****************************** NDVI
if satelite=="modis":
MODIS_NDVI= ee.ImageCollection('MODIS/006/MOD13Q1').select('NDVI').filterDate(startDate,endDate).filterBounds(region_Gh.geometry())
selected_NDVI=MODIS_NDVI
NDVI=selected_NDVI.mean().divide(10000).clip(region_Gh)
median_NDVI=selected_NDVI.reduce(ee.Reducer.median())
else:
l5images = nl5.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B4","B3"],["nir","red"]).map(ndvi)
l7images = nl7.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B4","B3"],["nir","red"]).map(ndvi)
l8images = nl8.filterBounds(region_Gh.geometry()).map(cloudfunction).select(["B5","B4"],["nir","red"]).map(ndvi)
if year>1999 & year<2015:
total_col = ee.ImageCollection(l7images)
elif year<1999:
total_col = ee.ImageCollection(l5images)
elif year>2015:
total_col = ee.ImageCollection(l8images)
l578NDVI = total_col
selected_year_month_data = ee.ImageCollection(l578NDVI).filterDate(startDate,endDate)
selected_NDVI=selected_year_month_data
NDVI=selected_NDVI.mean().clip(region_Gh)
median_NDVI=selected_NDVI.reduce(ee.Reducer.median())
#*************************************Linear Regression
#****************************MIN LST*************************
#Dependent: LST
y = ee.Image(min_LST)
#Independent: ndvi
x = ee.Image(median_NDVI)
#Intercept: b
b = ee.Image(1).rename('b')
#create an image collection with the three variables by concatenating them
reg_img = ee.Image.cat(b,x,y)
# fit the model
fit = ee.Image(reg_img).reduceRegion(ee.Reducer.linearRegression(2,1),region_Gh.geometry(), 3000)
fit = fit.combine({"coefficients": ee.Array([[1],[1]])}, False)
#Get the coefficients as a nested list,
#cast it to an array, and get just the selected column
slo = (ee.Array(fit.get('coefficients')).get([1,0])).getInfo()
inte = (ee.Array(fit.get('coefficients')).get([0,0])).getInfo()
#******************MAX LST *******************************
# Dependent: lst
y1 = max_LST
#Independent: ndvi
x1 = median_NDVI
#Intercept: b
b1 = ee.Image(1).rename('b')
#create an image collection with the three variables by concatenating them
reg_img1 = ee.Image.cat(b1,x1,y1)
#fit the model
fit1 = ee.Image(reg_img1).reduceRegion(ee.Reducer.linearRegression(2,1),region_Gh.geometry(),3000)
fit1 = fit1.combine({"coefficients": ee.Array([[1],[1]])}, False)
#Get the coefficients as a nested list,
#cast it to an array, and get just the selected column
slo1 = (ee.Array(fit1.get('coefficients')).get([1,0])).getInfo()
int1 = (ee.Array(fit1.get('coefficients')).get([0,0])).getInfo()
#****************************SMI*************************
# LST_max=a*NDVI +b
# LST_min=a1*NDVI + b1
LST_max=ee.Image(NDVI).multiply(ee.Number(slo1)).add(ee.Number(int1))
LST_min=ee.Image(NDVI).multiply(ee.Number(slo)).add(ee.Number(inte))
SMI=ee.Image(LST_max).subtract(selected_LST).divide(ee.Image(LST_max).subtract(LST_min))
if satelite=="modis":
max = ee.Image(SMI).reduceRegion(ee.Reducer.max(), region_Gh, 3000).getInfo()['NDVI']
min = ee.Image(SMI).reduceRegion(ee.Reducer.min(), region_Gh, 3000).getInfo()['NDVI']
else:
max=2.3
min=-2.3
vizAnomaly = {
'min':min, 'max':max,
'palette': ','.join(['#730000', '#E60000', '#FFAA00', '#FCD37F', '#FFFF00', '#FFFFFF', '#AAFF55', '#00FFFF', '#00AAFF', '#0000FF', '#0000AA'])
}
notes = "SOIL MOISTURE INDEX calculated" + " for " + str(date_year) + "-" + str(date_month)
scale = 300
name = notes
mapid = ee.Image(SMI).clip(region_Gh).getMapId(vizAnomaly)
download = getData(SMI,str(region_Gh.geometry().getInfo()['coordinates']),scale,"SMI" + str(date_year) + "-" + str(date_month))
col = {'mapid':mapid['mapid'],'token':mapid['token'] , 'note':notes,'type':'smi' ,'min':min,'max':max }
col['download_data'] = download
return col
#===========================================
# COMPUTE INDICES
#===========================================
def indices(options):
global data
selected_indices = options["indices"]
if selected_indices == "ndvi anomaly":
try:
data = ndvi_anomaly(options)
return data
except ee.EEException as e :
data = {'error':'Failed to Compute NORMALIZED DIFFERENCE VEGETATION INDEX ANOMALY Data . Error Stated::, ' + str(e)}
return data
elif selected_indices == "ndwi anomaly":
try:
data = ndwi_anomaly(options)
return data
except ee.EEException as e :
data = {'error':'Failed to Compute NORMALIZED DIFFERENCE WATER INDEX ANOMALY Data . Error Stated::, ' + str(e)}
return data
elif selected_indices == "precipitation anom":
try:
data = precipitation_anom(options)
return data
except ee.EEException as e :
data = {'error':'Failed to Compute Precipitation ANOMALY Data . Error Stated::, ' + str(e)}
return data
elif selected_indices == "smi":
data = SMI(options)
return data
elif selected_indices == "lst":
data = LST(options)
return data
elif selected_indices == "vhi":
try:
data = VHI(options)
return data
except ee.EEException as e :
data = {'error':'Failed to Compute VEGETATION HEALTH INDEX Data . Error Stated::, ' + str(e)}
return data
elif selected_indices == "precipitation":
try:
data = precipitation(options)
return data
except ee.EEException as e :
data = {'error':'Failed to Compute PRECIPITATION Data . Error Stated::, ' + str(e)}
return data
elif selected_indices == "spi":
data = spi(options)
return data
#except ee.EEException as e :
# data={'error':'Failed to Compute STANDARDIZED PRECIPITATION INDEX Data .
# Error Stated::, '+str(e)}
# return data
def log(img):
return ee.Image(img).log()
# calculate the monthly mean
def Cdf(x,shape,scale):
a = shape
b = ee.Image(x).divide(ee.Image(scale))
incom_gam = ee.Image(a).gammainc(b)
return incom_gam
def norm_ppf(x,mean,std):
return ee.Image(mean).subtract(ee.Image(std)).multiply(ee.Number(2).sqrt()).multiply(ee.Image(x).multiply(2).erfcInv())
#===========================================
# SPI
#===========================================
###### 3 MONTHS SELECTING
def hist_datelist(yearlist,selected):
f1 = [11,12]
f2 = [12]
f3 = [1,2,3,4,5,6,7,8,9,10,11,12]
li = []
if selected == 1:
for y in yearlist:
y = y - 1
for b in f1:
li.append(ee.Date.fromYMD(y,b,1))
li.append(ee.Date.fromYMD(y + 1,selected,1))
elif selected == 2:
for y in yearlist:
y = y - 1
for b in f2:
li.append(ee.Date.fromYMD(y,b,1))
li.append([ee.Date.fromYMD(y + 1,selected - 1,1),ee.Date.fromYMD(y + 1,selected,1)])
elif selected >= 3:
f3 = f3[selected - 3:selected]
for y in yearlist:
for b in f3:
li.append(ee.Date.fromYMD(y,b,1))
return ee.List(li)
def mlog(img):
return ee.Image(img).log()
def MLestimator(data,img):
x_ = ee.ImageCollection(data).reduce(ee.Reducer.mean()).log()
y_ = ee.ImageCollection(data).map(mlog).reduce(ee.Reducer.mean())
A = ee.Image(x_).subtract(y_)
B = ee.Image(1).add(ee.Image(1).add(A.multiply(4).divide(3)).sqrt()).divide(A.multiply(4))
a = ee.ImageCollection(data).reduce(ee.Reducer.mean()).divide(B)
theta = ee.Image(img).divide(a)
return ee.Image(theta).gammainc(B)
def norm_inverseCDF(x):
pass
def getallnonzeros(img):
return ee.Image(img).gt(0)
def getallzeros(img):
return ee.Image(img).lt(1)
def spi(options):
# rename the used option values
date_month = int(options["date_month"])
date_year = int(options["date_year"])
region_selected = str(options["region_selected"])
region = options["region"]
global region_Gh,scale, name
if region is not None:
region_Gh = ee.Geometry.Polygon(region)
else:
if region_selected == "ghana":
countries = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw')
region_Gh = countries.filter(ee.Filter.eq('Country', 'Ghana'))
else:
Ghana = ee.FeatureCollection('ft:1wF4uSA3CSYaCa9g93FRNXcL01-ThklMXRu92h-Vr')
region_Gh = Ghana .filter(ee.Filter.eq('name', region_selected))
# set start and end year
startyear = 1981
endyear = 2017
# make a date object
startdate = ee.Date.fromYMD(startyear, 1, 1)
enddate = ee.Date.fromYMD(endyear + 1, 12, 30)
#make a list with years
years = range(1981, 2018)
#make a list with months
months = range(1, 12)
rainfal_data = ee.ImageCollection('UCSB-CHG/CHIRPS/DAILY').filterDate('1981-01-01','2018-12-31').filterBounds(region_Gh)
# this is the ish
def calcMonthlyMean1(imageCollection):
mylist = []
for y in years:
for m in months:
w = imageCollection.filter(ee.Filter.calendarRange(y, y, 'year')).filter(ee.Filter.calendarRange(m, m, 'month')).sum()
mylist.append(ee.Image(w.set('year', y).set('month', m).set('date', ee.Date.fromYMD(y,m,1)).set('system:time_start',ee.Date.fromYMD(y,m,1))))
return ee.ImageCollection.fromImages(ee.List(mylist))
monthlyPrecip = ee.ImageCollection(calcMonthlyMean1(rainfal_data))
years_list = range(1982, 2017)
date_hist = ee.List(hist_datelist(years_list,date_month))
col_hist = ee.ImageCollection(monthlyPrecip).filter(ee.Filter.inList('system:time_start', date_hist))
year_on = [date_year]
dates_year = ee.List(hist_datelist(year_on,date_month))
col_year = monthlyPrecip.filter(ee.Filter.inList('system:time_start',dates_year))
v_img = ee.ImageCollection(col_year).reduce(ee.Reducer.mean())
allnonz = ee.ImageCollection(col_hist).map(getallnonzeros)
zero = ee.ImageCollection(col_hist).map(getallzeros)
allnonzero = ee.Image(ee.ImageCollection(allnonz).reduce(ee.Reducer.sum())).reduceRegion(ee.Reducer.sum(),region_Gh,30000).getInfo()['precipitation_sum']
zero_p = ee.Image(ee.ImageCollection(zero).reduce(ee.Reducer.sum())).reduceRegion(ee.Reducer.sum(),region_Gh,30000).getInfo()['precipitation_sum']
q = ee.Number(zero_p).divide(allnonzero)
Gx = MLestimator(col_hist,v_img)
inv_cum = ee.Image(Gx)
H = ee.Image(inv_cum).multiply(ee.Number(1).subtract(q)).add(q)
SPI_exp = ee.Image(H).expression('(b <0 || b<= 0.5) ? -1*(t1- (c0+c1*t1+c2*t1**2)/(1+d1*t1+d2*t1**2+d3*t1**3)) ' + ':(b <0.5 || b<= 1)? (t2- (c0+c1*t2+c2*t2**2)/(1+d1*t2+d2*t2**2+d3*t2**3)) : null',{
'b':H.select('precipitation_mean'),
'null':ee.Image(0),
't1':ee.Image(1).divide(ee.Image(H).pow(2)).log().sqrt(),
't2':ee.Image(1).divide(ee.Image(1).subtract(H).pow(2)).log().sqrt(),
'c0':2.515517,
'c1':0.802583,
'c2':0.010328,
'd1':1.432788,
'd2':0.189269,
'd3':0.001308
})
min = ee.Image(SPI_exp).reduceRegion(ee.Reducer.percentile([0]), region_Gh,300000).getInfo()['constant']
max = ee.Image(SPI_exp).reduceRegion(ee.Reducer.percentile([100]), region_Gh, 300000).getInfo()['constant']
vizAnomaly = {
'min':-1, 'max':max,
'palette': ','.join(['#730000','#E60000','#FFAA00','#FCD37F','#FFFF00','#FFFFFF','#AAFF55','#00FFFF','#00AAFF','#0000FF','#0000AA'])
}
notes = "Standardized Precipitation Index calculated" + " for " + str(date_year) + "-" + str(date_month)
scale = 1000
name = notes
download = getData(SPI_exp,str(region_Gh.geometry().getInfo()['coordinates']),scale,"SPI" + str(date_year) + "-" + str(date_month))
mapid = ee.Image(SPI_exp).clip(region_Gh).getMapId(vizAnomaly)
col = {'mapid':mapid['mapid'],'token':mapid['token'],'note':notes ,'type':'spi' ,'min':min,'max':max }
col['download_data'] = download
return col
| 28.070876
| 202
| 0.667424
| 5,766
| 43,566
| 4.928894
| 0.076483
| 0.035961
| 0.02734
| 0.019388
| 0.813758
| 0.778712
| 0.743596
| 0.711295
| 0.686524
| 0.647326
| 0
| 0.038766
| 0.139673
| 43,566
| 1,551
| 203
| 28.088975
| 0.719485
| 0.081738
| 0
| 0.586819
| 0
| 0.002535
| 0.138823
| 0.027007
| 0.010139
| 0
| 0
| 0
| 0
| 1
| 0.038023
| false
| 0.001267
| 0.007605
| 0.007605
| 0.097592
| 0.010139
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c1751d36159c0c8d6820430acf74bee2dd4b8375
| 3,731
|
py
|
Python
|
tests/test_analysis.py
|
andrewheusser/quail
|
fce1152a3f7dc983f4a3143698fdc3e27f61d1d2
|
[
"MIT"
] | 17
|
2017-04-12T15:45:37.000Z
|
2021-07-12T21:25:50.000Z
|
tests/test_analysis.py
|
vishalbelsare/quail
|
6c847a49f31d953f3264294439576a23588b84d8
|
[
"MIT"
] | 80
|
2017-04-12T18:54:10.000Z
|
2021-06-05T17:28:33.000Z
|
tests/test_analysis.py
|
vishalbelsare/quail
|
6c847a49f31d953f3264294439576a23588b84d8
|
[
"MIT"
] | 8
|
2018-02-01T18:53:46.000Z
|
2020-01-12T17:36:33.000Z
|
# # -*- coding: utf-8 -*-
#
# from quail.analysis.analysis import analyze
# from quail.load import load_example_data
# from quail.egg import Egg
# import numpy as np
# import pytest
# import pandas as pd
#
# presented=[[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']]]
# recalled=[[['bat', 'cat', 'goat', 'hat'],['animal', 'horse', 'zoo']]]
# egg = Egg(pres=presented,rec=recalled)
#
# def test_analysis_acc():
# print(analyze(egg, analysis='accuracy').data.values)
# assert np.array_equal(analyze(egg, analysis='accuracy').data.values,[np.array([1.]),np.array([.75])])
#
# def test_analysis_spc():
# assert np.array_equal(analyze(egg, analysis='spc').data.values,[np.array([ 1., 1., 1., 1.]),np.array([ 1., 1., 0., 1.])])
#
# def test_analysis_spc_listgroup():
# assert np.array_equal(analyze(egg, listgroup=[1,1], listname='Frank', analysis='spc').data.values,np.array([[ 1. , 1. , 0.5, 1. ]]))
#
# def test_analysis_pfr():
# assert np.array_equal(analyze(egg, analysis='pfr').data.values,[np.array([ 0., 1., 0., 0.]), np.array([ 0., 1., 0., 0.])])
#
# def test_analysis_pfr_listgroup():
# assert np.array_equal(analyze(egg, listgroup=['one','one'], analysis='pfr').data.values,np.array([[ 0., 1., 0., 0.]]))
#
# def test_analysis_lagcrp():
# # example from kahana lab lag-crp tutorial
# presented=[[['1', '2', '3', '4', '5', '6', '7', '8']]]
# recalled=[[['8', '7', '1', '2', '3', '5', '6', '4']]]
# egg = Egg(pres=presented,rec=recalled)
# assert np.allclose(analyze(egg, analysis='lagcrp').data.values,np.array([[0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.333333, 0.333333, np.nan, 0.75, 0.333333, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]), equal_nan=True)
#
# # MULTI SUBJECT
# presented=[[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']],[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']]]
# recalled=[[['bat', 'cat', 'goat', 'hat'],['animal', 'horse', 'zoo']],[['bat', 'cat', 'goat', 'hat'],['animal', 'horse', 'zoo']]]
# multisubj_egg = Egg(pres=presented,rec=recalled)
#
# def test_analysis_acc_multisubj():
# assert np.array_equal(analyze(multisubj_egg, analysis='accuracy').data.values,np.array([[ 1.],[ .75],[ 1.],[ .75]]))
#
# def test_analysis_spc_multisubj():
# assert np.array_equal(analyze(multisubj_egg, analysis='spc').data.values,np.array([[ 1., 1., 1., 1.],[ 1., 1., 0., 1.],[ 1., 1., 1., 1.],[ 1., 1., 0., 1.]]))
#
# def test_acc_best_euclidean():
# presented=[[[10, 20, 30, 40],[10, 20, 30, 40]]]
# recalled=[[[20, 10, 40, 30],[20, 40, 10]]]
# egg = Egg(pres=presented,rec=recalled)
# assert np.array_equal(egg.analyze('accuracy', match='best').data.values,[np.array([1.]),np.array([.75])])
#
# def test_acc_best_euclidean_3D():
# presented=[[[[10, 0, 0], [20, 0, 0], [30, 0, 0], [40, 0, 0]],
# [[10, 0, 0], [20, 0, 0], [30, 0, 0], [40, 0, 0]]]]
# recalled=[[[[20, 0, 0], [10, 0, 0], [40, 0, 0], [30, 0, 0]],
# [[20, 0, 0], [40, 0, 0], [10, 0, 0]]]]
# egg = Egg(pres=presented,rec=recalled)
# assert np.array_equal(egg.analyze('accuracy', match='best').data.values,[np.array([1.]),np.array([.75])])
#
# def test_acc_smooth_euclidean():
# presented=[[[10, 20, 30, 40],[10, 20, 30, 40]]]
# recalled=[[[20, 10, 40, 30],[20, 40, 10]]]
# egg = Egg(pres=presented,rec=recalled)
#
# def test_acc_smooth_euclidean_3d():
# presented=[[[[10, 0, 0], [20, 0, 0], [30, 0, 0], [40, 0, 0]],
# [[10, 0, 0], [20, 0, 0], [30, 0, 0], [40, 0, 0]]]]
# recalled=[[[[20, 0, 0], [10, 0, 0], [40, 0, 0], [30, 0, 0]],
# [[20, 0, 0], [40, 0, 0], [10, 0, 0]]]]
# egg = Egg(pres=presented,rec=recalled)
| 51.109589
| 202
| 0.555347
| 572
| 3,731
| 3.536713
| 0.132867
| 0.053386
| 0.026693
| 0.029659
| 0.814137
| 0.771132
| 0.744439
| 0.689076
| 0.578349
| 0.485418
| 0
| 0.098134
| 0.166979
| 3,731
| 72
| 203
| 51.819444
| 0.552767
| 0.960064
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c186f8db10e252fae1d1eeb80aa642502fa554d2
| 230
|
py
|
Python
|
napari_webcam/__init__.py
|
haesleinhuepf/napari-webcam
|
2557e7eab9b5b03e41780323f6aa90305eb87dd2
|
[
"BSD-3-Clause"
] | null | null | null |
napari_webcam/__init__.py
|
haesleinhuepf/napari-webcam
|
2557e7eab9b5b03e41780323f6aa90305eb87dd2
|
[
"BSD-3-Clause"
] | null | null | null |
napari_webcam/__init__.py
|
haesleinhuepf/napari-webcam
|
2557e7eab9b5b03e41780323f6aa90305eb87dd2
|
[
"BSD-3-Clause"
] | 1
|
2021-10-04T13:41:58.000Z
|
2021-10-04T13:41:58.000Z
|
try:
from ._version import version as __version__
except ImportError:
__version__ = "0.1.19"
from ._dock_widget import napari_experimental_provide_dock_widget
from ._function import napari_experimental_provide_function
| 23
| 65
| 0.821739
| 29
| 230
| 5.862069
| 0.551724
| 0.117647
| 0.282353
| 0.364706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020101
| 0.134783
| 230
| 9
| 66
| 25.555556
| 0.834171
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c18a5bb536d95ac2bcd3027d4081293cdae48ffb
| 104
|
py
|
Python
|
script/forecasting/__init__.py
|
amogkam/noisepage
|
89b675e2c63e4b11c97ab800930ade7b0fd5b071
|
[
"MIT"
] | 1
|
2021-01-28T13:44:59.000Z
|
2021-01-28T13:44:59.000Z
|
script/forecasting/__init__.py
|
amogkam/noisepage
|
89b675e2c63e4b11c97ab800930ade7b0fd5b071
|
[
"MIT"
] | null | null | null |
script/forecasting/__init__.py
|
amogkam/noisepage
|
89b675e2c63e4b11c97ab800930ade7b0fd5b071
|
[
"MIT"
] | null | null | null |
import sys
from pathlib import Path
sys.path.insert(0, str((Path.cwd() / '..' / 'testing').absolute()))
| 26
| 67
| 0.663462
| 15
| 104
| 4.6
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.115385
| 104
| 3
| 68
| 34.666667
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c18c083d6109567fb74c6cd2bab480b9f9764df5
| 180
|
py
|
Python
|
iterdeciser/admin.py
|
mpavlase/responses-form-evaluator
|
d0066a44c078ece458ae44577afc207583116638
|
[
"MIT"
] | 1
|
2020-02-19T00:39:10.000Z
|
2020-02-19T00:39:10.000Z
|
iterdeciser/admin.py
|
mpavlase/responses-form-evaluator
|
d0066a44c078ece458ae44577afc207583116638
|
[
"MIT"
] | null | null | null |
iterdeciser/admin.py
|
mpavlase/responses-form-evaluator
|
d0066a44c078ece458ae44577afc207583116638
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from iterdeciser.models import Question, Answer, Response
admin.site.register(Question)
admin.site.register(Answer)
admin.site.register(Response)
| 25.714286
| 57
| 0.833333
| 24
| 180
| 6.25
| 0.5
| 0.18
| 0.34
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 180
| 6
| 58
| 30
| 0.903614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c1a9ff194cf7cf378fbba704c4112ac5b9e541fd
| 139
|
py
|
Python
|
odoo-13.0/addons/l10n_latam_invoice_document/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/l10n_latam_invoice_document/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/l10n_latam_invoice_document/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import models
from . import wizards
from . import report
| 27.8
| 74
| 0.776978
| 21
| 139
| 5.142857
| 0.809524
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179856
| 139
| 4
| 75
| 34.75
| 0.947368
| 0.517986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c1c78a6fc01e46a607b43dce7eb70dc69fb8700d
| 24
|
py
|
Python
|
earlier-2020/python_mod_tutorials/import_t/submodule/module_c.py
|
transcendentsky/py_tutorials
|
fed8e6c8d79f854a1cebcfd5c37297a163846208
|
[
"Apache-2.0"
] | 1
|
2018-06-18T12:09:33.000Z
|
2018-06-18T12:09:33.000Z
|
earlier-2020/python_mod_tutorials/import_t/submodule/module_c.py
|
transcendentsky/py_tutorials
|
fed8e6c8d79f854a1cebcfd5c37297a163846208
|
[
"Apache-2.0"
] | null | null | null |
earlier-2020/python_mod_tutorials/import_t/submodule/module_c.py
|
transcendentsky/py_tutorials
|
fed8e6c8d79f854a1cebcfd5c37297a163846208
|
[
"Apache-2.0"
] | 1
|
2018-06-18T12:13:21.000Z
|
2018-06-18T12:13:21.000Z
|
print("CCCCCCCCCCCCCCc")
| 24
| 24
| 0.833333
| 2
| 24
| 10
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 24
| 1
| 24
| 24
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
c1d9c78b259e34f8660a516cc16224eefd491f79
| 100
|
py
|
Python
|
Python/pythonHelloWorld.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 1,428
|
2018-10-03T15:15:17.000Z
|
2019-03-31T18:38:36.000Z
|
Python/pythonHelloWorld.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 1,162
|
2018-10-03T15:05:49.000Z
|
2018-10-18T14:17:52.000Z
|
Python/pythonHelloWorld.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 3,909
|
2018-10-03T15:07:19.000Z
|
2019-03-31T18:39:08.000Z
|
print ("Welcome to Hacktober fest 2018")
print ("Hi ! I am Aman Singh Thakur")
print ("Hello World")
| 33.333333
| 40
| 0.71
| 16
| 100
| 4.4375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.16
| 100
| 3
| 41
| 33.333333
| 0.797619
| 0
| 0
| 0
| 0
| 0
| 0.673267
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
c1f467b47564d873ec03de3c9ac93a97eb532f80
| 35
|
py
|
Python
|
dwio/nyu/graphs/hamilton.py
|
danielwalt-io/nyu-python
|
be706940eb06d69dd331cf96d3327bbd5963d28b
|
[
"MIT"
] | null | null | null |
dwio/nyu/graphs/hamilton.py
|
danielwalt-io/nyu-python
|
be706940eb06d69dd331cf96d3327bbd5963d28b
|
[
"MIT"
] | null | null | null |
dwio/nyu/graphs/hamilton.py
|
danielwalt-io/nyu-python
|
be706940eb06d69dd331cf96d3327bbd5963d28b
|
[
"MIT"
] | null | null | null |
class Hamilton(object):
pass
| 7
| 23
| 0.657143
| 4
| 35
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 35
| 4
| 24
| 8.75
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c1f514724bffefe4d19a258fc5ca15ef5aafef7b
| 31
|
py
|
Python
|
tests/ss_fake/services/awesome_service/two/model.py
|
jayvdb/steelscript
|
af4405c63f378db5466397fd566beb8226edf647
|
[
"MIT"
] | 10
|
2015-08-20T15:09:25.000Z
|
2021-07-05T00:25:19.000Z
|
tests/ss_fake/services/awesome_service/two/model.py
|
jayvdb/steelscript
|
af4405c63f378db5466397fd566beb8226edf647
|
[
"MIT"
] | 11
|
2015-04-16T21:55:06.000Z
|
2021-07-21T09:22:26.000Z
|
tests/ss_fake/services/awesome_service/two/model.py
|
jayvdb/steelscript
|
af4405c63f378db5466397fd566beb8226edf647
|
[
"MIT"
] | 11
|
2015-11-08T20:54:07.000Z
|
2021-07-21T16:23:48.000Z
|
class FooBar(object):
pass
| 10.333333
| 21
| 0.677419
| 4
| 31
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 31
| 2
| 22
| 15.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
de0ebe783e76e158cd114ea0698737549b6001f7
| 89
|
py
|
Python
|
src/h_matchers/matcher/web/url/__init__.py
|
hypothesis/h-matcher
|
40a9a5577c33295f3ce651338df05a6814554568
|
[
"BSD-2-Clause"
] | null | null | null |
src/h_matchers/matcher/web/url/__init__.py
|
hypothesis/h-matcher
|
40a9a5577c33295f3ce651338df05a6814554568
|
[
"BSD-2-Clause"
] | 14
|
2019-10-31T17:24:09.000Z
|
2021-09-10T14:08:23.000Z
|
src/h_matchers/matcher/web/url/__init__.py
|
hypothesis/h-matchers
|
2544f2de107585f3964137d497a2349b689b1816
|
[
"BSD-2-Clause"
] | null | null | null |
"""A matcher that matches URLs."""
from h_matchers.matcher.web.url.fluent import AnyURL
| 22.25
| 52
| 0.764045
| 14
| 89
| 4.785714
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 89
| 3
| 53
| 29.666667
| 0.848101
| 0.314607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a9ab1b00a28a9d61d0c70aed4ff07242848a1ef7
| 86
|
py
|
Python
|
task_latihan/models/__init__.py
|
komarr007/odoo-app
|
7888fbd299ea3eb18e0b0d1651bac21e98c935f3
|
[
"Unlicense"
] | null | null | null |
task_latihan/models/__init__.py
|
komarr007/odoo-app
|
7888fbd299ea3eb18e0b0d1651bac21e98c935f3
|
[
"Unlicense"
] | null | null | null |
task_latihan/models/__init__.py
|
komarr007/odoo-app
|
7888fbd299ea3eb18e0b0d1651bac21e98c935f3
|
[
"Unlicense"
] | null | null | null |
from . import service_team
from . import booking_order
from . import sale_work_order
| 28.666667
| 29
| 0.813953
| 13
| 86
| 5.076923
| 0.615385
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151163
| 86
| 3
| 29
| 28.666667
| 0.90411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a9ce326d3de307f5765cd92b8c6ec2fef069979c
| 27
|
py
|
Python
|
lru_cache/__init__.py
|
lceames/lru-cache
|
9812edf5372d49f3719368d9f9962e27e03f5953
|
[
"MIT"
] | null | null | null |
lru_cache/__init__.py
|
lceames/lru-cache
|
9812edf5372d49f3719368d9f9962e27e03f5953
|
[
"MIT"
] | null | null | null |
lru_cache/__init__.py
|
lceames/lru-cache
|
9812edf5372d49f3719368d9f9962e27e03f5953
|
[
"MIT"
] | null | null | null |
from .cache import LRUCache
| 27
| 27
| 0.851852
| 4
| 27
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 27
| 1
| 27
| 27
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a9e5a929daeb637efbacb6ca12b0780473f751a7
| 173
|
py
|
Python
|
paperswithcode/__init__.py
|
Kabongosalomon/paperswithcode-client
|
3b85082aa897312424976577bcbc6305e64acac0
|
[
"Apache-2.0"
] | 78
|
2020-10-26T11:08:41.000Z
|
2022-03-31T18:38:40.000Z
|
paperswithcode/__init__.py
|
Kabongosalomon/paperswithcode-client
|
3b85082aa897312424976577bcbc6305e64acac0
|
[
"Apache-2.0"
] | 15
|
2020-10-31T11:46:07.000Z
|
2022-01-21T09:01:43.000Z
|
paperswithcode/__init__.py
|
Kabongosalomon/paperswithcode-client
|
3b85082aa897312424976577bcbc6305e64acac0
|
[
"Apache-2.0"
] | 13
|
2020-11-12T00:07:39.000Z
|
2022-01-14T03:07:45.000Z
|
__all__ = ["PapersWithCodeClient", "version", "__version__"]
from paperswithcode.client import PapersWithCodeClient
from paperswithcode.version import version, __version__
| 34.6
| 60
| 0.83237
| 15
| 173
| 8.8
| 0.466667
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086705
| 173
| 4
| 61
| 43.25
| 0.835443
| 0
| 0
| 0
| 0
| 0
| 0.219653
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e711aad87b80c0bcca6adf0a3a540f6b2887ffcf
| 92
|
py
|
Python
|
learning/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | 12
|
2021-02-09T05:08:36.000Z
|
2022-02-24T07:51:30.000Z
|
learning/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | null | null | null |
learning/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | 6
|
2021-03-30T06:30:13.000Z
|
2022-03-01T14:15:00.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from .models import *
from .datasets import *
| 23
| 23
| 0.652174
| 13
| 92
| 4.615385
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.152174
| 92
| 4
| 24
| 23
| 0.74359
| 0.467391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e714ac0dfec1782f1503fa01ae559927b1f5a382
| 114
|
py
|
Python
|
main/admin.py
|
SubsTheTechnomancer/NewsDashboard
|
f86dada8fec038dac45591acb0ddffa67a0bb798
|
[
"MIT"
] | 2
|
2020-11-24T21:01:26.000Z
|
2022-02-21T07:11:17.000Z
|
main/admin.py
|
SubsTheTechnomancer/NewsDashboard
|
f86dada8fec038dac45591acb0ddffa67a0bb798
|
[
"MIT"
] | null | null | null |
main/admin.py
|
SubsTheTechnomancer/NewsDashboard
|
f86dada8fec038dac45591acb0ddffa67a0bb798
|
[
"MIT"
] | 1
|
2020-11-24T07:37:57.000Z
|
2020-11-24T07:37:57.000Z
|
from django.contrib import admin
from .models import Marks
# Register your models here.
admin.site.register(Marks)
| 28.5
| 32
| 0.815789
| 17
| 114
| 5.470588
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114035
| 114
| 4
| 33
| 28.5
| 0.920792
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e738cb7a568a37e9f09ab4af31c748e05eef9296
| 307
|
py
|
Python
|
multi_objective/methods/__init__.py
|
nbingo/cosmos
|
5c323ac93e2f412f79ac3a65ddf2c3a25edc315e
|
[
"MIT"
] | 16
|
2021-03-26T07:07:58.000Z
|
2022-03-29T20:25:35.000Z
|
multi_objective/methods/__init__.py
|
nbingo/cosmos
|
5c323ac93e2f412f79ac3a65ddf2c3a25edc315e
|
[
"MIT"
] | 2
|
2021-07-22T12:42:42.000Z
|
2022-02-08T14:13:54.000Z
|
multi_objective/methods/__init__.py
|
nbingo/cosmos
|
5c323ac93e2f412f79ac3a65ddf2c3a25edc315e
|
[
"MIT"
] | 3
|
2021-03-30T05:19:47.000Z
|
2022-01-13T19:22:53.000Z
|
from methods.pareto_hypernet.phn_wrappers import HypernetMethod
from methods.pareto_mtl.pareto_mtl import ParetoMTLMethod
from methods.single_task import SingleTaskMethod
from methods.cosmos.cosmos import COSMOSMethod
from methods.mgda.mgda import MGDAMethod
from methods.uniform import UniformScalingMethod
| 51.166667
| 63
| 0.892508
| 39
| 307
| 6.897436
| 0.487179
| 0.245353
| 0.126394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074919
| 307
| 6
| 64
| 51.166667
| 0.947183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e7536f585524393b1c5bd8671cf9282634cab550
| 52
|
py
|
Python
|
examples/dict/ex4.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/dict/ex4.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
examples/dict/ex4.py
|
mcorne/python-by-example
|
15339c0909c84b51075587a6a66391100971c033
|
[
"MIT"
] | null | null | null |
print(dict([('two', 2), ('one', 1), ('three', 3)]))
| 26
| 51
| 0.442308
| 8
| 52
| 2.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.115385
| 52
| 1
| 52
| 52
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0.211538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e7577d3aefc3e930ac2d7abc4e87a402faafc7ea
| 138
|
py
|
Python
|
declare_qtquick/widgets/api/qt/labs/lottieqt/__list__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | 3
|
2021-11-02T03:45:27.000Z
|
2022-03-27T05:33:36.000Z
|
declare_qtquick/widgets/api/qt/labs/lottieqt/__list__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | null | null | null |
declare_qtquick/widgets/api/qt/labs/lottieqt/__list__.py
|
likianta/declare-qtquick
|
93c2ce49d841ccdeb0272085c5f731139927f0d7
|
[
"MIT"
] | null | null | null |
from declare_qtquick.widgets.api.qtquick import Item
from .__base__ import *
class LottieAnimation(Item, W.PsLottieAnimation):
pass
| 19.714286
| 52
| 0.797101
| 17
| 138
| 6.176471
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 138
| 6
| 53
| 23
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
e786218c92afea622a42b1baa59be6f61125299b
| 71
|
py
|
Python
|
tokenkeycode.py
|
komine-m/yukarisan
|
6ec6ee2799207a3f3367821f8b3cf97e88e13ed3
|
[
"MIT"
] | null | null | null |
tokenkeycode.py
|
komine-m/yukarisan
|
6ec6ee2799207a3f3367821f8b3cf97e88e13ed3
|
[
"MIT"
] | null | null | null |
tokenkeycode.py
|
komine-m/yukarisan
|
6ec6ee2799207a3f3367821f8b3cf97e88e13ed3
|
[
"MIT"
] | null | null | null |
TOKEN = 'ODIwNjE3MDEyNTI2NTc5NzI1.YE3xJg.BOl92jVmh6r8Aw_HoBFHQiVdxCM'
| 35.5
| 70
| 0.873239
| 5
| 71
| 12.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134328
| 0.056338
| 71
| 1
| 71
| 71
| 0.776119
| 0
| 0
| 0
| 0
| 0
| 0.842857
| 0.842857
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e78b6c9ee240fc1a7045e0f257445e194c7041a0
| 1,124
|
py
|
Python
|
firmware_kmk/lib/kmk/modules/__init__.py
|
telzo2000/NumAtreus_pico
|
3c8558e80869eca3ee753de21a02afc8108e5fcf
|
[
"MIT"
] | 1
|
2022-01-21T06:09:18.000Z
|
2022-01-21T06:09:18.000Z
|
firmware_kmk/lib/kmk/modules/__init__.py
|
telzo2000/NumAtreus_pico
|
3c8558e80869eca3ee753de21a02afc8108e5fcf
|
[
"MIT"
] | 1
|
2021-09-07T21:42:32.000Z
|
2021-09-07T21:44:19.000Z
|
firmware_kmk/lib/kmk/modules/__init__.py
|
telzo2000/NumAtreus_pico
|
3c8558e80869eca3ee753de21a02afc8108e5fcf
|
[
"MIT"
] | 1
|
2021-09-07T21:37:16.000Z
|
2021-09-07T21:37:16.000Z
|
class InvalidExtensionEnvironment(Exception):
pass
class Module:
'''
Modules differ from extensions in that they not only can read the state, but
are allowed to modify the state. The will be loaded on boot, and are not
allowed to be unloaded as they are required to continue functioning in a
consistant manner.
'''
# The below methods should be implemented by subclasses
def during_bootup(self, keyboard):
raise NotImplementedError
def before_matrix_scan(self, keyboard):
'''
Return value will be injected as an extra matrix update
'''
raise NotImplementedError
def after_matrix_scan(self, keyboard):
'''
Return value will be replace matrix update if supplied
'''
raise NotImplementedError
def before_hid_send(self, keyboard):
raise NotImplementedError
def after_hid_send(self, keyboard):
raise NotImplementedError
def on_powersave_enable(self, keyboard):
raise NotImplementedError
def on_powersave_disable(self, keyboard):
raise NotImplementedError
| 27.414634
| 80
| 0.691281
| 132
| 1,124
| 5.787879
| 0.515152
| 0.109948
| 0.212042
| 0.235602
| 0.353403
| 0.302356
| 0.302356
| 0.102094
| 0
| 0
| 0
| 0
| 0.259786
| 1,124
| 40
| 81
| 28.1
| 0.918269
| 0.3621
| 0
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.411765
| false
| 0.058824
| 0
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
99b29744eddd45b4cc7a10de64d628528cb12373
| 50
|
py
|
Python
|
Loops/exercicio 17.py
|
SkaarlK/Learning-Python
|
bbf011182fb5bf876aa9a274400c41a266a0e8c7
|
[
"MIT"
] | 2
|
2022-01-01T19:31:56.000Z
|
2022-01-01T19:32:54.000Z
|
Loops/exercicio 17.py
|
SkaarlK/Learning-Python
|
bbf011182fb5bf876aa9a274400c41a266a0e8c7
|
[
"MIT"
] | null | null | null |
Loops/exercicio 17.py
|
SkaarlK/Learning-Python
|
bbf011182fb5bf876aa9a274400c41a266a0e8c7
|
[
"MIT"
] | null | null | null |
print("O programa retorna '0 cédula(s) de R$50'")
| 25
| 49
| 0.68
| 10
| 50
| 3.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0.14
| 50
| 1
| 50
| 50
| 0.72093
| 0
| 0
| 0
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
99c8df4bcea2c5deb8a629aa8942ba704e271874
| 1,912
|
py
|
Python
|
tests/ghost_machine.py
|
kdmukai/specterext-bitcoinreserve
|
e03b49c32452b4d435e55a4c5f399341ef0c5bf1
|
[
"MIT"
] | null | null | null |
tests/ghost_machine.py
|
kdmukai/specterext-bitcoinreserve
|
e03b49c32452b4d435e55a4c5f399341ef0c5bf1
|
[
"MIT"
] | null | null | null |
tests/ghost_machine.py
|
kdmukai/specterext-bitcoinreserve
|
e03b49c32452b4d435e55a4c5f399341ef0c5bf1
|
[
"MIT"
] | null | null | null |
import pytest
# Using https://iancoleman.io/bip39/ and https://jlopp.github.io/xpub-converter/
# mnemonic = "ghost ghost ghost ghost ghost ghost ghost ghost ghost ghost ghost machine"
# m/44'/0'/0'
@pytest.fixture
def ghost_machine_xpub_44():
xpub = "xpub6CGap5qbgNCEsvXg2gAjEho17zECMA9PbZa7QkrEWTPnPRaubE6qKots5pNwhyFtuYSPa9gQu4jTTZi8WPaXJhtCHrvHQaFRqayN1saQoWv"
return xpub
# m/49'/0'/0'
@pytest.fixture
def ghost_machine_xpub_49():
xpub = "xpub6BtcNhqbaFaoC3oEfKky3Sm22pF48U2jmAf78cB3wdAkkGyAgmsVrgyt1ooSt3bHWgzsdUQh2pTJ867yTeUAMmFDKNSBp8J7WPmp7Df7zjv"
return xpub
@pytest.fixture
def ghost_machine_ypub():
ypub = "ypub6WisgNWWiw8H3LzMVgYbFXrXCnPW562EgHBKv14wKdYdoNnPwS34Uke231m2sxFCvL7gNx1FVUor1NjYBLtB9zvpBi8cQ37bn7qTVqo3fjR"
return ypub
@pytest.fixture
def ghost_machine_tpub_49():
tpub = "tpubDC5CZBbVc15fpTeqkyUBKgHqYCqkeaUtPjvGz7RJEttndfcN29psPcxTSj5RNJaWYaRQq8kqovLBrZA2tju3ThSAP9fY1eiSvorchnseFZu"
return tpub
@pytest.fixture
def ghost_machine_upub():
upub = "upub5DCn7wm4SgVmzmtdoi8DVVfxhBJkqL1L6mmKHNgVky1Fj5VyBxV6NzKD957sr5fWXkY5y8THtqSVWWpjLnomBYw4iXpxaPbkXg5Gn6s5tQf"
return upub
# m/84'/0'/0'
@pytest.fixture
def ghost_machine_xpub_84():
xpub = "xpub6CjsHfiuBnHMPBkxThQ4DDjTw2Qq3VMEVcPBoMBGejZGkj3WQR15LeJLmymPpSzYHX21C8SdFWHgMw2RUBdAQ2Aj4MMS93a68mxPQeS8oHr"
return xpub
@pytest.fixture
def ghost_machine_zpub():
zpub = "zpub6rQPu14jV9NK5n9C8QyJdPvUGxhivjLEKqRdN8y3QkK2rvfxujLCamccpPgZpGJP6oFch5dkApzn8WFYuaTBzVXvo2kHJsD4gE5gBnCBYj1"
return zpub
@pytest.fixture
def ghost_machine_tpub_84():
tpub = "tpubDC4DsqH5rqHqipMNqUbDFtQT3AkKkUrvLsN6miySvortU3s1LGaNVAb7wX2No2VsuxQV82T8s3HJLv3kdx1CPjsJ3onC1Zo5mWCQzRVaWVX"
return tpub
@pytest.fixture
def ghost_machine_vpub():
vpub = "vpub5Y24kG7ZrCFRkRnHia2sdnt5N7MmsrNry1jMrP8XptMEcZZqkjQA6bc1f52RGiEoJmdy1Vk9Qck9tAL1ohKvuq3oFXe3ADVse6UiTHzuyKx"
return vpub
| 28.537313
| 124
| 0.838912
| 134
| 1,912
| 11.798507
| 0.276119
| 0.063251
| 0.085389
| 0.119545
| 0.235927
| 0.235927
| 0.195446
| 0.099304
| 0.034788
| 0.034788
| 0
| 0.107809
| 0.10251
| 1,912
| 67
| 125
| 28.537313
| 0.81352
| 0.105126
| 0
| 0.378378
| 0
| 0
| 0.58558
| 0.58558
| 0
| 1
| 0
| 0
| 0
| 1
| 0.243243
| false
| 0
| 0.027027
| 0
| 0.513514
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
99f0bf219a78427bdbd73793b94374deea1ef92a
| 52
|
py
|
Python
|
src/__init__.py
|
snakeneedy/template-python
|
1a809be2da5991799de87355688bb112267a8c54
|
[
"MIT"
] | 1
|
2020-03-04T14:29:00.000Z
|
2020-03-04T14:29:00.000Z
|
src/__init__.py
|
snakeneedy/template-python
|
1a809be2da5991799de87355688bb112267a8c54
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
snakeneedy/template-python
|
1a809be2da5991799de87355688bb112267a8c54
|
[
"MIT"
] | null | null | null |
def foo():
pass
class Model(object):
pass
| 7.428571
| 20
| 0.576923
| 7
| 52
| 4.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 52
| 6
| 21
| 8.666667
| 0.833333
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.