hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d2b7a561e5b37a00ead0b9bd6fa843d7eb288a1b
| 397
|
py
|
Python
|
WhatsAppManifest/adb/__init__.py
|
riquedev/WhatsAppManifest
|
bcbbd48f6f9152024a54172886876d3a725a3a62
|
[
"MIT"
] | 15
|
2020-03-11T17:31:12.000Z
|
2021-11-19T03:26:09.000Z
|
WhatsAppManifest/adb/__init__.py
|
riquedev/WhatsAppManifest
|
bcbbd48f6f9152024a54172886876d3a725a3a62
|
[
"MIT"
] | 5
|
2021-03-31T19:43:15.000Z
|
2022-03-12T00:18:38.000Z
|
WhatsAppManifest/adb/__init__.py
|
riquedev/WhatsAppManifest
|
bcbbd48f6f9152024a54172886876d3a725a3a62
|
[
"MIT"
] | 4
|
2020-03-11T01:52:57.000Z
|
2021-03-16T04:14:33.000Z
|
"""
General purpose module for ADB control
"""
__author__ = 'Henrique da Silva Santos'
__copyright__ = 'Copyright 2020, WhatsAppManifest'
from WhatsAppManifest.adb.adb import ADB
from WhatsAppManifest.adb.base import WhatsAppManifest
from WhatsAppManifest.adb.device import Device
from WhatsAppManifest.adb.ui_automator_remote import UIAutomatorRemote
from WhatsAppManifest.adb.types import Log
| 30.538462
| 70
| 0.838791
| 47
| 397
| 6.87234
| 0.510638
| 0.309598
| 0.356037
| 0.241486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.103275
| 397
| 12
| 71
| 33.083333
| 0.896067
| 0.095718
| 0
| 0
| 0
| 0
| 0.159544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2bac96495edb2186bd954c5806d879a711cb7ae
| 39
|
py
|
Python
|
serums/overbound_estimator.py
|
drjdlarson/serums
|
0d5b04a82d37733f9e64a3ec278cef5337d83af4
|
[
"MIT"
] | null | null | null |
serums/overbound_estimator.py
|
drjdlarson/serums
|
0d5b04a82d37733f9e64a3ec278cef5337d83af4
|
[
"MIT"
] | null | null | null |
serums/overbound_estimator.py
|
drjdlarson/serums
|
0d5b04a82d37733f9e64a3ec278cef5337d83af4
|
[
"MIT"
] | null | null | null |
"""For SERUMS Overbound Estimation."""
| 19.5
| 38
| 0.717949
| 4
| 39
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.8
| 0.820513
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d2d596f62db00d1a8f570139a7fa627c0b638735
| 7,335
|
py
|
Python
|
tests/api/v3_1_0/test_mdm.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36
|
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
tests/api/v3_1_0/test_mdm.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15
|
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
tests/api/v3_1_0/test_mdm.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6
|
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI mdm API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.1.0', reason='version does not match')
def is_valid_get_endpoints(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_3f66874f1141550da6104eff5428d37a_v3_1_0').validate(obj.response)
return True
def get_endpoints(api):
endpoint_result = api.mdm.get_endpoints(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints(api, validator):
try:
assert is_valid_get_endpoints(
validator,
get_endpoints(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoints_default(api):
endpoint_result = api.mdm.get_endpoints(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints_default(api, validator):
try:
assert is_valid_get_endpoints(
validator,
get_endpoints_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoint_by_mac_address(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_6419150c239256209b64afc9e5522e11_v3_1_0').validate(obj.response)
return True
def get_endpoint_by_mac_address(api):
endpoint_result = api.mdm.get_endpoint_by_mac_address(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoint_by_mac_address(api, validator):
try:
assert is_valid_get_endpoint_by_mac_address(
validator,
get_endpoint_by_mac_address(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoint_by_mac_address_default(api):
endpoint_result = api.mdm.get_endpoint_by_mac_address(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoint_by_mac_address_default(api, validator):
try:
assert is_valid_get_endpoint_by_mac_address(
validator,
get_endpoint_by_mac_address_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoints_by_type(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_0239282311e55709895e677699dfc3f7_v3_1_0').validate(obj.response)
return True
def get_endpoints_by_type(api):
endpoint_result = api.mdm.get_endpoints_by_type(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints_by_type(api, validator):
try:
assert is_valid_get_endpoints_by_type(
validator,
get_endpoints_by_type(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoints_by_type_default(api):
endpoint_result = api.mdm.get_endpoints_by_type(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints_by_type_default(api, validator):
try:
assert is_valid_get_endpoints_by_type(
validator,
get_endpoints_by_type_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_endpoints_by_os_type(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_339bf2a72bc05f5aabd3a79a5188d86a_v3_1_0').validate(obj.response)
return True
def get_endpoints_by_os_type(api):
endpoint_result = api.mdm.get_endpoints_by_os_type(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints_by_os_type(api, validator):
try:
assert is_valid_get_endpoints_by_os_type(
validator,
get_endpoints_by_os_type(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_endpoints_by_os_type_default(api):
endpoint_result = api.mdm.get_endpoints_by_os_type(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.mdm
def test_get_endpoints_by_os_type_default(api, validator):
try:
assert is_valid_get_endpoints_by_os_type(
validator,
get_endpoints_by_os_type_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| 30.690377
| 109
| 0.719564
| 925
| 7,335
| 5.427027
| 0.189189
| 0.078884
| 0.061355
| 0.03506
| 0.731275
| 0.726892
| 0.72251
| 0.712351
| 0.709562
| 0.706574
| 0
| 0.019357
| 0.211179
| 7,335
| 238
| 110
| 30.819328
| 0.848254
| 0.156646
| 0
| 0.689655
| 0
| 0
| 0.0581
| 0.027836
| 0
| 0
| 0
| 0
| 0.137931
| 1
| 0.114943
| false
| 0
| 0.028736
| 0
| 0.235632
| 0.022989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d2db7739fa599f8b03ef86e06ea8ed1918b4c959
| 54
|
py
|
Python
|
frozenSpider/spiderNets.py
|
mrfrozen97/ML-Algorithms-Python3-Library
|
1d1fee8c4553f7d0c4c5339cef624e02eeb40650
|
[
"MIT"
] | 1
|
2021-01-18T17:48:16.000Z
|
2021-01-18T17:48:16.000Z
|
frozenSpider/spiderNets.py
|
mrfrozen97/frozenSpider-A-library-for-complete-machine-learning-algorithms-implementation-
|
1d1fee8c4553f7d0c4c5339cef624e02eeb40650
|
[
"MIT"
] | null | null | null |
frozenSpider/spiderNets.py
|
mrfrozen97/frozenSpider-A-library-for-complete-machine-learning-algorithms-implementation-
|
1d1fee8c4553f7d0c4c5339cef624e02eeb40650
|
[
"MIT"
] | null | null | null |
import numpy
import math
#Thsis is yet to be created
| 18
| 27
| 0.777778
| 10
| 54
| 4.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203704
| 54
| 3
| 27
| 18
| 0.976744
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2db9df1f1096f10642401560cf08a16356638aa
| 40
|
py
|
Python
|
bulk_sched/__init__.py
|
PrynsTag/oneBarangay
|
6a8d56003d85b8385e91f5c5d81208619023c1ee
|
[
"Apache-2.0"
] | null | null | null |
bulk_sched/__init__.py
|
PrynsTag/oneBarangay
|
6a8d56003d85b8385e91f5c5d81208619023c1ee
|
[
"Apache-2.0"
] | 96
|
2021-08-28T12:37:02.000Z
|
2022-03-23T04:25:12.000Z
|
bulk_sched/__init__.py
|
PrynsTag/oneBarangay
|
6a8d56003d85b8385e91f5c5d81208619023c1ee
|
[
"Apache-2.0"
] | null | null | null |
"""This is init file for bulk_sched."""
| 20
| 39
| 0.675
| 7
| 40
| 3.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 1
| 40
| 40
| 0.764706
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d2e1530311ecfbebfdb8069ce23797a7c34e7cd1
| 89
|
py
|
Python
|
show/admin.py
|
Heart8reak/django-rest-show
|
067cce26b92cf9c9380b102853313fe3a872b05b
|
[
"MIT"
] | null | null | null |
show/admin.py
|
Heart8reak/django-rest-show
|
067cce26b92cf9c9380b102853313fe3a872b05b
|
[
"MIT"
] | null | null | null |
show/admin.py
|
Heart8reak/django-rest-show
|
067cce26b92cf9c9380b102853313fe3a872b05b
|
[
"MIT"
] | 1
|
2019-11-07T02:04:24.000Z
|
2019-11-07T02:04:24.000Z
|
from django.contrib import admin
from show.models import Show
admin.site.register(Show)
| 17.8
| 32
| 0.820225
| 14
| 89
| 5.214286
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 89
| 4
| 33
| 22.25
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2f95b57b9a683977a9f353bab26cefcc62ba276
| 141
|
py
|
Python
|
django_ocr_server/tests/__init__.py
|
shmakovpn/django_ocr_server
|
4d694629c39c18a6c13bcdfafdb8258b78e5a859
|
[
"Apache-2.0"
] | 17
|
2019-12-04T03:14:56.000Z
|
2022-03-27T07:05:19.000Z
|
django_ocr_server/tests/__init__.py
|
shmakovpn/django_ocr_server
|
4d694629c39c18a6c13bcdfafdb8258b78e5a859
|
[
"Apache-2.0"
] | 1
|
2020-04-17T07:32:30.000Z
|
2020-04-17T07:32:30.000Z
|
django_ocr_server/tests/__init__.py
|
shmakovpn/django_ocr_server
|
4d694629c39c18a6c13bcdfafdb8258b78e5a859
|
[
"Apache-2.0"
] | 5
|
2020-03-16T10:43:03.000Z
|
2021-07-14T14:43:49.000Z
|
"""
django_ocr_server/tests/__init__.py
+++++++++++++++++++++++++++++++++++
| Author: shmakovpn <shmakovpn@yandex.ru>
| Date: 2021-01-07
"""
| 20.142857
| 41
| 0.539007
| 15
| 141
| 4.666667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0.078014
| 141
| 7
| 42
| 20.142857
| 0.476923
| 0.943262
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
960c058a776d8d1e458ea95382bfd46ffd491eb3
| 2,247
|
py
|
Python
|
stubs/3.2/time.py
|
zyga/mypy
|
5b7e222568cd20c31cde4e02adc9fd77d949197a
|
[
"PSF-2.0"
] | 1
|
2019-06-16T07:05:32.000Z
|
2019-06-16T07:05:32.000Z
|
stubs/3.2/time.py
|
zyga/mypy
|
5b7e222568cd20c31cde4e02adc9fd77d949197a
|
[
"PSF-2.0"
] | null | null | null |
stubs/3.2/time.py
|
zyga/mypy
|
5b7e222568cd20c31cde4e02adc9fd77d949197a
|
[
"PSF-2.0"
] | null | null | null |
# Stubs for time
# Ron Murawski <ron@horizonchess.com>
# based on: http://docs.python.org/3.2/library/time.html#module-time
# see: http://nullege.com/codes/search?cq=time
from typing import Undefined, Tuple, overload
# ----- variables and constants -----
accept2dyear = False
altzone = 0
daylight = 0
timezone = 0
tzname = Undefined(Tuple[str, str])
# ----- classes/methods -----
class struct_time:
# this is supposed to be a namedtuple object
# namedtuple is not yet implemented (see file: mypy/stubs/collections.py)
# see: http://docs.python.org/3.2/library/time.html#time.struct_time
# see: http://nullege.com/codes/search/time.struct_time
# TODO: namedtuple() object problem
#namedtuple __init__(self, int, int, int, int, int, int, int, int, int):
# pass
tm_year = 0
tm_mon = 0
tm_mday = 0
tm_hour = 0
tm_min = 0
tm_sec = 0
tm_wday = 0
tm_yday = 0
tm_isdst = 0
# ----- functions -----
@overload
def asctime() -> str: pass # return current time
@overload
def asctime(t: struct_time) -> str: pass
@overload
def asctime(t: Tuple[int, int, int, int, int, int, int, int, int]) -> str: pass
def clock() -> float: pass
@overload
def ctime() -> str: pass # return current time
@overload
def ctime(secs: float) -> str: pass
@overload
def gmtime() -> struct_time: pass # return current time
@overload
def gmtime(secs: float) -> struct_time: pass
@overload
def localtime() -> struct_time: pass # return current time
@overload
def localtime(secs: float) -> struct_time: pass
@overload
def mktime(t: struct_time) -> float: pass
@overload
def mktime(t: Tuple[int, int, int, int, int,
int, int, int, int]) -> float: pass
@overload
def sleep(secs: int) -> None: pass
@overload
def sleep(secs: float) -> None: pass
@overload
def strftime(format: str) -> str: pass # return current time
@overload
def strftime(format: str, t: struct_time) -> str: pass
@overload
def strftime(format: str, t: Tuple[int, int, int, int, int,
int, int, int, int]) -> str: pass
def strptime(string: str,
format: str = "%a %b %d %H:%M:%S %Y") -> struct_time: pass
def time() -> float: pass
def tzset() -> None: pass # Unix only
| 26.435294
| 79
| 0.650645
| 329
| 2,247
| 4.370821
| 0.31307
| 0.133519
| 0.175243
| 0.200278
| 0.511127
| 0.458275
| 0.412378
| 0.207232
| 0.148818
| 0.082754
| 0
| 0.009545
| 0.207388
| 2,247
| 84
| 80
| 26.75
| 0.797866
| 0.313752
| 0
| 0.290909
| 0
| 0
| 0.013167
| 0
| 0
| 0
| 0
| 0.011905
| 0
| 1
| 0.363636
| false
| 0.363636
| 0.018182
| 0
| 0.563636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
825d9fe5577f1ce26823d228cc5dd658b6a50f2e
| 30
|
py
|
Python
|
AttentionLayers/__init__.py
|
h-roy/Attentive-Group-Equivariant-Convolutional-Networks
|
6ff0757e594c070f42b81fdd0bf4a7d27740be7c
|
[
"MIT"
] | 53
|
2020-07-07T11:06:30.000Z
|
2022-03-26T02:42:49.000Z
|
AttentionLayers/__init__.py
|
h-roy/Attentive-Group-Equivariant-Convolutional-Networks
|
6ff0757e594c070f42b81fdd0bf4a7d27740be7c
|
[
"MIT"
] | null | null | null |
AttentionLayers/__init__.py
|
h-roy/Attentive-Group-Equivariant-Convolutional-Networks
|
6ff0757e594c070f42b81fdd0bf4a7d27740be7c
|
[
"MIT"
] | 2
|
2020-09-19T12:10:33.000Z
|
2020-10-29T19:37:08.000Z
|
from attgconv.layers import *
| 15
| 29
| 0.8
| 4
| 30
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
828f3c32ffdd22d02365f486b036739215fd0c7b
| 11,235
|
py
|
Python
|
python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_partition_backfill.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_partition_backfill.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_partition_backfill.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
import os
from dagster.core.execution.backfill import BulkActionStatus
from dagster.seven import get_system_temp_directory
from dagster_graphql.client.query import LAUNCH_PARTITION_BACKFILL_MUTATION
from dagster_graphql.test.utils import (
execute_dagster_graphql,
execute_dagster_graphql_and_finish_runs,
infer_repository_selector,
)
from .graphql_context_test_suite import ExecutingGraphQLContextTestMatrix
PARTITION_PROGRESS_QUERY = """
query PartitionProgressQuery($backfillId: String!) {
partitionBackfillOrError(backfillId: $backfillId) {
... on PartitionBackfill {
__typename
backfillId
status
numRequested
numTotal
fromFailure
reexecutionSteps
}
... on PythonError {
message
stack
}
}
}
"""
CANCEL_BACKFILL_MUTATION = """
mutation($backfillId: String!) {
cancelPartitionBackfill(backfillId: $backfillId) {
... on CancelBackfillSuccess {
__typename
backfillId
}
... on PythonError {
message
stack
}
}
}
"""
RESUME_BACKFILL_MUTATION = """
mutation($backfillId: String!) {
resumePartitionBackfill(backfillId: $backfillId) {
... on ResumeBackfillSuccess {
__typename
backfillId
}
... on PythonError {
message
stack
}
}
}
"""
class TestDaemonPartitionBackfill(ExecutingGraphQLContextTestMatrix):
def test_launch_full_pipeline_backfill(self, graphql_context):
repository_selector = infer_repository_selector(graphql_context)
result = execute_dagster_graphql(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": {
"repositorySelector": repository_selector,
"partitionSetName": "integer_partition",
},
"partitionNames": ["2", "3"],
}
},
)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
backfill_id = result.data["launchPartitionBackfill"]["backfillId"]
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
assert result.data["partitionBackfillOrError"]["numRequested"] == 0
assert result.data["partitionBackfillOrError"]["numTotal"] == 2
def test_launch_partial_backfill(self, graphql_context):
# execute a full pipeline, without the failure environment variable
repository_selector = infer_repository_selector(graphql_context)
partition_set_selector = {
"repositorySelector": repository_selector,
"partitionSetName": "chained_integer_partition",
}
# reexecute a partial pipeline
partial_steps = ["after_failure"]
result = execute_dagster_graphql_and_finish_runs(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": partition_set_selector,
"partitionNames": ["2", "3"],
"reexecutionSteps": partial_steps,
}
},
)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
backfill_id = result.data["launchPartitionBackfill"]["backfillId"]
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
assert result.data["partitionBackfillOrError"]["numRequested"] == 0
assert result.data["partitionBackfillOrError"]["numTotal"] == 2
assert result.data["partitionBackfillOrError"]["reexecutionSteps"] == ["after_failure"]
def test_cancel_backfill(self, graphql_context):
repository_selector = infer_repository_selector(graphql_context)
result = execute_dagster_graphql(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": {
"repositorySelector": repository_selector,
"partitionSetName": "integer_partition",
},
"partitionNames": ["2", "3"],
}
},
)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
backfill_id = result.data["launchPartitionBackfill"]["backfillId"]
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
assert result.data["partitionBackfillOrError"]["numRequested"] == 0
assert result.data["partitionBackfillOrError"]["numTotal"] == 2
result = execute_dagster_graphql(
graphql_context, CANCEL_BACKFILL_MUTATION, variables={"backfillId": backfill_id}
)
assert result.data
assert result.data["cancelPartitionBackfill"]["__typename"] == "CancelBackfillSuccess"
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "CANCELED"
def test_resume_backfill(self, graphql_context):
repository_selector = infer_repository_selector(graphql_context)
result = execute_dagster_graphql(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": {
"repositorySelector": repository_selector,
"partitionSetName": "integer_partition",
},
"partitionNames": ["2", "3"],
}
},
)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
backfill_id = result.data["launchPartitionBackfill"]["backfillId"]
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
assert result.data["partitionBackfillOrError"]["numRequested"] == 0
assert result.data["partitionBackfillOrError"]["numTotal"] == 2
# manually mark as failed
backfill = graphql_context.instance.get_backfill(backfill_id)
graphql_context.instance.update_backfill(backfill.with_status(BulkActionStatus.FAILED))
result = execute_dagster_graphql(
graphql_context, RESUME_BACKFILL_MUTATION, variables={"backfillId": backfill_id}
)
assert result.data
assert result.data["resumePartitionBackfill"]["__typename"] == "ResumeBackfillSuccess"
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
class TestLaunchDaemonBackfillFromFailure(ExecutingGraphQLContextTestMatrix):
def test_launch_from_failure(self, graphql_context):
repository_selector = infer_repository_selector(graphql_context)
partition_set_selector = {
"repositorySelector": repository_selector,
"partitionSetName": "chained_integer_partition",
}
# trigger failure in the conditionally_fail solid
output_file = os.path.join(
get_system_temp_directory(), "chained_failure_pipeline_conditionally_fail"
)
try:
with open(output_file, "w"):
result = execute_dagster_graphql_and_finish_runs(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": partition_set_selector,
"partitionNames": ["2", "3"],
}
},
)
finally:
os.remove(output_file)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
# re-execute from failure (without the failure file)
result = execute_dagster_graphql_and_finish_runs(
graphql_context,
LAUNCH_PARTITION_BACKFILL_MUTATION,
variables={
"backfillParams": {
"selector": partition_set_selector,
"partitionNames": ["2", "3"],
"fromFailure": True,
}
},
)
assert not result.errors
assert result.data
assert result.data["launchPartitionBackfill"]["__typename"] == "LaunchBackfillSuccess"
backfill_id = result.data["launchPartitionBackfill"]["backfillId"]
result = execute_dagster_graphql(
graphql_context, PARTITION_PROGRESS_QUERY, variables={"backfillId": backfill_id}
)
assert not result.errors
assert result.data
assert result.data["partitionBackfillOrError"]["__typename"] == "PartitionBackfill"
assert result.data["partitionBackfillOrError"]["status"] == "REQUESTED"
assert result.data["partitionBackfillOrError"]["numRequested"] == 0
assert result.data["partitionBackfillOrError"]["numTotal"] == 2
assert result.data["partitionBackfillOrError"]["fromFailure"]
| 38.34471
| 95
| 0.631153
| 873
| 11,235
| 7.838488
| 0.130584
| 0.078913
| 0.11457
| 0.15198
| 0.764723
| 0.753032
| 0.723513
| 0.723513
| 0.723513
| 0.723513
| 0
| 0.002708
| 0.276814
| 11,235
| 292
| 96
| 38.476027
| 0.839508
| 0.019315
| 0
| 0.623016
| 0
| 0
| 0.289593
| 0.127951
| 0
| 0
| 0
| 0
| 0.246032
| 1
| 0.019841
| false
| 0
| 0.02381
| 0
| 0.051587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
828f4bd4d27ef24e98cfb3b39d309e934042e897
| 42
|
py
|
Python
|
sasrl_env/__init__.py
|
sassoftware/sasrlenv
|
2c8039276fdfe8071582f1e5053f9cfcb4a194e9
|
[
"Apache-2.0"
] | 1
|
2021-04-23T15:10:58.000Z
|
2021-04-23T15:10:58.000Z
|
sasrl_env/__init__.py
|
sassoftware/sasrlenv
|
2c8039276fdfe8071582f1e5053f9cfcb4a194e9
|
[
"Apache-2.0"
] | null | null | null |
sasrl_env/__init__.py
|
sassoftware/sasrlenv
|
2c8039276fdfe8071582f1e5053f9cfcb4a194e9
|
[
"Apache-2.0"
] | null | null | null |
from sasrl_env.common import env_pb2_grpc
| 21
| 41
| 0.880952
| 8
| 42
| 4.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.095238
| 42
| 1
| 42
| 42
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
82b0f15038e9e437bc239d0a807039a5df58b5a0
| 305
|
py
|
Python
|
src/fusedwind/runiec_proto/testconvdriver.py
|
FUSED-Wind/fusedwind
|
5025b84f8bfb334b33bf172bf1a39e3abcadab15
|
[
"Apache-2.0"
] | 15
|
2015-01-19T18:20:35.000Z
|
2021-12-21T05:50:38.000Z
|
src/fusedwind/runiec_proto/testconvdriver.py
|
michaelXDzhang/fusedwind
|
5025b84f8bfb334b33bf172bf1a39e3abcadab15
|
[
"Apache-2.0"
] | 61
|
2015-01-05T02:47:35.000Z
|
2019-10-09T02:18:13.000Z
|
src/fusedwind/runiec_proto/testconvdriver.py
|
michaelXDzhang/fusedwind
|
5025b84f8bfb334b33bf172bf1a39e3abcadab15
|
[
"Apache-2.0"
] | 11
|
2015-01-16T03:05:49.000Z
|
2021-02-16T13:57:59.000Z
|
from openruniec import test_convergence
#test_convergence("dlcproto.5000samples.out", False, 100, 5000, 50)
#test_convergence("dlcproto.5148scan.out", True, 150, 5150, 50)
test_convergence("dlcproto.samp.out", False, 20,100, 5000, 50)
test_convergence("dlcproto.scan.out", True, 20,150, 5150, 50)
| 21.785714
| 67
| 0.75082
| 43
| 305
| 5.209302
| 0.44186
| 0.334821
| 0.410714
| 0.334821
| 0.285714
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0.175824
| 0.104918
| 305
| 13
| 68
| 23.461538
| 0.644689
| 0.419672
| 0
| 0
| 0
| 0
| 0.201183
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
82c9e32cfc504b975de63a874111883bbfca69bb
| 32
|
py
|
Python
|
common/dataset/__init__.py
|
AlexTaehwan/kgpolicy
|
de54cd877c4d3a0a0bfd51efd5202783dd8ff13a
|
[
"MIT"
] | 111
|
2020-03-10T06:55:58.000Z
|
2022-03-29T09:56:49.000Z
|
common/dataset/__init__.py
|
AlexTaehwan/kgpolicy
|
de54cd877c4d3a0a0bfd51efd5202783dd8ff13a
|
[
"MIT"
] | 3
|
2020-09-16T19:54:45.000Z
|
2021-12-26T13:35:41.000Z
|
common/dataset/__init__.py
|
AlexTaehwan/kgpolicy
|
de54cd877c4d3a0a0bfd51efd5202783dd8ff13a
|
[
"MIT"
] | 39
|
2020-03-11T02:27:59.000Z
|
2022-02-15T08:38:47.000Z
|
from .preprocess import CKGData
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7d79e3f8ceea6022877155b5271c1f211d71706e
| 224
|
py
|
Python
|
procuremonkey/procuremonkey/doctype/procurement_process/test_procurement_process.py
|
mbieker/ProcureMonkey
|
9426fcfc7cb373f3373c135755f9f6101b550aba
|
[
"MIT"
] | 1
|
2021-01-22T09:55:51.000Z
|
2021-01-22T09:55:51.000Z
|
procuremonkey/procuremonkey/doctype/procurement_process/test_procurement_process.py
|
mbieker/ProcureMonkey
|
9426fcfc7cb373f3373c135755f9f6101b550aba
|
[
"MIT"
] | null | null | null |
procuremonkey/procuremonkey/doctype/procurement_process/test_procurement_process.py
|
mbieker/ProcureMonkey
|
9426fcfc7cb373f3373c135755f9f6101b550aba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Martin Bieker and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestProcurementProcess(unittest.TestCase):
pass
| 20.363636
| 52
| 0.772321
| 27
| 224
| 6.222222
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025907
| 0.138393
| 224
| 10
| 53
| 22.4
| 0.84456
| 0.455357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
7dceb25ac4acf2d48baf3b7a280c609b94e142bc
| 1,051
|
py
|
Python
|
gunnery/account/urls.py
|
timgates42/gunnery
|
733a261cae6243a11883a40e18b14f57cf6e47b2
|
[
"Apache-2.0"
] | 314
|
2015-01-01T06:17:34.000Z
|
2022-03-10T03:34:02.000Z
|
gunnery/account/urls.py
|
timgates42/gunnery
|
733a261cae6243a11883a40e18b14f57cf6e47b2
|
[
"Apache-2.0"
] | 20
|
2015-04-03T13:34:59.000Z
|
2021-06-10T20:37:25.000Z
|
gunnery/account/urls.py
|
pkucmus/gunnery
|
30fce7f3fd74947621da6e91c1e872f383fc1e71
|
[
"Apache-2.0"
] | 57
|
2015-01-07T05:41:34.000Z
|
2021-10-31T19:56:50.000Z
|
from django.conf.urls import patterns, url
from views import modal_permissions, profile_page
urlpatterns = patterns('',
url(r'^account/profile/(?P<user_id>[\d]+)/$', profile_page, name='profile'),
url(r'^account/login/$', 'django.contrib.auth.views.login', {'template_name': 'page/login.html'}),
url(r'^account/logout/$', 'django.contrib.auth.views.logout_then_login', name='logout'),
url(r'^account/password_reset/$', 'django.contrib.auth.views.password_reset', name='password_reset'),
url(r'^account/password_reset_done$', 'django.contrib.auth.views.password_reset_done',
name='password_reset_done'),
url(r'^account/password_reset_confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$',
'django.contrib.auth.views.password_reset_confirm', name='password_reset_confirm'),
url(r'^account/password_reset_complete$', 'django.contrib.auth.views.password_reset_complete',
name='password_reset_complete'),
url(r'^modal/permissions/(?P<group_id>[\d]+)/$', modal_permissions, name='modal_permissions'),
)
| 58.388889
| 105
| 0.716461
| 141
| 1,051
| 5.113475
| 0.276596
| 0.216366
| 0.106796
| 0.183079
| 0.327323
| 0.194175
| 0
| 0
| 0
| 0
| 0
| 0.00418
| 0.089439
| 1,051
| 18
| 106
| 58.388889
| 0.749216
| 0
| 0
| 0
| 0
| 0.066667
| 0.631179
| 0.513308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.466667
| 0.133333
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7dd525ff289e01b8ec204570abcb59c7a1919f8a
| 35
|
py
|
Python
|
frugy/__main__.py
|
CAENels/frugy
|
0670c8d62a8e56b4c72cd4e7b0c7c7f46b86b67b
|
[
"BSD-3-Clause"
] | 2
|
2022-03-30T10:03:57.000Z
|
2022-03-31T09:54:33.000Z
|
frugy/__main__.py
|
CAENels/frugy
|
0670c8d62a8e56b4c72cd4e7b0c7c7f46b86b67b
|
[
"BSD-3-Clause"
] | null | null | null |
frugy/__main__.py
|
CAENels/frugy
|
0670c8d62a8e56b4c72cd4e7b0c7c7f46b86b67b
|
[
"BSD-3-Clause"
] | 2
|
2020-09-14T01:40:00.000Z
|
2021-03-26T09:39:59.000Z
|
from frugy.cli import main
main()
| 8.75
| 26
| 0.742857
| 6
| 35
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 35
| 3
| 27
| 11.666667
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
815019660a73a94c8d4737cc80c245c73445fbfc
| 8,901
|
py
|
Python
|
code/orbsim/r4b_3d/equations_of_motion.py
|
GandalfSaxe/letomes
|
5f73a4066fcf69260cb538c105acf898b22e756d
|
[
"MIT"
] | null | null | null |
code/orbsim/r4b_3d/equations_of_motion.py
|
GandalfSaxe/letomes
|
5f73a4066fcf69260cb538c105acf898b22e756d
|
[
"MIT"
] | null | null | null |
code/orbsim/r4b_3d/equations_of_motion.py
|
GandalfSaxe/letomes
|
5f73a4066fcf69260cb538c105acf898b22e756d
|
[
"MIT"
] | null | null | null |
"""
Equations of motion for R4B-3D system (Restricted 4-Body Problem in 3 Dimensions),
derived via Hamiltons's equations.
Includes at least:
- 3 Qdot: coordinate differential equations in spherical coordinates.
- 3 Pdot: generalized momentum differential equations in spherical coordinates.
- 3 P: generalized momentum as function of Qdot.
- Hamiltonian
All non-dimensionalized and scaled with mass of spacecraft (see derivations in report)
"""
from math import pi
import numpy as np
from numpy import cos, sin, sqrt, tan
from orbsim.r4b_3d import EARTH_ETA, MARS_ETA, SUN_ETA
eta_ks = [SUN_ETA, EARTH_ETA, MARS_ETA]
# region Coodinate Derivatives: Qdot(Q, B)
def get_Rdot(B_R):
"""Rdot(R, theta, phi, B_R, B_theta, B_phi) from Hamilton's equations"""
return B_R
def get_thetadot(R, B_theta):
"""thetadot(R, theta, phi, B_R, B_theta, B_phi) from Hamilton's equations"""
if R <= 0:
raise ValueError("R must be positive.")
return B_theta / (R ** 2)
def get_phidot(R, theta, B_phi):
"""phidot(R, theta, phi, B_R, B_theta, B_phi) from Hamilton's equations"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
elif theta <= 0 or theta >= pi:
raise ValueError("theta must be in range 0 < theta < pi.")
return B_phi / (R ** 2 * sin(theta) ** 2)
# endregion
# region Momentum Derivatives: Bdot(Q, B, Qk)
def get_Bdot_R(R, theta, phi, B_theta, B_phi, R_ks, theta_ks, phi_ks):
"""
Gives Bdot_R, i.e. the time derivative of the generalized momentum in R direction,
per unit mass and in chosen characteristic units.
Arguments:
R {float} -- R coordinate (AU)
theta {float} -- theta coordinate (rad)
phi {float} -- phi coordinate (rad)
B_theta {float} -- B_theta momentum (linear velocity in R direction, AU/y)
B_phi {float} -- B_theta (angular theta momentum per mass, AU^2/y)
R_ks {List(float)} -- Coordinates [R_sun, R_earth, R_mars], AU
theta_ks {List(float)} -- Coordinates [theta_sun, theta_earth, theta_mars], rad
phi_ks {List(float)} -- Coordinates [phi_sun, phi_earth, phi_mars], rad
Raises:
ValueError -- Out of range coordinates.
Returns:
float -- Bdot_R
"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
if theta <= 0 or theta >= pi:
raise ValueError("theta must be in range 0 < theta < pi.")
if phi <= -pi or phi > pi:
raise ValueError("phi must be in range -pi < phi <= pi.")
for R_k in R_ks:
if R_k < 0:
raise ValueError("All R_k must zero or be positive (allow for SUN_R = 0)")
for theta_k in theta_ks:
if theta_k <= 0 or theta_k >= pi:
raise ValueError("theta_k must be in range 0 < theta_k < pi.")
for phi_k in phi_ks:
if phi_k <= -pi or phi_k > pi:
raise ValueError("phi_k must be in range -pi < phi_k <= pi.")
R_ks = np.array(R_ks)
theta_ks = np.array(theta_ks)
phi_ks = np.array(phi_ks)
numerators = eta_ks * (
-R
+ R_ks
* (cos(theta) * cos(theta_ks) + sin(theta) * sin(theta_ks) * cos(phi - phi_ks))
)
denominators_base = (
R ** 2
+ R_ks ** 2
- 2
* R
* R_ks
* (cos(theta) * cos(theta_ks) + sin(theta) * sin(theta_ks) * cos(phi - phi_ks))
)
denominators = denominators_base * sqrt(denominators_base)
summation = np.sum(numerators / denominators)
Bdot_R1 = B_theta ** 2 / (R ** 3)
Bdot_R2 = B_phi ** 2 / (R ** 3 * sin(theta) ** 2)
Bdot_R3 = summation
Bdot_R = Bdot_R1 + Bdot_R2 + Bdot_R3
return Bdot_R
def get_Bdot_theta(R, theta, phi, B_phi, R_ks, theta_ks, phi_ks):
"""
Gives Bdot_theta, i.e. the time derivative of the generalized momentum in theta
direction, per unit mass and in chosen characteristic units.
Arguments:
R {float} -- R coordinate (AU)
theta {float} -- theta coordinate (rad)
phi {float} -- phi coordinate (rad)
B_phi {float} -- B_theta (angular theta momentum per mass, AU^2/y)
R_ks {List(float)} -- Coordinates [R_sun, R_earth, R_mars], AU
theta_ks {List(float)} -- Coordinates [theta_sun, theta_earth, theta_mars], rad
phi_ks {List(float)} -- Coordinates [phi_sun, phi_earth, phi_mars], rad
Raises:
ValueError -- Out of range coordinates.
Returns:
float -- Bdot_theta
"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
if theta <= 0 or theta >= pi:
raise ValueError("theta must be in range 0 < theta < pi.")
if phi <= -pi or phi > pi:
raise ValueError("phi must be in range -pi < phi <= pi.")
for R_k in R_ks:
if R_k < 0:
raise ValueError("All R_k must zero or be positive (allow for SUN_R = 0)")
for theta_k in theta_ks:
if theta_k <= 0 or theta_k >= pi:
raise ValueError("theta_k must be in range 0 < theta_k < pi.")
for phi_k in phi_ks:
if phi_k <= -pi or phi_k > pi:
raise ValueError("phi_k must be in range -pi < phi_k <= pi.")
R_ks = np.array(R_ks)
theta_ks = np.array(theta_ks)
phi_ks = np.array(phi_ks)
numerators = eta_ks * (
R
* R_ks
* (-sin(theta) * cos(theta_ks) + cos(theta) * sin(theta_ks) * cos(phi - phi_ks))
)
denominators_base = (
R ** 2
+ R_ks ** 2
- 2
* R
* R_ks
* (cos(theta) * cos(theta_ks) + sin(theta) * sin(theta_ks) * cos(phi - phi_ks))
)
denominators = denominators_base * sqrt(denominators_base)
summation = np.sum(numerators / denominators)
Bdot_theta1 = B_phi ** 2 / (R ** 2 * sin(theta) ** 2 * tan(theta))
Bdot_theta2 = summation
Bdot_theta = Bdot_theta1 + Bdot_theta2
return Bdot_theta
def get_Bdot_phi(R, theta, phi, R_ks, theta_ks, phi_ks):
"""
Gives Bdot_phi, i.e. the time derivative of the generalized momentum in theta
direction, per unit mass and in chosen characteristic units.
Arguments:
R {float} -- R coordinate (AU)
theta {float} -- theta coordinate (rad)
phi {float} -- phi coordinate (rad)
R_ks {List(float)} -- Coordinates [R_sun, R_earth, R_mars], AU
theta_ks {List(float)} -- Coordinates [theta_sun, theta_earth, theta_mars], rad
phi_ks {List(float)} -- Coordinates [phi_sun, phi_earth, phi_mars], rad
Raises:
ValueError -- Out of range coordinates.
Returns:
float -- Bdot_phi
"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
if theta <= 0 or theta >= pi:
raise ValueError("theta must be in range 0 < theta < pi.")
if phi <= -pi or phi > pi:
raise ValueError("phi must be in range -pi < phi <= pi.")
for R_k in R_ks:
if R_k < 0:
raise ValueError("All R_k must zero or be positive (allow for SUN_R = 0)")
for theta_k in theta_ks:
if theta_k <= 0 or theta_k >= pi:
raise ValueError("theta_k must be in range 0 < theta_k < pi.")
for phi_k in phi_ks:
if phi_k <= -pi or phi_k > pi:
raise ValueError("phi_k must be in range -pi < phi_k <= pi.")
R_ks = np.array(R_ks)
theta_ks = np.array(theta_ks)
phi_ks = np.array(phi_ks)
numerators = eta_ks * (-R * R_ks * sin(theta) * sin(theta_ks) * sin(phi - phi_ks))
denominators_base = (
R ** 2
+ R_ks ** 2
- 2
* R
* R_ks
* (cos(theta) * cos(theta_ks) + sin(theta) * sin(theta_ks) * cos(phi - phi_ks))
)
denominators = denominators_base * sqrt(denominators_base)
summation = np.sum(numerators / denominators)
Bdot_phi = summation
return Bdot_phi
# endregion
# region Momenta B(Q, Qdot) - Derived from Qdot(Q, B)
def get_B_R(Rdot):
"""Get B_R from Q, Qdot"""
return Rdot
def get_B_theta(R, thetadot):
"""Get B_theta from Q, Qdot"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
return R ** 2 * thetadot
def get_B_phi(R, theta, phidot):
"""Get B_phi from Q, Qdot"""
if R <= 0:
raise ValueError("R cannot be less than or equal to zero.")
if theta <= 0 or theta >= pi:
raise ValueError("theta must be in range 0 < theta < pi.")
return R ** 2 * sin(theta) ** 2 * phidot
# endregion
# if __name__ == "__main__":
# from pprint import pprint
# pprint(
# get_Bdot_R(
# 1.1,
# 3.1315926535897933,
# 3.141592653589793,
# 0.2,
# -0.1,
# [0.0, 0.983580560001, 1.470582878522],
# [0.013707783890401887, 1.1997429598510756, 1.264411333882953],
# [0.0, 2.0274978713480216, 6.283185307179586],
# )
# )
| 29.869128
| 88
| 0.600157
| 1,354
| 8,901
| 3.77031
| 0.108567
| 0.035651
| 0.046621
| 0.035651
| 0.749657
| 0.738492
| 0.719295
| 0.719295
| 0.719295
| 0.714006
| 0
| 0.035653
| 0.284687
| 8,901
| 297
| 89
| 29.969697
| 0.766138
| 0.365015
| 0
| 0.688406
| 0
| 0
| 0.178902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0
| 0.028986
| 0
| 0.15942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
815e0c2f1cec380243ea196d8d8ff3df56e89959
| 95
|
py
|
Python
|
app/repository/manufacturers/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
app/repository/manufacturers/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
app/repository/manufacturers/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
from .defaults import get_manufacturers_repository
__all__ = ["get_manufacturers_repository"]
| 23.75
| 50
| 0.852632
| 10
| 95
| 7.3
| 0.7
| 0.438356
| 0.712329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 95
| 3
| 51
| 31.666667
| 0.83908
| 0
| 0
| 0
| 0
| 0
| 0.294737
| 0.294737
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
815e9c450c87268f41a55bec2e93279d6a75332c
| 146
|
py
|
Python
|
namespaces/exceptions.py
|
leobellaera/meetme-api-web
|
55e3127572f9ecd346a91d7eece4af08227c9eda
|
[
"MIT"
] | null | null | null |
namespaces/exceptions.py
|
leobellaera/meetme-api-web
|
55e3127572f9ecd346a91d7eece4af08227c9eda
|
[
"MIT"
] | null | null | null |
namespaces/exceptions.py
|
leobellaera/meetme-api-web
|
55e3127572f9ecd346a91d7eece4af08227c9eda
|
[
"MIT"
] | null | null | null |
class UserDoesNotExist(Exception):
pass
class PasswordDoesNotMatch(Exception):
pass
class EmailAlreadyRegistered(Exception):
pass
| 13.272727
| 40
| 0.767123
| 12
| 146
| 9.333333
| 0.5
| 0.348214
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171233
| 146
| 10
| 41
| 14.6
| 0.92562
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.666667
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
817f71a5c968e20d263b98657b76de41f8d4959c
| 171
|
py
|
Python
|
documented/error.py
|
python-platonic/documented
|
eac807f593a3e052a37b78ba74a79f8cff8d0047
|
[
"MIT"
] | null | null | null |
documented/error.py
|
python-platonic/documented
|
eac807f593a3e052a37b78ba74a79f8cff8d0047
|
[
"MIT"
] | 3
|
2020-09-28T14:31:49.000Z
|
2020-10-18T16:26:32.000Z
|
documented/error.py
|
python-platonic/documented
|
eac807f593a3e052a37b78ba74a79f8cff8d0047
|
[
"MIT"
] | null | null | null |
from documented.documented import Documented
class DocumentedError(Documented, Exception):
"""Exception with a templated error message provided as the docstring."""
| 28.5
| 77
| 0.795322
| 19
| 171
| 7.157895
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134503
| 171
| 5
| 78
| 34.2
| 0.918919
| 0.391813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
81c0e538eb1a091f2e33ebaa27f900bf215a45fd
| 144
|
py
|
Python
|
sitecats_helpers/apps.py
|
bashu/django-sitecats-helpers
|
0937ea13243db110aba4f1e5247c3c61cf5c094d
|
[
"MIT"
] | 3
|
2016-09-30T14:01:04.000Z
|
2021-11-19T11:00:37.000Z
|
sitecats_helpers/apps.py
|
bashu/django-sitecats-helpers
|
0937ea13243db110aba4f1e5247c3c61cf5c094d
|
[
"MIT"
] | 1
|
2021-09-18T08:41:53.000Z
|
2021-09-18T08:42:51.000Z
|
sitecats_helpers/apps.py
|
bashu/django-sitecats-helpers
|
0937ea13243db110aba4f1e5247c3c61cf5c094d
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class SitecatsHelpersConfig(AppConfig):
name = "sitecats_helpers"
verbose_name = "Sitecats Helpers"
| 20.571429
| 39
| 0.770833
| 15
| 144
| 7.266667
| 0.733333
| 0.220183
| 0.348624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159722
| 144
| 6
| 40
| 24
| 0.900826
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
81e228fd1b0429cd01d4ad14440763297b4f4dce
| 2,183
|
py
|
Python
|
storage/team11/AVLMode.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
storage/team11/AVLMode.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
storage/team11/AVLMode.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
from Manager import Manager
db = Manager()
def createDatabase(db_name):
return db.createDatabase(db_name)
def showDatabases():
return db.showDatabases()
def alterDatabase(old_db, new_db):
return db.alterDatabase(old_db, new_db)
def dropDatabase(name_db):
return db.dropDatabase(name_db)
def createTable(database, name_table, number_columns):
return db.createTable(database, name_table, number_columns)
def showTables(database):
return db.showTables(database)
def extractTable(database, name_table):
return db.extractTable(database, name_table)
def extractRangeTable(database, name_table, number_column, lower, upper):
return db.extractRangeTable(database, name_table, number_column, lower, upper)
def alterAddPK(database, name_table, columns):
return db.alterAddPK(database, name_table, columns)
def alterDropPK(database, name_table):
return db.alterDropPK(database, name_table)
def alterTable(database, old_table, new_table):
return db.alterTable(database, old_table, new_table)
def alterAddColumn(database, name_table, default):
return db.alterAddColumn(database, name_table, default)
def alterDropColumn(database, name_table, number_column):
return db.alterDropColumn(database, name_table, number_column)
def dropTable(database, name_table):
return db.dropTable(database, name_table)
def insert(database, name_table, register):
return db.insert(database, name_table, register)
def loadCSV(file, database, table_name):
return db.loadCSV(file, database, table_name)
def extractRow(database, name_table, columns):
return db.extractRow(database, name_table, columns)
def update(database, name_table, register, columns):
return db.update(database, name_table, register, columns)
def delete(database, name_table, columns):
return db.delete(database, name_table, columns)
def truncate(database, name_table):
return db.truncate(database, name_table)
def graficarRegistros(database, name_table):
return db.graficarRegistros(database, name_table)
def graficarTabla(database):
return db.graficarTabla(database)
def graficarDB():
return db.graficarDB()
| 22.739583
| 82
| 0.769583
| 273
| 2,183
| 5.978022
| 0.153846
| 0.205882
| 0.291667
| 0.084559
| 0.625
| 0.319853
| 0.068627
| 0.068627
| 0
| 0
| 0
| 0
| 0.137426
| 2,183
| 95
| 83
| 22.978947
| 0.866702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.479167
| false
| 0
| 0.020833
| 0.479167
| 0.979167
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c49c50e0a60b6d7f83ce8b17fca356094cd354d6
| 147
|
py
|
Python
|
bundyclock/__init__.py
|
dahallgren/bundyclock
|
0303ad7cef8a026bde796ad1386f856d674f0ac8
|
[
"MIT"
] | null | null | null |
bundyclock/__init__.py
|
dahallgren/bundyclock
|
0303ad7cef8a026bde796ad1386f856d674f0ac8
|
[
"MIT"
] | null | null | null |
bundyclock/__init__.py
|
dahallgren/bundyclock
|
0303ad7cef8a026bde796ad1386f856d674f0ac8
|
[
"MIT"
] | null | null | null |
import logging
import sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format='%(asctime)s %(name)s %(levelname)s: %(message)s')
| 29.4
| 119
| 0.741497
| 21
| 147
| 5.190476
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088435
| 147
| 4
| 120
| 36.75
| 0.813433
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4e144a408bc6224440020cd1f65c80700f6d803
| 2,760
|
py
|
Python
|
temboo/core/Library/Fitbit/Activities/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Fitbit/Activities/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Fitbit/Activities/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Fitbit.Activities.AddFavoriteActivity import AddFavoriteActivity, AddFavoriteActivityInputSet, AddFavoriteActivityResultSet, AddFavoriteActivityChoreographyExecution
from temboo.Library.Fitbit.Activities.BrowseActivities import BrowseActivities, BrowseActivitiesInputSet, BrowseActivitiesResultSet, BrowseActivitiesChoreographyExecution
from temboo.Library.Fitbit.Activities.DeleteActivityLog import DeleteActivityLog, DeleteActivityLogInputSet, DeleteActivityLogResultSet, DeleteActivityLogChoreographyExecution
from temboo.Library.Fitbit.Activities.DeleteFavoriteActivity import DeleteFavoriteActivity, DeleteFavoriteActivityInputSet, DeleteFavoriteActivityResultSet, DeleteFavoriteActivityChoreographyExecution
from temboo.Library.Fitbit.Activities.GetActivities import GetActivities, GetActivitiesInputSet, GetActivitiesResultSet, GetActivitiesChoreographyExecution
from temboo.Library.Fitbit.Activities.GetActivity import GetActivity, GetActivityInputSet, GetActivityResultSet, GetActivityChoreographyExecution
from temboo.Library.Fitbit.Activities.GetActivityDailyGoals import GetActivityDailyGoals, GetActivityDailyGoalsInputSet, GetActivityDailyGoalsResultSet, GetActivityDailyGoalsChoreographyExecution
from temboo.Library.Fitbit.Activities.GetActivityWeeklyGoals import GetActivityWeeklyGoals, GetActivityWeeklyGoalsInputSet, GetActivityWeeklyGoalsResultSet, GetActivityWeeklyGoalsChoreographyExecution
from temboo.Library.Fitbit.Activities.GetFavoriteActivities import GetFavoriteActivities, GetFavoriteActivitiesInputSet, GetFavoriteActivitiesResultSet, GetFavoriteActivitiesChoreographyExecution
from temboo.Library.Fitbit.Activities.GetFrequentActivities import GetFrequentActivities, GetFrequentActivitiesInputSet, GetFrequentActivitiesResultSet, GetFrequentActivitiesChoreographyExecution
from temboo.Library.Fitbit.Activities.GetLatestActivity import GetLatestActivity, GetLatestActivityInputSet, GetLatestActivityResultSet, GetLatestActivityChoreographyExecution
from temboo.Library.Fitbit.Activities.GetRecentActivities import GetRecentActivities, GetRecentActivitiesInputSet, GetRecentActivitiesResultSet, GetRecentActivitiesChoreographyExecution
from temboo.Library.Fitbit.Activities.LogActivity import LogActivity, LogActivityInputSet, LogActivityResultSet, LogActivityChoreographyExecution
from temboo.Library.Fitbit.Activities.UpdateActivityDailyGoals import UpdateActivityDailyGoals, UpdateActivityDailyGoalsInputSet, UpdateActivityDailyGoalsResultSet, UpdateActivityDailyGoalsChoreographyExecution
from temboo.Library.Fitbit.Activities.UpdateActivityWeeklyGoals import UpdateActivityWeeklyGoals, UpdateActivityWeeklyGoalsInputSet, UpdateActivityWeeklyGoalsResultSet, UpdateActivityWeeklyGoalsChoreographyExecution
| 172.5
| 215
| 0.923913
| 165
| 2,760
| 15.454545
| 0.4
| 0.058824
| 0.1
| 0.135294
| 0.194118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038043
| 2,760
| 15
| 216
| 184
| 0.960452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4e871228c3fe778e513ae7687d9e80edfd29d91
| 122
|
py
|
Python
|
transliterator/admin.py
|
MapleCCC/Nixacernis-Keyboard-Backend
|
909f1731b301999b209c99a1f6320ad1a9dd2ec8
|
[
"WTFPL"
] | null | null | null |
transliterator/admin.py
|
MapleCCC/Nixacernis-Keyboard-Backend
|
909f1731b301999b209c99a1f6320ad1a9dd2ec8
|
[
"WTFPL"
] | 4
|
2019-05-06T16:06:57.000Z
|
2019-12-10T03:23:22.000Z
|
transliterator/admin.py
|
MapleCCC/Nixacernis-Keyboard-Backend
|
909f1731b301999b209c99a1f6320ad1a9dd2ec8
|
[
"WTFPL"
] | null | null | null |
from django.contrib import admin
from .models import UserDict
# Register your models here.
admin.site.register(UserDict)
| 20.333333
| 32
| 0.811475
| 17
| 122
| 5.823529
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122951
| 122
| 5
| 33
| 24.4
| 0.925234
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4802d7126605b5ad02ac419dbb5c8e8ea5375bac
| 133
|
py
|
Python
|
test.py
|
valentinpel/calculator-python
|
c1a798124f1832037dc30b76661c741380cf8cef
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
valentinpel/calculator-python
|
c1a798124f1832037dc30b76661c741380cf8cef
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
valentinpel/calculator-python
|
c1a798124f1832037dc30b76661c741380cf8cef
|
[
"Apache-2.0"
] | null | null | null |
asdasdfgs = 1
def add(x, y):
return x + y
print(add(asdasdfgs, 2))
print(add(asdasdfgs, 3))
print(add(asdasdfgs, 5))
| 12.090909
| 24
| 0.601504
| 21
| 133
| 3.809524
| 0.52381
| 0.3
| 0.6375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039604
| 0.240602
| 133
| 11
| 25
| 12.090909
| 0.752475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.166667
| 0.333333
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 5
|
4844dae9f36721b657b735f78e1c3c48efe8ca1b
| 13
|
py
|
Python
|
Adora/hello1.py
|
adora2211/Python-BootCamp
|
55a2dc1244cd9627deeecb82f7159e23c32c47de
|
[
"MIT"
] | null | null | null |
Adora/hello1.py
|
adora2211/Python-BootCamp
|
55a2dc1244cd9627deeecb82f7159e23c32c47de
|
[
"MIT"
] | null | null | null |
Adora/hello1.py
|
adora2211/Python-BootCamp
|
55a2dc1244cd9627deeecb82f7159e23c32c47de
|
[
"MIT"
] | null | null | null |
print("HIii)
| 6.5
| 12
| 0.692308
| 2
| 13
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 13
| 1
| 13
| 13
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
48671faffd7af14dfa3b98971834bed05b616293
| 5,157
|
py
|
Python
|
giung2/layers/linear.py
|
cs-giung/giung2-jax
|
4c340543e4ac5067fac52d3a1ad336a9cd0e14c3
|
[
"MIT"
] | 5
|
2022-02-08T05:47:35.000Z
|
2022-03-22T07:57:01.000Z
|
giung2/layers/linear.py
|
cs-giung/giung2-jax
|
4c340543e4ac5067fac52d3a1ad336a9cd0e14c3
|
[
"MIT"
] | null | null | null |
giung2/layers/linear.py
|
cs-giung/giung2-jax
|
4c340543e4ac5067fac52d3a1ad336a9cd0e14c3
|
[
"MIT"
] | null | null | null |
import jax
import jax.numpy as jnp
import flax.linen as nn
from typing import Any, Callable, Iterable, Optional, Tuple, Union
PRNGKey = Any
Shape = Iterable[int]
Array = Any
Dtype = Any
__all__ = [
"Linear",
"Linear_Dropout",
"Linear_GaussianDropout",
"Linear_BatchEnsemble",
]
class Linear(nn.Module):
features: int
use_bias: bool = True
w_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.kaiming_normal()
b_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.zeros
@nn.compact
def __call__(self, x, **kwargs):
"""
Args:
x (Array): An input array with shape [N, C1,].
Returns:
y (Array): An output array with shape [N, C2,].
"""
w = jnp.asarray(self.param('w', self.w_init, (x.shape[-1], self.features,)), x.dtype)
y = jnp.dot(x, w)
if self.use_bias:
b = jnp.asarray(self.param('b', self.b_init, (self.features,)), x.dtype)
y = jnp.add(y, jnp.reshape(b, (1, -1,)))
return y
class Linear_BatchEnsemble(nn.Module):
ensemble_size: int
features: int
use_bias: bool = True
w_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.kaiming_normal()
b_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.zeros
r_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.ones
s_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.ones
@nn.compact
def __call__(self, x, **kwargs):
"""
Args:
x (Array): An input array with shape [N, C1,].
Returns:
y (Array): An output array with shape [N, C2,].
"""
r = jnp.asarray(self.param('r', self.r_init, (self.ensemble_size, x.shape[-1],)), x.dtype)
x = jnp.reshape(x, (self.ensemble_size, x.shape[0] // self.ensemble_size, -1,))
x = jnp.multiply(x, jnp.reshape(r, (self.ensemble_size, 1, -1,)))
w = jnp.asarray(self.param('w', self.w_init, (x.shape[-1], self.features,)), x.dtype)
y = jax.lax.dot_general(x, w, (((2,), (0,),), ((), (),)))
if self.use_bias:
b = jnp.asarray(self.param('b', self.b_init, (self.features,)), x.dtype)
y = jnp.add(y, jnp.reshape(b, (1, 1, -1,)))
s = jnp.asarray(self.param('s', self.s_init, (self.ensemble_size, y.shape[-1],)), x.dtype)
y = jnp.multiply(y, jnp.reshape(s, (self.ensemble_size, 1, -1,)))
y = jnp.reshape(y, (y.shape[0] * y.shape[1], -1,))
return y
class Linear_Dropout(nn.Module):
features: int
use_bias: bool = True
drop_rate: float = 0.5
deterministic: Optional[bool] = None
w_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.kaiming_normal()
b_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.zeros
@nn.compact
def __call__(self, x, **kwargs):
"""
Args:
x (Array): An input array with shape [N, C1,].
Returns:
y (Array): An output array with shape [N, C2,].
"""
deterministic = kwargs.pop('deterministic', True)
deterministic = nn.merge_param('deterministic', self.deterministic, deterministic)
if not deterministic:
rng = self.make_rng('dropout')
keep = 1.0 - self.drop_rate
mask = jax.random.bernoulli(rng, p=keep, shape=x.shape)
x = jax.lax.select(mask, x / keep, jnp.zeros_like(x))
w = jnp.asarray(self.param('w', self.w_init, (x.shape[-1], self.features,)), x.dtype)
y = jnp.dot(x, w)
if self.use_bias:
b = jnp.asarray(self.param('b', self.b_init, (self.features,)), x.dtype)
y = jnp.add(y, jnp.reshape(b, (1, -1,)))
return y
class Linear_GaussianDropout(nn.Module):
features: int
use_bias: bool = True
drop_rate: float = 0.5
deterministic: Optional[bool] = None
w_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.kaiming_normal()
b_init: Callable[[PRNGKey, Shape, Dtype], Array] = jax.nn.initializers.zeros
@nn.compact
def __call__(self, x, **kwargs):
"""
Args:
x (Array): An input array with shape [N, C1,].
Returns:
y (Array): An output array with shape [N, C2,].
"""
deterministic = kwargs.pop('deterministic', True)
deterministic = nn.merge_param('deterministic', self.deterministic, deterministic)
if not deterministic:
rng = self.make_rng('dropout')
keep = 1.0 - self.drop_rate
mask = jnp.ones_like(x) + jax.random.normal(
rng, shape=x.shape
) * jnp.sqrt(keep / (1 - keep))
x = jnp.multiply(x, mask)
w = jnp.asarray(self.param('w', self.w_init, (x.shape[-1], self.features,)), x.dtype)
y = jnp.dot(x, w)
if self.use_bias:
b = jnp.asarray(self.param('b', self.b_init, (self.features,)), x.dtype)
y = jnp.add(y, jnp.reshape(b, (1, -1,)))
return y
| 35.321918
| 98
| 0.578243
| 705
| 5,157
| 4.121986
| 0.131915
| 0.01927
| 0.065382
| 0.082588
| 0.779422
| 0.751893
| 0.74501
| 0.74501
| 0.742257
| 0.742257
| 0
| 0.011322
| 0.263525
| 5,157
| 145
| 99
| 35.565517
| 0.753818
| 0.092108
| 0
| 0.631579
| 0
| 0
| 0.030935
| 0.004932
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042105
| false
| 0
| 0.042105
| 0
| 0.410526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6f9505d1499ea938828d5d01a97079db90ed1533
| 48,764
|
py
|
Python
|
tests/test_agent_db.py
|
johnblackford/agent
|
dd303f658d483317154aada1109ca5b742f8f094
|
[
"MIT"
] | 2
|
2019-04-27T14:13:42.000Z
|
2022-03-23T06:51:44.000Z
|
tests/test_agent_db.py
|
johnblackford/agent
|
dd303f658d483317154aada1109ca5b742f8f094
|
[
"MIT"
] | 1
|
2019-01-22T07:32:51.000Z
|
2019-03-01T08:59:47.000Z
|
tests/test_agent_db.py
|
johnblackford/agent
|
dd303f658d483317154aada1109ca5b742f8f094
|
[
"MIT"
] | 4
|
2018-01-25T19:41:47.000Z
|
2021-04-30T12:57:41.000Z
|
"""
Copyright (c) 2016 John Blackford
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# File Name: test_agent_db.py
#
# Description: Unit tests for the Database Class
#
"""
import time
import datetime
import unittest.mock as mock
from agent import agent_db
def get_db_file_contents():
db_contents = """{
"Device.ControllerNumberOfEntries": "__NUM_ENTRIES__",
"Device.SubscriptionNumberOfEntries": "__NUM_ENTRIES__",
"Device.LocalAgent.Manufacturer": "ARRIS",
"Device.LocalAgent.ManufacturerOUI": "00D09E",
"Device.LocalAgent.ProductClass": "RPi_Camera",
"Device.LocalAgent.SerialNumber": "C0000000001",
"Device.LocalAgent.EndpointID": "usp.00D09E-RPi_Camera-C0000000001",
"Device.LocalAgent.ModelName": "PoC-USP-Agent-Camera",
"Device.LocalAgent.HardwareVersion": "RPi2-B",
"Device.LocalAgent.SoftwareVersion": "0.0.1-alpha",
"Device.LocalAgent.PeriodicInterval": 300,
"Device.LocalAgent.ProvisioningCode": "",
"Device.LocalAgent.SupportedProtocols": "STOMP",
"Device.LocalAgent.UpTime": "__UPTIME__",
"Device.LocalAgent.X_ARRIS-COM_IPAddr": "__IPADDR__",
"Device.Time.Enable" : true,
"Device.Time.Status" : "Synchronized",
"Device.Time.NTPServer1" : "ntp1.zzz.com",
"Device.Time.NTPServer2" : "ntp2.zzz.com",
"Device.Time.NTPServer3" : "ntp3.zzz.com",
"Device.Time.NTPServer4" : "",
"Device.Time.NTPServer5" : "",
"Device.Time.CurrentLocalTime" : "__CURR_TIME__",
"Device.Time.LocalTimeZone" : "CST6CDT,M3.2.0/2,M11.1.0",
"Device.Controller.1.Enable": true,
"Device.Controller.1.EndpointID": "usp.controller-stomp-johnb",
"Device.Controller.1.Protocol": "STOMP",
"Device.Controller.1.CoAP.Host": "",
"Device.Controller.1.CoAP.Port": 0,
"Device.Controller.1.STOMP.Host": "stomp.johnblackford.org",
"Device.Controller.1.STOMP.Port": 61613,
"Device.Controller.1.STOMP.Username": "jab",
"Device.Controller.1.STOMP.Password": "johnb23",
"Device.Controller.2.Enable": true,
"Device.Controller.2.EndpointID": "usp.controller-coap-johnb",
"Device.Controller.2.Protocol": "CoAP",
"Device.Controller.2.CoAP.Host": "localhost",
"Device.Controller.2.CoAP.Port": 15683,
"Device.Controller.2.STOMP.Host": "",
"Device.Controller.2.STOMP.Port": 0,
"Device.Controller.2.STOMP.Username": "",
"Device.Controller.2.STOMP.Password": "",
"Device.Subscription.1.Enable": true,
"Device.Subscription.1.ID": "sub-boot-stomp",
"Device.Subscription.1.NotifType": "Boot",
"Device.Subscription.1.ParamPath": "Device.LocalAgent.",
"Device.Subscription.1.Controller": "Device.Controller.1.",
"Device.Subscription.1.TimeToLive": -1,
"Device.Subscription.1.Persistent": true,
"Device.Subscription.2.Enable": true,
"Device.Subscription.2.ID": "sub-periodic-stomp",
"Device.Subscription.2.NotifType": "Periodic",
"Device.Subscription.2.ParamPath": "Device.LocalAgent.",
"Device.Subscription.2.Controller": "Device.Controller.1.",
"Device.Subscription.2.TimeToLive": -1,
"Device.Subscription.2.Persistent": true,
"Device.Subscription.3.Enable": true,
"Device.Subscription.3.ID": "sub-boot-coap",
"Device.Subscription.3.NotifType": "Boot",
"Device.Subscription.3.ParamPath": "Device.LocalAgent.",
"Device.Subscription.3.Controller": "Device.Controller.2.",
"Device.Subscription.3.TimeToLive": -1,
"Device.Subscription.3.Persistent": true,
"Device.Subscription.4.Enable": true,
"Device.Subscription.4.ID": "sub-periodic-coap",
"Device.Subscription.4.NotifType": "Periodic",
"Device.Subscription.4.ParamPath": "Device.LocalAgent.",
"Device.Subscription.4.Controller": "Device.Controller.2.",
"Device.Subscription.4.TimeToLive": -1,
"Device.Subscription.4.Persistent": true,
"Device.Services.HomeAutomationNumberOfEntries": "__NUM_ENTRIES__",
"Device.Services.HomeAutomation.1.CameraNumberOfEntries": "__NUM_ENTRIES__",
"Device.Services.HomeAutomation.1.Camera.1.MaxNumberOfPics": 30,
"Device.Services.HomeAutomation.1.Camera.1.PicNumberOfEntries": "__NUM_ENTRIES__",
"Device.Services.HomeAutomation.1.Camera.1.Pic.__NextInstNum__": 11,
"Device.Services.HomeAutomation.1.Camera.1.Pic.9.URL": "http://localhost:8080/pic1.png",
"Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL": "http://localhost:8080/pic2.png",
"Device.Services.HomeAutomation.1.Camera.2.MaxNumberOfPics": 30,
"Device.Services.HomeAutomation.1.Camera.2.PicNumberOfEntries": "__NUM_ENTRIES__",
"Device.Services.HomeAutomation.1.Camera.2.Pic.__NextInstNum__": 11,
"Device.Services.HomeAutomation.1.Camera.2.Pic.10.URL": "http://localhost:8080/pic5.png",
"Device.Services.HomeAutomation.1.Camera.2.Pic.90.URL": "http://localhost:8080/pic9.png",
"Device.Services.HomeAutomation.1.Camera.2.Pic.100.URL": "http://localhost:8080/pic20.png"
}"""
return db_contents
def get_dm_file_contents():
dm_contents = """{
"Device.ControllerNumberOfEntries": "readOnly",
"Device.SubscriptionNumberOfEntries": "readOnly",
"Device.LocalAgent.Manufacturer": "readOnly",
"Device.LocalAgent.ManufacturerOUI": "readOnly",
"Device.LocalAgent.ProductClass": "readOnly",
"Device.LocalAgent.SerialNumber": "readOnly",
"Device.LocalAgent.EndpointID": "readOnly",
"Device.LocalAgent.ModelName": "readOnly",
"Device.LocalAgent.HardwareVersion": "readOnly",
"Device.LocalAgent.SoftwareVersion": "readOnly",
"Device.LocalAgent.PeriodicInterval": "readWrite",
"Device.LocalAgent.ProvisioningCode": "readWrite",
"Device.LocalAgent.SupportedProtocols": "readOnly",
"Device.LocalAgent.UpTime": "readOnly",
"Device.LocalAgent.X_ARRIS-COM_IPAddr": "readOnly",
"Device.Time.Enable" : "readWrite",
"Device.Time.Status" : "readOnly",
"Device.Time.NTPServer1" : "readWrite",
"Device.Time.NTPServer2" : "readWrite",
"Device.Time.NTPServer3" : "readWrite",
"Device.Time.NTPServer4" : "readWrite",
"Device.Time.NTPServer5" : "readWrite",
"Device.Time.CurrentLocalTime" : "readOnly",
"Device.Time.LocalTimeZone" : "readWrite",
"Device.Controller.{i}.Enable": "readWrite",
"Device.Controller.{i}.EndpointID": "readWrite",
"Device.Controller.{i}.Protocol": "readWrite",
"Device.Controller.{i}.CoAP.Host": "readWrite",
"Device.Controller.{i}.CoAP.Port": "readWrite",
"Device.Controller.{i}.STOMP.Host": "readWrite",
"Device.Controller.{i}.STOMP.Port": "readWrite",
"Device.Controller.{i}.STOMP.Username": "readWrite",
"Device.Controller.{i}.STOMP.Password": "readWrite",
"Device.Subscription.{i}.Enable": "readWrite",
"Device.Subscription.{i}.ID": "readWrite",
"Device.Subscription.{i}.NotifType": "readWrite",
"Device.Subscription.{i}.ParamPath": "readWrite",
"Device.Subscription.{i}.Controller": "readWrite",
"Device.Subscription.{i}.TimeToLive": "readWrite",
"Device.Subscription.{i}.Persistent": "readWrite",
"Device.Services.HomeAutomationNumberOfEntries": "readOnly",
"Device.Services.HomeAutomation.{i}.CameraNumberOfEntries": "readOnly",
"Device.Services.HomeAutomation.{i}.Camera.{i}.TakePicture()": "readWrite",
"Device.Services.HomeAutomation.{i}.Camera.{i}.MaxNumberOfPics": "readWrite",
"Device.Services.HomeAutomation.{i}.Camera.{i}.PicNumberOfEntries": "readOnly",
"Device.Services.HomeAutomation.{i}.Camera.{i}.Pic.{i}.URL": "readOnly"
}"""
return dm_contents
"""
Tests for find_params
"""
def test_find_param_static_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_param_list1 = my_db.find_params("Device.ControllerNumberOfEntries")
found_param_list2 = my_db.find_params("Device.LocalAgent.SupportedProtocols")
assert len(found_param_list1) == 1
assert "Device.ControllerNumberOfEntries" in found_param_list1
assert len(found_param_list2) == 1
assert "Device.LocalAgent.SupportedProtocols" in found_param_list2
def test_find_param_static_path_exception():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_params("Device.NoSuchParameter")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_param_partial_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_param_list1 = my_db.find_params("Device.LocalAgent.")
found_param_list2 = my_db.find_params("Device.Services.")
assert len(found_param_list1) == 13
assert "Device.LocalAgent.Manufacturer" in found_param_list1
assert "Device.LocalAgent.ManufacturerOUI" in found_param_list1
assert "Device.LocalAgent.ProductClass" in found_param_list1
assert "Device.LocalAgent.SerialNumber" in found_param_list1
assert "Device.LocalAgent.EndpointID" in found_param_list1
assert "Device.LocalAgent.ModelName" in found_param_list1
assert "Device.LocalAgent.HardwareVersion" in found_param_list1
assert "Device.LocalAgent.SoftwareVersion" in found_param_list1
assert "Device.LocalAgent.PeriodicInterval" in found_param_list1
assert "Device.LocalAgent.ProvisioningCode" in found_param_list1
assert "Device.LocalAgent.SupportedProtocols" in found_param_list1
assert "Device.LocalAgent.UpTime" in found_param_list1
assert "Device.LocalAgent.X_ARRIS-COM_IPAddr" in found_param_list1
assert len(found_param_list2) == 11
assert "Device.Services.HomeAutomationNumberOfEntries" in found_param_list2
assert "Device.Services.HomeAutomation.1.CameraNumberOfEntries" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.1.MaxNumberOfPics" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.1.PicNumberOfEntries" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.9.URL" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.2.MaxNumberOfPics" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.2.PicNumberOfEntries" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10.URL" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.90.URL" in found_param_list2
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.100.URL" in found_param_list2
def test_find_param_instance_number_addressing():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_param_list1 = my_db.find_params("Device.Controller.1.Enable")
found_param_list2 = my_db.find_params("Device.Controller.2.STOMP.Username")
found_param_list3 = my_db.find_params("Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL")
assert len(found_param_list1) == 1
assert "Device.Controller.1.Enable" in found_param_list1
assert len(found_param_list2) == 1
assert "Device.Controller.2.STOMP.Username" in found_param_list2
assert len(found_param_list3) == 1
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL" in found_param_list3
def test_find_param_instance_number_addressing_no_instance():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_param_list1 = my_db.find_params("Device.Services.HomeAutomation.1.Camera.1.Pic.1.URL")
assert len(found_param_list1) == 0
def test_find_param_wildcard_searching():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_param_list1 = my_db.find_params("Device.Controller.*.Enable")
found_param_list2 = my_db.find_params("Device.Controller.*.STOMP.Username")
found_param_list3 = my_db.find_params("Device.Services.HomeAutomation.1.Camera.1.Pic.*.URL")
found_param_list4 = my_db.find_params("Device.Services.HomeAutomation.1.Camera.*.Pic.*.URL")
assert len(found_param_list1) == 2
assert "Device.Controller.1.Enable" in found_param_list1
assert "Device.Controller.2.Enable" in found_param_list1
assert len(found_param_list2) == 2
assert "Device.Controller.1.STOMP.Username" in found_param_list2
assert "Device.Controller.2.STOMP.Username" in found_param_list2
assert len(found_param_list3) == 2
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.9.URL" in found_param_list3
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL" in found_param_list3
assert len(found_param_list4) == 5
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.9.URL" in found_param_list4
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL" in found_param_list4
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10.URL" in found_param_list4
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.90.URL" in found_param_list4
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.100.URL" in found_param_list4
"""
Tests for is_param_writable
"""
def test_is_param_writable_static_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
is_param_writable1 = my_db.is_param_writable("Device.ControllerNumberOfEntries")
is_param_writable2 = my_db.is_param_writable("Device.LocalAgent.PeriodicInterval")
assert not is_param_writable1, "Attempted a 'readOnly' Parameter"
assert is_param_writable2, "Attempted a 'readWrite' Parameter"
def test_is_param_writable_static_path_exception():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.is_param_writable("Device.NoSuchParameter")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_is_param_writable_instance_number_addressing():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
is_param_writable1 = my_db.is_param_writable("Device.Controller.1.Enable")
is_param_writable2 = my_db.is_param_writable("Device.Controller.2.STOMP.Username")
is_param_writable3 = my_db.is_param_writable("Device.Services.HomeAutomation.1.Camera.1.Pic.10.URL")
is_param_writable4 = my_db.is_param_writable("Device.Services.HomeAutomation.1.Camera.1.Pic.1.URL")
assert is_param_writable1, "Attempted 'readWrite' with single instance number"
assert is_param_writable2, "Attempted 'readWrite' with single instance number and sub-object"
assert not is_param_writable3, "Attempted 'readOnly' with multiple instance numbers"
assert not is_param_writable4, "Attempted 'readOnly' with multiple instance numbers, but instances don't exist"
def test_is_param_writable_wildcard_searching():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
is_param_writable1 = my_db.is_param_writable("Device.Controller.*.Enable")
is_param_writable2 = my_db.is_param_writable("Device.Controller.*.STOMP.Username")
is_param_writable3 = my_db.is_param_writable("Device.Services.HomeAutomation.1.Camera.1.Pic.*.URL")
is_param_writable4 = my_db.is_param_writable("Device.Services.HomeAutomation.1.Camera.*.Pic.*.URL")
assert is_param_writable1, "Attempted 'readWrite' with wild-card for instance number"
assert is_param_writable2, "Attempted 'readWrite' with wild-card for instance number and sub-object"
assert not is_param_writable3, "Attempted 'readOnly' with multiple instance numbers and single wild-card"
assert not is_param_writable4, "Attempted 'readOnly' with single instance number and multiple wild-cards"
"""
Tests for find_instances
"""
def test_find_instances_invalid_obj():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_instances("Device.NoSuchObj.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_instances_full_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_instances("Device.ControllerNumberOfEntries")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_instances_static_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_instances("Device.LocalAgent.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_instances_static_table():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_instances("Device.Controller.")
found_instances_list2 = my_db.find_instances("Device.Subscription.")
assert len(found_instances_list1) == 2
assert "Device.Controller.1." in found_instances_list1
assert "Device.Controller.2." in found_instances_list1
assert len(found_instances_list2) == 4
assert "Device.Subscription.1." in found_instances_list2
assert "Device.Subscription.2." in found_instances_list2
assert "Device.Subscription.3." in found_instances_list2
assert "Device.Subscription.4." in found_instances_list2
def test_find_instances_instance_number_addressing():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_instances("Device.Services.HomeAutomation.1.Camera.2.Pic.")
assert len(found_instances_list1) == 3
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.90." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.100." in found_instances_list1
def test_find_instances_instance_number_addressing_no_instance():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_instances("Device.Services.HomeAutomation.1.Camera.3.Pic.")
assert len(found_instances_list1) == 0
def test_find_instances_wildcard_searching():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_instances("Device.Services.HomeAutomation.1.Camera.*.Pic.")
assert len(found_instances_list1) == 5
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.9." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.90." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.100." in found_instances_list1
"""
Tests for find_objects
"""
def test_find_objects_invalid_obj():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_objects("Device.NoSuchObj.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_objects_full_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_objects("Device.ControllerNumberOfEntries")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_objects_static_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_objects("Device.LocalAgent.")
assert len(found_objects_list1) == 1
assert "Device.LocalAgent." in found_objects_list1
def test_find_objects_instance_number_addressing():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_objects("Device.Services.HomeAutomation.1.Camera.2.")
assert len(found_instances_list1) == 1
assert "Device.Services.HomeAutomation.1.Camera.2." in found_instances_list1
def test_find_objects_instance_number_addressing_no_instance():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_objects("Device.Services.HomeAutomation.1.Camera.3.")
assert len(found_instances_list1) == 0
def test_find_objects_wildcard_searching_and_instance_number():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_objects("Device.Services.HomeAutomation.1.Camera.*.Pic.10.")
assert len(found_instances_list1) == 2
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10." in found_instances_list1
def test_find_objects_multiple_wildcard_searching():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_instances_list1 = my_db.find_objects("Device.Services.HomeAutomation.1.Camera.*.Pic.*.")
assert len(found_instances_list1) == 5
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.9." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.1.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.10." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.90." in found_instances_list1
assert "Device.Services.HomeAutomation.1.Camera.2.Pic.100." in found_instances_list1
"""
Tests for find_impl_objects
"""
def test_find_impl_objects_invalid_obj():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_impl_objects("Device.NoSuchObj.", False)
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_impl_objects_invalid_obj_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_impl_objects("Device.NoSuchObj.", True)
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_impl_objects_full_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_impl_objects("Device.ControllerNumberOfEntries", False)
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_impl_objects_full_path_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.find_impl_objects("Device.ControllerNumberOfEntries", True)
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_find_impl_objects_static_table():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.{i}.", False)
found_objects_list2 = my_db.find_impl_objects("Device.Subscription.", False)
found_objects_list3 = my_db.find_impl_objects("Device.Services.", False)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 1
assert "Device.Subscription.{i}." in found_objects_list2
assert len(found_objects_list3) == 3
# assert "Device.Services.HomeAutomation." in found_objects_list3
assert "Device.Services.HomeAutomation.{i}." in found_objects_list3
# assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list3
assert "Device.Services.HomeAutomation.{i}.Camera.{i}." in found_objects_list3
# assert "Device.Services.HomeAutomation.{i}.Camera..{i}.Pic." in found_objects_list3
assert "Device.Services.HomeAutomation.{i}.Camera.{i}.Pic.{i}." in found_objects_list3
def test_find_impl_objects_static_table_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.", True)
found_objects_list2 = my_db.find_impl_objects("Device.Controller.{i}.", True)
found_objects_list3 = my_db.find_impl_objects("Device.Services.", True)
assert len(found_objects_list1) == 1
assert "Device.Controller.{i}." in found_objects_list1
assert len(found_objects_list2) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list2
assert "Device.Controller.{i}.STOMP." in found_objects_list2
assert len(found_objects_list3) == 1
assert "Device.Services.HomeAutomation." in found_objects_list3
# assert "Device.Services.HomeAutomation.{i}." in found_objects_list3
def test_find_impl_objects_instance_number_addressing():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.1.", False)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.1.", False)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 2
# assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}." in found_objects_list2
# assert "Device.Services.HomeAutomation.{i}.Camera..{i}.Pic." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}.Pic.{i}." in found_objects_list2
def test_find_impl_objects_instance_number_addressing_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.2.", True)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.1.", True)
found_objects_list3 = my_db.find_impl_objects("Device.Services.HomeAutomation.1.Camera.1.Pic.1.", True)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 1
assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
assert len(found_objects_list3) == 0
def test_find_impl_objects_instance_number_addressing_no_instance():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.5.", False)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.2.", False)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 2
# assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}." in found_objects_list2
# assert "Device.Services.HomeAutomation.{i}.Camera..{i}.Pic." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}.Pic.{i}." in found_objects_list2
def test_find_impl_objects_instance_number_addressing_no_instance_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.5.", True)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.2.", True)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 1
assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
def test_find_impl_objects_wildcard_searching():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.*.", False)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.*.", False)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 2
# assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}." in found_objects_list2
# assert "Device.Services.HomeAutomation.{i}.Camera..{i}.Pic." in found_objects_list2
assert "Device.Services.HomeAutomation.{i}.Camera.{i}.Pic.{i}." in found_objects_list2
def test_find_impl_objects_wildcard_searching_next_level():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
found_objects_list1 = my_db.find_impl_objects("Device.Controller.*.", True)
found_objects_list2 = my_db.find_impl_objects("Device.Services.HomeAutomation.*.", True)
assert len(found_objects_list1) == 2
assert "Device.Controller.{i}.CoAP." in found_objects_list1
assert "Device.Controller.{i}.STOMP." in found_objects_list1
assert len(found_objects_list2) == 1
assert "Device.Services.HomeAutomation.{i}.Camera." in found_objects_list2
"""
Tests for get
"""
def test_get_uptime():
time_mock = start_time_mock = mock.Mock()
current_time_mock = mock.Mock()
start_time_mock.return_value = time.mktime(datetime.datetime(2016, 9, 20, 20, 15, 10).timetuple())
current_time_mock.return_value = time.mktime(datetime.datetime(2016, 9, 21, 1, 16, 15).timetuple())
time_mock.side_effect = [start_time_mock.return_value, current_time_mock.return_value]
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch("time.time", time_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
get_value1 = my_db.get("Device.LocalAgent.UpTime")
assert get_value1 == 18065
def test_get_num_entries():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
get_value1 = my_db.get("Device.ControllerNumberOfEntries")
get_value2 = my_db.get("Device.SubscriptionNumberOfEntries")
get_value3 = my_db.get("Device.Services.HomeAutomation.1.CameraNumberOfEntries")
get_value4 = my_db.get("Device.Services.HomeAutomation.1.Camera.2.PicNumberOfEntries")
assert get_value1 == 2
assert get_value2 == 4
assert get_value3 == 2
assert get_value4 == 3
def test_get_ip_addr():
ip_mock = mock.Mock()
ip_mock.return_value = "10.99.12.8"
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch("agent.utils.IPAddr.get_ip_addr", ip_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
get_value1 = my_db.get("Device.LocalAgent.X_ARRIS-COM_IPAddr")
assert get_value1 == "10.99.12.8"
def test_get_currrent_local_time():
time_mock = mock.Mock()
time_mock.now.return_value = datetime.datetime(2016, 9, 20, 20, 15, 10)
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch("datetime.datetime", time_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
get_value1 = my_db.get("Device.Time.CurrentLocalTime")
assert get_value1 == "2016-09-20T20:15:10-06:00"
def test_get_normal_param():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
get_value1 = my_db.get("Device.LocalAgent.Manufacturer")
get_value2 = my_db.get("Device.Controller.1.Protocol")
get_value3 = my_db.get("Device.Subscription.3.ID")
get_value4 = my_db.get("Device.Services.HomeAutomation.1.Camera.2.MaxNumberOfPics")
assert get_value1 == "ARRIS"
assert get_value2 == "STOMP"
assert get_value3 == "sub-boot-coap"
assert get_value4 == 30
def test_get_no_such_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
try:
my_db.get("Device.NoSuchParam")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
"""
Tests for update
NOTE: Mocking the _save method
"""
def test_update_param():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch.object(agent_db.Database, '_save') as save_mock:
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db.update("Device.LocalAgent.PeriodicInterval", 60)
save_mock.assert_called_once_with()
def test_update_no_such_path():
my_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
my_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", my_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db._save = mock.MagicMock()
try:
my_db.update("Device.NoSuchParam", "ZZZ")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
"""
Tests for insert
NOTE: Mocking the _save method
"""
def test_insert_instance():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch.object(agent_db.Database, '_save') as save_mock:
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db.insert("Device.Services.HomeAutomation.1.Camera.2.Pic.")
# _save gets called twice during an insert
save_mock.assert_called()
def test_insert_instance_no_such_path():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db._save = mock.MagicMock()
try:
my_db.insert("Device.NoSuchPath.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
"""
Tests for delete
NOTE: Mocking the _save method
"""
def test_delete_instance():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
with mock.patch.object(agent_db.Database, '_save') as save_mock:
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db.delete("Device.Services.HomeAutomation.1.Camera.2.Pic.100.")
save_mock.assert_called_once_with()
def test_delete_instance_no_such_path():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db._save = mock.MagicMock()
try:
my_db.delete("Device.NoSuchPath.1.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
def test_delete_instance_no_such_instance():
file_mock = dm_mock = mock.mock_open(read_data=get_dm_file_contents())
db_mock = mock.mock_open(read_data=get_db_file_contents())
file_mock.side_effect = [dm_mock.return_value, db_mock.return_value]
with mock.patch("builtins.open", file_mock):
my_db = agent_db.Database("mock_dm.json", "mock_db.json", "intf")
my_db._save = mock.MagicMock()
try:
my_db.delete("Device.Services.HomeAutomation.1.Camera.2.Pic.999.")
my_db.delete("Device.NoSuchPath.1.")
assert False, "NoSuchPathError Expected"
except agent_db.NoSuchPathError:
pass
| 46.93359
| 115
| 0.725228
| 6,789
| 48,764
| 4.886729
| 0.049345
| 0.049192
| 0.04657
| 0.047263
| 0.813027
| 0.78813
| 0.764951
| 0.724198
| 0.693212
| 0.670244
| 0
| 0.01826
| 0.153207
| 48,764
| 1,038
| 116
| 46.978805
| 0.785169
| 0.040624
| 0
| 0.517787
| 0
| 0.009223
| 0.366272
| 0.232004
| 0
| 0
| 0
| 0
| 0.230567
| 1
| 0.067194
| false
| 0.025033
| 0.00527
| 0
| 0.075099
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6f9d758a9564bd551091d79c5a4bcb4b82871f69
| 163
|
py
|
Python
|
maro/event_buffer/__init__.py
|
VinayaSathyanarayana/maro
|
0ba55f36d89c235ef3af04efbac78b3885d8695d
|
[
"MIT"
] | 1
|
2020-09-30T09:31:05.000Z
|
2020-09-30T09:31:05.000Z
|
maro/event_buffer/__init__.py
|
VinayaSathyanarayana/maro
|
0ba55f36d89c235ef3af04efbac78b3885d8695d
|
[
"MIT"
] | null | null | null |
maro/event_buffer/__init__.py
|
VinayaSathyanarayana/maro
|
0ba55f36d89c235ef3af04efbac78b3885d8695d
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .event_buffer import EventBuffer, Event, EventState, EventCategory, DECISION_EVENT
| 27.166667
| 87
| 0.803681
| 19
| 163
| 6.789474
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128834
| 163
| 5
| 88
| 32.6
| 0.908451
| 0.417178
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
82f82300bd7c02ea99cb5deb6ff2f53f7e3de504
| 175
|
py
|
Python
|
dejgre2/magazine/tests/fixtures/producer_fixture.py
|
dontkillme/GameLender
|
db791a64633d6f7b84f626866a02f26b8c151d1e
|
[
"Apache-2.0"
] | null | null | null |
dejgre2/magazine/tests/fixtures/producer_fixture.py
|
dontkillme/GameLender
|
db791a64633d6f7b84f626866a02f26b8c151d1e
|
[
"Apache-2.0"
] | null | null | null |
dejgre2/magazine/tests/fixtures/producer_fixture.py
|
dontkillme/GameLender
|
db791a64633d6f7b84f626866a02f26b8c151d1e
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from magazine.tests.factory.producer_factory import ProducerFactory
@pytest.fixture(name="producer")
def create_producer():
return ProducerFactory.create()
| 25
| 67
| 0.817143
| 20
| 175
| 7.05
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091429
| 175
| 7
| 68
| 25
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
d22270977a28102163edf78d838a2041e07bd184
| 109
|
py
|
Python
|
GIX_envtest.py
|
janice-lin/GIX_GIthub_Lab
|
d2b3e7866e5e193dbfb9e5b3e0431cb50d1e9a94
|
[
"CNRI-Python"
] | null | null | null |
GIX_envtest.py
|
janice-lin/GIX_GIthub_Lab
|
d2b3e7866e5e193dbfb9e5b3e0431cb50d1e9a94
|
[
"CNRI-Python"
] | null | null | null |
GIX_envtest.py
|
janice-lin/GIX_GIthub_Lab
|
d2b3e7866e5e193dbfb9e5b3e0431cb50d1e9a94
|
[
"CNRI-Python"
] | null | null | null |
import sys
import numpy as np
print("Python version: "+sys.version)
print("Numpy version: "+np.__version__)
| 18.166667
| 39
| 0.752294
| 16
| 109
| 4.875
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119266
| 109
| 5
| 40
| 21.8
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.284404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
d234a6be63100a491cc2a7306708a769d2f1cf61
| 180,343
|
py
|
Python
|
slingen/src/dsls/ll2sll.py
|
danielesgit/slingen
|
e7cfee7f6f2347b57eb61a077746c9309a85411c
|
[
"BSD-3-Clause"
] | 23
|
2018-03-13T07:52:26.000Z
|
2022-03-24T02:32:00.000Z
|
slingen/src/dsls/ll2sll.py
|
danielesgit/slingen
|
e7cfee7f6f2347b57eb61a077746c9309a85411c
|
[
"BSD-3-Clause"
] | 2
|
2018-09-28T18:29:25.000Z
|
2019-02-20T13:22:19.000Z
|
slingen/src/dsls/ll2sll.py
|
danielesgit/slingen
|
e7cfee7f6f2347b57eb61a077746c9309a85411c
|
[
"BSD-3-Clause"
] | 3
|
2018-06-13T13:51:57.000Z
|
2020-01-11T14:47:02.000Z
|
'''
Created on Jan 23, 2015
@author: danieles
'''
import re
from sympy import sympify
from islpy import Set, Map, format, Context, Space, align_spaces, Constraint, Aff, PwAff, dim_type
from src.dsls.ll import Assign, Matrix, ConstantMatrix, AllEntriesConstantMatrix, ZeroMatrix, Triangular, Sqrt, Add, Sub, Neg, Mul, LDiv, Div, T, Kro, G, S, HRed, PMul, Index, fHbs, globalSSAIndex,\
scalar_block, Quantity, Tile, LowerTriangular, UpperTriangular, llBlock, llFor, llIf, llStmt
from src.dsls.sigmall import sllProgram, parseSigmaLL, Sum
from src.dsls.processing import reorderIdxList
from copy import deepcopy
def rewriteToSigma_old(llprog, opts):
# rew = ToSigma()
rew = ToPolySigma(llprog)
sllprog = sllProgram()
for s in llprog.stmtList:
sllprog.extend( rew.apply(s.eq, opts) )
return sllprog
def rewriteToSigma(llprog, opts):
rew = ToPolySigma(llprog, opts)
sllprog = sllProgram(rew.apply())
return sllprog
#-------------------- LL -> Sigma-LL ---------------------
class ToSigma(object):
def __init__(self):
super(ToSigma, self).__init__()
def apply(self, root, opts):
return getattr(self, root.__class__.__name__)(root, opts)
def Assign(self, expr, opts):
lhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
rhs = getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
# mLhs = lhs.getOut()
# mRhs = rhs.getOut()
#
# if mLhs.attr['o']:
# src = mRhs
# dst = mLhs
# sexpr = rhs
# else:
# src = mLhs
# dst = mRhs
# sexpr = lhs
#
# #Replace the PhysLayout of the destination with the one of the source
# if not sexpr.reqAss and not dst.attr['i']:
# srcPhys = ir.icode.bindingTable.getPhysicalLayout(src)
# dstPhys = ir.icode.bindingTable.getPhysicalLayout(dst)
# ir.icode.bindingTable.replaceConnectedPhysicalLayout(srcPhys, dstPhys, sexpr)
# if not ir.icode.bindingTable.existPhysicalLayout(srcPhys):
# ir.icode.declare.remove(srcPhys)
return Assign(lhs, rhs)
def Tile(self, expr, opts):
return getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
def Scalar(self, expr, opts):
return self.Matrix(expr, opts)
def SquaredMatrix(self, expr, opts):
return self.Matrix(expr, opts)
def LowerTriangular(self, expr, opts):
return self.Matrix(expr, opts)
def UpperTriangular(self, expr, opts):
return self.Matrix(expr, opts)
def Symmetric(self, expr, opts):
return self.Matrix(expr, opts)
def Matrix(self, expr, opts):
sexpr = expr.duplicate()
# if not sexpr.attr['o'] and sexpr.isScalar():
# physLayout = Scalars(sexpr.name, sexpr.size, opts, isParam=True)
# else:
# physLayout = Array(sexpr.name, sexpr.size, opts, isOut=sexpr.attr['o'])
# if ir.icode.bindingTable.addBinding(sexpr, physLayout):
# ir.icode.signature += [physLayout]
return sexpr
def T(self, expr, opts):
sub = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
subOut = sub.getOut()
if subOut.isScalar():
return sub
blk = expr.getOut()
M,N = sympify(blk.size[0]), sympify(blk.size[1])
topSize = blk.getPartitionSize(0,0)
SI,SJ = sympify(topSize[0]), sympify(topSize[1])
I,J = Index("I", sympify(0), M, SI, True), Index("J", sympify(0), N, SJ, True)
idxList = [I,J]
uFactors = [sympify(2)]*2
iPriority = { I.i: {'t': 0, 's': 0, 'i': 0}, J.i:{'t': 0, 's': 0, 'i': 0} }
flatM,flatN = sympify(blk.getFlatSize()[0]), sympify(blk.getFlatSize()[1])
lev = 1
bBlk, hBlk, vBlk = blk.getBlock(0,0), blk.getBlock(M-1,0), blk.getBlock(0,N-1)
bFSize, hFSize, vFSize = sympify(bBlk.getFlatSize()), sympify(hBlk.getFlatSize()), sympify(vBlk.getFlatSize())
iName, jName = "i"*lev, "j"*lev
maxI, maxJ = SI*bFSize[0], SJ*bFSize[1]
bi,ei,si = I.i*bFSize[0], maxI + I.i/SI*(flatM - maxI), bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bj,ej,sj = J.i*bFSize[1], maxJ + J.i/SJ*(flatN - maxJ), bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
idxList += [i,j]
uFactors += [sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 0, 'i': 0} })
l,r = fHbs(si,flatM,i.i,1), fHbs(sj,flatN,j.i,1)
lev+=1
baselevel = 2 if opts['useintrinsics'] else 1
while bBlk.level > baselevel:
bBlk, hBlk, vBlk = bBlk.getBlock(0,0), hBlk.getBlock(0,0), vBlk.getBlock(0,0)
bFSize, hFSize, vFSize = bBlk.getFlatSize(), hBlk.getFlatSize(), vBlk.getFlatSize()
iName, jName = "i"*lev, "j"*lev
bi,ei,si = sympify(0), si, bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bj,ej,sj = sympify(0), sj, bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
idxList += [i,j]
uFactors += [sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 0, 'i': 0} })
l,r = l.compose(fHbs(si,ei,i.i,1)), r.compose(fHbs(sj,ej,j.i,1))
lev+=1
#Reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
# # Unroll inner loops
if opts['unrollinner']:
for i in range(len(ordIdxList)-2, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
gat = G(r, sub, l)
sca = S(l, T(gat), r)
# sexpr = Sum([ sca ], idxList, uFactors)
sexpr = Sum([ sca ], ordIdxList, uFactors, outerIdx=[ I.i, J.i ], outDep=[ sca.fL.of(0), sca.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
return sexpr
def Add(self, expr, opts):
lhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
rhs = getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
#Accomodate arbitrary sizes by adding an outer loop to span across different homogeneous area of the matrix
# lhsOut = lhs.getOut()
# rhsOut = rhs.getOut()
blk = expr.getOut()
M,N = sympify(blk.size[0]), sympify(blk.size[1])
topSize = blk.getPartitionSize(0,0)
SI,SJ = sympify(topSize[0]), sympify(topSize[1])
I,J = Index("I", sympify(0), M, SI, True), Index("J", sympify(0), N, SJ, True)
idxList = [I,J]
uFactors = [sympify(2)]*2
iPriority = { I.i: {'t': 0, 's': 0, 'i': 0}, J.i:{'t': 0, 's': 1, 'i': 0} }
flatM,flatN = sympify(blk.getFlatSize()[0]), sympify(blk.getFlatSize()[1])
lev = 1
bBlk, hBlk, vBlk = blk.getBlock(0,0), blk.getBlock(M-1,0), blk.getBlock(0,N-1)
bFSize, hFSize, vFSize = sympify(bBlk.getFlatSize()), sympify(hBlk.getFlatSize()), sympify(vBlk.getFlatSize())
iName, jName = "i"*lev, "j"*lev
maxI, maxJ = SI*bFSize[0], SJ*bFSize[1]
bi,ei,si = I.i*bFSize[0], maxI + I.i/SI*(flatM - maxI), bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bj,ej,sj = J.i*bFSize[1], maxJ + J.i/SJ*(flatN - maxJ), bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
idxList += [i,j]
uFactors += [sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 1, 'i': 0} })
l,r = fHbs(si,flatM,i.i,1), fHbs(sj,flatN,j.i,1)
lev+=1
baselevel = 2 if opts['useintrinsics'] else 1
while bBlk.level > baselevel:
bBlk, hBlk, vBlk = bBlk.getBlock(0,0), hBlk.getBlock(0,0), vBlk.getBlock(0,0)
bFSize, hFSize, vFSize = bBlk.getFlatSize(), hBlk.getFlatSize(), vBlk.getFlatSize()
iName, jName = "i"*lev, "j"*lev
bi,ei,si = sympify(0), si, bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bj,ej,sj = sympify(0), sj, bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
idxList += [i,j]
uFactors += [sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 1, 'i': 0} })
l,r = l.compose(fHbs(si,ei,i.i,1)), r.compose(fHbs(sj,ej,j.i,1))
lev+=1
#Reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
# Unroll inner loops
if opts['unrollinner']:
for i in range(len(ordIdxList)-2, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
# uFactors[i] = sympify(2)
# uFactors[-1] = idxList[-1].e - idxList[-1].b
# uFactors[-1] = sympify(8)
glhs = G(l, lhs, r)
grhs = G(l, rhs, r)
ssum = glhs + grhs
ssca = S(l, ssum, r)
sexpr = Sum([ ssca ], ordIdxList, uFactors, outerIdx=[ I.i, J.i ], outDep=[ ssca.fL.of(0), ssca.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
return sexpr
def Kro(self, expr, opts): # Temporarily only dealing with sca-mat mul
lhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
rhs = getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
lhsOut = lhs.getOut()
rhsOut = rhs.getOut()
sca,mat,scaOut,scaLhs = (lhs, rhs, lhsOut, True) if rhsOut.size[0]*rhsOut.size[1] > 1 else (rhs, lhs, rhsOut, False)
blk = expr.getOut()
M,N = sympify(blk.size[0]), sympify(blk.size[1])
baselevel = 2 if opts['useintrinsics'] else 1
# jSlocPrior = lambda lev: 0 if lev == baselevel else 2
# ijIlpPrior = lambda lev: 1 if lev == baselevel else 0
topSize = blk.getPartitionSize(0,0)
SI,SJ = sympify(topSize[0]), sympify(topSize[1])
I,J = Index("I", 0, M, SI, True), Index("J", 0, N, SJ, True)
iPriority = { I.i: {'t': 0, 's': 0, 'i': 0}, J.i:{'t': 0, 's': 1, 'i': 0} }
idxList = [I,J]
uFactors = [sympify(2)]*2
flatScaM,flatScaN = sympify(scaOut.getFlatSize()[0]), sympify(scaOut.getFlatSize()[1])
flatM, flatN = sympify(blk.getFlatSize()[0]), sympify(blk.getFlatSize()[1])
flatMatM,flatMatN = flatM,flatN # For now rM == M and rN == N
lev = 1
bBlk, hBlk, vBlk = blk.getBlock(0,0), blk.getBlock(M-1,0), blk.getBlock(0,N-1)
bFSize, hFSize, vFSize = sympify(bBlk.getFlatSize()), sympify(hBlk.getFlatSize()), sympify(vBlk.getFlatSize())
iName, jName, kName, lName = "i", "j", "k", "l"
maxI, maxJ = SI*bFSize[0], SJ*bFSize[1]
bi,ei,si = 0, flatScaM, 1
bj,ej,sj = 0, flatScaN, 1
bk,ek,sk = I.i*bFSize[0], maxI + I.i/SI*(flatMatM - maxI), bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bl,el,sl = J.i*bFSize[1], maxJ + J.i/SJ*(flatMatN - maxJ), bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
k,l = Index(kName, bk, ek, sk), Index(lName, bl, el, sl)
idxList += [i,j,k,l]
uFactors += [sympify(1),sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 1, 'i': 0} , k.i: {'t': lev, 's': 2, 'i': 0}, l.i:{'t': lev, 's': 3, 'i': 0} })
ll,lr = fHbs(si,flatScaM,i.i,1), fHbs(sj,flatScaN,j.i,1)
rl,rr = fHbs(sk,flatMatM,k.i,1), fHbs(sl,flatMatN,l.i,1)
lev+=1
while bBlk.level > baselevel:
bBlk, hBlk, vBlk = bBlk.getBlock(0,0), hBlk.getBlock(0,0), vBlk.getBlock(0,0)
bFSize, hFSize, vFSize = bBlk.getFlatSize(), hBlk.getFlatSize(), vBlk.getFlatSize()
iName, jName = "i"*lev, "j"*lev
kName, lName = "k"*lev, "l"*lev
bi,ei,si = 0, si, 1
bj,ej,sj = 0, sj, 1
bk,ek,sk = 0, sk, bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bl,el,sl = 0, sl, bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
i,j = Index(iName, bi, ei, si), Index(jName, bj, ej, sj)
k,l = Index(kName, bk, ek, sk), Index(lName, bl, el, sl)
idxList += [i,j,k,l]
uFactors += [sympify(1),sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': lev, 's': 0, 'i': 0}, j.i:{'t': lev, 's': 1, 'i': 0} , k.i: {'t': lev, 's': 2, 'i': 0}, l.i:{'t': lev, 's': 3, 'i': 0} })
ll,lr = ll.compose(fHbs(si,ei,i.i,1)), lr.compose(fHbs(sj,ej,j.i,1))
rl,rr = rl.compose(fHbs(sk,ek,k.i,1)), rr.compose(fHbs(sl,el,l.i,1))
lev+=1
#Reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
# Unroll inner loops
if opts['unrollinner']:
for i in range(len(ordIdxList)-2, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
gsca = G(ll, sca, lr)
gmat = G(rl, mat, rr)
if scaLhs:
# smul = gsca * gmat
smul = Kro(gsca, gmat)
else:
# smul = gmat * gsca
smul = Kro(gmat, gsca)
ssca = S(rl, smul, rr)
sexpr = Sum([ ssca ], ordIdxList, uFactors, outerIdx=[ I.i, J.i ], outDep=[ ssca.fL.of(0), ssca.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
return sexpr
def Mul(self, expr, opts):
lhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
rhs = getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
#Accommodate arbitrary sizes by adding an outer loop to span across different homogeneous area of the matrix
# lhsOut = lhs.getOut()
# rhsOut = rhs.getOut()
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
M,K,N = sympify(blkLhs.size[0]), sympify(blkLhs.size[1]), sympify(blkRhs.size[1])
baselevel = 2 if opts['useintrinsics'] else 1
kTlocPrior = lambda lev: 0 if lev == baselevel else 2
jSlocPrior = lambda lev: 0 if lev == baselevel else 2
kSlocPrior = lambda lev: 0 if lev == baselevel else 1
ijIlpPrior = lambda lev: 1 if lev == baselevel else 0
topSizeLhs = blkLhs.getPartitionSize(0,0)
topSizeRhs = blkRhs.getPartitionSize(0,0)
SI,SK,SJ = sympify(topSizeLhs[0]), sympify(topSizeLhs[1]), sympify(topSizeRhs[1])
I,iK,J = Index("I", sympify(0), M, SI, True), Index("K", sympify(0), K, SK, True), Index("J", sympify(0), N, SJ, True)
idxList = [I,J,iK]
uFactors = [sympify(2)]*3
iPriority = { I.i: {'t': (0,0), 's': 0, 'i': 0}, J.i:{'t': (0,1), 's': 2, 'i': 0}, iK.i:{'t': (0,2), 's': 1, 'i': 0} }
flatM,flatK,flatN = sympify(blkLhs.getFlatSize()[0]), sympify(blkLhs.getFlatSize()[1]), sympify(blkRhs.getFlatSize()[1])
# mvm case for SSE - doesn't work well together with sumExchange in compiler.Compiler.generateKernel()
# if flatN == 1 and opts['vectorize'] and SSE in opts['isa']:
if False:
return self.HRed(HRed(PMul(expr.inexpr[0], expr.inexpr[1], opts['nu'])), opts)
else:
lev = 1
bLhs, hLhs, vLhs = blkLhs.getBlock(0,0), blkLhs.getBlock(M-1,0), blkLhs.getBlock(0,K-1)
bLFSize, hLFSize, vLFSize = sympify(bLhs.getFlatSize()), sympify(hLhs.getFlatSize()), sympify(vLhs.getFlatSize())
bRhs, vRhs = blkRhs.getBlock(0,0), blkRhs.getBlock(0,N-1)
bRFSize, vRFSize = sympify(bRhs.getFlatSize()), sympify(vRhs.getFlatSize())
iName, kName, jName = "i"*lev, "k"*lev, "j"*lev
maxI, maxK, maxJ = SI*bLFSize[0], SK*bLFSize[1], SJ*bRFSize[1]
bi,ei,si = I.i*bLFSize[0], maxI + I.i/SI*(flatM - maxI), bLFSize[0] + I.i/SI*(hLFSize[0] - bLFSize[0])
bk,ek,sk = iK.i*bLFSize[1], maxK + iK.i/SK*(flatK - maxK), bLFSize[1] + iK.i/SK*(vLFSize[1] - bLFSize[1])
bj,ej,sj = J.i*bRFSize[1], maxJ + J.i/SJ*(flatN - maxJ), bRFSize[1] + J.i/SJ*(vRFSize[1] - bRFSize[1])
i,k,j = Index(iName, bi, ei, si), Index(kName, bk, ek, sk), Index(jName, bj, ej, sj)
idxList += [i,j,k]
uFactors += [sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': (lev, 0), 's': 1, 'i': ijIlpPrior(bLhs.level)}, j.i:{'t': (lev, 1), 's': jSlocPrior(bLhs.level), 'i': ijIlpPrior(bLhs.level)} , k.i: {'t': (lev, kTlocPrior(bLhs.level)), 's': kSlocPrior(bLhs.level), 'i': 0} })
fi,fk,fj = fHbs(si,flatM,i.i,1), fHbs(sk,flatK,k.i,1), fHbs(sj,flatN,j.i,1)
lev+=1
while bLhs.level > baselevel:
bLhs, hLhs, vLhs = bLhs.getBlock(0,0), hLhs.getBlock(0,0), vLhs.getBlock(0,0)
bLFSize, hLFSize, vLFSize = sympify(bLhs.getFlatSize()), sympify(hLhs.getFlatSize()), sympify(vLhs.getFlatSize())
bRhs, vRhs = bRhs.getBlock(0,0), vRhs.getBlock(0,0)
bRFSize, vRFSize = sympify(bRhs.getFlatSize()), sympify(vRhs.getFlatSize())
iName, kName, jName = "i"*lev, "k"*lev, "j"*lev
bi,ei,si = sympify(0), si, bLFSize[0] + I.i/SI*(hLFSize[0] - bLFSize[0])
bk,ek,sk = sympify(0), sk, bLFSize[1] + iK.i/SK*(vLFSize[1] - bLFSize[1])
bj,ej,sj = sympify(0), sj, bRFSize[1] + J.i/SJ*(vRFSize[1] - bRFSize[1])
i,k,j = Index(iName, bi, ei, si), Index(kName, bk, ek, sk), Index(jName, bj, ej, sj)
idxList += [i,j,k]
uFactors += [sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': (lev, 0), 's': 1, 'i': ijIlpPrior(bLhs.level)}, j.i:{'t': (lev, 1), 's': jSlocPrior(bLhs.level), 'i': ijIlpPrior(bLhs.level)} , k.i: {'t': (lev, kTlocPrior(bLhs.level)), 's': kSlocPrior(bLhs.level), 'i': 0} })
fi,fk,fj = fi.compose(fHbs(si,ei,i.i,1)), fk.compose(fHbs(sk,ek,k.i,1)), fj.compose(fHbs(sj,ej,j.i,1))
lev+=1
# # Changing order of the indices
# innerk = idxList.pop()
# idxList.insert(len(idxList)-2, innerk)
#Reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
# Unrolling
if opts['unrollinner']:
for i in range(len(ordIdxList)-3, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
# uFactors[i] = sympify(8)
glhs = G(fi, lhs, fk)
grhs = G(fk, rhs, fj)
smul0 = glhs * grhs
ssca0 = S(fi, smul0, fj)
acc=lhs.getOut().size[1] > bLhs.size[1] # Accumulation of products happens only when K > nu
sexpr = Sum([ ssca0 ], ordIdxList, uFactors, acc=acc, outerIdx=[ I.i, J.i, iK.i ], outDep=[ ssca0.fL.of(0), ssca0.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
if acc: # This part can in principle be automated
smul1 = glhs * grhs
gTOut = G(fi, sexpr, fj)
sadd = gTOut + smul1
ssca1 = S(fi, sadd, fj)
sexpr.inexpr.append(ssca1)
sexpr.setAsPred()
return sexpr
def PMul(self, expr, opts):
lhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
rhs = getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
nu = expr.nu
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
# get the size of the two matrix operands
M,K,N = sympify(blkLhs.size[0]), sympify(blkLhs.size[1]), sympify(blkRhs.size[1])
# level at which we stop creating sums
baselevel = 2 if opts['useintrinsics'] else 1
# index priority coefficients (Tloc -> temp. locality, Sloc -> spacial locality, Ilp -> instruction level parallelism)
jSlocPrior = lambda lev: 0 if lev == baselevel else 2
kSlocPrior = lambda lev: 1
ijIlpPrior = lambda lev: 0
# get the size of the main blocks of the two matrix operands
topSizeLhs = blkLhs.getPartitionSize(0,0)
topSizeRhs = blkRhs.getPartitionSize(0,0)
# sympify the dimensions of the main blocks
SI,SK,SJ = sympify(topSizeLhs[0]), sympify(topSizeLhs[1]), sympify(topSizeRhs[1])
# create the indexes for the outer loop mentioned above
I,iK,J = Index("I", sympify(0), M, SI, True), Index("K", sympify(0), K, SK, True), Index("J", sympify(0), N, SJ, True)
idxList = [I,J,iK]
# unrolling factor for each Index
# in this case we want the outer loops fully unrolled (first iteration -> main block, second iteration -> leftover block)
uFactors = [sympify(2)]*3
iPriority = { I.i: {'t': (0,0), 's': 0, 'i': 0}, J.i:{'t': (0,1), 's': 2, 'i': 0}, iK.i:{'t': (0,2), 's': 1, 'i': 0} }
# get the real sizes of the two matrix operands
flatM,flatK,flatN = sympify(blkLhs.getFlatSize()[0]), sympify(blkLhs.getFlatSize()[1]), sympify(blkRhs.getFlatSize()[1])
lev = 1
# get topleft, bottom left and topright block of left operand and the corresponding block sizes
bLhs, hLhs, vLhs = blkLhs.getBlock(0,0), blkLhs.getBlock(M-1,0), blkLhs.getBlock(0,K-1)
bLFSize, hLFSize, vLFSize = sympify(bLhs.getFlatSize()), sympify(hLhs.getFlatSize()), sympify(vLhs.getFlatSize())
# get topleft and topright block of right operand and the corresponding block sizes
bRhs, vRhs = blkRhs.getBlock(0,0), blkRhs.getBlock(0,N-1)
bRFSize, vRFSize = sympify(bRhs.getFlatSize()), sympify(vRhs.getFlatSize())
# create the indexes for this level's loops
iName, kName, jName = "i"*lev, "k"*lev, "j"*lev
# these are the max flat values of i,j,k for the main block
maxI, maxK, maxJ = SI*bLFSize[0], SK*bLFSize[1], SJ*bRFSize[1]
# I.i/SI = 0 if we are in the main block / 1 if we are in the leftover block
bi,ei,si = I.i*bLFSize[0], maxI + I.i/SI*(flatM - maxI), bLFSize[0] + I.i/SI*(hLFSize[0] - bLFSize[0])
bk,ek,sk = iK.i*bLFSize[1], maxK + iK.i/SK*(flatK - maxK), bLFSize[1] + iK.i/SK*(vLFSize[1] - bLFSize[1])
bj,ej,sj = J.i*bRFSize[1], maxJ + J.i/SJ*(flatN - maxJ), bRFSize[1] + J.i/SJ*(vRFSize[1] - bRFSize[1])
i,k,j = Index(iName, bi, ei, si), Index(kName, bk, ek, sk), Index(jName, bj, ej, sj)
idxList += [i,j,k]
uFactors += [sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': (lev, 0), 's': 1, 'i': ijIlpPrior(bLhs.level)}, j.i:{'t': (lev, 1), 's': jSlocPrior(bLhs.level), 'i': ijIlpPrior(bLhs.level)} , k.i: {'t': (lev, 2), 's': kSlocPrior(bLhs.level), 'i': 0} })
fi,fk,fj = fHbs(si, flatM, i.i, 1), fHbs(sk, flatK, k.i, 1), fHbs(sj, flatN, j.i, 1)
lev+=1
while bLhs.level > baselevel: # don't go under the baselevel (2 for vectorized code)
# all three blocks bLhs, hLhs, vLhs are homogeneous, so no need to worry about leftovers at this point
bLhs, hLhs, vLhs = bLhs.getBlock(0,0), hLhs.getBlock(0,0), vLhs.getBlock(0,0)
bLFSize, hLFSize, vLFSize = sympify(bLhs.getFlatSize()), sympify(hLhs.getFlatSize()), sympify(vLhs.getFlatSize())
bRhs, vRhs = bRhs.getBlock(0,0), vRhs.getBlock(0,0)
bRFSize, vRFSize = sympify(bRhs.getFlatSize()), sympify(vRhs.getFlatSize())
iName, kName, jName = "i"*lev, "k"*lev, "j"*lev
# the end values of the loops now are the step values of the immediately outer loops
bi,ei,si = sympify(0), si, bLFSize[0] + I.i/SI*(hLFSize[0] - bLFSize[0])
bk,ek,sk = sympify(0), sk, bLFSize[1] + iK.i/SK*(vLFSize[1] - bLFSize[1])
bj,ej,sj = sympify(0), sj, bRFSize[1] + J.i/SJ*(vRFSize[1] - bRFSize[1])
i,k,j = Index(iName, bi, ei, si), Index(kName, bk, ek, sk), Index(jName, bj, ej, sj)
idxList += [i,j,k]
uFactors += [sympify(1),sympify(1),sympify(1)]
iPriority.update({ i.i: {'t': (lev, 0), 's': 1, 'i': ijIlpPrior(bLhs.level)}, j.i:{'t': (lev, 1), 's': jSlocPrior(bLhs.level), 'i': ijIlpPrior(bLhs.level)} , k.i: {'t': (lev, 2), 's': kSlocPrior(bLhs.level), 'i': 0} })
fi,fk,fj = fi.compose(fHbs(si,ei,i.i,1)), fk.compose(fHbs(sk,ek,k.i,1)), fj.compose(fHbs(sj,ej,j.i,1))
lev+=1
# reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
if opts['unrollinner']:
# set unrolling factors of 3 innermost loops to the corresponding loop length in order to be eventually unrolled
for i in range(len(ordIdxList)-3, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
fjsrc = fHbs(sj, sj, 0, 1)
fjdst = fHbs(nu, nu, 0, 1)
glhs = G(fi, lhs, fk)
grhs = G(fk, rhs, fjsrc)
smul0 = PMul(glhs, grhs, nu)
ssca0 = S(fi, smul0, fjdst)
acc = lhs.getOut().size[1] > bLhs.size[1] # Accumulation of products happens only when K > nu
sexpr = Sum([ ssca0 ], ordIdxList, uFactors, acc=acc, outerIdx=[ I.i, J.i, iK.i ], outDep=[ ssca0.fL.of(0), ssca0.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
if acc:
smul1 = PMul(glhs, grhs, nu)
gTOut = G(fi, sexpr, fjdst)
sadd = gTOut + smul1
ssca1 = S(fi, sadd, fjdst)
sexpr.inexpr.append(ssca1)
sexpr.setAsPred()
return sexpr
def HRed(self, expr, opts):
rhs = getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
mat = rhs
blk = expr.getInexprMat(0)
M,N = sympify(blk.size[0]), sympify(blk.size[1])
baselevel = 2 if opts['useintrinsics'] else 1
topSize = blk.getPartitionSize(0,0)
SI,SJ = sympify(topSize[0]), sympify(topSize[1])
I,J = Index("I", 0, M, SI, True), Index("J", 0, N, SJ, True)
iPriority = { I.i: {'t': 0, 's': 0, 'i': 0}, J.i:{'t': 0, 's': 1, 'i': 0} }
idxList = [I,J]
uFactors = [sympify(2)]*2
flatM, flatN = sympify(blk.getFlatSize()[0]), sympify(blk.getFlatSize()[1])
flatMatM,flatMatN = flatM,flatN
lev = 1
bBlk, hBlk, vBlk = blk.getBlock(0,0), blk.getBlock(M-1,0), blk.getBlock(0,N-1)
bFSize, hFSize, vFSize = sympify(bBlk.getFlatSize()), sympify(hBlk.getFlatSize()), sympify(vBlk.getFlatSize())
kName, lName = "i", "j"
maxI, maxJ = SI*bFSize[0], SJ*bFSize[1]
bk,ek,sk = I.i*bFSize[0], maxI + I.i/SI*(flatMatM - maxI), bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bl,el,sl = J.i*bFSize[1], maxJ + J.i/SJ*(flatMatN - maxJ), bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
k,l = Index(kName, bk, ek, sk), Index(lName, bl, el, sl)
idxList += [k,l]
uFactors += [sympify(1),sympify(1)]
iPriority.update({k.i: {'t': lev, 's': 2, 'i': 0}, l.i:{'t': lev, 's': 3, 'i': 0} })
rl,rr = fHbs(sk,flatMatM,k.i,1), fHbs(sl,flatMatN,l.i,1)
lev+=1
while bBlk.level > baselevel:
bBlk, hBlk, vBlk = bBlk.getBlock(0,0), hBlk.getBlock(0,0), vBlk.getBlock(0,0)
bFSize, hFSize, vFSize = bBlk.getFlatSize(), hBlk.getFlatSize(), vBlk.getFlatSize()
kName, lName = "i"*lev, "j"*lev
bk,ek,sk = 0, sk, bFSize[0] + I.i/SI*(hFSize[0] - bFSize[0])
bl,el,sl = 0, sl, bFSize[1] + J.i/SJ*(vFSize[1] - bFSize[1])
k,l = Index(kName, bk, ek, sk), Index(lName, bl, el, sl)
idxList += [k,l]
uFactors += [sympify(1),sympify(1)]
iPriority.update({k.i: {'t': lev, 's': 2, 'i': 0}, l.i:{'t': lev, 's': 3, 'i': 0} })
rl,rr = rl.compose(fHbs(sk,ek,k.i,1)), rr.compose(fHbs(sl,el,l.i,1))
lev+=1
#Reorder ids based on priorities
ordIdxList = reorderIdxList(idxList, iPriority, opts)
# Unroll inner loops
if opts['unrollinner']:
for i in range(len(ordIdxList)-2, len(ordIdxList)):
uFactors[i] = ordIdxList[i].e - ordIdxList[i].b
fjdst = fHbs(1, 1, 0, 1)
gmat = G(rl, mat, rr)
hred = HRed(gmat)
ssca = S(rl, hred, fjdst)
sexpr = Sum([ ssca ], ordIdxList, uFactors, outerIdx=[ I.i, J.i ], outDep=[ ssca.fL.of(0), ssca.fR.of(0) ], forceInitIdx=[ I.i, J.i ], iPriority=iPriority)
return sexpr
class StmtWrap(object):
def __init__(self, wrap, inputs):
self.wrap = wrap
self.inputs = inputs
def __repr__(self):
return str(self)
def __str__(self):
res = ""
if len(self.wrap)<2:
return res
res += str(self.wrap[0])
i = 0
while i < len(self.inputs):
res += str(self.inputs[i])
res += str(self.wrap[1+i])
i+=1
return res
class StmtExpr(object):
def __init__(self, inputs, op=None, wrap=None):
self.inputs = inputs
self.op = op
self.wrap = ["",""] if wrap is None else wrap
def removeWrap(self):
self.wrap = ["",""]
return self
def __repr__(self):
return str(self)
def __str__(self):
res = ""
if len(self.inputs) > 1:
res = str(self.inputs[0]) + self.op.toPolySigma() + str(self.inputs[1])
else:
if self.op is None:
res = str(self.inputs[0])
else:
res = self.op.toPolySigma() + "(" + str(self.inputs[0]) + ")"
w0, w1 = str(self.wrap[0]), str(self.wrap[1])
if w0+w1 != "":
if w0+w1 == "()":
w0 = w1 = ''
res = w0 + "(" + res + ")" + w1
# res = str(self.wrap[0]) + "(" + res + ")" + str(self.wrap[1])
return res
class ToPolySigma(object):
def __init__(self, llprog, opts):
super(ToPolySigma, self).__init__()
self.mDict = dict(llprog.mDict)
self.ops = llprog.getOps()
self.llprog = llprog
self.opts = opts
# def apply(self, root, opts):
def apply(self):
self.eq_id = 0
self.numStmts = 0
self.stmts = []
self.globalspace = None
self.indices = []
self.idxPriorityList = []
# self.globIdxOrder = []
self.subsWithOut = {}
self.sigmaSource = ""
self.nublac = self.opts['isaman'].getNuBLAC(self.opts['precision'], self.opts['nu'])
self.baselevel = 3 if self.opts['useintrinsics'] else 2 # Mat level
# if 'baseufs' not in self.opts:
self.opts['baseufs'] = { }
if 'unroll' not in self.opts:
self.opts['unroll'] = { }
self.opts['ufslist'] = [ ] # Currently unused
#OLD
# self.fillinSubsWithOut(root, opts)
# self.computeOpenSCoPSpace(root, opts)
# self.computeSumsOrderAndUnrolling(root, opts)
# self.opOrder = 0
# getattr(self, root.__class__.__name__)(root, opts)
# self.correctDomains(root.getPolyStmts())
# self.computeScheds(root)
# self.addStmts(root.getPolyStmts())
#NEW
print "Computing OpenSCoP space.."
self.computeOpenSCoPSpace()
print "Computing indices pos e levels.."
for eq_id, root in enumerate( self.llprog.getEqsList() ):
root.computeIdxPosAndLevInfo()
self.fillinSubsWithOut(eq_id, root)
# self.opOrder = 0
print "Starting polystatements computation.."
getattr(self, self.llprog.__class__.__name__)(self.llprog)
print "Fixing polystatements.."
self.fix_ps_choice(self.llprog.stmtList)
print "Ended polystatements computation.."
self.opts['stmts_ids'] = { }
print "Computing local sums and order.."
self.computeLocalSumsOrder(self.llprog.stmtList)
print "Computing Unrolling properties.."
self.computeUnrollingAndProperties()
self.scat_counter = -1
self.scatnames = []
print "Computing Scatnames expression tree.."
self.computeScatNamesExprTree(self.llprog.stmtList, 0)
print "Adding indices from polystatements.."
self.computeScatNamesAddStmtsIds(self.llprog.stmtList)
# self.computeSumsOrderAndUnrolling(root, opts)
print "Computing schedules.."
self.computeScheds(self.llprog.stmtList)
print "Compiling CLooG statements.."
for root in self.llprog.getEqsList():
self.addStmts(root.getPolyStmts())
print "Completing CLooG input.."
self.createOpenSCoP()
print "Creating Sigma-LL source.."
self.createSigmaSource()
if not self.opts.get('onlygen', False) and self.opts.get('savesigma', False):
params = '-'.join(str(p) for p in self.opts['static_params'])
destfile = '/%s_sigma-%s-%s.txt' % (self.opts['hfilebasename'], params, str(hash(self.llprog)))
fname = self.opts['logroot'] + '/results/' + self.opts['testname'] + destfile
sigmasrc = open(fname, 'w')
sigmasrc.write("Sigma-LL src for :\n\n")
sigmasrc.write(str(self.llprog) + "\n\n")
sigmasrc.write("="*20 + "\n\n")
sigmasrc.write(self.sigmaSource + "\n\n")
sigmasrc.write("="*20 + "\n\n")
sigmasrc.close()
self.opts['stmts_ids'].clear()
print "Parsing new Sigma-LL input.."
sem = parseSigmaLL(self.sigmaSource, self.mDict, self.opts, {'indices': self.indices} )
print "Parsing concluded."
return sem
def fillinSubsWithOut(self, eq_id, root):
self.subsWithOut[ (eq_id, root.inexpr[1]) ] = root.inexpr[0]
def getEmptyDomain(self):
emp = Set.universe(self.globalspace)
return emp-emp
def fix_ps_choice(self, expr):
if isinstance(expr, llBlock):
for s in expr:
self.fix_ps_choice(s)
elif isinstance(expr, llFor):
self.fix_ps_choice(expr.body)
elif isinstance(expr, llStmt):
for ps in expr.eq.getPolyStmts():
stmts_no_perm = filter(lambda t: t[1] is None, zip(ps['stmt'], ps['perm_oacc'], ps['domain'], ps['outinfo']))
#If any choice with no perm required exists take it otherwise peek first choice
if stmts_no_perm:
ps['stmt'], ps['perm_oacc'], ps['domain'], ps['outinfo'] = stmts_no_perm[0]
else:
for f in ['stmt','perm_oacc','domain','outinfo']:
ps[f] = ps[f][0]
elif isinstance(expr, llIf):
for b in expr.bodys:
self.fix_ps_choice(b)
# def computeOpenSCoPSpace(self, root, opts):
def computeOpenSCoPSpace(self):
self.opts['idsattr'] = {}
# root.computeSpaceIdxNames(i='i',j='j', ipfix=str(globalSSAIndex()), jpfix=str(globalSSAIndex()), opts=opts, baselevel=self.baselevel)
# l = [root]
self.llprog.computeSpaceIdxNames(opts=self.opts, baselevel=self.baselevel)
# l = self.llprog.getEqsList()
# while l:
# tl = []
# for e in l:
# for ids in e.getOut().spaceIdxNames:
# for c in range(len(ids)):
# prefix = self.opts['idsattr'].get(ids[c][0], None)
# if prefix:
# ids[c] = prefix + ids[c]
# if isinstance(e, Operator):
# tl += e.inexpr
# l = tl
self.indices = list(self.llprog.getSpaceIdxSet())
self.globalspace = Space.create_from_names(Context(), set=self.indices)
def computeSumsOrder(self, iPriorityList, ids):
# def computeGlobalOrder(self):
# self.globIdxOrder = []
globIdxPriority = {}
for i in ids:
maxValue = (0,0,0)
for ip in iPriorityList:
if ip[i] > maxValue:
maxValue = ip[i]
globIdxPriority[i] = maxValue
ordPrioList = sorted(globIdxPriority.items(), key=lambda idxp: idxp[1])
ordSym = [ idxp[0] for idxp in ordPrioList ]
globIdxOrder = [None]*len(ids)
for idx in ids:
globIdxOrder[ordSym.index(idx)] = idx
return globIdxOrder
def computeLocalSumsOrder(self, expr):
if isinstance(expr, llBlock):
for s in expr:
self.computeLocalSumsOrder(s)
elif isinstance(expr, llFor):
self.computeLocalSumsOrder(expr.body)
elif isinstance(expr, llStmt):
iPriorityList = []
ids = list(expr.getSpaceIdxSet())
baselevel = self.baselevel if expr.can_gen_with_nublac(self.nublac) else 2
expr.eq.computeIdxPriority(iPriorityList, ids, self.opts['indexorder'], baselevel)
self.opts['stmts_ids'][expr] = self.computeSumsOrder(iPriorityList, ids)
elif isinstance(expr, llIf):
for b in expr.bodys:
self.computeLocalSumsOrder(b)
# def computeSumsOrderAndUnrolling(self):
# root.computeIdxPriority(self.idxPriorityList, self.indices, opts['indexorder'], self.baselevel)
# self.computeGlobalOrder()
def computeUnrollingAndProperties(self):
baseUFs = { i : 1 for i in self.indices }
# opts['baseufs'] = baseUFs
# opts['ufslist'] = [ ]
self.opts['baseufs'].update(baseUFs)
# unroll = 1 if self.ops <= self.opts['icachel1'] else 0
# uFs = { i : [unroll] for i in self.indices } # Using uFs only to mark whether we wnat to unroll over a specific dimension
unroll = self.ops <= self.opts['icachel1']
for stmt in self.opts['stmts_ids']:
# ids = self.opts['stmts_ids'][stmt]
baselevel = self.baselevel if stmt.can_gen_with_nublac(self.nublac) else 2
uFs = { i : [0] for i in self.indices }
if unroll:
stmt.eq.computeUnrolling(uFs, self.indices, baselevel)
# opts['unroll'] = { i : max(uFs[i]) for i in self.indices }
self.opts['unroll'].update( { i : max(uFs[i]) for i in self.indices } )
propDict = {i : set() for i in self.indices }
isRowIdx = lambda idx, idxInfoList, baselevel: any(map(lambda idxInfo: idx in idxInfo and idxInfo[idx][2] == 0, idxInfoList))
isColIdx = lambda idx, idxInfoList, baselevel: any(map(lambda idxInfo: idx in idxInfo and idxInfo[idx][2] == 1, idxInfoList))
canDistributeOverIdx = lambda op, idx, idxInfoList, baselevel: 'dist' if isinstance(op, Mul) and isRowIdx(idx, idxInfoList, baselevel) and isColIdx(idx, idxInfoList, baselevel) else None
propList = [ canDistributeOverIdx ]
stmt.eq.markProperties(propDict, propList, self.indices, baselevel)
self.opts['idxProperties'] = propDict
# uFs = { i : [1] for i in self.indices }
# root.computeUnrolling(uFs, self.indices, self.baselevel)
# uFs = { i : max(uFs[i]) for i in self.indices }
# if uFs != baseUFs:
# for uf in range(2,4):
# t = { i : uf if uFs[i] > 1 else 1 for i in uFs }
# opts['ufslist'].append(t)
def computeScatNamesExprTree(self, expr, level):
if isinstance(expr, llBlock):
if level+1 > len(self.scatnames):
self.scat_counter += 1
self.scatnames.append( "b"+str(self.scat_counter) )
for s in expr:
self.computeScatNamesExprTree(s, level + 1)
elif isinstance(expr, llFor):
if level+1 > len(self.scatnames):
self.scat_counter += 1
self.scatnames.append(str(expr.idx))
self.computeScatNamesExprTree(expr.body, level + 1)
elif isinstance(expr, llIf):
for b in expr.bodys:
self.computeScatNamesExprTree(b, level)
# elif isinstance(expr, llStmt):
# res = list(self.opts['stmts_ids'][expr])
def computeScatNamesAddStmtsIds(self, expr):
if isinstance(expr, llBlock):
for s in expr:
self.computeScatNamesAddStmtsIds(s)
elif isinstance(expr, llFor):
self.computeScatNamesAddStmtsIds(expr.body)
elif isinstance(expr, llStmt):
self.scatnames.extend( list(self.opts['stmts_ids'][expr]) )
elif isinstance(expr, llIf):
for b in expr.bodys:
self.computeScatNamesAddStmtsIds(b)
def joinAlignedSets(self, set1, set2, space):
lBsets1,lBsets2 = set1.get_basic_sets(), set2.get_basic_sets()
genpair = ((bs1,bs2) for bs1 in lBsets1 for bs2 in lBsets2)
set3 = Set.universe(space)
set3 = set3-set3
for bs1,bs2 in genpair:
tempd = Set.universe(space)
cs = bs1.get_constraints() + bs2.get_constraints()
tempd = tempd.add_constraints(cs)
set3 = set3.union(tempd)
return set3
# def separateInitAccDomains(self, space, domain, flatStructInfo, accIdxList):
# tDomain = domain-domain
# bsets = domain.get_basic_sets()
# cs = []
# for sd in flatStructInfo:
# for sbs in sd.get_basic_sets():
# for c in sbs.get_constraints():
# goodc = True
# i=0
# while goodc and i < len(accIdxList):
# dimType, pos = space.get_var_dict()[accIdxList[i]]
# if c.is_equality():
# reva = c.get_aff().mul(Aff.read_from_str(c.get_ctx(),"{[]->[(-1)]}"))
# revc = Constraint.equality_from_aff(reva)
# goodc = c.involves_dims(dimType, pos, 1) and (c.is_lower_bound(dimType, pos) or revc.is_lower_bound(dimType, pos))
# else:
# goodc = c.involves_dims(dimType, pos, 1) and c.is_lower_bound(dimType, pos)
# i+=1
# if goodc:
# cs += [Constraint.equality_from_aff(c.get_aff())]
# for c in cs:
# for bs in bsets:
# tDomain = tDomain.union(bs.add_constraint(c))
# initDomain = domain.intersect(tDomain)
# return initDomain
def computeInitDomain(self, space, initDomain, domain, flatStructInfo, accIdxList):
tDomain = domain-domain
# bsets = domain.get_basic_sets()
# for sd in flatStructInfo:
# for sbs in sd.get_basic_sets():
for bs in domain.get_basic_sets():
s_cpy = bs
for idx in accIdxList:
dimType, pos = space.get_var_dict()[idx]
cs = []
for c in bs.get_constraints():
if c.involves_dims(dimType, pos, 1) and c.is_lower_bound(dimType, pos) and not c.is_equality():
cs.append(c)
if cs:
if len(cs) > 1:
aff_re = re.compile("{ \[.*\] -> \[(.*)\] }")
max_args = []
for c in cs:
saff = str(c.set_coefficient_val(dimType, pos, 0).get_aff().mul(Aff.read_from_str(domain.get_ctx(),"{[]->[(-1)]}")) )
arg = aff_re.search(saff).group(1)
max_args.append( arg )
smax = "max(" + max_args[0] + ", " + max_args[1] + ")"
for arg in max_args[2:]:
smax = "max(" + smax + ", " + arg + ")"
pwaff = PwAff.read_from_str(domain.get_ctx(), "{["+(",".join(self.indices))+"]->[("+idx+" - "+smax+")]}")
s = s_cpy - s_cpy
for pc in pwaff.get_pieces():
s = s.union( pc[0].add_constraint(Constraint.equality_from_aff(pc[1])) )
s_cpy = s.intersect(s_cpy)
else:
s_cpy = s_cpy.add_constraint(Constraint.equality_from_aff(cs[0].get_aff()))
tDomain = tDomain.union(s_cpy)
# for bs in bsets:
# tDomain = tDomain.union(bs.add_constraints(cs))
tInitDomain = tDomain.coalesce().remove_redundancies()
# tInitDomain = domain.intersect(tDomain) # Is this necessary??
if initDomain.is_empty():
return tInitDomain
#Compare intersection with previous initDomain projecting out the accIndices
tPrjOut, iPrjOut = tInitDomain, initDomain
tSpace = space
for i in accIdxList:
dimType, pos = tSpace.get_var_dict()[i]
tPrjOut = tPrjOut.project_out(dimType, pos, 1)
iPrjOut = iPrjOut.project_out(dimType, pos, 1)
tSpace = tPrjOut.get_space()
intPrjOut = tPrjOut.intersect(iPrjOut)
if not intPrjOut.is_empty():
initDomain = tInitDomain - tInitDomain.intersect(align_spaces(intPrjOut,domain))
else:
# initDomain = domain-domain
initDomain = tInitDomain
return initDomain
def compute_full_init_domain(self, space, domain, accIdxList):
tDomain = domain-domain
aff_re = re.compile("{ \[.*\] -> \[(.*)\] }")
for bs in domain.get_basic_sets():
s_cpy = bs
for idx in accIdxList:
dimType, pos = space.get_var_dict()[idx]
cs = []
for c in bs.get_constraints():
if c.involves_dims(dimType, pos, 1) and c.is_lower_bound(dimType, pos) and not c.is_equality():
cs.append(c)
if cs:
if len(cs) > 1:
max_args = []
for c in cs:
saff = str(c.set_coefficient_val(dimType, pos, 0).get_aff().mul(Aff.read_from_str(domain.get_ctx(),"{[]->[(-1)]}")) )
arg = aff_re.search(saff).group(1)
max_args.append( arg )
smax = "max(" + max_args[0] + ", " + max_args[1] + ")"
for arg in max_args[2:]:
smax = "max(" + smax + ", " + arg + ")"
pwaff = PwAff.read_from_str(domain.get_ctx(), "{["+(",".join(self.indices))+"]->[("+idx+" - "+smax+")]}")
s = s_cpy - s_cpy
for pc in pwaff.get_pieces():
s = s.union( pc[0].add_constraint(Constraint.equality_from_aff(pc[1])) )
s_cpy = s.intersect(s_cpy)
else:
s_cpy = s_cpy.add_constraint(Constraint.equality_from_aff(cs[0].get_aff()))
tDomain = tDomain.union(s_cpy)
initDomain = tDomain.coalesce().remove_redundancies()
#Now need to make sure we get ride of overlapping init areas across BSs (Following is meant for Set with 2 BSs)
if len(initDomain.get_basic_sets()) > 1:
dom_wo_acc = initDomain.universe_like()
for idx in accIdxList:
dimType, pos = dom_wo_acc.get_space().get_var_dict()[idx]
dom_wo_acc = dom_wo_acc.remove_dims(dimType, pos, 1)
dom_wo_acc = dom_wo_acc-dom_wo_acc
union_projected_out_bss = None
for bs in initDomain.get_basic_sets():
bs_wo_acc = bs
for idx in accIdxList:
dimType, pos = bs_wo_acc.get_space().get_var_dict()[idx]
bs_wo_acc = bs_wo_acc.project_out(dimType, pos, 1)
if union_projected_out_bss is None:
union_projected_out_bss = bs_wo_acc
else:
intersection = union_projected_out_bss.intersect(bs_wo_acc)
union_projected_out_bss.union(bs_wo_acc)
if not intersection.is_empty():
dom_wo_acc = dom_wo_acc.union(intersection)
overlapping_area = initDomain.intersect(align_spaces(dom_wo_acc, initDomain))
# Remove the overlapping area that will be replaced with one of its faces
initDomain = initDomain - overlapping_area
overlapping_area_only_acc = overlapping_area
for idx in self.indices:
if idx not in accIdxList:
dimType, pos = overlapping_area_only_acc.get_space().get_var_dict()[idx]
overlapping_area_only_acc = overlapping_area_only_acc.project_out(dimType, pos, 1)
for idx in accIdxList:
dimType, pos = overlapping_area_only_acc.get_space().get_var_dict()[idx]
cs = []
for bs in overlapping_area_only_acc.get_basic_sets():
for c in bs.get_constraints():
if c.involves_dims(dimType, pos, 1) and ( c.is_lower_bound(dimType, pos) or c.is_equality() ):
cs.append(c)
if cs:
if len(cs) > 1:
min_args = []
for c in cs:
saff = str(c.set_coefficient_val(dimType, pos, 0).get_aff().mul(Aff.read_from_str(domain.get_ctx(),"{[]->[(-1)]}")) )
arg = aff_re.search(saff).group(1)
min_args.append( arg )
smin = "min(" + min_args[0] + ", " + min_args[1] + ")"
for arg in min_args[2:]:
smin = "min(" + smin + ", " + arg + ")"
pwaff = PwAff.read_from_str(domain.get_ctx(), "{["+(",".join(self.indices))+"]->[("+idx+" - "+smin+")]}")
s = overlapping_area - overlapping_area
for pc in pwaff.get_pieces():
s = s.union( pc[0].add_constraint(Constraint.equality_from_aff(pc[1])) )
overlapping_area = s.intersect(overlapping_area)
else:
aff_out = aff_re.search(str(cs[0].get_aff())).group(1)
aff = Aff.read_from_str(domain.get_ctx(), "{["+(",".join(self.indices))+"]->[("+aff_out+")]}")
overlapping_area = overlapping_area.add_constraint(Constraint.equality_from_aff(aff))
initDomain = initDomain.union(overlapping_area)
return initDomain
def createOpenSCoP(self):
text = "<OpenScop>\n\nSIGMA\n\n"
#Context
text += "CONTEXT\n0 2 0 0 0 0\n\n0\n\n" # 2 columns e/i | 1 + Parameters are not provided
text += str(self.numStmts) + "\n\n"
for s in self.stmts:
text += s
text += "<scatnames>\n"
text += " ".join(self.scatnames)
text += "\n</scatnames>\n\n"
text += "</OpenScop>\n" # 2 columns e/i | 1 + Parameters are not provided
f = open("/tmp/temp.scop", 'w')
f.write(text)
f.close()
def addStmts(self, polyStmts):
self.numStmts += len(polyStmts)
for ps in polyStmts:
text = ""
text += "2\n\n" # Num. of relations describing the statement (now only domain+sched)
text += "#----------------------------------------------------------------\n"
text += "DOMAIN\n"
f = open("/tmp/temp.scop", 'w')
ps['domain'].print_(f, 0, format.EXT_POLYLIB)
f.close()
f = open("/tmp/temp.scop", 'r')
sdom = f.read()
f.close()
text += sdom
text += "SCATTERING\n"
f = open("/tmp/temp.scop", 'w')
ps['sched'].print_(f, 0, format.EXT_POLYLIB)
f.close()
f = open("/tmp/temp.scop", 'r')
ssched = f.read()
f.close()
text += ssched
text += "\n1\n" #Statement body is provided
text += "\n<body>\n"
idxList = ps['idxlist']
# idxList = self.indices
text += str(len(idxList)) + "\n"
text += " ".join(idxList) + "\n"
text += str(ps['stmt']) + "\n"
text += str(ps['eq_id']) + "\n"
text += ps['outinfo'][0] + "\n"
if len(ps['outinfo']) > 1:
# text += "1\n"
text += ps['outinfo'][1] + "\n"
text += "\n</body>\n"
text += "\n#----------------------------------------------------------------\n\n"
self.stmts.append(text)
def createSigmaSource(self):
# import example_cloog
import sigmacloog
s = sigmacloog.tosigma_str("/tmp/temp.scop")
self.sigmaSource += str(s)
# def computeScheds(self, root):
# for ps in root.getPolyStmts():
# ps['sched'] = self.computeSched(ps)
def computeScheds(self, expr, prefix=None):
prefix = [] if prefix is None else prefix
if isinstance(expr, llBlock):
i = 0
for s in expr:
new_prefix = prefix + [ str(i) ]
i += 1
self.computeScheds(s, new_prefix)
elif isinstance(expr, llFor):
new_prefix = prefix + [ str(expr.idx) ]
self.computeScheds(expr.body, new_prefix)
elif isinstance(expr, llStmt):
new_prefix = prefix + ['0']*(self.scat_counter-len(prefix)+1)
for ps in expr.eq.getPolyStmts():
ps['sched'] = self.computeSched(ps, self.opts['stmts_ids'][expr], new_prefix)
elif isinstance(expr, llIf):
for b in expr.bodys:
self.computeScheds(b, prefix)
def computeSched(self, polyStmt, ids_order, prefix):
varDict = polyStmt['domain'].get_space().get_var_dict()
schedList = list(prefix)
for i in ids_order:
if not i in varDict:
schedList.append('0')
else:
schedList.append(i)
## schedList.append(str(polyStmt['oporder']))
# schedList.extend( ['0']*(len(self.scatnames)-len(schedList)) )
varList = sorted(varDict.items(), key=lambda entry: entry[1][1])
idxList = [ e[0] for e in varList ]
polyStmt['idxlist'] = idxList
## m = Map("{ ["+",".join(self.indices)+"] -> ["+",".join(schedList)+"] }")
m = Map("{ ["+",".join(idxList)+"] -> ["+",".join(schedList)+"] }")
return m
def reorderIdxList(self, idxList, iPriority, opts):
order = opts['indexorder']
for idx in iPriority:
p = iPriority[idx]
t = ( p[order[0]], p[order[1]], p[order[2]] )
iPriority[idx] = t
ordPrioList = sorted(iPriority.items(), key=lambda idxp: idxp[1])
ordSym = [ idxp[0] for idxp in ordPrioList ]
ordIdxList = [None]*len(idxList)
for idx in idxList:
ordIdxList[ordSym.index(idx)] = idx
return ordIdxList
def correctDomains(self, polyStmts):
for ps in polyStmts:
ps['domain'] = self.correctDomain(ps['domain'])
ps['eq_id'] = self.eq_id
def correctDomain(self, domain_list, indices=None):
indices = self.indices if indices is None else indices
res = []
for tDomain in domain_list:
for i in self.indices:
vardict = tDomain.get_space().get_var_dict()
if i in vardict and not tDomain.involves_dims(vardict[i][0], vardict[i][1], 1):
tDomain = tDomain.project_out(vardict[i][0], vardict[i][1], 1)
res.append(tDomain)
del domain_list
return res
def fuseStmtWithSub(self, domain, subexpr, mReduceDims, oAccMap, includeAccPss=False, pos=None, extra_ids=None):
pss = []
if subexpr is not None:
# tdReduced = mReduceDims.intersect_domain(domain).range()
# tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
for ps in subexpr.getPolyStmts():
if includeAccPss or not ps['acc']:
# if extra_ids is not None and (extra_ids[0] or extra_ids[1]):
# if pos == 0: # To make it intersectable you extend one operand's touched with extra ids of the other
# ps_touched = ps['touched'].add_dims(dim_type.set, len(extra_ids[1]))
# else:
# ps_touched = ps['touched'].insert_dims(dim_type.set, 2, len(extra_ids[0]))
# else:
# ps_touched = ps['touched']
# touch_int = tdTransformedByOAcc.intersect(ps_touched)
# if touch_int.is_empty():
# continue
# The code above was probably introduced to handle same domains (like from
# different matrix partitions) touch different areas of a matrix. I think there was one
# major flaw: using the reduceDims map from current expression can be meaningless.
tdReduced = ps['reducedims'].intersect_domain(domain).range()
for d,s in zip(ps['domain'], ps['stmt']):
psTdReduced = ps['reducedims'].intersect_domain(d).range()
if not tdReduced.intersect(psTdReduced).is_empty():
psDomInt = domain.intersect(d)
if not psDomInt.is_empty():
pss.append((psDomInt,s,ps['acc']))
return pss
def genGSSeq(self, imfList, acc=False, accSign=None):
accSign = '' if not accSign else accSign
accPrefix = ("$" + accSign) if acc else ""
p = accPrefix + "[" + str(imfList[0][0]) + "," + str(imfList[0][1]) + "]"
for i in imfList[1:]:
p += accPrefix + "[" + str(i[0]) + "," + str(i[1]) + "]"
# if len(imfList)>1:
# p += ("$" if acc else "") + "[" + str(imfList[-1][0]) + "," + str(imfList[-1][1]) + "]"
return p
def getNonTileMatrix(self, expr):
if isinstance(expr, Quantity) or isinstance(expr, Tile):
return expr.getNonTileOut()
return None
def getUnusedIds(self, imfList):
usedIds = []
for imf in imfList:
usedIds.extend(imf.getAtoms())
unusedIdx = [ idx for idx in self.indices if idx not in usedIds ]
return unusedIdx
def buildUnStmtLists(self, expr, mat, op, domain, pi, access, oAccSet, mReduceDims, oAccMap, includeAccPss=None, par=None, pos=None, acc_ids=None):
includeAccPss = False if includeAccPss is None else includeAccPss
par = False if par is None else par
def _buildRet(td, inp, domList, inList, opList):
inp = (StmtExpr([ inp ], wrap=['(',')']) if par else inp) if access[3] is None else StmtExpr([inp], access[3])
domList.append(td)
inList.append([inp])
opList.append(op)
# oTargetAccess = [ a for a in oAccess if not self.joinAlignedSets(oAccess[a], td, self.globalspace).is_empty() ]
# oTargetList.append(oTargetAccess)
tDomainList, inputsList, opList = [], [], []
accDomainList, accInputsList, accOpList = [], [], []
tDomain = self.joinAlignedSets(pi['access'][access], oAccSet, self.globalspace)
tDomain = self.joinAlignedSets(tDomain, domain, self.globalspace)
# mat = expr.getNonTileOut()
if mat is None:
pss = self.fuseStmtWithSub(tDomain, expr, mReduceDims, oAccMap, includeAccPss=includeAccPss, pos=pos, extra_ids=acc_ids)
if pss:
for ps in pss:
td = ps[0]
inp = ps[1].removeWrap()
# inp = ps[1]['stmt'].removeWrap()
# if ps[1]['acc']:
if ps[2]:
_buildRet(td, inp, accDomainList, accInputsList, accOpList)
else:
_buildRet(td, inp, tDomainList, inputsList, opList)
else:
# inp = StmtExpr([mat.name+self.genGSSeq(access[1])])
inp = StmtExpr([mat.toLL()+self.genGSSeq(access[1])])
_buildRet(tDomain, inp, tDomainList, inputsList, opList)
return (tDomainList, accDomainList, inputsList, accInputsList, opList, accOpList)
def buildBinStmtLists(self, exprs, mats, op, domain, lpi, rpi, access, oAccSet, mReduceDims, oAccMap, includeAccPss=None, par=None, acc_ids=None):
includeAccPss = (False, )*2 if includeAccPss is None else includeAccPss
par = (False, )*2 if par is None else par
def _buildRet(td, lin, rin, domList, inList, opList):
lin = (StmtExpr([ lin ], wrap=['(',')']) if par[0] else lin) if access[0][3] is None else StmtExpr([lin], access[0][3])
rin = (StmtExpr([ rin ], wrap=['(',')']) if par[1] else rin) if access[1][3] is None else StmtExpr([rin], access[1][3])
inputs = [lin, rin]
domList.append(td)
inList.append(inputs)
opList.append(op)
tDomainList, inputsList, opList = [], [], []
accDomainList, accInputsList, accOpList = [], [], []
tDomain = self.joinAlignedSets(lpi['access'][access[0]], rpi['access'][access[1]], self.globalspace)
tDomain = self.joinAlignedSets(tDomain, oAccSet, self.globalspace)
tDomain = self.joinAlignedSets(tDomain, domain, self.globalspace)
if mats[0] is None:
pssl = self.fuseStmtWithSub(tDomain, exprs[0], mReduceDims, oAccMap, includeAccPss=includeAccPss[0], pos=0, extra_ids=acc_ids)
if pssl:
for psl in pssl:
td = psl[0]
lin = psl[1].removeWrap()
if mats[1] is None:
pssr = self.fuseStmtWithSub(td, exprs[1], mReduceDims, oAccMap, includeAccPss=includeAccPss[1], pos=1, extra_ids=acc_ids)
if pssr:
for psr in pssr:
td = psr[0]
rin = psr[1].removeWrap()
if psl[2] or psr[2]:
_buildRet(td, lin, rin, accDomainList, accInputsList, accOpList)
else:
_buildRet(td, lin, rin, tDomainList, inputsList, opList)
else:
rin = StmtExpr([mats[1].toLL()+self.genGSSeq(access[1][1])])
if psl[2]:
_buildRet(td, lin, rin, accDomainList, accInputsList, accOpList)
else:
_buildRet(td, lin, rin, tDomainList, inputsList, opList)
else:
lin = StmtExpr([mats[0].toLL()+self.genGSSeq(access[0][1])])
if mats[1] is None:
pssr = self.fuseStmtWithSub(tDomain, exprs[1], mReduceDims, oAccMap, includeAccPss=includeAccPss[1], pos=1, extra_ids=acc_ids)
if pssr:
for psr in pssr:
td = psr[0]
rin = psr[1].removeWrap()
if psr[2]:
_buildRet(td, lin, rin, accDomainList, accInputsList, accOpList)
else:
_buildRet(td, lin, rin, tDomainList, inputsList, opList)
else:
rin = StmtExpr([mats[1].toLL()+self.genGSSeq(access[1][1])])
_buildRet(tDomain, lin, rin, tDomainList, inputsList, opList)
return (tDomainList, accDomainList, inputsList, accInputsList, opList, accOpList)
def set_from_ctx(self, ctx):
for_ctx = filter(lambda c: isinstance(c, llFor), ctx)
if_ctx = filter(lambda c: isinstance(c, llIf), ctx)
constr_list = [ "(exists s: " + str(c.idx) + "="+str(c.s)+"s and " + str(c.lb) + " <= " + str(c.idx) + " <= " + str(c.ub) + ")" for c in for_ctx ]
constr_list.extend( [c.conds[0].getIslStr() for c in if_ctx] )
sCst = " and ".join(constr_list)
sIndices = ",".join(self.indices)
return Set("{["+sIndices+"] : "+sCst+"}")
def get_out_acc_set_and_map(self, expr, blk_out, ctx):
trailIds = [str(idx) for idx in expr.accIds]
trailIds.extend( [str(c.idx) for c in ctx if isinstance(c, llFor)] )
sTrailIds = ""
if expr.accIds or ctx:
sTrailIds = "," + ",".join(trailIds)
fullOAccSet = Set("{[i,j" + sTrailIds + "]: 1=0}")
oAccMap = blk_out.getFlatAccessMapND(trail=trailIds)
return (sTrailIds, fullOAccSet, oAccMap)
def llProgram(self, llprog):
getattr(self, llprog.stmtList.__class__.__name__)(llprog.stmtList, [])
def llBlock(self, blk, ctx):
for b in blk:
getattr(self, b.__class__.__name__)(b, ctx)
def llFor(self, llfor, ctx):
getattr(self, llfor.body.__class__.__name__)(llfor.body, ctx+[ llfor ] )
def llIf(self, llif, ctx):
getattr(self, llif.bodys[0].__class__.__name__)(llif.bodys[0], ctx+[ llif ] )
def llStmt(self, llstmt, ctx):
baselevel = self.baselevel
if not llstmt.can_gen_with_nublac(self.nublac):
self.baselevel = 2
#print "Entering llStmt..."
getattr(self, llstmt.eq.__class__.__name__)(llstmt.eq, ctx )
self.baselevel = baselevel
self.correctDomains(llstmt.eq.getPolyStmts())
self.eq_id += 1
def Assign(self, expr, ctx):
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
if isinstance(expr.inexpr[1], Tile):
blkRhs = expr.getInexprMat(1)
blkOut = self.subsWithOut.get((self.eq_id, expr.inexpr[1]), expr.inexpr[1]).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
rMat = self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr.inexpr[1]), expr.inexpr[1]).getNonTileOut()
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = []
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
for r in range(len(rPInfo)):
for c in range(len(rPInfo[0])):
rpi,opi = rPInfo[r][c], oPInfo[r][c]
cpyAccess = [(rAccess,oAccess) for rAccess in rpi['access'] for oAccess in opi['access']]
cpyStruct = [(rStr,oStr) for rStr in rpi['struct'].items() for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for cStr in cpyStruct:
cpyDomain = self.joinAlignedSets(cStr[0][1], cStr[1][1], self.globalspace)
cpyDomain = self.joinAlignedSets(cpyDomain, rpi['tiling'], self.globalspace)
cpyDomain = self.joinAlignedSets(cpyDomain, set_ctx, self.globalspace)
if cpyDomain.is_empty():
continue
for cAccess in cpyAccess:
oAccess = cAccess[1]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildUnStmtLists(expr.inexpr[1], rMat, None, cpyDomain, rpi, cAccess[0], oAccSet, mReduceDims, oAccMap)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
else:
expr.setPolyStmts(expr.inexpr[1].getPolyStmts())
def Mul(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
# opOrder = self.opOrder
# self.opOrder+=1
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
expr.accIds = [ i for i in self.indices if i in blkLhs.idxPosAndLevInfo and blkLhs.idxPosAndLevInfo[i][2] == 1 ]
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
fullDomain = self.getEmptyDomain()
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
for kc in range(len(lPinfo[0])):
lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
prodStruct = [(lStr,rStr) for lStr in lpi['struct'].items() if lStr[0] is not ZeroMatrix for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
for pStr in prodStruct:
lTSDomain = self.joinAlignedSets(pStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(pStr[1][1], rpi['tiling'], self.globalspace)
mulDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
mulDomain = self.joinAlignedSets(mulDomain, set_ctx, self.globalspace)
fullDomain = fullDomain.union(mulDomain).coalesce().remove_redundancies()
initDomain = self.compute_full_init_domain(self.globalspace, fullDomain, expr.accIds)
polyStmts = []
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
for kc in range(len(lPinfo[0])):
lpi,rpi,opi = lPinfo[r][kc], rPInfo[kc][c], oPInfo[r][c]
prodAccess = [(lAccess,rAccess, oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
prodStruct = [(lStr,rStr, oStr) for lStr in lpi['struct'].items() if lStr[0] is not ZeroMatrix for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix \
for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix)]
for pStr in prodStruct:
lTSDomain = self.joinAlignedSets(pStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(pStr[1][1], rpi['tiling'], self.globalspace)
mulDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
mulDomain = self.joinAlignedSets(mulDomain, pStr[2][1], self.globalspace)
mulDomain = self.joinAlignedSets(mulDomain, set_ctx, self.globalspace)
locInitDomain = initDomain.intersect(mulDomain)
accDomain = mulDomain - locInitDomain
for pAccess in prodAccess:
oAccess = pAccess[2]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
if not locInitDomain.is_empty():
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Mul, locInitDomain, lpi, rpi, pAccess, oAccSet, mReduceDims, oAccMap, par=(True,True), acc_ids=[expr.accIds,expr.accIds])
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
if not accDomain.is_empty():
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Mul, accDomain, lpi, rpi, pAccess, oAccSet, mReduceDims, oAccMap, par=(True,True), acc_ids=[expr.accIds,expr.accIds])
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def LDiv(self, expr, ctx):
blkLhs = expr.getInexprMat(0)
if isinstance(blkLhs, LowerTriangular):
self.forward_sub(expr, ctx)
elif isinstance(blkLhs, UpperTriangular):
self.backward_sub(expr, ctx)
# self.LDiv2(expr, opts)
def forward_sub(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
# opOrder = self.opOrder
# self.opOrder+=1
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
expr.accIds = expr.inexpr[0].accIds + expr.inexpr[1].accIds
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
# lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(self.subsWithOut.get(expr, expr))
#
# lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel, extrainfo=['StrictLower', 'Diag', 'TopLeft'])
# rPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
#
# accIdx = [ i for i in self.indices if i in blkLhs.idxPosAndLevInfo and blkLhs.idxPosAndLevInfo[i][2] == 1 ]
#
# polyStmts = []
#
# TList = [[],[]]
# for r in range(len(lPinfo)):
# for c in range(len(rPInfo[0])):
#
# # Temp = Matrix("P"+str(globalSSAIndex()), rPInfo[r][c]['topblk'], (1,1))
# Temp = Matrix("P"+str(globalSSAIndex()), rPInfo[r][c]['topblk'], tuple(blkOut.getPartitionSize(r,c)) )
# self.mDict[Temp.name] = Matrix(Temp.name, scalar_block(), Temp.getFlatSize(), attr={ 'o':True, 'i':True, 't':True })
# Temp.spaceIdxNames = [ deepcopy(s) for s in blkRhs.spaceIdxNames ]
# for d in range(2):
# if(Temp.size[d] == 1):
# Temp.spaceIdxNames[d][0] = '0'
# # Temp.spaceIdxNames[0][0] = '0'
# # Temp.spaceIdxNames[1][0] = '0'
# tPInfo = Temp.getPolyInfo(self.indices, baselevel=self.baselevel)
# TList[r].append((Temp, tPInfo))
#
# initDomain = self.getEmptyDomain()
#
# for kc in range(len(lPinfo[0])):
# lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
#
# # oAccess = tPInfo[0][0]['access']
# prodAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in tPInfo[0][0]['access']]
# prodStruct = [(lStr,rStr) for lStr in lpi['StrictLower'].items() if lStr[0] is Matrix for rStr in rpi['struct'].items() if rStr[0] is Matrix ]
# for pStr in prodStruct:
# lTSDomain = self.joinAlignedSets(pStr[0][1], lpi['tiling'], self.globalspace)
# rTSDomain = self.joinAlignedSets(pStr[1][1], rpi['tiling'], self.globalspace)
# mulDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
# # locInitDomain = self.computeInitDomain(self.globalspace, initDomain, mulDomain, [lpi['flatstruct'], rpi['flatstruct']], accIdx)
# locInitDomain = self.computeInitDomain(self.globalspace, initDomain, mulDomain, [lTSDomain.convex_hull(), rTSDomain.convex_hull()], accIdx)
# accDomain = mulDomain - locInitDomain
# initDomain = initDomain.union(locInitDomain).coalesce().remove_redundancies()
#
# for pAccess in prodAccess:
# # tDomain = self.joinAlignedSets(lpi['access'][pAccess[0]], rpi['access'][pAccess[1]], self.globalspace)
# # tDomain = self.joinAlignedSets(tDomain, mulDomain, self.globalspace)
# # if not tDomain.is_empty():
# # #Determine Init instances
# # locInitDomain = self.computeInitDomain(self.globalspace, initDomain, tDomain, [lpi['flatstruct'], rpi['flatstruct']], accIdx)
# # accDomain = tDomain - locInitDomain
# # initDomain = initDomain.union(locInitDomain).coalesce().remove_redundancies()
# oAccess = pAccess[2]
# oAccSet = tPInfo[0][0]['access'][oAccess]
# if not locInitDomain.is_empty():
# # pssl = self.fuseStmtWithSub(locInitDomain, expr.inexpr[0])
# # if pssl:
# # locInitDomain = pssl[0][0]
# # lin = pssl[0][1]['stmt'].removeWrap()
# # else:
# # lin = StmtExpr([lMat.name+self.genGSSeq(pAccess[0][1])])
# # # pssr = self.fuseStmtWithSub(locInitDomain, expr.inexpr[1])
# # # if pssr:
# # # locInitDomain = pssr[0][0]
# # # rin = pssr[0][1]['stmt'].removeWrap()
# # # else:
# # rin = StmtExpr([rMat.name+self.genGSSeq(pAccess[1][1])])
# #
# # lin = StmtExpr([ lin ], wrap=['(',')']) if pAccess[0][3] is None else StmtExpr([lin], pAccess[0][3])
# # rin = StmtExpr([ rin ], wrap=['(',')']) if pAccess[1][3] is None else StmtExpr([rin], pAccess[1][3])
# # inputs = [lin, rin]
# tDomainList, opList, inputsList = [], [], []
# tDomainList, _, inputsList, _, opList, _ = \
# self.buildBinStmtLists((expr.inexpr[0],None), (lMat, rMat), Mul, locInitDomain, lpi, rpi, pAccess, oAccSet, par=(True,True))
# if tDomainList:
# for td, inputs, op in zip(tDomainList, inputsList, opList):
# if not td.is_empty():
# # scat = self.genGSSeq(oAccess[0][1])
# scat = self.genGSSeq(oAccess[1])
# wrap = [scat,""]
# polyStmt = {}
# # polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
# finStmt = StmtExpr(inputs, op)
# finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
# polyStmt['stmt'] = StmtExpr([finStmt], None, wrap)
# polyStmt['outinfo'] = [Temp.name, scat]
# # polyStmt['domain'] = locInitDomain.coalesce().remove_redundancies()
# polyStmt['domain'] = td
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
# if not accDomain.is_empty():
# # pssl = self.fuseStmtWithSub(accDomain, expr.inexpr[0])
# # if pssl:
# # accDomain = pssl[0][0]
# # lin = pssl[0][1]['stmt'].removeWrap()
# # else:
# # lin = StmtExpr([lMat.name+self.genGSSeq(pAccess[0][1])])
# # rin = StmtExpr([rMat.name+self.genGSSeq(pAccess[1][1])])
# # lin = StmtExpr([ lin ], wrap=['(',')']) if pAccess[0][3] is None else StmtExpr([lin], pAccess[0][3])
# # rin = StmtExpr([ rin ], wrap=['(',')']) if pAccess[1][3] is None else StmtExpr([rin], pAccess[1][3])
# # inputs = [lin, rin]
# tDomainList, opList, inputsList = [], [], []
# tDomainList, _, inputsList, _, opList, _ = \
# self.buildBinStmtLists((expr.inexpr[0],None), (lMat, rMat), Mul, accDomain, lpi, rpi, pAccess, oAccSet, par=(True,True))
# if tDomainList:
# for td, inputs, op in zip(tDomainList, inputsList, opList):
# if not td.is_empty():
# # wrap = [self.genGSSeq(oAccess[0][1], acc=True),""]
# wrap = [self.genGSSeq(oAccess[1], acc=True),""]
# polyStmt = {}
# polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
# polyStmt['outinfo'] = [Temp.name, self.genGSSeq(oAccess[1])]
# # polyStmt['domain'] = accDomain.coalesce().remove_redundancies()
# polyStmt['domain'] = td
# polyStmt['acc'] = True
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel, extrainfo=['StrictLower', 'Diag', 'TopLeft'])
# lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
lMat, rMat, oMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1]), self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
# for r in range(len(lPinfo)):
# for c in range(len(rPInfo[0])):
# Temp, tPInfo = TList[r][c]
# opi = oPInfo[r][c]
# for kc in range(len(lPinfo[0])):
# lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
# # oAccess = opi['access'].items()[0]
# # oAccess = opi['access']
# tAccess = tPInfo[0][0]['access'].items()[0]
# ldivAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
# ldivStruct = [(lStr,rStr) for lStr in lpi['Diag'].items() for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
# tl = self.getEmptyDomain() if not lpi['TopLeft'] else lpi['TopLeft'].items()[0][1]
# for ldStr in ldivStruct:
# lTSDomain = self.joinAlignedSets(ldStr[0][1]-tl, lpi['tiling'], self.globalspace)
# rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
# ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
#
# for ldAccess in ldivAccess:
# # tDomain = self.joinAlignedSets(lpi['access'][ldAccess[0]], rpi['access'][ldAccess[1]], self.globalspace)
# # tDomain = self.joinAlignedSets(tDomain, ldivDomain, self.globalspace)
# # if not tDomain.is_empty():
# # polyStmt = {}
# # pssl = self.fuseStmtWithSub(tDomain, expr.inexpr[0])
# # if pssl:
# # tDomain = pssl[0][0]
# # lin = pssl[0][1]['stmt'].removeWrap()
# # else:
# # lin = StmtExpr([lMat.name+self.genGSSeq(ldAccess[0][1])])
# # pssr = self.fuseStmtWithSub(tDomain, expr.inexpr[1])
# # if pssr:
# # tDomain = pssr[0][0]
# # rin = pssr[0][1]['stmt'].removeWrap()
# # else:
# # rin = StmtExpr([rMat.name+self.genGSSeq(ldAccess[1][1])])
# # lin = StmtExpr([ lin ], wrap=['(',')']) if ldAccess[0][3] is None else StmtExpr([lin], ldAccess[0][3])
# # rin = StmtExpr([ rin ], wrap=['(',')']) if ldAccess[1][3] is None else StmtExpr([rin], ldAccess[1][3])
# # rin = StmtExpr([rin,StmtExpr([Temp.name+self.genGSSeq(tAccess[0][1])])], Sub, ["(",")"])
# # inputs = [lin, rin]
# oAccess = ldAccess[2]
# oAccSet = opi['access'][oAccess]
# tDomainList, opList, inputsList = [], [], []
# tDomainList, _, inputsList, _, opList, _ = \
# self.buildBinStmtLists(expr.inexpr, (lMat, rMat), LDiv, ldivDomain, lpi, rpi, ldAccess, oAccSet, par=(True,True))
# if tDomainList:
# for td, inputs, op in zip(tDomainList, inputsList, opList):
# if not td.is_empty():
# inputs[1] = StmtExpr([inputs[1],StmtExpr([Temp.name+self.genGSSeq(tAccess[0][1])])], Sub, ["(",")"])
# # scat = self.genGSSeq(oAccess[0][1])
# scat = self.genGSSeq(oAccess[1])
# wrap = [scat,""]
# # polyStmt['stmt'] = StmtExpr([StmtExpr([oMat.name]), StmtExpr(inputs, LDiv, wrap)], Assign)
# polyStmt = {}
# polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
# polyStmt['outinfo'] = [oMat.name, scat]
# # polyStmt['domain'] = tDomain.coalesce().remove_redundancies()
# polyStmt['domain'] = td
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
opi = oPInfo[r][c]
for kc in range(len(lPinfo[0])):
lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
# oAccess = opi['access'].items()[0]
# oAccess = opi['access']
ldivAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
ldivStruct = [(lStr,rStr) for lStr in lpi['TopLeft'].items() if issubclass(lStr[0], Triangular) for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
for ldStr in ldivStruct:
lTSDomain = self.joinAlignedSets(ldStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
ldivDomain = self.joinAlignedSets(ldivDomain, set_ctx, self.globalspace)
for ldAccess in ldivAccess:
oAccess = ldAccess[2]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), LDiv, ldivDomain, lpi, rpi, ldAccess, oAccSet, par=(True,True))
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
polyStmt = {}
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'] = StmtExpr([finStmt], None, wrap)
polyStmt['domain'] = td
polyStmt['acc'] = False
polyStmt['outinfo'] = [oMat2ll[0], scat]
polyStmts.append(polyStmt)
# for td, inputs, op in zip(tDomainList, inputsList, opList):
# if not td.is_empty():
# scat = self.genGSSeq(oAccess[1])
# wrap = [scat,""]
# polyStmt = {}
# polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
# polyStmt['outinfo'] = [oMat.name, scat]
# polyStmt['domain'] = td
# polyStmt['acc'] = False
# polyStmts.append(polyStmt)
#
expr.setPolyStmts(polyStmts)
# def LDiv2(self, expr, opts):
#
# getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
# getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
# opOrder = self.opOrder
# getattr(self, expr.suppExpr.__class__.__name__)(expr.suppExpr, opts)
# # self.opOrder+=1
#
# blkLhs = expr.getInexprMat(0)
# blkRhs = expr.getInexprMat(1)
# blkOut = expr.getOut()
# suppBlkLhs = expr.suppExpr.getInexprMat(0)
# suppBlkRhs = expr.suppExpr.getInexprMat(1)
#
# lPInfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel, extrainfo=['StrictLower', 'Diag', 'Tip'])
# rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
# oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel, extrainfo=['LowerStrip'])
# suppLPInfo = suppBlkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
# suppRPInfo = suppBlkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
# lMat, rMat, oMat = expr.inexpr[0].getNonTileOut(), expr.inexpr[1].getNonTileOut(), self.subsWithOut.get(expr, expr).getNonTileOut()
#
# accIdx = [ i for i in self.indices if i in blkLhs.idxPosAndLevInfo and blkLhs.idxPosAndLevInfo[i][2] == 1 ]
#
# polyStmts = []
#
# for r in range(len(lPInfo)):
# for c in range(len(rPInfo[0])):
# opi = oPInfo[r][c]
# for kc in range(len(lPInfo[0])):
# lpi,rpi = lPInfo[r][kc], rPInfo[kc][c]
# oAccess = opi['access'].items()[0]
# ldivAccess = [(lAccess,rAccess) for lAccess in lpi['access'] for rAccess in rpi['access']]
# ldivStruct = [(lStr,rStr) for lStr in lpi['Tip'].items() if issubclass(lStr[0], Triangular) for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
# for ldStr in ldivStruct:
# lTSDomain = self.joinAlignedSets(ldStr[0][1], lpi['tiling'], self.globalspace)
# rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
# ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
#
# for ldAccess in ldivAccess:
# tDomain = self.joinAlignedSets(lpi['access'][ldAccess[0]], rpi['access'][ldAccess[1]], self.globalspace)
# tDomain = self.joinAlignedSets(tDomain, ldivDomain, self.globalspace)
# if not tDomain.is_empty():
# polyStmt = {}
# pssl = self.fuseStmtWithSub(tDomain, expr.inexpr[0])
# if pssl:
# tDomain = pssl[0][0]
# lin = pssl[0][1]['stmt'].removeWrap()
# else:
# lin = StmtExpr([lMat.name+self.genGSSeq(ldAccess[0][1])])
# pssr = self.fuseStmtWithSub(tDomain, expr.inexpr[1])
# if pssr:
# tDomain = pssr[0][0]
# rin = pssr[0][1]['stmt'].removeWrap()
# else:
# rin = StmtExpr([rMat.name+self.genGSSeq(ldAccess[1][1])])
# lin = StmtExpr([ lin ], wrap=['(',')']) if ldAccess[0][3] is None else StmtExpr([lin], ldAccess[0][3])
# rin = StmtExpr([ rin ], wrap=['(',')']) if ldAccess[1][3] is None else StmtExpr([rin], ldAccess[1][3])
# inputs = [lin, rin]
# scat = self.genGSSeq(oAccess[0][1])
# wrap = [scat,""]
# polyStmt['stmt'] = StmtExpr(inputs, LDiv, wrap)
# polyStmt['outinfo'] = [oMat.name, scat]
# polyStmt['domain'] = tDomain.coalesce().remove_redundancies()
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
#
#
#
# TList = [[],[]]
# for r in range(len(lPinfo)):
# for c in range(len(rPInfo[0])):
#
# ts = tuple(blkRhs.getPartitionSize(r,c))
# # ts = (ts[0]-1, ts[1])
# Temp = Matrix("P"+str(globalSSAIndex()), bPInfo[r][c]['topblk'], ts)
# self.mDict[Temp.name] = Matrix(Temp.name, scalar, Temp.getFlatSize(), attr={ 'o':True, 'i':True, 't':True })
# Temp.spaceIdxNames = [ deepcopy(s) for s in blkRhs.spaceIdxNames ]
# if ts == (1,1):
# Temp.spaceIdxNames[0][0] = '0'
# Temp.spaceIdxNames[1][0] = '0'
# tPInfo = Temp.getPolyInfo(self.indices, baselevel=self.baselevel)
# TList[r].append((Temp, tPInfo))
#
# initDomain = self.getEmptyDomain()
#
# for kc in range(len(lPinfo[0])):
# lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
# bAccess = bPInfo[r][c]['access'].items()[0]
# oAccess = tPInfo[0][0]['access'].items()[0]
# prodAccess = [(lAccess,rAccess) for lAccess in lpi['access'] for rAccess in rpi['access']]
# prodStruct = [(lStr,rStr) for lStr in lpi['StrictLower'].items() if lStr[0] is Matrix for rStr in rpi['struct'].items() if rStr[0] is Matrix ]
# lowStrip = self.getEmptyDomain() if not rpi['LowerStrip'] else rpi['LowerStrip'].items()[0][1]
# for pStr in prodStruct:
# lTSDomain = self.joinAlignedSets(pStr[0][1], lpi['tiling'], self.globalspace)
# rTSDomain = self.joinAlignedSets(pStr[1][1]-lowStrip, rpi['tiling'], self.globalspace)
# mulDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
#
# for pAccess in prodAccess:
# tDomain = self.joinAlignedSets(lpi['access'][pAccess[0]], rpi['access'][pAccess[1]], self.globalspace)
# tDomain = self.joinAlignedSets(tDomain, mulDomain, self.globalspace)
# if not tDomain.is_empty():
# #Determine Init instances
# locInitDomain = self.computeInitDomain(self.globalspace, initDomain, tDomain, [lpi['flatstruct'], rpi['flatstruct']], accIdx)
# accDomain = tDomain - locInitDomain
# initDomain = initDomain.union(locInitDomain).coalesce().remove_redundancies()
#
# if not locInitDomain.is_empty():
# polyStmt = {}
# pssl = self.fuseStmtWithSub(locInitDomain, expr.inexpr[0])
# if pssl:
# locInitDomain = pssl[0][0]
# lin = pssl[0][1]['stmt'].removeWrap()
# else:
# lin = StmtExpr([lMat.name+self.genGSSeq(pAccess[0][1])])
# # pssr = self.fuseStmtWithSub(locInitDomain, expr.inexpr[1])
# # if pssr:
# # locInitDomain = pssr[0][0]
# # rin = pssr[0][1]['stmt'].removeWrap()
# # else:
# rin = StmtExpr([rMat.name+self.genGSSeq(pAccess[1][1])])
# pssb = self.fuseStmtWithSub(locInitDomain, expr.inexpr[1])
# if pssb:
# locInitDomain = pssb[0][0]
# b_in = pssb[0][1]['stmt'].removeWrap()
# else:
# b_in = StmtExpr([bMat.name+self.genGSSeq(bAccess[0][1])])
#
#
# lin = StmtExpr([ lin ], wrap=['(',')']) if pAccess[0][3] is None else StmtExpr([lin], pAccess[0][3])
# rin = StmtExpr([ rin ], wrap=['(',')']) if pAccess[1][3] is None else StmtExpr([rin], pAccess[1][3])
# b_in = StmtExpr([ b_in ], wrap=['(',')']) if bAccess[0][3] is None else StmtExpr([b_in], bAccess[0][3])
# mulin = StmtExpr([lin,rin], Mul, ["(",")"])
# inputs = [b_in, mulin]
# scat = self.genGSSeq(oAccess[0][1])
# wrap = [scat,""]
# # polyStmt['stmt'] = StmtExpr([StmtExpr([Temp.name]), StmtExpr(inputs, Mul, wrap)], Assign)
# polyStmt['stmt'] = StmtExpr(inputs, Sub, wrap)
# polyStmt['outinfo'] = [Temp.name, scat]
# polyStmt['domain'] = locInitDomain.coalesce().remove_redundancies()
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
# if not accDomain.is_empty():
# polyStmt = {}
# pssl = self.fuseStmtWithSub(accDomain, expr.inexpr[0])
# if pssl:
# accDomain = pssl[0][0]
# lin = pssl[0][1]['stmt'].removeWrap()
# else:
# lin = StmtExpr([lMat.name+self.genGSSeq(pAccess[0][1])])
# # pssr = self.fuseStmtWithSub(accDomain, expr.inexpr[1])
# # if pssr:
# # accDomain = pssr[0][0]
# # rin = pssr[0][1]['stmt'].removeWrap()
# # else:
# rin = StmtExpr([rMat.name+self.genGSSeq(pAccess[1][1])])
#
# lin = StmtExpr([ lin ], wrap=['(',')']) if pAccess[0][3] is None else StmtExpr([lin], pAccess[0][3])
# rin = StmtExpr([ rin ], wrap=['(',')']) if pAccess[1][3] is None else StmtExpr([rin], pAccess[1][3])
# mulin = StmtExpr([lin,rin], Mul, ["(",")"])
# inputs = [StmtExpr([Temp.name+self.genGSSeq(oAccess[0][1])]), mulin]
# wrap = [self.genGSSeq(oAccess[0][1]),""]
# polyStmt['stmt'] = StmtExpr(inputs, Sub, wrap)
# polyStmt['outinfo'] = [Temp.name, self.genGSSeq(oAccess[0][1])]
# polyStmt['domain'] = accDomain.coalesce().remove_redundancies()
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
#
# # lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
# rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
# oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
# lMat, rMat, oMat = expr.inexpr[0].getNonTileOut(), expr.inexpr[1].getNonTileOut(), self.subsWithOut.get(expr, expr).getNonTileOut()
#
# for r in range(len(lPinfo)):
# for c in range(len(rPInfo[0])):
# Temp, tPInfo = TList[r][c]
# opi = oPInfo[r][c]
# for kc in range(len(lPinfo[0])):
# lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
# oAccess = opi['access'].items()[0]
# tAccess = tPInfo[0][0]['access'].items()[0]
# ldivAccess = [(lAccess,rAccess) for lAccess in lpi['access'] for rAccess in rpi['access']]
# ldivStruct = [(lStr,rStr) for lStr in lpi['Diag'].items() for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
# tip = self.getEmptyDomain() if not lpi['Tip'] else lpi['Tip'].items()[0][1]
# for ldStr in ldivStruct:
# lTSDomain = self.joinAlignedSets(ldStr[0][1]-tip, lpi['tiling'], self.globalspace)
# rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
# ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
#
# for ldAccess in ldivAccess:
# tDomain = self.joinAlignedSets(lpi['access'][ldAccess[0]], rpi['access'][ldAccess[1]], self.globalspace)
# tDomain = self.joinAlignedSets(tDomain, ldivDomain, self.globalspace)
# if not tDomain.is_empty():
# polyStmt = {}
# pssl = self.fuseStmtWithSub(tDomain, expr.inexpr[0])
# if pssl:
# tDomain = pssl[0][0]
# lin = pssl[0][1]['stmt'].removeWrap()
# else:
# lin = StmtExpr([lMat.name+self.genGSSeq(ldAccess[0][1])])
# # pssr = self.fuseStmtWithSub(tDomain, expr.inexpr[1])
# # if pssr:
# # tDomain = pssr[0][0]
# # rin = pssr[0][1]['stmt'].removeWrap()
# # else:
# rin = StmtExpr([Temp.name+self.genGSSeq(tAccess[0][1])])
# lin = StmtExpr([ lin ], wrap=['(',')']) if ldAccess[0][3] is None else StmtExpr([lin], ldAccess[0][3])
# # rin = StmtExpr([ rin ], wrap=['(',')']) if ldAccess[1][3] is None else StmtExpr([rin], ldAccess[1][3])
# # rin = StmtExpr([rin,StmtExpr([Temp.name+self.genGSSeq(tAccess[0][1])])], Sub, ["(",")"])
# inputs = [lin, rin]
# scat = self.genGSSeq(oAccess[0][1])
# wrap = [scat,""]
# # polyStmt['stmt'] = StmtExpr([StmtExpr([oMat.name]), StmtExpr(inputs, LDiv, wrap)], Assign)
# polyStmt['stmt'] = StmtExpr(inputs, LDiv, wrap)
# polyStmt['outinfo'] = [oMat.name, scat]
# polyStmt['domain'] = tDomain.coalesce().remove_redundancies()
# polyStmt['acc'] = False
# polyStmt['oporder'] = opOrder
# polyStmts.append(polyStmt)
#
# expr.setPolyStmts(polyStmts)
def backward_sub(self, expr, opts):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], opts)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], opts)
opOrder = self.opOrder
self.opOrder+=1
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkOut = expr.getOut()
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(self.subsWithOut.get((self.eq_id, expr), expr))
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel, extrainfo=['StrictUpper', 'Diag', 'BottomRight'], directions=('b','b'))
rPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel, directions=('b','f'))
accIdx = [ i for i in self.indices if i in blkLhs.idxPosAndLevInfo and blkLhs.idxPosAndLevInfo[i][2] == 1 ]
polyStmts = []
TList = [[],[]]
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
Temp = Matrix("P"+str(globalSSAIndex()), rPInfo[r][c]['topblk'], tuple(blkOut.getPartitionSize(r,c)) )
self.mDict[Temp.name] = Matrix(Temp.name, scalar_block(), Temp.getFlatSize(), attr={ 'o':True, 'i':True, 't':True })
Temp.spaceIdxNames = [ deepcopy(s) for s in blkRhs.spaceIdxNames ]
for d in range(2):
if(Temp.size[d] == 1):
Temp.spaceIdxNames[d][0] = '0'
tPInfo = Temp.getPolyInfo(self.indices, baselevel=self.baselevel, directions=('b','f'))
TList[r].append((Temp, tPInfo))
initDomain = self.getEmptyDomain()
for kc in (range(len(lPinfo[0]))[::-1]):
lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
prodAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in tPInfo[0][0]['access']]
prodStruct = [(lStr,rStr) for lStr in lpi['StrictUpper'].items() if lStr[0] is Matrix for rStr in rpi['struct'].items() if rStr[0] is Matrix ]
for pStr in prodStruct:
lTSDomain = self.joinAlignedSets(pStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(pStr[1][1], rpi['tiling'], self.globalspace)
mulDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
# locInitDomain = self.computeInitDomain(self.globalspace, initDomain, mulDomain, [lpi['flatstruct'], rpi['flatstruct']], accIdx)
locInitDomain = self.computeInitDomain(self.globalspace, initDomain, mulDomain, [lTSDomain.convex_hull(), rTSDomain.convex_hull()], accIdx)
accDomain = mulDomain - locInitDomain
initDomain = initDomain.union(locInitDomain).coalesce().remove_redundancies()
for pAccess in prodAccess:
oAccess = pAccess[2]
oAccSet = tPInfo[0][0]['access'][oAccess]
if not locInitDomain.is_empty():
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists((expr.inexpr[0],None), (lMat, rMat), Mul, locInitDomain, lpi, rpi, pAccess, oAccSet, par=(True,True))
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
scat = self.genGSSeq(oAccess[1])
wrap = [scat,""]
polyStmt = {}
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'] = StmtExpr([finStmt], None, wrap)
polyStmt['outinfo'] = [Temp.name, scat]
polyStmt['domain'] = td
polyStmt['acc'] = False
polyStmt['oporder'] = opOrder
polyStmts.append(polyStmt)
if not accDomain.is_empty():
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists((expr.inexpr[0],None), (lMat, rMat), Mul, accDomain, lpi, rpi, pAccess, oAccSet, par=(True,True))
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
polyStmt = {}
polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
polyStmt['outinfo'] = [Temp.name, self.genGSSeq(oAccess[1])]
polyStmt['domain'] = td
polyStmt['acc'] = True
polyStmt['oporder'] = opOrder
polyStmts.append(polyStmt)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel, directions=('b','f'))
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel, directions=('b','f'))
lMat, rMat, oMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1]), self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
Temp, tPInfo = TList[r][c]
opi = oPInfo[r][c]
for kc in range(len(lPinfo[0])):
lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
tAccess = tPInfo[0][0]['access'].items()[0]
ldivAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
ldivStruct = [(lStr,rStr) for lStr in lpi['Diag'].items() for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
br = self.getEmptyDomain() if not lpi['BottomRight'] else lpi['BottomRight'].items()[0][1]
for ldStr in ldivStruct:
lTSDomain = self.joinAlignedSets(ldStr[0][1]-br, lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
for ldAccess in ldivAccess:
oAccess = ldAccess[2]
oAccSet = opi['access'][oAccess]
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), LDiv, ldivDomain, lpi, rpi, ldAccess, oAccSet, par=(True,True))
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
inputs[1] = StmtExpr([inputs[1],StmtExpr([Temp.name+self.genGSSeq(tAccess[0][1])])], Sub, ["(",")"])
scat = self.genGSSeq(oAccess[1])
wrap = [scat,""]
polyStmt = {}
polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
polyStmt['outinfo'] = [oMat.name, scat]
polyStmt['domain'] = td
polyStmt['acc'] = False
polyStmt['oporder'] = opOrder
polyStmts.append(polyStmt)
for r in range(len(lPinfo)):
for c in range(len(rPInfo[0])):
opi = oPInfo[r][c]
for kc in range(len(lPinfo[0])):
lpi,rpi = lPinfo[r][kc], rPInfo[kc][c]
ldivAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
ldivStruct = [(lStr,rStr) for lStr in lpi['BottomRight'].items() if issubclass(lStr[0], Triangular) for rStr in rpi['struct'].items() if rStr[0] is not ZeroMatrix ]
for ldStr in ldivStruct:
lTSDomain = self.joinAlignedSets(ldStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(ldStr[1][1], rpi['tiling'], self.globalspace)
ldivDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
for ldAccess in ldivAccess:
oAccess = ldAccess[2]
oAccSet = opi['access'][oAccess]
tDomainList, opList, inputsList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), LDiv, ldivDomain, lpi, rpi, ldAccess, oAccSet, par=(True,True))
if tDomainList:
for td, inputs, op in zip(tDomainList, inputsList, opList):
if not td.is_empty():
scat = self.genGSSeq(oAccess[1])
wrap = [scat,""]
polyStmt = {}
polyStmt['stmt'] = StmtExpr(inputs, op, wrap)
polyStmt['outinfo'] = [oMat.name, scat]
polyStmt['domain'] = td
polyStmt['acc'] = False
polyStmt['oporder'] = opOrder
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Add(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
# opOrder = self.opOrder
# self.opOrder+=1
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds + expr.inexpr[1].accIds
acc_ids=[expr.inexpr[0].accIds, expr.inexpr[1].accIds]
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
for r in range(len(lPinfo)):
for c in range(len(lPinfo[0])):
lpi,rpi,opi = lPinfo[r][c], rPInfo[r][c], oPInfo[r][c]
addAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
addStruct = [(lStr,rStr,oStr) for lStr in lpi['struct'].items() for rStr in rpi['struct'].items() if (lStr[0] is not ZeroMatrix or (lStr[0] is ZeroMatrix and rStr[0] is not ZeroMatrix)) \
for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for aStr in addStruct:
addDomain = self.joinAlignedSets(aStr[0][1], aStr[1][1], self.globalspace)
addDomain = self.joinAlignedSets(addDomain, aStr[2][1], self.globalspace)
addDomain = self.joinAlignedSets(addDomain, lpi['tiling'], self.globalspace)
addDomain = self.joinAlignedSets(addDomain, set_ctx, self.globalspace)
if addDomain.is_empty():
continue
for aAccess in addAccess:
oAccess = aAccess[2]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
acc_data = []
tAccDomainList, accInputsList, accOpList = [], [], []
if aStr[0][0] is ZeroMatrix:
tDomainList, tAccDomainList, inputsList, accInputsList, opList, accOpList = \
self.buildUnStmtLists(expr.inexpr[1], rMat, None, addDomain, rpi, aAccess[1], oAccSet, mReduceDims, oAccMap, includeAccPss=True)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
elif aStr[1][0] is ZeroMatrix:
tDomainList, tAccDomainList, inputsList, accInputsList, opList, accOpList = \
self.buildUnStmtLists(expr.inexpr[0], lMat, None, addDomain, lpi, aAccess[0], oAccSet, mReduceDims, oAccMap, includeAccPss=True)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
else:
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Add, addDomain, lpi, rpi, aAccess, oAccSet, mReduceDims, oAccMap, acc_ids=acc_ids)
# We split the build as in some areas where l/r is not defined r/l may be.
_, tAccDomainList, _, accInputsList, _, accOpList = \
self.buildUnStmtLists(expr.inexpr[0], lMat, None, addDomain, lpi, aAccess[0], oAccSet, mReduceDims, oAccMap, includeAccPss=True, pos=0, acc_ids=acc_ids)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
_, tAccDomainList, _, accInputsList, _, accOpList = \
self.buildUnStmtLists(expr.inexpr[1], rMat, None, addDomain, rpi, aAccess[1], oAccSet, mReduceDims, oAccMap, includeAccPss=True, pos=1, acc_ids=acc_ids)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
while acc_data:
acc_triple = acc_data.pop()
if acc_triple[0]:
for td,inputs,op in zip(*acc_triple):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Sub(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
# blkOut = expr.getOut()
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
set_ctx = self.set_from_ctx(ctx)
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds + expr.inexpr[1].accIds
acc_ids=[expr.inexpr[0].accIds, expr.inexpr[1].accIds]
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
for r in range(len(lPinfo)):
for c in range(len(lPinfo[0])):
lpi,rpi,opi = lPinfo[r][c], rPInfo[r][c], oPInfo[r][c]
subAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
subStruct = [(lStr,rStr, oStr) for lStr in lpi['struct'].items() for rStr in rpi['struct'].items() for oStr in opi['struct'].items() \
if not (lStr[0] is ZeroMatrix and rStr[0] is ZeroMatrix or issubclass(oStr[0], ConstantMatrix)) ]
for aStr in subStruct:
subDomain = self.joinAlignedSets(aStr[0][1], aStr[1][1], self.globalspace)
subDomain = self.joinAlignedSets(subDomain, aStr[2][1], self.globalspace)
subDomain = self.joinAlignedSets(subDomain, lpi['tiling'], self.globalspace)
subDomain = self.joinAlignedSets(subDomain, set_ctx, self.globalspace)
for sAccess in subAccess:
oAccess = sAccess[2]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
acc_data = []
tAccDomainList, accInputsList, accOpList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Sub, subDomain, lpi, rpi, sAccess, oAccSet, mReduceDims, oAccMap, par=[False,True], acc_ids=acc_ids)
_, tAccDomainList, _, accInputsList, _, accOpList = \
self.buildUnStmtLists(expr.inexpr[0], lMat, None, subDomain, lpi, sAccess[0], oAccSet, mReduceDims, oAccMap, includeAccPss=True, pos=0, acc_ids=acc_ids)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
_, tAccDomainList, _, accInputsList, _, accOpList = \
self.buildUnStmtLists(expr.inexpr[1], rMat, Neg, subDomain, rpi, sAccess[1], oAccSet, mReduceDims, oAccMap, includeAccPss=True, par=True, pos=1, acc_ids=acc_ids)
acc_data.append( (tAccDomainList, accInputsList, accOpList) )
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
while acc_data:
acc_triple = acc_data.pop()
if acc_triple[0]:
for td,inputs,op in zip(*acc_triple):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Neg(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
blkSub = expr.getInexprMat(0)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
subMat = self.getNonTileMatrix(expr.inexpr[0])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
sPinfo = blkSub.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
for r in range(len(sPinfo)):
for c in range(len(sPinfo[0])):
spi,opi = sPinfo[r][c], oPInfo[r][c]
negAccess = [(sAccess,oAccess) for sAccess in spi['access'] for oAccess in opi['access']]
negStruct = [ (sStr,oStr) for sStr in spi['struct'].items() if not (sStr[0] is ZeroMatrix) for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for nStr in negStruct:
negDomain = self.joinAlignedSets(nStr[0][1], spi['tiling'], self.globalspace)
negDomain = self.joinAlignedSets(negDomain, nStr[1][1], self.globalspace)
negDomain = self.joinAlignedSets(negDomain, set_ctx, self.globalspace)
for nAccess in negAccess:
oAccess = nAccess[1]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tAccDomainList, accInputsList, accOpList = [], [], []
tDomainList, tAccDomainList, inputsList, accInputsList, opList, accOpList = \
self.buildUnStmtLists(expr.inexpr[0], subMat, Neg, negDomain, spi, nAccess[0], oAccSet, mReduceDims, oAccMap, includeAccPss=True, par=True)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
if tAccDomainList:
for td,inputs,op in zip(tAccDomainList, accInputsList, accOpList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Sqrt(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
blkSub = expr.getInexprMat(0)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
subMat = self.getNonTileMatrix(expr.inexpr[0])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
subPinfo = blkSub.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
spi,opi = subPinfo[0][0], oPInfo[0][0]
srAccess = [ (sAccess, oAccess) for sAccess in spi['access'] for oAccess in opi['access'] ]
srStruct = [ (sStr,oStr) for sStr in spi['struct'].items() for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for srStr in srStruct:
srDomain = self.joinAlignedSets(srStr[0][1], spi['tiling'], self.globalspace)
srDomain = self.joinAlignedSets(srDomain, srStr[1][1], self.globalspace)
srDomain = self.joinAlignedSets(srDomain, set_ctx, self.globalspace)
for tAccess in srAccess:
oAccess = tAccess[1]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildUnStmtLists(expr.inexpr[0], subMat, Sqrt, srDomain, spi, tAccess[0], oAccSet, mReduceDims, oAccMap)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Div(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
lPinfo = blkLhs.getPolyInfo(self.indices, baselevel=self.baselevel)
rPInfo = blkRhs.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds + expr.inexpr[1].accIds
acc_ids = [expr.inexpr[0].accIds, expr.inexpr[1].accIds]
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
lpi,rpi,opi = lPinfo[0][0], rPInfo[0][0], oPInfo[0][0]
divAccess = [(lAccess,rAccess,oAccess) for lAccess in lpi['access'] for rAccess in rpi['access'] for oAccess in opi['access']]
divStruct = [(lStr,rStr,oStr) for lStr in lpi['struct'].items() for rStr in rpi['struct'].items() if not (lStr[0] is ZeroMatrix or rStr[0] is ZeroMatrix) \
for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for dStr in divStruct:
lTSDomain = self.joinAlignedSets(dStr[0][1], lpi['tiling'], self.globalspace)
rTSDomain = self.joinAlignedSets(dStr[1][1], rpi['tiling'], self.globalspace)
divDomain = self.joinAlignedSets(lTSDomain, rTSDomain, self.globalspace)
divDomain = self.joinAlignedSets(divDomain, dStr[2][1], self.globalspace)
divDomain = self.joinAlignedSets(divDomain, set_ctx, self.globalspace)
for rdAccess in divAccess:
oAccess = rdAccess[2]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tDomainList, _, inputsList, _, opList, _ = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Div, divDomain, lpi, rpi, rdAccess, oAccSet, mReduceDims, oAccMap, par=(True,True), acc_ids=acc_ids)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Kro(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
getattr(self, expr.inexpr[1].__class__.__name__)(expr.inexpr[1], ctx)
blkLhs = expr.getInexprMat(0)
blkRhs = expr.getInexprMat(1)
blkSca, blkMat, scaLhs = (blkLhs, blkRhs, True) if blkLhs.isScalar() else (blkRhs, blkLhs, False)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
lMat, rMat = self.getNonTileMatrix(expr.inexpr[0]), self.getNonTileMatrix(expr.inexpr[1])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
scaPinfo = blkSca.getPolyInfo(self.indices, baselevel=self.baselevel)
matPInfo = blkMat.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds + expr.inexpr[1].accIds
acc_ids=[expr.inexpr[0].accIds, expr.inexpr[1].accIds]
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
spi = scaPinfo[0][0]
for r in range(len(matPInfo)):
for c in range(len(matPInfo[0])):
mpi,opi = matPInfo[r][c], oPInfo[r][c]
kroAccess = [(scaAccess,mAccess,oAccess) for scaAccess in spi['access'] for mAccess in mpi['access'] for oAccess in opi['access']]
scamulStruct = [ (scaStr,mStr,oStr) for scaStr in spi['struct'].items() for mStr in mpi['struct'].items() if scaStr[0] is not ZeroMatrix and mStr[0] is not ZeroMatrix \
for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for smStr in scamulStruct:
scaTSDomain = self.joinAlignedSets(smStr[0][1], spi['tiling'], self.globalspace)
matTSDomain = self.joinAlignedSets(smStr[1][1], mpi['tiling'], self.globalspace)
scamulDomain = self.joinAlignedSets(scaTSDomain, matTSDomain, self.globalspace)
scamulDomain = self.joinAlignedSets(scamulDomain, smStr[2][1], self.globalspace)
scamulDomain = self.joinAlignedSets(scamulDomain, set_ctx, self.globalspace)
for tAccess in kroAccess:
kAccess = (tAccess[0], tAccess[1]) if scaLhs else (tAccess[1], tAccess[0])
oAccess = tAccess[2]
oAccSet = opi['access'][oAccess]
lpi, rpi = (spi, mpi) if scaLhs else (mpi, spi)
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tAccDomainList, accInputsList, accOpList = [], [], []
tDomainList, tAccDomainList, inputsList, accInputsList, opList, accOpList = \
self.buildBinStmtLists(expr.inexpr, (lMat, rMat), Kro, scamulDomain, lpi, rpi, kAccess, oAccSet, mReduceDims, oAccMap, includeAccPss=(True, True), par=(True,True), acc_ids=acc_ids)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
if tAccDomainList:
for td,inputs,op in zip(tAccDomainList, accInputsList, accOpList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def T(self, expr, ctx):
getattr(self, expr.inexpr[0].__class__.__name__)(expr.inexpr[0], ctx)
blkSub = expr.getInexprMat(0)
blkOut = self.subsWithOut.get((self.eq_id, expr), expr).getOut()
# blkOut = expr.getOut()
set_ctx = self.set_from_ctx(ctx)
subMat = self.getNonTileMatrix(expr.inexpr[0])
oMat = self.subsWithOut.get((self.eq_id, expr), expr).getNonTileOut()
subPinfo = blkSub.getPolyInfo(self.indices, baselevel=self.baselevel)
oPInfo = blkOut.getPolyInfo(self.indices, baselevel=self.baselevel)
expr.accIds = expr.inexpr[0].accIds
sTrailIds, fullOAccSet, oAccMap = self.get_out_acc_set_and_map(expr, blkOut, ctx)
polyStmts = []
for r in range(len(subPinfo)):
for c in range(len(subPinfo[0])):
spi,opi = subPinfo[r][c], oPInfo[c][r]
trAccess = [ (sAccess, oAccess) for sAccess in spi['access'] for oAccess in opi['access'] ]
trStruct = [ (sStr,oStr) for sStr in spi['struct'].items() if not issubclass(sStr[0], AllEntriesConstantMatrix) for oStr in opi['struct'].items() if not issubclass(oStr[0], ConstantMatrix) ]
for trStr in trStruct:
trDomain = self.joinAlignedSets(trStr[0][1], spi['tiling'], self.globalspace)
trDomain = self.joinAlignedSets(trDomain, trStr[1][1], self.globalspace)
trDomain = self.joinAlignedSets(trDomain, set_ctx, self.globalspace)
if trDomain.is_empty():
continue
for tAccess in trAccess:
oAccess = tAccess[1]
oAccSet = opi['access'][oAccess]
mReduceDims = Map("{["+(",".join(self.indices))+"]->[i,j"+sTrailIds+"]: " + str(oAccess[2][0].of(0)) + "=i and " + str(oAccess[2][1].of(0)) + "=j}")
tDomainList, inputsList, opList = [], [], []
tAccDomainList, accInputsList, accOpList = [], [], []
tDomainList, tAccDomainList, inputsList, accInputsList, opList, accOpList = \
self.buildUnStmtLists(expr.inexpr[0], subMat, T, trDomain, spi, tAccess[0], oAccSet, mReduceDims, oAccMap, includeAccPss=True, par=True)
if tDomainList:
for td,inputs,op in zip(tDomainList,inputsList,opList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(sep=True)
if len(oMat2ll) > 1:
wrap = [oMat2ll[1]+scat, ""]
else:
wrap = [scat, ""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = False
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
if tAccDomainList:
for td,inputs,op in zip(tAccDomainList, accInputsList, accOpList):
if not td.is_empty():
tdReduced = mReduceDims.intersect_domain(td).range()
tdTransformedByOAcc = oAccMap.intersect_domain(tdReduced).range()
# if fullOAccSet.intersect(tdTransformedByOAcc).is_empty():
# fullOAccSet = fullOAccSet.union(tdTransformedByOAcc)
# polyStmt = {}
overlapping_ps = filter(lambda ps: tdTransformedByOAcc == ps['touched'], polyStmts)
polyStmt, new_ps = (overlapping_ps[0], False) if overlapping_ps else ({}, True)
if new_ps:
polyStmt['touched'] = tdTransformedByOAcc
polyStmt['reducedims'] = mReduceDims
polyStmt['stmt'] = []
polyStmt['perm_oacc'] = []
polyStmt['domain'] = []
polyStmt['outinfo'] = []
scat = self.genGSSeq(oAccess[1])
oMat2ll = oMat.toLL(acc=True, sep=True)
if len(oMat2ll) > 1:
wrap = ["$"+oMat2ll[1]+self.genGSSeq(oAccess[1], acc=True), ""]
else:
wrap = [self.genGSSeq(oAccess[1], acc=True),""]
finStmt = StmtExpr(inputs, op)
finStmt = finStmt if oAccess[3] is None else StmtExpr([finStmt], oAccess[3].inverse())
polyStmt['stmt'].append( StmtExpr([finStmt], None, wrap) )
polyStmt['perm_oacc'].append( oAccess[3] )
polyStmt['domain'].append( td )
polyStmt['acc'] = True
polyStmt['outinfo'].append( [oMat2ll[0], scat] )
if new_ps:
polyStmts.append(polyStmt)
expr.setPolyStmts(polyStmts)
def Tile(self, expr, opts):
pass
def G(self, expr, opts):
pass
def Scalar(self, expr, opts):
pass
def SquaredMatrix(self, expr, opts):
pass
def LowerTriangular(self, expr, opts):
pass
def LowerUnitTriangular(self, expr, opts):
pass
def UpperTriangular(self, expr, opts):
pass
def UpperUnitTriangular(self, expr, opts):
pass
def Symmetric(self, expr, opts):
pass
def Matrix(self, expr, opts):
pass
def IdentityMatrix(self, expr, opts):
pass
if __name__ == '__main__':
pass
import sigmacloog
# sigmacloog.tosigma("/tmp/temp.scop", "/tmp/temp.sigma")
s = sigmacloog.tosigma_str("/tmp/temp.scop")
print s
# Constraint.equality_from_aff(c.get_aff())
# reva = c.get_aff().mul(Aff.read_from_str(c.get_ctx(),"{[]->[(-1)]}"))
# revc = Constraint.equality_from_aff(reva)
# s1 = Set("{[i,k]: 0<=i<4 and i<=k<4}")
# s2 = Set("{[k,j]: 0<=k<4 and 0<=j<=k}")
# c1 = s1.get_basic_sets()[0].get_constraints()
# c2 = s2.get_basic_sets()[0].get_constraints()
# # print c2[0].is_lower_bound(dim_type.set, 0)
# # print c2[3].is_lower_bound(dim_type.set, 0)
# # e0 = Constraint.equality_from_aff(c2[0].get_aff())
# # e1 = Constraint.equality_from_aff(c2[3].get_aff())
# import re
# aff_re = re.compile("{ \[.*\] -> \[(.*)\] }")
# ctx = Context()
# args = [ aff_re.search(str(c1[2].set_coefficient_val(dim_type.set, 1, 0).get_aff().mul(Aff.read_from_str(ctx,"{[]->[(-1)]}")) )) ]
# args.append( aff_re.search(str(c2[0].set_coefficient_val(dim_type.set, 0, 0).get_aff().mul(Aff.read_from_str(ctx,"{[]->[(-1)]}")) )) )
# args.append( aff_re.search(str(c2[3].set_coefficient_val(dim_type.set, 0, 0).get_aff().mul(Aff.read_from_str(ctx,"{[]->[(-1)]}")) )) )
#
# maxs = "max(" + args[0].group(1) + ", " + args[1].group(1) + ")"
# maxs = "max(" + maxs + ", " + args[2].group(1) + ")"
# pwaff = PwAff.read_from_str(ctx, "{[i,j,k]->[(k - "+maxs+")]}")
# pcs = pwaff.get_pieces()
#
# s = Set("{ [i, j, k] : 1=0 }")
# for pc in pcs:
# s = s.union( pc[0].add_constraint(Constraint.equality_from_aff(pc[1])) )
# print s
# print s.add_constraints([])
# # print c1.get_aff().union_add(c2.get_aff())
# # print Set("{[i,j,k]: 0<=i<4 and 0<=j<=k and k=max(max(0,i),j)}")
# # print Set("{[i,j,k]: 0<=i<4 and 0<=j<=k and k=0}") == Set("{[i,j,k]: 0<=i<4 and 0<=j<=k and k=max(0,i)}")
# # print c1.set_coefficient_val(dim_type.set, 1, 0).get_aff()
# # print c2.set_coefficient_val(dim_type.set, 0, 0).get_aff()
#
# # coeff = c.get_coefficient_val(dim_type.set, 1)
# # print s2
| 59.245401
| 256
| 0.482364
| 18,071
| 180,343
| 4.741298
| 0.051076
| 0.018324
| 0.010528
| 0.012605
| 0.779925
| 0.752264
| 0.724592
| 0.708648
| 0.696043
| 0.680007
| 0
| 0.01673
| 0.388149
| 180,343
| 3,043
| 257
| 59.26487
| 0.759758
| 0.254559
| 0
| 0.654401
| 0
| 0
| 0.031686
| 0.001205
| 0.000497
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005967
| 0.004475
| null | null | 0.008951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
964cb5e2f56e00b7eee5645e317b827e2c9c4b43
| 204
|
py
|
Python
|
aldryn_forms/signals.py
|
zbohm/aldryn-forms
|
752657a3334a749bd67c1c29ebde23af435a610a
|
[
"BSD-3-Clause"
] | null | null | null |
aldryn_forms/signals.py
|
zbohm/aldryn-forms
|
752657a3334a749bd67c1c29ebde23af435a610a
|
[
"BSD-3-Clause"
] | null | null | null |
aldryn_forms/signals.py
|
zbohm/aldryn-forms
|
752657a3334a749bd67c1c29ebde23af435a610a
|
[
"BSD-3-Clause"
] | 1
|
2021-05-31T01:27:59.000Z
|
2021-05-31T01:27:59.000Z
|
# -*- coding: utf-8 -*-
from django.dispatch import Signal
form_pre_save = Signal(providing_args=['instance', 'form', 'request'])
form_post_save = Signal(providing_args=['instance', 'form', 'request'])
| 29.142857
| 71
| 0.710784
| 26
| 204
| 5.346154
| 0.615385
| 0.143885
| 0.273381
| 0.330935
| 0.604317
| 0.604317
| 0.604317
| 0
| 0
| 0
| 0
| 0.005464
| 0.102941
| 204
| 6
| 72
| 34
| 0.754098
| 0.102941
| 0
| 0
| 0
| 0
| 0.209945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
968333e09ace8c8f3ba77e21b4b88e591f6d2fb6
| 211
|
py
|
Python
|
GPy/examples/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | 1
|
2015-08-06T13:47:10.000Z
|
2015-08-06T13:47:10.000Z
|
GPy/examples/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | null | null | null |
GPy/examples/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | 1
|
2021-12-09T01:31:17.000Z
|
2021-12-09T01:31:17.000Z
|
# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import classification
import regression
import dimensionality_reduction
import non_gaussian
| 26.375
| 59
| 0.810427
| 29
| 211
| 5.827586
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048649
| 0.123223
| 211
| 7
| 60
| 30.142857
| 0.864865
| 0.535545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9699dd8b6246f727d98fd62fc9a71dc2e5c1a0cc
| 7,115
|
py
|
Python
|
preprocessing.py
|
627oldcat/CNN_Virus
|
d47aa36a691b1024963abe37af794602b66467d2
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
627oldcat/CNN_Virus
|
d47aa36a691b1024963abe37af794602b66467d2
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
627oldcat/CNN_Virus
|
d47aa36a691b1024963abe37af794602b66467d2
|
[
"MIT"
] | 1
|
2022-01-21T03:39:57.000Z
|
2022-01-21T03:39:57.000Z
|
import keras
import numpy as np
#dictionary for one-hot encoding
d_nucl={"A":0,"C":1,"G":2,"T":3,"N":4}
#get different learning weights for different classes
def get_learning_weights(filepath):
f=open(filepath,"r").readlines()
d_weights={}
for i in f:
i=i.strip().split("\t")
d_weights[float(i[0])]=float(i[1])
return d_weights
#set default params for generating batches of 50-mer
def get_params_50mer():
params = {'batch_size': 1024,
'n_classes': 187,
'shuffle': True}
return params
#set default params for generating batches of 100-mer
def get_params_150mer():
params = {'batch_size': 101,
'n_classes': 187,
'shuffle': False}
return params
#get k-mers, labels and locations for 50-mer
#default format for each line of training files: kmer+"\t"+label+"\t"+location
def get_kmer_from_50mer(filepath):
f=open(filepath,"r").readlines()
f_matrix=[]
f_labels=[]
f_pos=[]
for i in f:
i=i.strip().split("\t")
f_matrix.append(i[0])
f_labels.append(i[1])
f_pos.append(i[2])
return f_matrix,f_labels,f_pos
#get k-mers, labels and locations for 150-mer
#default format for each line of training files: kmer+"\t"+label+"\t"+location
def get_kmer_from_150mer(filepath):
f=open(filepath,"r").readlines()
f_matrix=[]
f_labels=[]
f_pos=[]
for line in f:
line=line.strip().split("\t")
f_labels.append(line[1])
f_pos.append(line[2])
for i in range(len(line[0])-49):
kmer=line[0][i:i+50]
f_matrix.append(kmer)
return f_matrix,f_labels,f_pos
#get k-mers from RNA-seq files of COV-ID-19 patients
#default format for each line of training files: kmer
def get_kmer_from_realdata(filepath):
f=open(filepath,"r").readlines()
lines=[]
for i in range(0,len(f),4):
lines.append(f[i+1].strip())
f_matrix=[]
f_index=[]
sum_loc=0
for line in lines:
line=line.strip()
length_of_read=len(line)
if length_of_read>=50:
for i in range(len(line)-49):
kmer=line[i:i+50]
f_matrix.append(kmer)
sum_loc+=1
f_index.append(sum_loc)
return f_matrix,f_index
# simulated hiv-1 reads using santa-sim
# input: fastq format
def get_kmer_from_santi(filepath):
f=open(filepath,"r").readlines()
lines=[]
for i in range(0,len(f),4):
lines.append(f[i+1].strip())
f_matrix=[]
f_index=[]
sum_loc=0
for line in lines:
line=line.strip()
length_of_read=len(line)
if length_of_read>=50:
for i in range(len(line)-49):
kmer=line[i:i+50]
f_matrix.append(kmer)
sum_loc+=1
f_index.append(sum_loc)
return f_matrix,f_index
#data generator for generating batches of data from 50-mers
class DataGenerator_from_50mer(keras.utils.Sequence):
def __init__(self, f_matrix, f_labels, f_pos, batch_size=1024,n_classes=187, shuffle=True):
self.batch_size = batch_size
self.labels = f_labels
self.matrix = f_matrix
self.pos = f_pos
self.n_classes = n_classes
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
return int(np.ceil(len(self.labels) / self.batch_size))
def __getitem__(self, index):
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
X, y= self.__data_generation(indexes)
return X,y
def on_epoch_end(self):
self.indexes = np.arange(len(self.labels))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, index):
x_train=[]
for i in index:
seq=self.matrix[i]
seq_list=[j for j in seq]
x_train.append(seq_list)
x_train=np.array(x_train)
x_tensor=np.zeros(list(x_train.shape)+[5])
for row in range(len(x_train)):
for col in range(50):
x_tensor[row,col,d_nucl[x_train[row,col]]]=1
y_pos=[]
y_label=[self.labels[i] for i in index]
y_label=np.array(y_label)
y_label=keras.utils.to_categorical(y_label, num_classes=self.n_classes)
y_pos=[self.pos[i] for i in index]
y_pos=np.array(y_pos)
y_pos=keras.utils.to_categorical(y_pos, num_classes=10)
return x_tensor,{'output1': y_label, 'output2': y_pos}
#data generator for generating batches of data from 50-mers for testing
class DataGenerator_from_50mer_testing(keras.utils.Sequence):
def __init__(self, f_matrix, batch_size=1024,shuffle=False):
self.batch_size = batch_size
self.matrix = f_matrix
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
return int(np.ceil(len(self.matrix) / self.batch_size))
def __getitem__(self, index):
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
X = self.__data_generation(indexes)
return X
def on_epoch_end(self):
self.indexes = np.arange(len(self.matrix))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, index):
x_train=[]
for i in index:
seq=self.matrix[i]
seq_list=[j for j in seq]
x_train.append(seq_list)
x_train=np.array(x_train)
x_tensor=np.zeros(list(x_train.shape)+[5])
for row in range(len(x_train)):
for col in range(50):
x_tensor[row,col,d_nucl[x_train[row,col]]]=1
return x_tensor
#data generator for generating batches of data from 100-mers
class DataGenerator_from_150mer(keras.utils.Sequence):
def __init__(self, f_matrix, batch_size=101,n_classes=187, shuffle=False):
self.batch_size = batch_size
self.matrix = f_matrix
self.n_classes = n_classes
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
return int(np.ceil(len(self.matrix) / self.batch_size))
def __getitem__(self, index):
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
X = self.__data_generation(indexes)
return X
def on_epoch_end(self):
self.indexes = np.arange(len(self.matrix))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, index):
x_train=[]
for i in index:
seq=self.matrix[i]
seq_list=[j for j in seq]
x_train.append(seq_list)
x_train=np.array(x_train)
x_tensor=np.zeros(list(x_train.shape)+[5])
for row in range(len(x_train)):
for col in range(50):
x_tensor[row,col,d_nucl[x_train[row,col]]]=1
return x_tensor
#data generator for generating batches of data from real-world data
class DataGenerator_from_realdata(keras.utils.Sequence):
def __init__(self, f_matrix,index_list,batch_size=51,n_classes=187, shuffle=False):
self.batch_size = batch_size
self.matrix = f_matrix
self.index_list=index_list
self.n_classes = n_classes
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
return len(self.index_list)
def __getitem__(self, index):
if index==0:
indexes = self.indexes[0:self.index_list[index]]
else:
indexes = self.indexes[self.index_list[index-1]:self.index_list[index]]
X = self.__data_generation(indexes)
return X
def on_epoch_end(self):
self.indexes = np.arange(len(self.matrix))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, indexes):
x_train=[]
for i in indexes:
seq=self.matrix[i]
seq_list=[j for j in seq]
x_train.append(seq_list)
x_train=np.array(x_train)
x_tensor=np.zeros(list(x_train.shape)+[5])
for row in range(len(x_train)):
for col in range(50):
x_tensor[row,col,d_nucl[x_train[row,col]]]=1
return x_tensor
| 30.021097
| 92
| 0.718904
| 1,242
| 7,115
| 3.888084
| 0.115137
| 0.03479
| 0.016152
| 0.027335
| 0.784635
| 0.767861
| 0.738041
| 0.7076
| 0.670325
| 0.653759
| 0
| 0.022653
| 0.143781
| 7,115
| 236
| 93
| 30.148305
| 0.770026
| 0.118201
| 0
| 0.730769
| 0
| 0
| 0.013103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129808
| false
| 0
| 0.009615
| 0.019231
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
96c265740becb460f87686d7ce14bbf2ecf1a19f
| 20
|
py
|
Python
|
checkov/version.py
|
tophersmith/checkov
|
33902841477db80f383f3e3f6f5ae208a7412f38
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
tophersmith/checkov
|
33902841477db80f383f3e3f6f5ae208a7412f38
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
tophersmith/checkov
|
33902841477db80f383f3e3f6f5ae208a7412f38
|
[
"Apache-2.0"
] | null | null | null |
version = '2.0.538'
| 10
| 19
| 0.6
| 4
| 20
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
73b47b690fe7bdea9af81f8d6b4d6ebefc3437b5
| 10,235
|
py
|
Python
|
saleor/plugins/user_email/notify_events.py
|
felipearmat/saleor
|
34c01912fede74dae45edfd23c1bfdca8ad26e35
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/plugins/user_email/notify_events.py
|
felipearmat/saleor
|
34c01912fede74dae45edfd23c1bfdca8ad26e35
|
[
"CC-BY-4.0"
] | 66
|
2021-08-30T04:27:20.000Z
|
2022-03-28T04:39:20.000Z
|
saleor/plugins/user_email/notify_events.py
|
felipearmat/saleor
|
34c01912fede74dae45edfd23c1bfdca8ad26e35
|
[
"CC-BY-4.0"
] | null | null | null |
from ..email_common import get_email_subject, get_email_template_or_default
from . import constants
from .tasks import (
send_account_confirmation_email_task,
send_account_delete_confirmation_email_task,
send_fulfillment_confirmation_email_task,
send_fulfillment_update_email_task,
send_invoice_email_task,
send_order_canceled_email_task,
send_order_confirmation_email_task,
send_order_confirmed_email_task,
send_order_refund_email_task,
send_password_reset_email_task,
send_payment_confirmation_email_task,
send_request_email_change_email_task,
send_set_user_password_email_task,
send_user_change_email_notification_task,
)
def send_account_password_reset_event(
payload: dict, config: dict, plugin_configuration: list
):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_PASSWORD_RESET_TEMPLATE_FIELD,
constants.ACCOUNT_PASSWORD_RESET_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_PASSWORD_RESET_SUBJECT_FIELD,
constants.ACCOUNT_PASSWORD_RESET_DEFAULT_SUBJECT,
)
send_password_reset_email_task.delay(
recipient_email,
payload,
config,
subject,
template,
)
def send_account_confirmation(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_CONFIRMATION_TEMPLATE_FIELD,
constants.ACCOUNT_CONFIRMATION_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_CONFIRMATION_SUBJECT_FIELD,
constants.ACCOUNT_CONFIRMATION_DEFAULT_SUBJECT,
)
send_account_confirmation_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_account_change_email_request(
payload: dict, config: dict, plugin_configuration: list
):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_CHANGE_EMAIL_REQUEST_TEMPLATE_FIELD,
constants.ACCOUNT_CHANGE_EMAIL_REQUEST_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_CHANGE_EMAIL_REQUEST_SUBJECT_FIELD,
constants.ACCOUNT_CHANGE_EMAIL_REQUEST_DEFAULT_SUBJECT,
)
send_request_email_change_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_account_change_email_confirm(
payload: dict, config: dict, plugin_configuration: list
):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_CHANGE_EMAIL_CONFIRM_TEMPLATE_FIELD,
constants.ACCOUNT_CHANGE_EMAIL_CONFIRM_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_CHANGE_EMAIL_CONFIRM_SUBJECT_FIELD,
constants.ACCOUNT_CHANGE_EMAIL_CONFIRM_DEFAULT_SUBJECT,
)
send_user_change_email_notification_task.delay(
recipient_email, payload, config, subject, template
)
def send_account_delete(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_DELETE_TEMPLATE_FIELD,
constants.ACCOUNT_DELETE_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_DELETE_SUBJECT_FIELD,
constants.ACCOUNT_DELETE_DEFAULT_SUBJECT,
)
send_account_delete_confirmation_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_account_set_customer_password(
payload: dict, config: dict, plugin_configuration: list
):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ACCOUNT_SET_CUSTOMER_PASSWORD_TEMPLATE_FIELD,
constants.ACCOUNT_SET_CUSTOMER_PASSWORD_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ACCOUNT_SET_CUSTOMER_PASSWORD_SUBJECT_FIELD,
constants.ACCOUNT_SET_CUSTOMER_PASSWORD_DEFAULT_SUBJECT,
)
send_set_user_password_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_invoice(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.INVOICE_READY_TEMPLATE_FIELD,
constants.INVOICE_READY_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.INVOICE_READY_SUBJECT_FIELD,
constants.INVOICE_READY_DEFAULT_SUBJECT,
)
send_invoice_email_task.delay(recipient_email, payload, config, subject, template)
def send_order_confirmation(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_CONFIRMATION_TEMPLATE_FIELD,
constants.ORDER_CONFIRMATION_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_CONFIRMATION_SUBJECT_FIELD,
constants.ORDER_CONFIRMATION_DEFAULT_SUBJECT,
)
send_order_confirmation_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_fulfillment_confirmation(
payload: dict, config: dict, plugin_configuration: list
):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_FULFILLMENT_CONFIRMATION_TEMPLATE_FIELD,
constants.ORDER_FULFILLMENT_CONFIRMATION_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_FULFILLMENT_CONFIRMATION_SUBJECT_FIELD,
constants.ORDER_FULFILLMENT_CONFIRMATION_DEFAULT_SUBJECT,
)
send_fulfillment_confirmation_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_fulfillment_update(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_FULFILLMENT_UPDATE_TEMPLATE_FIELD,
constants.ORDER_FULFILLMENT_UPDATE_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_FULFILLMENT_UPDATE_SUBJECT_FIELD,
constants.ORDER_FULFILLMENT_UPDATE_DEFAULT_SUBJECT,
)
send_fulfillment_update_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_payment_confirmation(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_PAYMENT_CONFIRMATION_TEMPLATE_FIELD,
constants.ORDER_PAYMENT_CONFIRMATION_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_PAYMENT_CONFIRMATION_SUBJECT_FIELD,
constants.ORDER_PAYMENT_CONFIRMATION_DEFAULT_SUBJECT,
)
send_payment_confirmation_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_order_canceled(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_CANCELED_TEMPLATE_FIELD,
constants.ORDER_CANCELED_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_CANCELED_SUBJECT_FIELD,
constants.ORDER_CANCELED_DEFAULT_SUBJECT,
)
send_order_canceled_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_order_refund(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_REFUND_CONFIRMATION_TEMPLATE_FIELD,
constants.ORDER_REFUND_CONFIRMATION_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_REFUND_CONFIRMATION_SUBJECT_FIELD,
constants.ORDER_REFUND_CONFIRMATION_DEFAULT_SUBJECT,
)
send_order_refund_email_task.delay(
recipient_email, payload, config, subject, template
)
def send_order_confirmed(payload: dict, config: dict, plugin_configuration: list):
recipient_email = payload["recipient_email"]
template = get_email_template_or_default(
plugin_configuration,
constants.ORDER_CONFIRMED_TEMPLATE_FIELD,
constants.ORDER_CONFIRMED_DEFAULT_TEMPLATE,
constants.DEFAULT_EMAIL_TEMPLATES_PATH,
)
subject = get_email_subject(
plugin_configuration,
constants.ORDER_CONFIRMED_SUBJECT_FIELD,
constants.ORDER_CONFIRMED_DEFAULT_SUBJECT,
)
send_order_confirmed_email_task.delay(
recipient_email, payload, config, subject, template
)
| 36.166078
| 87
| 0.760625
| 1,083
| 10,235
| 6.65097
| 0.043398
| 0.110787
| 0.081633
| 0.037484
| 0.943912
| 0.806331
| 0.700819
| 0.655977
| 0.655977
| 0.648202
| 0
| 0
| 0.185247
| 10,235
| 282
| 88
| 36.294326
| 0.863773
| 0
| 0
| 0.417323
| 0
| 0
| 0.020518
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055118
| false
| 0.055118
| 0.011811
| 0
| 0.066929
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
73ee97eaff126cec169072c566dcf3c4e954a650
| 446
|
py
|
Python
|
corehq/util/datadog/metrics.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
corehq/util/datadog/metrics.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2019-12-09T14:00:14.000Z
|
2019-12-09T14:00:14.000Z
|
corehq/util/datadog/metrics.py
|
MaciejChoromanski/commcare-hq
|
fd7f65362d56d73b75a2c20d2afeabbc70876867
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
from __future__ import unicode_literals
JSERROR_COUNT = 'commcare.jserror.count'
ERROR_COUNT = 'commcare.error.count'
REPEATER_ERROR_COUNT = 'commcare.repeaters.error'
REPEATER_SUCCESS_COUNT = 'commcare.repeaters.success'
MULTIMEDIA_SUBMISSION_ERROR_COUNT = 'commcare.corrupt-multimedia-submission.error.count'
DATE_OPENED_CASEBLOCK_ERROR_COUNT = 'commcare.date-opened-caseblock-bug.error.count'
XFORM_LOCKED_COUNT = 'commcare.xformlocked.count'
| 49.555556
| 88
| 0.849776
| 55
| 446
| 6.527273
| 0.4
| 0.253482
| 0.200557
| 0.167131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056054
| 446
| 8
| 89
| 55.75
| 0.852732
| 0
| 0
| 0
| 0
| 0
| 0.479821
| 0.434978
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fb47c07aade898257b6644b60f28accbc0fcac0b
| 71
|
py
|
Python
|
backend/core/utils/__init__.py
|
iamnkc/tournesol
|
4a09985f494577917c357783a37dfae02c57fd82
|
[
"CC0-1.0"
] | null | null | null |
backend/core/utils/__init__.py
|
iamnkc/tournesol
|
4a09985f494577917c357783a37dfae02c57fd82
|
[
"CC0-1.0"
] | null | null | null |
backend/core/utils/__init__.py
|
iamnkc/tournesol
|
4a09985f494577917c357783a37dfae02c57fd82
|
[
"CC0-1.0"
] | null | null | null |
"""
Utils methjods for Tournesol's core app
"""
from .models import *
| 11.833333
| 39
| 0.690141
| 10
| 71
| 4.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183099
| 71
| 5
| 40
| 14.2
| 0.844828
| 0.549296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb4f404ec35766a39753ca9e4b48faec1cac01b1
| 292
|
py
|
Python
|
example/tests/checkout/test_checkout_utils.py
|
icvntechstudio/django-salesman
|
017dd31713e37a445500c18e0c7034608f4f62a7
|
[
"BSD-3-Clause"
] | 222
|
2020-02-03T16:58:56.000Z
|
2022-03-30T16:35:35.000Z
|
example/tests/checkout/test_checkout_utils.py
|
icvntechstudio/django-salesman
|
017dd31713e37a445500c18e0c7034608f4f62a7
|
[
"BSD-3-Clause"
] | 16
|
2020-03-17T12:38:27.000Z
|
2022-03-16T13:14:55.000Z
|
example/tests/checkout/test_checkout_utils.py
|
icvntechstudio/django-salesman
|
017dd31713e37a445500c18e0c7034608f4f62a7
|
[
"BSD-3-Clause"
] | 23
|
2020-08-28T04:46:33.000Z
|
2022-01-12T21:57:39.000Z
|
import pytest
from django.core.exceptions import ValidationError
from salesman.checkout import utils
def test_validate_address():
with pytest.raises(ValidationError):
assert utils.validate_address('', context={})
assert utils.validate_address('Test', context={}) == 'Test'
| 26.545455
| 63
| 0.75
| 33
| 292
| 6.515152
| 0.545455
| 0.209302
| 0.176744
| 0.24186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140411
| 292
| 10
| 64
| 29.2
| 0.856574
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.142857
| true
| 0
| 0.428571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fb89d8e600d06b4a83a701488ef8247431fedfdc
| 127
|
py
|
Python
|
pretty_tables/__init__.py
|
Justintime50/pretty-tables
|
d3a5d2864276225cff65b036a84bd32bb871266a
|
[
"MIT"
] | 6
|
2020-11-08T02:09:12.000Z
|
2021-11-23T07:40:28.000Z
|
pretty_tables/__init__.py
|
Justintime50/pretty-tables
|
d3a5d2864276225cff65b036a84bd32bb871266a
|
[
"MIT"
] | 2
|
2020-12-02T20:08:16.000Z
|
2021-11-23T05:49:09.000Z
|
pretty_tables/__init__.py
|
Justintime50/pretty-tables
|
d3a5d2864276225cff65b036a84bd32bb871266a
|
[
"MIT"
] | 2
|
2020-12-02T18:18:45.000Z
|
2020-12-30T11:17:44.000Z
|
from pretty_tables.formatting import Colors
from pretty_tables.tables import create
__all__ = [
'Colors',
'create',
]
| 15.875
| 43
| 0.732283
| 15
| 127
| 5.8
| 0.533333
| 0.229885
| 0.367816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181102
| 127
| 7
| 44
| 18.142857
| 0.836538
| 0
| 0
| 0
| 0
| 0
| 0.094488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fbb5b63543a6069670d77281aa5c9d62ca9f9ce2
| 12,271
|
py
|
Python
|
pybind/slxos/v16r_1_00b/brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import root_bridge
import bridge
import port
class stp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-xstp-ext - based on the path /brocade_xstp_ext_rpc/get-stp-brief-info/output/spanning-tree-info/stp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__root_bridge','__bridge','__port',)
_yang_name = 'stp'
_rest_name = 'stp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__bridge = YANGDynClass(base=bridge.bridge, is_container='container', presence=False, yang_name="bridge", rest_name="bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
self.__root_bridge = YANGDynClass(base=root_bridge.root_bridge, is_container='container', presence=False, yang_name="root-bridge", rest_name="root-bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
self.__port = YANGDynClass(base=YANGListType(False,port.port, yang_name="port", rest_name="port", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None, choice=(u'spanning-tree-mode', u'stp')), is_container='list', yang_name="port", rest_name="port", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_xstp_ext_rpc', u'get-stp-brief-info', u'output', u'spanning-tree-info', u'stp']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'get-stp-brief-info', u'output', u'spanning-tree-info', u'stp']
def _get_root_bridge(self):
"""
Getter method for root_bridge, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/root_bridge (container)
"""
return self.__root_bridge
def _set_root_bridge(self, v, load=False):
"""
Setter method for root_bridge, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/root_bridge (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_root_bridge is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_root_bridge() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=root_bridge.root_bridge, is_container='container', presence=False, yang_name="root-bridge", rest_name="root-bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """root_bridge must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=root_bridge.root_bridge, is_container='container', presence=False, yang_name="root-bridge", rest_name="root-bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)""",
})
self.__root_bridge = t
if hasattr(self, '_set'):
self._set()
def _unset_root_bridge(self):
self.__root_bridge = YANGDynClass(base=root_bridge.root_bridge, is_container='container', presence=False, yang_name="root-bridge", rest_name="root-bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
def _get_bridge(self):
"""
Getter method for bridge, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/bridge (container)
"""
return self.__bridge
def _set_bridge(self, v, load=False):
"""
Setter method for bridge, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/bridge (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bridge is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bridge() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bridge.bridge, is_container='container', presence=False, yang_name="bridge", rest_name="bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bridge must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bridge.bridge, is_container='container', presence=False, yang_name="bridge", rest_name="bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)""",
})
self.__bridge = t
if hasattr(self, '_set'):
self._set()
def _unset_bridge(self):
self.__bridge = YANGDynClass(base=bridge.bridge, is_container='container', presence=False, yang_name="bridge", rest_name="bridge", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)
def _get_port(self):
"""
Getter method for port, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/port (list)
"""
return self.__port
def _set_port(self, v, load=False):
"""
Setter method for port, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp/port (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_port is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType(False,port.port, yang_name="port", rest_name="port", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None, choice=(u'spanning-tree-mode', u'stp')), is_container='list', yang_name="port", rest_name="port", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """port must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType(False,port.port, yang_name="port", rest_name="port", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None, choice=(u'spanning-tree-mode', u'stp')), is_container='list', yang_name="port", rest_name="port", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='list', is_config=True)""",
})
self.__port = t
if hasattr(self, '_set'):
self._set()
def _unset_port(self):
self.__port = YANGDynClass(base=YANGListType(False,port.port, yang_name="port", rest_name="port", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None, choice=(u'spanning-tree-mode', u'stp')), is_container='list', yang_name="port", rest_name="port", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='list', is_config=True)
root_bridge = __builtin__.property(_get_root_bridge, _set_root_bridge)
bridge = __builtin__.property(_get_bridge, _set_bridge)
port = __builtin__.property(_get_port, _set_port)
__choices__ = {u'spanning-tree-mode': {u'stp': [u'root_bridge', u'bridge', u'port']}}
_pyangbind_elements = {'root_bridge': root_bridge, 'bridge': bridge, 'port': port, }
| 62.607143
| 610
| 0.731155
| 1,711
| 12,271
| 4.985389
| 0.099942
| 0.053927
| 0.054162
| 0.03388
| 0.777491
| 0.743962
| 0.728839
| 0.723329
| 0.717233
| 0.713482
| 0
| 0.000566
| 0.136419
| 12,271
| 195
| 611
| 62.928205
| 0.804379
| 0.148399
| 0
| 0.393939
| 0
| 0.022727
| 0.30998
| 0.120299
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.083333
| 0
| 0.310606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fbda83204228817f208a80aa260a51153578d723
| 220
|
py
|
Python
|
vt_graph_api/__init__.py
|
ianhelle/vt-graph-api
|
8f20031db833cb2cf122c7b9a0102624bc1cefa5
|
[
"Apache-2.0"
] | 28
|
2020-01-24T09:52:51.000Z
|
2022-02-23T19:22:49.000Z
|
vt_graph_api/__init__.py
|
ianhelle/vt-graph-api
|
8f20031db833cb2cf122c7b9a0102624bc1cefa5
|
[
"Apache-2.0"
] | 4
|
2021-01-21T11:46:18.000Z
|
2022-02-01T16:13:40.000Z
|
vt_graph_api/__init__.py
|
ianhelle/vt-graph-api
|
8f20031db833cb2cf122c7b9a0102624bc1cefa5
|
[
"Apache-2.0"
] | 8
|
2020-12-14T01:00:29.000Z
|
2022-03-02T10:07:43.000Z
|
"""vt_graph_api.
vt_graph_api package exports.
"""
from vt_graph_api.graph import VTGraph
from vt_graph_api.node import Node
from vt_graph_api.version import __version__
__all__ = ["Node", "VTGraph", "__version__"]
| 16.923077
| 44
| 0.777273
| 33
| 220
| 4.515152
| 0.333333
| 0.234899
| 0.33557
| 0.281879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122727
| 220
| 12
| 45
| 18.333333
| 0.772021
| 0.2
| 0
| 0
| 0
| 0
| 0.130178
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8379ade5be84af88f8da089e25683418d056f774
| 529
|
py
|
Python
|
db_destroy.py
|
huntingtonwest/OFFICIAL
|
728e6d6ea35b9b334622f28fb6788020c60c1607
|
[
"MIT"
] | null | null | null |
db_destroy.py
|
huntingtonwest/OFFICIAL
|
728e6d6ea35b9b334622f28fb6788020c60c1607
|
[
"MIT"
] | null | null | null |
db_destroy.py
|
huntingtonwest/OFFICIAL
|
728e6d6ea35b9b334622f28fb6788020c60c1607
|
[
"MIT"
] | null | null | null |
#!flask/bin/python
from migrate.versioning import api
#from dev_config import SQLALCHEMY_DATABASE_URI
#from dev_config import SQLALCHEMY_MIGRATE_REPO
from server import db
import os.path
db.reflect()
db.drop_all()
# if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
# api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
# api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
# else:
# api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
| 33.0625
| 113
| 0.820416
| 73
| 529
| 5.630137
| 0.39726
| 0.248175
| 0.306569
| 0.092457
| 0.428224
| 0.287105
| 0.287105
| 0.287105
| 0.287105
| 0
| 0
| 0
| 0.098299
| 529
| 15
| 114
| 35.266667
| 0.861635
| 0.778828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
839409077e8a3430e880848b62c92fd4e275fb2f
| 55
|
py
|
Python
|
urlshortener/__init__.py
|
RaRhAeu/URLShortener
|
d53025e47fe16c98cee69e5a3767e500e5ff40fa
|
[
"MIT"
] | 1
|
2020-05-27T06:44:20.000Z
|
2020-05-27T06:44:20.000Z
|
urlshortener/__init__.py
|
RaRhAeu/URLShortener
|
d53025e47fe16c98cee69e5a3767e500e5ff40fa
|
[
"MIT"
] | null | null | null |
urlshortener/__init__.py
|
RaRhAeu/URLShortener
|
d53025e47fe16c98cee69e5a3767e500e5ff40fa
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from urlshortener.app import create_app
| 18.333333
| 39
| 0.818182
| 8
| 55
| 5.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.127273
| 55
| 2
| 40
| 27.5
| 0.895833
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
83a23c5453cd1f61bb96013b7364c977cc8822c1
| 893
|
py
|
Python
|
tests/auth/test_basic.py
|
d3no/mocean-sdk-python
|
cbc215a0eb8aa26c04afb940eab6482f23150c75
|
[
"MIT"
] | 2
|
2019-10-31T02:37:43.000Z
|
2021-07-25T02:45:27.000Z
|
tests/auth/test_basic.py
|
d3no/mocean-sdk-python
|
cbc215a0eb8aa26c04afb940eab6482f23150c75
|
[
"MIT"
] | 18
|
2019-05-30T01:09:34.000Z
|
2022-01-04T07:31:47.000Z
|
tests/auth/test_basic.py
|
d3no/mocean-sdk-python
|
cbc215a0eb8aa26c04afb940eab6482f23150c75
|
[
"MIT"
] | 4
|
2019-04-19T08:34:47.000Z
|
2021-07-21T02:02:07.000Z
|
from unittest import TestCase
from moceansdk import Basic
class TestBasic(TestCase):
def setUp(self):
self.basic = Basic()
def test_set_api_key(self):
self.basic.set_api_key('test api key')
self.assertEqual(self.basic.get_params()[
'mocean-api-key'], 'test api key')
def test_set_api_secret(self):
self.basic.set_api_secret('test api secret')
self.assertEqual(self.basic.get_params()[
'mocean-api-secret'], 'test api secret')
def test_get_params(self):
self.basic.set_api_key('test api key')
self.basic.set_api_secret('test api secret')
self.assertEqual(self.basic.get_params()[
'mocean-api-key'], 'test api key')
self.assertEqual(self.basic.get_params()[
'mocean-api-secret'], 'test api secret')
| 30.793103
| 65
| 0.601344
| 114
| 893
| 4.535088
| 0.175439
| 0.156673
| 0.10058
| 0.116054
| 0.711799
| 0.704062
| 0.704062
| 0.704062
| 0.704062
| 0.704062
| 0
| 0
| 0.278835
| 893
| 28
| 66
| 31.892857
| 0.802795
| 0
| 0
| 0.6
| 0
| 0
| 0.19037
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.1
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
83c3a30f7a15396b3565ba4806a03c89516a5bb0
| 154
|
py
|
Python
|
salaryhedge/salaryhedge/salaryServer/admin.py
|
focalpointgit/djangoReactDemo
|
108cae09f1720179648c4e71192b62856ae6b5f0
|
[
"MIT"
] | null | null | null |
salaryhedge/salaryhedge/salaryServer/admin.py
|
focalpointgit/djangoReactDemo
|
108cae09f1720179648c4e71192b62856ae6b5f0
|
[
"MIT"
] | null | null | null |
salaryhedge/salaryhedge/salaryServer/admin.py
|
focalpointgit/djangoReactDemo
|
108cae09f1720179648c4e71192b62856ae6b5f0
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Address, Asset
# Register your models here.
admin.site.register(Address)
admin.site.register(Asset)
| 22
| 34
| 0.805195
| 22
| 154
| 5.636364
| 0.545455
| 0.145161
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11039
| 154
| 6
| 35
| 25.666667
| 0.905109
| 0.168831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
83eee495d3612007dd04fea9b8259448e1961070
| 112
|
py
|
Python
|
Mundo 1_Fundamentos/Desafio_30.py
|
VictorOliveira02/Desafios-Python3-Curso-em-Video
|
53ee8bd814b816f3a21936677ef3f155b582843f
|
[
"MIT"
] | null | null | null |
Mundo 1_Fundamentos/Desafio_30.py
|
VictorOliveira02/Desafios-Python3-Curso-em-Video
|
53ee8bd814b816f3a21936677ef3f155b582843f
|
[
"MIT"
] | null | null | null |
Mundo 1_Fundamentos/Desafio_30.py
|
VictorOliveira02/Desafios-Python3-Curso-em-Video
|
53ee8bd814b816f3a21936677ef3f155b582843f
|
[
"MIT"
] | null | null | null |
n = float(input('Digite um valor: '))
if n % 2 == 0:
print(f'{n} é PAR!')
else:
print(f'{n} é ÍMPAR!')
| 16
| 37
| 0.508929
| 21
| 112
| 2.714286
| 0.714286
| 0.210526
| 0.245614
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.25
| 112
| 6
| 38
| 18.666667
| 0.654762
| 0
| 0
| 0
| 0
| 0
| 0.348214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
83f5691ab7b7a74515c04fa1d0080efa5ff34a44
| 39
|
py
|
Python
|
VisualGraphTheory/__main__.py
|
iflor413/VisualGraphTheory
|
19f152a3e966a2e70f4a90bc4e3f410044f2c989
|
[
"MIT"
] | null | null | null |
VisualGraphTheory/__main__.py
|
iflor413/VisualGraphTheory
|
19f152a3e966a2e70f4a90bc4e3f410044f2c989
|
[
"MIT"
] | null | null | null |
VisualGraphTheory/__main__.py
|
iflor413/VisualGraphTheory
|
19f152a3e966a2e70f4a90bc4e3f410044f2c989
|
[
"MIT"
] | null | null | null |
from . import initialize
initialize()
| 9.75
| 24
| 0.769231
| 4
| 39
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 39
| 3
| 25
| 13
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f7a200923f83a4483d4af6a75e97c7eddd5aa21c
| 247
|
py
|
Python
|
h5bpstrap/views.py
|
dherbst/h5bpstrap
|
d62775c3ea18e763414570db4e095c8497f0550d
|
[
"Apache-2.0"
] | 1
|
2016-02-07T03:06:18.000Z
|
2016-02-07T03:06:18.000Z
|
h5bpstrap/views.py
|
dherbst/h5bpstrap
|
d62775c3ea18e763414570db4e095c8497f0550d
|
[
"Apache-2.0"
] | null | null | null |
h5bpstrap/views.py
|
dherbst/h5bpstrap
|
d62775c3ea18e763414570db4e095c8497f0550d
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render_to_response
from django.template import RequestContext
def home(request, template='index.html'):
return render_to_response(
template,
{ },
context_instance=RequestContext(request))
| 24.7
| 49
| 0.732794
| 27
| 247
| 6.518519
| 0.62963
| 0.113636
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190283
| 247
| 9
| 50
| 27.444444
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0.04065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.285714
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f7c537a49fe124ff30730f25acd4b1f7cc184945
| 14,799
|
py
|
Python
|
fexm/docker_scripts/afl_base_image/afl_utils/tests/test_afl_sync.py
|
fgsect/fexm
|
cf213c9dea3778c09c1d475e6a16b9db78a6f1e6
|
[
"Apache-2.0"
] | 105
|
2018-08-09T22:13:59.000Z
|
2022-03-26T23:24:20.000Z
|
fexm/docker_scripts/afl_base_image/afl_utils/tests/test_afl_sync.py
|
DeadManINDIA/fexm
|
ca6629bbcbf79639871d3ec52bc2a7de9ae453a4
|
[
"Apache-2.0"
] | 13
|
2018-08-23T13:40:04.000Z
|
2022-03-11T23:28:00.000Z
|
fexm/docker_scripts/afl_base_image/afl_utils/tests/test_afl_sync.py
|
DeadManINDIA/fexm
|
ca6629bbcbf79639871d3ec52bc2a7de9ae453a4
|
[
"Apache-2.0"
] | 25
|
2018-08-09T21:56:12.000Z
|
2022-03-22T22:08:12.000Z
|
from afl_utils import afl_sync
from afl_utils.afl_sync import AflRsync
import os
import shutil
import unittest
class AflSyncTestCase(unittest.TestCase):
def setUp(self):
# Use to set up test environment prior to test case
# invocation
os.makedirs('testdata/rsync_tmp_store', exist_ok=True)
os.makedirs('testdata/sync/fuzz000/crashes', exist_ok=True)
os.makedirs('testdata/sync/fuzz000/hangs', exist_ok=True)
os.makedirs('testdata/sync/fuzz000/.cur_input', exist_ok=True)
os.makedirs('testdata/sync/fuzz001/.cur_input', exist_ok=True)
os.makedirs('testdata/sync/fuzz002.sync', exist_ok=True)
os.makedirs('testdata/sync/invalid_fuzz000', exist_ok=True)
os.makedirs('testdata/sync/invalid_fuzz001', exist_ok=True)
# push
os.makedirs('testdata/rsync_output_push', exist_ok=True)
# pull
os.makedirs('testdata/rsync_output_pull/fuzz000.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/fuzz001.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_fuzz000.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_fuzz000.sync/.cur_input', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_fuzz000.sync/crashes', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_fuzz001.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_fuzz001.sync/.cur_input', exist_ok=True)
os.makedirs('testdata/rsync_output_pull/other_invalid_fuzz000.sync', exist_ok=True)
# sync
os.makedirs('testdata/rsync_output_sync/other_fuzz000.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_sync/other_fuzz001.sync', exist_ok=True)
os.makedirs('testdata/rsync_output_sync/other_invalid_fuzz000.sync', exist_ok=True)
def tearDown(self):
# Use for clean up after tests have run
self.clean_remove_dir('testdata/rsync_tmp_store')
self.clean_remove_dir('testdata/sync/fuzz000/crashes')
self.clean_remove_dir('testdata/sync/fuzz000/hangs')
self.clean_remove_dir('testdata/sync/fuzz000/.cur_input')
self.clean_remove_dir('testdata/sync/fuzz001/.cur_input')
self.clean_remove_dir('testdata/sync/fuzz002.sync')
self.clean_remove_dir('testdata/sync/invalid_fuzz000')
self.clean_remove_dir('testdata/sync/invalid_fuzz001')
self.clean_remove_dir('testdata/sync/fuzz000.sync')
self.clean_remove_dir('testdata/sync/fuzz001.sync')
self.clean_remove_dir('testdata/sync/other_fuzz000.sync')
self.clean_remove_dir('testdata/sync/other_fuzz001.sync')
self.clean_remove_dir('testdata/sync/other_invalid_fuzz000.sync')
self.clean_remove_dir('testdata/rsync_output_push')
self.clean_remove_dir('testdata/rsync_output_pull')
self.clean_remove_dir('testdata/rsync_output_sync')
self.clean_remove_dir('testdata/new_sync')
def clean_remove(self, file):
if os.path.exists(file):
os.remove(file)
def clean_remove_dir(self, dir):
if os.path.exists(dir):
shutil.rmtree(dir)
def test_show_info(self):
self.assertIsNone(afl_sync.show_info())
def test_afl_rsync_init(self):
server_config = {
'remote_path': 'testdata/rsync_output',
}
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': 'fuzz',
'exclude_crashes': True,
'exclude_hangs': True,
}
afl_rsync = AflRsync(server_config, fuzzer_config)
self.assertDictEqual(server_config, afl_rsync.server_config)
self.assertDictEqual(fuzzer_config, afl_rsync.fuzzer_config)
def test_afl_rsync_prepare_sync_command(self):
afl_rsync = AflRsync(None, None)
expected_put_cmdline = [
'rsync',
afl_sync._rsync_default_options[0],
'--exclude=\"exclude\"',
'src/',
'dst.sync/'
]
expected_get_cmdline = [
'rsync',
afl_sync._rsync_default_options[0],
'--exclude=\"exclude\"',
'dst/*',
'src/'
]
self.assertListEqual(expected_put_cmdline, afl_rsync._AflRsync__prepare_rsync_commandline('src', 'dst',
rsync_excludes=[
'exclude']))
self.assertListEqual(expected_get_cmdline, afl_rsync._AflRsync__prepare_rsync_commandline('src', 'dst',
rsync_excludes=[
'exclude'],
rsync_get=True))
def test_afl_rsync_invoke_rsync(self):
rsync_cmdline = ['rsync', '--help']
afl_rsync = AflRsync(None, None)
self.assertTrue(afl_rsync._AflRsync__invoke_rsync(rsync_cmdline))
self.assertFalse(afl_rsync._AflRsync__invoke_rsync(['rsync']))
def test_afl_rsync_get_fuzzers(self):
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': 'fuzz',
'exclude_crashes': True,
'exclude_hangs': True,
}
expected_fuzzers = [
'fuzz000',
'fuzz001',
'invalid_fuzz000',
'invalid_fuzz001'
]
afl_rsync = AflRsync(None, fuzzer_config)
self.assertListEqual(sorted(expected_fuzzers), sorted(afl_rsync._AflRsync__get_fuzzers()))
def test_afl_rsync_put(self):
local_path = 'testdata/sync/fuzz000'
remote_path = 'testdata/rsync_tmp_store/fuzz000'
excludes = ['crashes*/', 'hangs*/']
afl_rsync = AflRsync(None, None)
self.assertTrue(afl_rsync.rsync_put(local_path, remote_path, rsync_excludes=excludes))
self.assertTrue(os.path.exists(remote_path + '.sync/fuzzer_stats'))
self.assertTrue(os.path.exists(remote_path + '.sync/.cur_input'))
self.assertFalse(os.path.exists(remote_path + '.sync/crashes'))
self.assertFalse(os.path.exists(remote_path + '.sync/hangs'))
def test_afl_rsync_get(self):
local_path = 'testdata/rsync_tmp_store/fuzz000_get'
remote_path = 'testdata/sync/fuzz000'
excludes = ['crashes*/', 'hangs*/']
afl_rsync = AflRsync(None, None)
self.assertTrue(afl_rsync.rsync_get(remote_path, local_path, rsync_excludes=excludes))
self.assertTrue(os.path.exists(local_path + '/fuzzer_stats'))
self.assertFalse(os.path.exists(local_path + '/crashes'))
self.assertFalse(os.path.exists(local_path + '/hangs'))
def test_afl_rsync_push(self):
server_config = {
'remote_path': 'testdata/rsync_output_push',
}
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': 'fuzz',
'exclude_crashes': True,
'exclude_hangs': True,
}
afl_rsync = AflRsync(server_config, fuzzer_config)
self.assertIsNone(afl_rsync.push())
self.assertTrue(os.path.exists('testdata/rsync_output_push/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz000.sync/.cur_input'))
self.assertTrue(os.path.exists('testdata/rsync_output_push/fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz000.sync/.cur_input'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz002.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz002.sync.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/invalid_fuzz001.sync'))
def test_afl_rsync_pull_session(self):
server_config = {
'remote_path': 'testdata/rsync_output_pull',
}
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': 'other_fuzz',
'exclude_crashes': True,
'exclude_hangs': True,
}
afl_rsync = AflRsync(server_config, fuzzer_config)
self.assertIsNone(afl_rsync.pull())
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync/crashes'))
self.assertFalse(os.path.exists('testdata/sync/other_fuzz000.sync/.cur_input'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/sync/other_fuzz001.sync/.cur_input'))
self.assertFalse(os.path.exists('testdata/sync/other_invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz001.sync'))
def test_afl_rsync_pull_all(self):
server_config = {
'remote_path': 'testdata/rsync_output_pull',
}
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': None,
'exclude_crashes': True,
'exclude_hangs': True,
}
afl_rsync = AflRsync(server_config, fuzzer_config)
self.assertIsNone(afl_rsync.pull())
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/sync/other_fuzz000.sync/.cur_input'))
self.assertFalse(os.path.exists('testdata/sync/other_fuzz001.sync/.cur_input'))
self.assertTrue(os.path.exists('testdata/sync/other_invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz001.sync'))
def test_afl_rsync_sync(self):
server_config = {
'remote_path': 'testdata/rsync_output_sync',
}
fuzzer_config = {
'sync_dir': 'testdata/sync',
'session': None,
'exclude_crashes': True,
'exclude_hangs': True,
}
afl_rsync = AflRsync(server_config, fuzzer_config)
self.assertIsNone(afl_rsync.sync())
# pull assertions
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz001.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz001.sync'))
# push assertions
self.assertTrue(os.path.exists('testdata/rsync_output_sync/fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_sync/fuzz002.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_sync/fuzz002.sync.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/invalid_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/invalid_fuzz001.sync'))
def test_main(self):
argv = [
'afl-sync'
]
with self.assertRaises(SystemExit):
self.assertIsNone(afl_sync.main(argv))
argv = [
'afl-sync',
'put',
'src',
'dst'
]
with self.assertRaises(SystemExit) as e:
afl_sync.main(argv)
self.assertEqual(1, e.exception.code)
argv = [
'afl-sync',
'push',
'testdata/new_sync',
'testdata/rsync_output_push'
]
with self.assertRaises(SystemExit) as e:
afl_sync.main(argv)
self.assertEqual(1, e.exception.code)
argv = [
'afl-sync',
'pull',
'testdata/new_sync',
'testdata/rsync_output_pull'
]
self.assertIsNone(afl_sync.main(argv))
argv = [
'afl-sync',
'push',
'testdata/sync',
'testdata/rsync_output_push'
]
self.assertIsNone(afl_sync.main(argv))
self.assertTrue(os.path.exists('testdata/rsync_output_push/fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_push/fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz002.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_push/fuzz002.sync.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_push/invalid_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_push/invalid_fuzz001.sync'))
argv = [
'afl-sync',
'pull',
'testdata/sync',
'testdata/rsync_output_pull'
]
self.assertIsNone(afl_sync.main(argv))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz001.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz001.sync'))
argv = [
'afl-sync',
'sync',
'testdata/sync',
'testdata/rsync_output_sync'
]
self.assertIsNone(afl_sync.main(argv))
# pull assertions
self.assertTrue(os.path.exists('testdata/sync/other_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_fuzz001.sync'))
self.assertTrue(os.path.exists('testdata/sync/other_invalid_fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz000.sync'))
self.assertFalse(os.path.exists('testdata/sync/fuzz001.sync'))
# push assertions
self.assertTrue(os.path.exists('testdata/rsync_output_sync/fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/fuzz001.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_sync/fuzz002.sync'))
self.assertFalse(os.path.exists('testdata/rsync_output_sync/fuzz002.sync.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/invalid_fuzz000.sync'))
self.assertTrue(os.path.exists('testdata/rsync_output_sync/invalid_fuzz001.sync'))
| 43.27193
| 114
| 0.636124
| 1,706
| 14,799
| 5.267292
| 0.06272
| 0.043401
| 0.086802
| 0.124638
| 0.855998
| 0.826508
| 0.794235
| 0.73047
| 0.659025
| 0.617294
| 0
| 0.025454
| 0.24076
| 14,799
| 341
| 115
| 43.398827
| 0.774297
| 0.01196
| 0
| 0.507042
| 0
| 0
| 0.313942
| 0.257614
| 0
| 0
| 0
| 0
| 0.306338
| 1
| 0.056338
| false
| 0
| 0.017606
| 0
| 0.077465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7915cff64745275a3956da0146279cb771b473cb
| 50
|
py
|
Python
|
src/app/coupon/coupon_not_found_exception.py
|
daniellima/desafio-lojaintegrada
|
3cb17f4ec9769472e111adfe6d550e668518aabd
|
[
"MIT"
] | null | null | null |
src/app/coupon/coupon_not_found_exception.py
|
daniellima/desafio-lojaintegrada
|
3cb17f4ec9769472e111adfe6d550e668518aabd
|
[
"MIT"
] | null | null | null |
src/app/coupon/coupon_not_found_exception.py
|
daniellima/desafio-lojaintegrada
|
3cb17f4ec9769472e111adfe6d550e668518aabd
|
[
"MIT"
] | null | null | null |
class CouponNotFoundException(Exception):
pass
| 25
| 41
| 0.82
| 4
| 50
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 50
| 2
| 42
| 25
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f70a7c97aa3c694dfc2fdc8eb7fb9de62211e209
| 119
|
py
|
Python
|
carbon/client/metrics/__init__.py
|
mosquito/carbonate
|
5eca69602b9fc03dc0b982f9104c7ebb04159059
|
[
"MIT"
] | 2
|
2017-12-21T15:40:12.000Z
|
2018-02-07T10:00:14.000Z
|
carbon/client/metrics/__init__.py
|
mosquito/carbonate
|
5eca69602b9fc03dc0b982f9104c7ebb04159059
|
[
"MIT"
] | 2
|
2016-12-02T08:53:48.000Z
|
2016-12-05T21:46:04.000Z
|
carbon/client/metrics/__init__.py
|
mosquito/carbonate
|
5eca69602b9fc03dc0b982f9104c7ebb04159059
|
[
"MIT"
] | 5
|
2015-07-22T14:31:28.000Z
|
2020-09-30T08:20:29.000Z
|
from .timer import Timer
from .simple import Counter
from .heartbeat import HeartBeat
from .collector import Collector
| 23.8
| 32
| 0.831933
| 16
| 119
| 6.1875
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134454
| 119
| 4
| 33
| 29.75
| 0.961165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f7338eebb28d83e5ed91ee4013c8eac11bcbfae5
| 352
|
py
|
Python
|
utils.py
|
schorrm/arm2riscv
|
5fa28e28d920705b660874a03b9906fae710b442
|
[
"MIT"
] | 8
|
2020-07-07T13:08:26.000Z
|
2022-03-29T23:12:37.000Z
|
utils.py
|
schorrm/arm2riscv
|
5fa28e28d920705b660874a03b9906fae710b442
|
[
"MIT"
] | 2
|
2020-04-05T07:17:22.000Z
|
2021-06-27T22:33:25.000Z
|
utils.py
|
schorrm/arm2riscv
|
5fa28e28d920705b660874a03b9906fae710b442
|
[
"MIT"
] | 1
|
2021-06-19T12:38:45.000Z
|
2021-06-19T12:38:45.000Z
|
#!/usr/bin/python3
class InstructionNotRecognized(Exception):
''' Exception to throw when an instruction does not have defined conversion code '''
pass
reg_labels = """ .section .tdata
REG_BANK:
.dword 0
.dword 0
.dword 0
.dword 0
.dword 0
.dword 0
.dword 0
.dword 0
"""
| 19.555556
| 88
| 0.5625
| 41
| 352
| 4.780488
| 0.609756
| 0.244898
| 0.392857
| 0.428571
| 0.244898
| 0.244898
| 0.244898
| 0.244898
| 0.244898
| 0.244898
| 0
| 0.03913
| 0.346591
| 352
| 18
| 89
| 19.555556
| 0.813043
| 0.269886
| 0
| 0.615385
| 0
| 0
| 0.677291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.076923
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f746bad1d975630dd21180afa88f5600bd13a5f5
| 113
|
py
|
Python
|
openjij/__init__.py
|
y-yu/OpenJij
|
ed08460b7c9f8e553d4d33e08977d465472e9c44
|
[
"Apache-2.0"
] | null | null | null |
openjij/__init__.py
|
y-yu/OpenJij
|
ed08460b7c9f8e553d4d33e08977d465472e9c44
|
[
"Apache-2.0"
] | null | null | null |
openjij/__init__.py
|
y-yu/OpenJij
|
ed08460b7c9f8e553d4d33e08977d465472e9c44
|
[
"Apache-2.0"
] | null | null | null |
from .sampler import Sampler, SASampler, SQASampler
from .model import BinaryQuadraticModel
from .utils import *
| 28.25
| 51
| 0.823009
| 13
| 113
| 7.153846
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123894
| 113
| 3
| 52
| 37.666667
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f74e1dbac059ada44b9193a81557d3a22a5a92ba
| 1,673
|
py
|
Python
|
rest_api/migrations/0041_auto_20210304_1653.py
|
InspectorIncognito/gtfs-editor
|
4e3245f44ec44aeb2d28aa25786dc95a3193fb81
|
[
"MIT"
] | 2
|
2021-10-01T16:11:20.000Z
|
2022-01-15T10:55:40.000Z
|
rest_api/migrations/0041_auto_20210304_1653.py
|
InspectorIncognito/gtfs-editor
|
4e3245f44ec44aeb2d28aa25786dc95a3193fb81
|
[
"MIT"
] | 3
|
2021-06-10T19:17:55.000Z
|
2022-03-05T08:37:46.000Z
|
rest_api/migrations/0041_auto_20210304_1653.py
|
InspectorIncognito/gtfs-editor
|
4e3245f44ec44aeb2d28aa25786dc95a3193fb81
|
[
"MIT"
] | 1
|
2022-03-05T08:37:53.000Z
|
2022-03-05T08:37:53.000Z
|
# Generated by Django 3.1.3 on 2021-03-04 19:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rest_api', '0040_project_loading_gtfs_job_id'),
]
operations = [
migrations.AlterField(
model_name='calendar',
name='end_date',
field=models.DateField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='friday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='monday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='saturday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='start_date',
field=models.DateField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='sunday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='thursday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='tuesday',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='calendar',
name='wednesday',
field=models.BooleanField(default=False),
),
]
| 28.355932
| 57
| 0.548715
| 142
| 1,673
| 6.34507
| 0.330986
| 0.199778
| 0.249723
| 0.289678
| 0.761376
| 0.722531
| 0.677026
| 0.677026
| 0.677026
| 0.677026
| 0
| 0.017179
| 0.338912
| 1,673
| 58
| 58
| 28.844828
| 0.797468
| 0.026898
| 0
| 0.692308
| 1
| 0
| 0.110701
| 0.01968
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f76708d7471aa5d570bd5ddef8657c98378d9b65
| 55
|
py
|
Python
|
oekaki/__init__.py
|
y-tetsu/oekaki
|
9b48d0e5ecc0f7dfc6cf49b569a2f42495658990
|
[
"MIT"
] | null | null | null |
oekaki/__init__.py
|
y-tetsu/oekaki
|
9b48d0e5ecc0f7dfc6cf49b569a2f42495658990
|
[
"MIT"
] | null | null | null |
oekaki/__init__.py
|
y-tetsu/oekaki
|
9b48d0e5ecc0f7dfc6cf49b569a2f42495658990
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from oekaki.oekaki import Oekaki
| 18.333333
| 32
| 0.781818
| 9
| 55
| 4.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 55
| 2
| 33
| 27.5
| 0.877551
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f76d5ef8075eb53c2cb8081f782722a8e8cd32a6
| 119
|
py
|
Python
|
services/web/test.py
|
PersonofNote/flask-react-postgres-boilerplate
|
f482df1923e6a31e862bad48c6d8727c3884d3ea
|
[
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null |
services/web/test.py
|
PersonofNote/flask-react-postgres-boilerplate
|
f482df1923e6a31e862bad48c6d8727c3884d3ea
|
[
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null |
services/web/test.py
|
PersonofNote/flask-react-postgres-boilerplate
|
f482df1923e6a31e862bad48c6d8727c3884d3ea
|
[
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null |
import unittest
from . import *
class HomePageTest(unittest.TestCase):
def test_home_page(self):
pass
| 17
| 38
| 0.689076
| 14
| 119
| 5.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 119
| 7
| 39
| 17
| 0.879121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
e3c75b876987290261f96570c6cdc98ce81f7fee
| 286
|
py
|
Python
|
lession_01/sorting.py
|
room89/algorithms
|
51b3256245cdaa227566402afee4078d9b3f8a1a
|
[
"MIT"
] | null | null | null |
lession_01/sorting.py
|
room89/algorithms
|
51b3256245cdaa227566402afee4078d9b3f8a1a
|
[
"MIT"
] | null | null | null |
lession_01/sorting.py
|
room89/algorithms
|
51b3256245cdaa227566402afee4078d9b3f8a1a
|
[
"MIT"
] | null | null | null |
def bubble_sort(iterable):
return sorted(iterable)
def selection_sort(iterable):
return sorted(iterable)
def insertion_sort(iterable):
return sorted(iterable)
def merge_sort(iterable):
return sorted(iterable)
def quicksort(iterable):
return sorted(iterable)
| 15.052632
| 29
| 0.744755
| 34
| 286
| 6.147059
| 0.294118
| 0.334928
| 0.478469
| 0.669856
| 0.669856
| 0.669856
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167832
| 286
| 18
| 30
| 15.888889
| 0.878151
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
e3d3a63be79e3fec92c3ae4ec836e460108d1823
| 154
|
py
|
Python
|
mainflux/__init__.py
|
molodoj88/mainfluxpy
|
7895b2b76dabe232effe8a1350ca8c1a31bf050e
|
[
"MIT"
] | 1
|
2020-04-23T17:32:45.000Z
|
2020-04-23T17:32:45.000Z
|
mainflux/__init__.py
|
molodoj88/mainfluxpy
|
7895b2b76dabe232effe8a1350ca8c1a31bf050e
|
[
"MIT"
] | 1
|
2020-06-09T17:56:30.000Z
|
2020-06-09T17:56:30.000Z
|
mainflux/__init__.py
|
molodoj88/mainfluxpy
|
7895b2b76dabe232effe8a1350ca8c1a31bf050e
|
[
"MIT"
] | 1
|
2021-06-30T05:54:31.000Z
|
2021-06-30T05:54:31.000Z
|
from .app import MainfluxApp
from .thing import Thing
from .transport import MQTT, HTTP, COAP
__ALL__ = ['MainfluxApp', 'Thing', 'MQTT', 'HTTP', 'COAP']
| 25.666667
| 58
| 0.714286
| 20
| 154
| 5.3
| 0.5
| 0.150943
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 154
| 5
| 59
| 30.8
| 0.80303
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e3f8d81e2c492135968930d47f0a2a32a96fc345
| 354
|
py
|
Python
|
src/homework/services/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 62
|
2021-09-22T18:38:26.000Z
|
2022-03-29T06:09:42.000Z
|
src/homework/services/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 50
|
2021-09-16T07:17:31.000Z
|
2022-03-26T12:06:58.000Z
|
src/homework/services/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 16
|
2021-10-17T17:43:31.000Z
|
2022-03-26T11:22:45.000Z
|
from homework.services.answer_crosscheck_dispatcher import AnswerCrossCheckDispatcher
from homework.services.new_answer_notifier import NewAnswerNotifier
from homework.services.question_crosscheck_dispatcher import QuestionCrossCheckDispatcher
__all__ = [
'AnswerCrossCheckDispatcher',
'NewAnswerNotifier',
'QuestionCrossCheckDispatcher',
]
| 35.4
| 89
| 0.858757
| 28
| 354
| 10.5
| 0.5
| 0.122449
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090395
| 354
| 9
| 90
| 39.333333
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0.200565
| 0.152542
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5419165c6f2defa2c5f8e159ea42cdeddc8d5c77
| 1,306
|
py
|
Python
|
module1-introduction-to-sql/rpg_queries.py
|
adphelps/DS-Unit-3-Sprint-2-SQL-and-Databases
|
d8f2b6df9a20d4d140e23722f6f41968fc4813d8
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/rpg_queries.py
|
adphelps/DS-Unit-3-Sprint-2-SQL-and-Databases
|
d8f2b6df9a20d4d140e23722f6f41968fc4813d8
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/rpg_queries.py
|
adphelps/DS-Unit-3-Sprint-2-SQL-and-Databases
|
d8f2b6df9a20d4d140e23722f6f41968fc4813d8
|
[
"MIT"
] | null | null | null |
import sqlite3
conn = sqlite3.connect('rpg_db.sqlite3')
cur = conn.cursor()
cur.execute('SELECT * FROM charactercreator_character')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_cleric')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_fighter')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_mage')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_necromancer')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_thief')
cur.fetchall()
cur.execute('SELECT * FROM armory_item')
cur.fetchall()
cur.execute('SELECT * FROM armory_weapon')
cur.fetchall()
cur.execute('SELECT * FROM charactercreator_character_inventory LIMIT 20')
cur.fetchall()
query = """
SELECT * FROM charactercreator_character_inventory WHERE id IN (
SELECT item_ptr_id FROM armory_weapon
) LIMIT 20
"""
cur.execute(query)
cur.fetchall()
query = """
SELECT AVG(c)
FROM
(
SELECT COUNT(*) c
FROM charactercreator_character_inventory
GROUP BY character_id
)
"""
cur.execute(query)
cur.fetchall()
query = """
SELECT AVG(c)
FROM
(
SELECT COUNT(*) c
FROM charactercreator_character_inventory
WHERE id IN (
SELECT item_ptr_id
FROM armory_weapon
)
GROUP BY character_id
)
"""
cur.execute()
cur.fetchall()
| 22.912281
| 75
| 0.722818
| 160
| 1,306
| 5.74375
| 0.21875
| 0.130577
| 0.156692
| 0.195865
| 0.847661
| 0.831338
| 0.722524
| 0.335147
| 0.335147
| 0.335147
| 0
| 0.006399
| 0.162328
| 1,306
| 56
| 76
| 23.321429
| 0.833638
| 0
| 0
| 0.555556
| 0
| 0
| 0.612
| 0.2304
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018519
| 0
| 0.018519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5859867bfe543b89b7e78101cb9fb17f5a606426
| 17,328
|
py
|
Python
|
tick/survival/simu_coxreg.py
|
sumau/tick
|
1b56924a35463e12f7775bc0aec182364f26f2c6
|
[
"BSD-3-Clause"
] | 411
|
2017-03-30T15:22:05.000Z
|
2022-03-27T01:58:34.000Z
|
tick/survival/simu_coxreg.py
|
saurabhdash/tick
|
bbc561804eb1fdcb4c71b9e3e2d83a66e7b13a48
|
[
"BSD-3-Clause"
] | 345
|
2017-04-13T14:53:20.000Z
|
2022-03-26T00:46:22.000Z
|
tick/survival/simu_coxreg.py
|
saurabhdash/tick
|
bbc561804eb1fdcb4c71b9e3e2d83a66e7b13a48
|
[
"BSD-3-Clause"
] | 102
|
2017-04-25T11:47:53.000Z
|
2022-02-15T11:45:49.000Z
|
# License: BSD 3 clause
import numpy as np
from tick.base.simulation import SimuWithFeatures
from tick.preprocessing.features_binarizer import FeaturesBinarizer
# TODO: something better to tune the censoring level than this censoring factor
class SimuCoxReg(SimuWithFeatures):
"""Simulation of a Cox regression for proportional hazards
Parameters
----------
coeffs : `numpy.ndarray`, shape=(n_coeffs,)
The array of coefficients of the model
features : `numpy.ndarray`, shape=(n_samples, n_features), default=`None`
The features matrix to use. If None, it is simulated
n_samples : `int`, default=200
Number of samples
times_distribution : `str`, default="weibull"
The distrubution of times. Only ``"weibull"``
is implemented for now
scale : `float`, default=1.0
Scaling parameter to use in the distribution of times
shape : `float`, default=1.0
Shape parameter to use in the distribution of times
censoring_factor : `float`, default=2.0
Level of censoring. Increasing censoring_factor leads
to less censored times and conversely.
features_type : `str`, default="cov_toeplitz"
The type of features matrix to simulate
* If ``"cov_toeplitz"`` : a Gaussian distribution with
Toeplitz correlation matrix
* If ``"cov_uniform"`` : a Gaussian distribution with
correlation matrix given by O.5 * (U + U.T), where U is
uniform on [0, 1] and diagonal filled with ones.
cov_corr : `float`, default=0.5
Correlation to use in the Toeplitz correlation matrix
features_scaling : `str`, default="none"
The way the features matrix is scaled after simulation
* If ``"standard"`` : the columns are centered and
normalized
* If ``"min-max"`` : remove the minimum and divide by
max-min
* If ``"norm"`` : the columns are normalized but not centered
* If ``"none"`` : nothing is done to the features
seed : `int`, default=None
The seed of the random number generator. If `None` it is not
seeded
verbose : `bool`, default=True
If True, print things
Attributes
----------
features : `numpy.ndarray`, shape=(n_samples, n_features)
The simulated (or given) features matrix
times : `numpy.ndarray`, shape=(n_samples,)
Simulated times
censoring : `numpy.ndarray`, shape=(n_samples,)
Simulated censoring indicator, where ``censoring[i] == 1``
indicates that the time of the i-th individual is a failure
time, and where ``censoring[i] == 0`` means that the time of
the i-th individual is a censoring time
time_start : `str`
Start date of the simulation
time_elapsed : `int`
Duration of the simulation, in seconds
time_end : `str`
End date of the simulation
dtype : `{'float64', 'float32'}`, default='float64'
Type of the generated arrays.
Used in the case features is None
Notes
-----
There is no intercept in this model
"""
_attrinfos = {
"times": {
"writable": False
},
"censoring": {
"writable": False
},
"_times_distribution": {
"writable": False
},
"_scale": {
"writable": False
},
"_shape": {
"writable": False
}
}
def __init__(self, coeffs: np.ndarray,
features: np.ndarray = None, n_samples: int = 200,
times_distribution: str = "weibull",
shape: float = 1., scale: float = 1.,
censoring_factor: float = 2.,
features_type: str = "cov_toeplitz",
cov_corr: float = 0.5, features_scaling: str = "none",
seed: int = None, verbose: bool = True, dtype="float64"):
n_features = coeffs.shape[0]
# intercept=None in this model
SimuWithFeatures.__init__(self, None, features, n_samples,
n_features, features_type, cov_corr,
features_scaling, seed, verbose, dtype=dtype)
self.coeffs = coeffs
self.shape = shape
self.scale = scale
self.censoring_factor = censoring_factor
self.times_distribution = times_distribution
self.features = None
self.times = None
self.censoring = None
def simulate(self):
"""Launch simulation of the data
Returns
-------
features : `numpy.ndarray`, shape=(n_samples, n_features)
The simulated (or given) features matrix
times : `numpy.ndarray`, shape=(n_samples,)
Simulated times
censoring : `numpy.ndarray`, shape=(n_samples,)
Simulated censoring indicator, where ``censoring[i] == 1``
indicates that the time of the i-th individual is a failure
time, and where ``censoring[i] == 0`` means that the time of
the i-th individual is a censoring time
"""
return SimuWithFeatures.simulate(self)
@property
def times_distribution(self):
return self._times_distribution
@times_distribution.setter
def times_distribution(self, val):
if val != "weibull":
raise ValueError("``times_distribution`` was not "
"understood, try using 'weibull' instead")
self._set("_times_distribution", val)
@property
def shape(self):
return self._shape
@shape.setter
def shape(self, val):
if val <= 0:
raise ValueError("``shape`` must be strictly positive")
self._set("_shape", val)
@property
def scale(self):
return self._scale
@scale.setter
def scale(self, val):
if val <= 0:
raise ValueError("``scale`` must be strictly positive")
self._set("_scale", val)
def _simulate(self):
# The features matrix already exists, and is created by the
# super class
features = self.features
n_samples, n_features = features.shape
u = features.dot(self.coeffs)
# Simulation of true times
E = np.random.exponential(scale=1., size=n_samples)
E *= np.exp(-u)
scale = self.scale
shape = self.shape
if self.times_distribution == "weibull":
T = 1. / scale * E ** (1. / shape)
else:
# There is not point in this test, but let's do it like that
# since we're likely to implement other distributions
T = 1. / scale * E ** (1. / shape)
m = T.mean()
# Simulation of the censoring
c = self.censoring_factor
C = np.random.exponential(scale=c * m, size=n_samples)
# Observed time
self._set("times", np.minimum(T, C).astype(self.dtype))
# Censoring indicator: 1 if it is a time of failure, 0 if it's
# censoring. It is as int8 and not bool as we might need to
# construct a memory access on it later
censoring = (T <= C).astype(np.ushort)
self._set("censoring", censoring)
return self.features, self.times, self.censoring
def _as_dict(self):
dd = SimuWithFeatures._as_dict(self)
dd.pop("features", None)
dd.pop("times", None)
dd.pop("censoring", None)
return dd
class SimuCoxRegWithCutPoints(SimuWithFeatures):
"""Simulation of a Cox regression for proportional hazards with cut-points
effects in the features
Parameters
----------
features : `numpy.ndarray`, shape=(n_samples, n_features), default=`None`
The features matrix to use. If None, it is simulated
n_samples : `int`, default=200
Number of samples
n_features : `int`, default=5
Number of features
times_distribution : `str`, default="weibull"
The distrubution of times. Only ``"weibull"``
is implemented for now
scale : `float`, default=1.0
Scaling parameter to use in the distribution of times
shape : `float`, default=1.0
Shape parameter to use in the distribution of times
censoring_factor : `float`, default=2.0
Level of censoring. Increasing censoring_factor leads
to less censored times and conversely.
features_type : `str`, default="cov_toeplitz"
The type of features matrix to simulate
* If ``"cov_toeplitz"`` : a Gaussian distribution with
Toeplitz correlation matrix
* If ``"cov_uniform"`` : a Gaussian distribution with
correlation matrix given by O.5 * (U + U.T), where U is
uniform on [0, 1] and diagonal filled with ones.
cov_corr : `float`, default=0.5
Correlation to use in the Toeplitz correlation matrix
features_scaling : `str`, default="none"
The way the features matrix is scaled after simulation
* If ``"standard"`` : the columns are centered and
normalized
* If ``"min-max"`` : remove the minimum and divide by
max-min
* If ``"norm"`` : the columns are normalized but not centered
* If ``"none"`` : nothing is done to the features
seed : `int`, default=None
The seed of the random number generator. If `None` it is not
seeded
verbose : `bool`, default=True
If True, print things
n_cut_points : `int`, default="none"
Number of cut-points generated per feature. If `None` it is sampled from
a geometric distribution of parameter n_cut_points_factor.
n_cut_points_factor : `float`, default=0.7
Parameter of the geometric distribution used to generate the number of
cut-points when n_cut_points is `None`. Increasing n_cut_points_factor
leads to less cut-points per feature on average.
sparsity : `float`, default=0
Percentage of block sparsity induced in the coefficient vector. Must be
in [0, 1].
Attributes
----------
features : `numpy.ndarray`, shape=(n_samples, n_features)
The simulated (or given) features matrix
times : `numpy.ndarray`, shape=(n_samples,)
Simulated times
censoring : `numpy.ndarray`, shape=(n_samples,)
Simulated censoring indicator, where ``censoring[i] == 1``
indicates that the time of the i-th individual is a failure
time, and where ``censoring[i] == 0`` means that the time of
the i-th individual is a censoring time
Notes
-----
There is no intercept in this model
"""
_attrinfos = {
"times": {
"writable": False
},
"censoring": {
"writable": False
},
"_times_distribution": {
"writable": False
},
"_scale": {
"writable": False
},
"_shape": {
"writable": False
},
"_sparsity": {
"writable": False
}
}
def __init__(self, features: np.ndarray = None, n_samples: int = 200,
n_features: int = 5, n_cut_points: int = None,
n_cut_points_factor: float = .7,
times_distribution: str = "weibull",
shape: float = 1., scale: float = 1.,
censoring_factor: float = 2.,
features_type: str = "cov_toeplitz",
cov_corr: float = 0.5, features_scaling: str = "none",
seed: int = None, verbose: bool = True, sparsity=0):
# intercept=None in this model
SimuWithFeatures.__init__(self, None, features, n_samples,
n_features, features_type, cov_corr,
features_scaling, seed, verbose)
self.shape = shape
self.scale = scale
self.censoring_factor = censoring_factor
self.times_distribution = times_distribution
self.n_cut_points = n_cut_points
self.n_cut_points_factor = n_cut_points_factor
self.sparsity = sparsity
self.features = None
self.times = None
self.censoring = None
def simulate(self):
"""Launch simulation of the data
Returns
-------
features : `numpy.ndarray`, shape=(n_samples, n_features)
The simulated (or given) features matrix
times : `numpy.ndarray`, shape=(n_samples,)
Simulated times
censoring : `numpy.ndarray`, shape=(n_samples,)
Simulated censoring indicator, where ``censoring[i] == 1``
indicates that the time of the i-th individual is a failure
time, and where ``censoring[i] == 0`` means that the time of
the i-th individual is a censoring time
"""
return SimuWithFeatures.simulate(self)
@property
def times_distribution(self):
return self._times_distribution
@times_distribution.setter
def times_distribution(self, val):
if val != "weibull":
raise ValueError("``times_distribution`` was not "
"understood, try using 'weibull' instead")
self._set("_times_distribution", val)
@property
def shape(self):
return self._shape
@shape.setter
def shape(self, val):
if val <= 0:
raise ValueError("``shape`` must be strictly positive")
self._set("_shape", val)
@property
def scale(self):
return self._scale
@scale.setter
def scale(self, val):
if val <= 0:
raise ValueError("``scale`` must be strictly positive")
self._set("_scale", val)
@property
def sparsity(self):
return self._sparsity
@sparsity.setter
def sparsity(self, val):
if not 0 <= val <= 1:
raise ValueError("``sparsity`` must be in (0, 1)")
self._set("_sparsity", val)
def _simulate(self):
# The features matrix already exists, and is created by the
# super class
features = self.features
n_samples, n_features = features.shape
# Simulation of cut-points
n_cut_points = self.n_cut_points
n_cut_points_factor = self.n_cut_points_factor
sparsity = self.sparsity
s = round(n_features * sparsity)
# sparsity index set
S = np.random.choice(n_features, s, replace=False)
if n_cut_points is None:
n_cut_points = np.random.geometric(n_cut_points_factor, n_features)
else:
n_cut_points = np.repeat(n_cut_points, n_features)
cut_points = {}
coeffs_binarized = np.array([])
for j in range(n_features):
feature_j = features[:, j]
quantile_cuts = np.linspace(10, 90, 10)
candidates = np.percentile(feature_j, quantile_cuts,
interpolation="nearest")
cut_points_j = np.random.choice(candidates, n_cut_points[j],
replace=False)
cut_points_j = np.sort(cut_points_j)
cut_points_j = np.insert(cut_points_j, 0, -np.inf)
cut_points_j = np.append(cut_points_j, np.inf)
cut_points[str(j)] = cut_points_j
# generate beta star
if j in S:
coeffs_block = np.zeros(n_cut_points[j] + 1)
else:
coeffs_block = np.random.normal(1, .5, n_cut_points[j] + 1)
# make sure 2 consecutive coeffs are different enough
coeffs_block = np.abs(coeffs_block)
coeffs_block[::2] *= -1
# sum-to-zero constraint in each block
coeffs_block = coeffs_block - coeffs_block.mean()
coeffs_binarized = np.append(coeffs_binarized, coeffs_block)
binarizer = FeaturesBinarizer(method='given',
bins_boundaries=cut_points)
binarized_features = binarizer.fit_transform(features)
u = binarized_features.dot(coeffs_binarized)
# Simulation of true times
E = np.random.exponential(scale=1., size=n_samples)
E *= np.exp(-u)
scale = self.scale
shape = self.shape
if self.times_distribution == "weibull":
T = 1. / scale * E ** (1. / shape)
else:
# There is not point in this test, but let's do it like that
# since we're likely to implement other distributions
T = 1. / scale * E ** (1. / shape)
m = T.mean()
# Simulation of the censoring
c = self.censoring_factor
C = np.random.exponential(scale=c * m, size=n_samples)
# Observed time
self._set("times", np.minimum(T, C).astype(self.dtype))
# Censoring indicator: 1 if it is a time of failure, 0 if censoring.
censoring = (T <= C).astype(np.ushort)
self._set("censoring", censoring)
return self.features, self.times, self.censoring, cut_points, \
coeffs_binarized, S
def _as_dict(self):
dd = SimuWithFeatures._as_dict(self)
dd.pop("features", None)
dd.pop("times", None)
dd.pop("censoring", None)
return dd
| 33.84375
| 80
| 0.591009
| 2,080
| 17,328
| 4.794712
| 0.132692
| 0.036097
| 0.023062
| 0.027073
| 0.778101
| 0.755841
| 0.755841
| 0.754738
| 0.742304
| 0.722852
| 0
| 0.009089
| 0.314231
| 17,328
| 511
| 81
| 33.90998
| 0.830178
| 0.431614
| 0
| 0.669565
| 0
| 0
| 0.080674
| 0.004843
| 0
| 0
| 0
| 0.001957
| 0
| 1
| 0.095652
| false
| 0
| 0.013043
| 0.030435
| 0.182609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5868eacfc40097456bf91fed7ab000d79bd948fc
| 664
|
py
|
Python
|
tests/test_ppjson.py
|
jiamo/ppjson
|
940b785b4ac51994990ad3bab20dc556fd523e73
|
[
"MIT"
] | 1
|
2019-04-21T17:59:18.000Z
|
2019-04-21T17:59:18.000Z
|
tests/test_ppjson.py
|
jiamo/ppjson
|
940b785b4ac51994990ad3bab20dc556fd523e73
|
[
"MIT"
] | 2
|
2018-05-16T00:54:32.000Z
|
2021-03-25T21:56:45.000Z
|
tests/test_ppjson.py
|
jiamo/ppjson
|
940b785b4ac51994990ad3bab20dc556fd523e73
|
[
"MIT"
] | null | null | null |
import simplejson as json
from ppjson import ppjson
# def test_array_null_empty():
# assert json.loads('[null, 1, "1", {}]') == ppjson.loads('[null, 1, "1", {}]')
def test_array_true():
assert json.loads('[true]') == ppjson.loads('[true]')
def test_array():
assert json.loads('[1]') == ppjson.loads('[1]')
def test_dict():
assert json.loads('{"1":1}') == ppjson.loads('{"1":1}')
def test_empty_dict():
assert json.loads('{ }') == ppjson.loads('{ }')
def test_number():
assert json.loads("1") == 1 == ppjson.loads("1")
assert json.loads('1') == 1 == ppjson.loads('1')
assert json.loads('1.1') == 1.1 == ppjson.loads('1.1')
| 24.592593
| 83
| 0.585843
| 96
| 664
| 3.947917
| 0.177083
| 0.05277
| 0.316623
| 0.171504
| 0.316623
| 0.316623
| 0.274406
| 0.274406
| 0.197889
| 0.197889
| 0
| 0.039711
| 0.165663
| 664
| 26
| 84
| 25.538462
| 0.644404
| 0.165663
| 0
| 0
| 0
| 0
| 0.087591
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.357143
| true
| 0
| 0.142857
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5893082d625877f8a82f2bc2567d4eeb93974755
| 376
|
py
|
Python
|
PurBeurre/tests/test_url.py
|
Jinr0h404/projet10
|
8d344c6257a7f9436d2ead0a7c12a4da05991fc1
|
[
"OML"
] | null | null | null |
PurBeurre/tests/test_url.py
|
Jinr0h404/projet10
|
8d344c6257a7f9436d2ead0a7c12a4da05991fc1
|
[
"OML"
] | null | null | null |
PurBeurre/tests/test_url.py
|
Jinr0h404/projet10
|
8d344c6257a7f9436d2ead0a7c12a4da05991fc1
|
[
"OML"
] | null | null | null |
from django.urls import resolve
def test_index_url():
"""Check if the name of the view is correct and that the URL matches the name of the view."""
assert resolve("/").view_name == "index"
def test_legal_url():
"""Check if the name of the view is correct and that the URL matches the name of the view."""
assert resolve("/legal").view_name == "home-legal"
| 31.333333
| 97
| 0.691489
| 63
| 376
| 4.031746
| 0.365079
| 0.110236
| 0.141732
| 0.188976
| 0.661417
| 0.661417
| 0.661417
| 0.661417
| 0.661417
| 0.661417
| 0
| 0
| 0.196809
| 376
| 11
| 98
| 34.181818
| 0.84106
| 0.465426
| 0
| 0
| 0
| 0
| 0.115789
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
5466f880ad9065e9600a6a1d557d5270af5ecae3
| 241
|
py
|
Python
|
inscribete/queryset.py
|
Kiri23/DECE-Backend-Project
|
f488277bf294a4421c86efa512927e6d0f3255d6
|
[
"MIT"
] | null | null | null |
inscribete/queryset.py
|
Kiri23/DECE-Backend-Project
|
f488277bf294a4421c86efa512927e6d0f3255d6
|
[
"MIT"
] | 13
|
2019-05-24T21:13:59.000Z
|
2022-03-11T23:45:53.000Z
|
inscribete/queryset.py
|
Kiri23/DECE-Backend-Project
|
f488277bf294a4421c86efa512927e6d0f3255d6
|
[
"MIT"
] | null | null | null |
from django.db import models
class InscribeteQueryset(models.QuerySet):
def porEstudiante(self, estudianteId):
return self.filter(estudiante__pk=estudianteId)
def costoTotalPorEstudiante(self, estudianteId):
pass
| 21.909091
| 55
| 0.751037
| 24
| 241
| 7.458333
| 0.75
| 0.178771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178423
| 241
| 10
| 56
| 24.1
| 0.90404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
546bbdae6d3c8fb58ade9578420855244c0799c6
| 369
|
py
|
Python
|
fnet/data/__init__.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | null | null | null |
fnet/data/__init__.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | null | null | null |
fnet/data/__init__.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | null | null | null |
from fnet.data.czidataset import CziDataset
from fnet.data.bufferedpatchdataset import BufferedPatchDataset
from fnet.data.dummychunkdataset import DummyChunkDataset
from fnet.data.tiffdataset import TiffDataset
from fnet.data.nd2dataset import ND2Dataset
from fnet.data.allpatchesdataset import AllPatchesDataset
from fnet.data.combineddatasets import ConcatDataset
| 36.9
| 63
| 0.880759
| 42
| 369
| 7.738095
| 0.285714
| 0.172308
| 0.258462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0059
| 0.081301
| 369
| 9
| 64
| 41
| 0.952802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
49aec3bdc7396453aa29ff51934a26882b79635e
| 112
|
py
|
Python
|
tests/test_nothing.py
|
jayvdb/tox-constraints
|
e8c4faa455e5adcba59cec978c88290600cd40b1
|
[
"MIT"
] | 1
|
2021-06-04T15:59:24.000Z
|
2021-06-04T15:59:24.000Z
|
tests/test_nothing.py
|
jayvdb/tox-constraints
|
e8c4faa455e5adcba59cec978c88290600cd40b1
|
[
"MIT"
] | 2
|
2019-04-04T16:06:47.000Z
|
2020-12-09T02:04:45.000Z
|
tests/test_nothing.py
|
jayvdb/tox-constraints
|
e8c4faa455e5adcba59cec978c88290600cd40b1
|
[
"MIT"
] | 1
|
2020-12-08T11:18:27.000Z
|
2020-12-08T11:18:27.000Z
|
"""pytest is unhappy if it finds no tests"""
def test_nothing():
"""An empty test to keep pytest happy"""
| 18.666667
| 44
| 0.660714
| 18
| 112
| 4.055556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205357
| 112
| 5
| 45
| 22.4
| 0.820225
| 0.651786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
49c31b159a274a094b2c58b188ddb9938e49c1f9
| 224
|
py
|
Python
|
ex11_sd3.py
|
jlaw8504/lp3thw
|
e3e5c559a0d851547acda6387eb5ff9034549e48
|
[
"BSD-3-Clause"
] | null | null | null |
ex11_sd3.py
|
jlaw8504/lp3thw
|
e3e5c559a0d851547acda6387eb5ff9034549e48
|
[
"BSD-3-Clause"
] | null | null | null |
ex11_sd3.py
|
jlaw8504/lp3thw
|
e3e5c559a0d851547acda6387eb5ff9034549e48
|
[
"BSD-3-Clause"
] | null | null | null |
name = input("What is your name?")
quest = input("What is your quest?")
color = input("What is your favorite color?")
print(f"So your name is {name}.\nYou seek to {quest}.\nYour favorite color is {color}.\nYou may cross!")
| 44.8
| 104
| 0.691964
| 38
| 224
| 4.078947
| 0.447368
| 0.174194
| 0.212903
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151786
| 224
| 4
| 105
| 56
| 0.815789
| 0
| 0
| 0
| 0
| 0.25
| 0.709821
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
49d4256a04355282fd05393354f554e357746c98
| 287
|
py
|
Python
|
src/prefect/tasks/prefect/__init__.py
|
vnsn/prefect
|
972345597975155dba9e3232bcc430d0a6258a37
|
[
"Apache-2.0"
] | 1
|
2021-05-12T12:47:12.000Z
|
2021-05-12T12:47:12.000Z
|
src/prefect/tasks/prefect/__init__.py
|
vnsn/prefect
|
972345597975155dba9e3232bcc430d0a6258a37
|
[
"Apache-2.0"
] | 7
|
2021-06-26T08:05:20.000Z
|
2022-03-26T08:05:32.000Z
|
src/prefect/tasks/prefect/__init__.py
|
vnsn/prefect
|
972345597975155dba9e3232bcc430d0a6258a37
|
[
"Apache-2.0"
] | 2
|
2021-03-03T17:46:43.000Z
|
2021-03-05T15:39:35.000Z
|
"""
Tasks for interacting with the Prefect API
"""
from prefect.tasks.prefect.flow_run import FlowRunTask, StartFlowRun
from prefect.tasks.prefect.flow_run_rename import RenameFlowRunTask, RenameFlowRun
from prefect.tasks.prefect.flow_run_cancel import CancelFlowRunTask, CancelFlowRun
| 35.875
| 82
| 0.850174
| 36
| 287
| 6.638889
| 0.527778
| 0.138075
| 0.200837
| 0.288703
| 0.376569
| 0.376569
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087108
| 287
| 7
| 83
| 41
| 0.912214
| 0.146341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b7381bd689e01bed46655a5373354fbfe4b31b62
| 10,100
|
py
|
Python
|
tests/unit/search/test_search.py
|
dastra/hargreaves-sdk-python
|
8099d775c6a70ac415690c0322fe1b964356f6ff
|
[
"MIT"
] | null | null | null |
tests/unit/search/test_search.py
|
dastra/hargreaves-sdk-python
|
8099d775c6a70ac415690c0322fe1b964356f6ff
|
[
"MIT"
] | null | null | null |
tests/unit/search/test_search.py
|
dastra/hargreaves-sdk-python
|
8099d775c6a70ac415690c0322fe1b964356f6ff
|
[
"MIT"
] | null | null | null |
import http
from pathlib import Path
import pytest
from hargreaves.search.clients import parse_search_results, SecuritySearchClient, security_filter
from hargreaves.search.errors import SearchFilterError
from hargreaves.search.models import InvestmentTypes, SearchResult, InvestmentCategoryTypes
from hargreaves.utils import clock
from hargreaves.utils.logs import LogHelper
from requests_tracker.mocks import MockWebSession
LogHelper.configure_std_out()
clock.freeze_time()
class SearchResultBuilder():
_stock_ticker: str
_security_name: str
_sedol_code: str
_internet_allowed: bool
_category: str
def __init__(self):
pass
def with_overseas(self, stock_ticker: str, security_name: str, sedol_code: str):
self._stock_ticker = stock_ticker
self._security_name = security_name
self._sedol_code = sedol_code
self._internet_allowed = True
self._category = InvestmentCategoryTypes.OVERSEAS
return self
def with_equity(self, stock_ticker: str, security_name: str, sedol_code: str):
self._stock_ticker = stock_ticker
self._security_name = security_name
self._sedol_code = sedol_code
self._internet_allowed = True
self._category = InvestmentCategoryTypes.EQUITIES
return self
def build(self) -> SearchResult:
return SearchResult(
stock_ticker=self._stock_ticker,
security_name=self._security_name,
sedol_code=self._sedol_code,
internet_allowed=self._internet_allowed,
category=self._category
)
def test_submit_search_request_for_shares():
search_string = 'GOOG'
investment_types = [InvestmentTypes.SHARES]
excl_investment_types = ",".join([InvestmentTypes.OVERSEAS, InvestmentTypes.FUNDS, InvestmentTypes.ETFS,
InvestmentTypes.BONDS_AND_GILTS])
search_results_found_jsonp = Path(Path(__file__).parent / 'files/search-results-found.jsonp').read_text()
with MockWebSession() as web_session:
web_session.mock_get(
url='https://online.hl.co.uk/ajaxx/stocks.php',
params={
'callback': f"jsonp{clock.get_current_time_as_epoch_time(offset_seconds=-10)}",
'pid': clock.get_current_time_as_epoch_time(),
'sq': search_string,
'filters': excl_investment_types,
'offset': 0,
'instance': '',
'format': 'jsonp'
},
headers={
'Referer': 'https://online.hl.co.uk/my-accounts/stock_and_fund_search/action/deal'
},
response_text=search_results_found_jsonp,
status_code=http.HTTPStatus.OK
)
client = SecuritySearchClient()
search_results = client.investment_search(
web_session=web_session,
search_string=search_string,
investment_types=investment_types)
assert len(search_results) == 2
def test_submit_search_request_for_all():
search_string = 'GOOG'
investment_types = InvestmentTypes.ALL
search_results_found_jsonp = Path(Path(__file__).parent / 'files/search-results-found.jsonp').read_text()
with MockWebSession() as web_session:
web_session.mock_get(
url='https://online.hl.co.uk/ajaxx/stocks.php',
params={
'callback': f"jsonp{clock.get_current_time_as_epoch_time(offset_seconds=-10)}",
'pid': clock.get_current_time_as_epoch_time(),
'sq': search_string,
'filters': '',
'offset': 0,
'instance': '',
'format': 'jsonp'
},
headers={
'Referer': 'https://online.hl.co.uk/my-accounts/stock_and_fund_search/action/deal'
},
response_text=search_results_found_jsonp,
status_code=http.HTTPStatus.OK
)
client = SecuritySearchClient()
search_results = client.investment_search(
web_session=web_session,
search_string=search_string,
investment_types=investment_types)
assert len(search_results) == 2
def test_parse_search_results_found():
search_results_found_jsonp = Path(Path(__file__).parent / 'files/search-results-found.jsonp').read_text()
search_results = parse_search_results(search_results_found_jsonp)
assert len(search_results) == 2
goog = search_results[0]
assert goog.stock_ticker == 'GOOG'
assert goog.security_name == 'Alphabet Inc NPV C'
assert goog.sedol_code == 'BYY88Y7'
assert goog.internet_allowed
assert goog.category == 'O'
def test_parse_search_results_found_without_stock_ticker():
search_results_found_jsonp = Path(Path(__file__).parent / 'files/search-results-found-without-stock-ticker.jsonp') \
.read_text()
search_results = parse_search_results(search_results_found_jsonp)
assert len(search_results) == 1
pdg = search_results[0]
assert pdg.stock_ticker == 'PDG'
assert pdg.security_name == 'Pendragon Ordinary 5p'
assert pdg.sedol_code == 'B1JQBT1'
assert pdg.internet_allowed
assert pdg.category == 'E'
def test_parse_search_results_not_found():
search_results_found_jsonp = Path(Path(__file__).parent / 'files/search-results-not-found.jsonp').read_text()
search_results = parse_search_results(search_results_found_jsonp)
assert len(search_results) == 0
def test_security_filter_by_stock_ticker():
search_results = [
SearchResultBuilder().with_overseas(stock_ticker='FB',
security_name='Meta Platforms Inc Com USD0.000006',
sedol_code='B7TL820').build(),
SearchResultBuilder().with_equity(stock_ticker='FBH',
security_name='FBD Holdings plc Ordinary EUR0.60',
sedol_code='0329028').build(),
SearchResultBuilder().with_equity(stock_ticker='2FB',
security_name='Leverage Shares Plc 2X Facebook ETP 03/04/67 GBP',
sedol_code='BYX84Z1').build()
]
assert security_filter(search_results=search_results, stock_ticker='FB').stock_ticker == 'FB'
# assert security_filter(search_results=search_results, stock_ticker='BLAH') is None
def test_security_filter_by_sedol_code():
search_results = [
SearchResultBuilder().with_overseas(stock_ticker='FB',
security_name='Meta Platforms Inc Com USD0.000006',
sedol_code='B7TL820').build(),
SearchResultBuilder().with_equity(stock_ticker='FBH',
security_name='FBD Holdings plc Ordinary EUR0.60',
sedol_code='0329028').build(),
SearchResultBuilder().with_equity(stock_ticker='2FB',
security_name='Leverage Shares Plc 2X Facebook ETP 03/04/67 GBP',
sedol_code='BYX84Z1').build()
]
assert security_filter(search_results=search_results, sedol_code='0329028').stock_ticker == 'FBH'
# assert security_filter(search_results=search_results, sedol_code='BLAH') is None
def test_security_filter_by_stock_ticker_and_sedol_code():
search_results = [
SearchResultBuilder().with_overseas(stock_ticker='FB',
security_name='Meta Platforms Inc Com USD0.000006',
sedol_code='B7TL820').build(),
SearchResultBuilder().with_equity(stock_ticker='FBH',
security_name='FBD Holdings plc Ordinary EUR0.60',
sedol_code='0329028').build(),
SearchResultBuilder().with_equity(stock_ticker='2FB',
security_name='Leverage Shares Plc 2X Facebook ETP 03/04/67 GBP',
sedol_code='BYX84Z1').build()
]
assert security_filter(search_results=search_results, stock_ticker='FB', sedol_code='B7TL820').stock_ticker == 'FB'
def test_security_filter_invalid_cases():
search_results = [
SearchResultBuilder().with_overseas(stock_ticker='FB',
security_name='Meta Platforms Inc Com USD0.000006',
sedol_code='B7TL820').build(),
SearchResultBuilder().with_equity(stock_ticker='FBH',
security_name='FBD Holdings plc Ordinary EUR0.60',
sedol_code='0329028').build(),
SearchResultBuilder().with_equity(stock_ticker='2FB',
security_name='Leverage Shares Plc 2X Facebook ETP 03/04/67 GBP',
sedol_code='BYX84Z1').build(),
SearchResultBuilder().with_overseas(stock_ticker='2FB',
security_name='Dummy Duplicate',
sedol_code='123456').build(),
]
with pytest.raises(SearchFilterError, match='Could not find security, results filtered to 0'):
security_filter(search_results=search_results, stock_ticker='FB', sedol_code='XXX')
with pytest.raises(SearchFilterError, match='Could not find security, results filtered to 0'):
security_filter(search_results=search_results, stock_ticker='XXX')
with pytest.raises(SearchFilterError, match='Could not find security, results filtered to 0'):
security_filter(search_results=search_results, sedol_code='XXX')
with pytest.raises(SearchFilterError, match='Could not find security, results filtered to 2'):
security_filter(search_results=search_results, stock_ticker='2FB')
| 42.259414
| 120
| 0.626436
| 1,068
| 10,100
| 5.59176
| 0.166667
| 0.121902
| 0.048225
| 0.050067
| 0.782652
| 0.768754
| 0.730743
| 0.723208
| 0.703617
| 0.67649
| 0
| 0.023439
| 0.281881
| 10,100
| 238
| 121
| 42.436975
| 0.799945
| 0.016139
| 0
| 0.543011
| 0
| 0
| 0.153715
| 0.031307
| 0
| 0
| 0
| 0
| 0.096774
| 1
| 0.069892
| false
| 0.005376
| 0.048387
| 0.005376
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b73821d775466e5fce884a9393c703ba7fca3c11
| 1,213
|
py
|
Python
|
src/dcos_e2e_cli/common/options/cluster_size.py
|
jongiddy/dcos-e2e
|
b52ef9a1097a8fb328902064345cc6c8b0bf5779
|
[
"Apache-2.0"
] | 63
|
2018-05-17T21:02:14.000Z
|
2021-11-15T19:18:03.000Z
|
src/dcos_e2e_cli/common/options/cluster_size.py
|
jongiddy/dcos-e2e
|
b52ef9a1097a8fb328902064345cc6c8b0bf5779
|
[
"Apache-2.0"
] | 225
|
2017-09-08T02:24:58.000Z
|
2018-05-16T12:18:58.000Z
|
src/dcos_e2e_cli/common/options/cluster_size.py
|
jongiddy/dcos-e2e
|
b52ef9a1097a8fb328902064345cc6c8b0bf5779
|
[
"Apache-2.0"
] | 21
|
2018-06-14T21:58:24.000Z
|
2021-11-15T19:18:06.000Z
|
"""
Options for choosing the cluster size.
"""
from typing import Callable
import click
def masters_option(command: Callable[..., None]) -> Callable[..., None]:
"""
An option decorator for the number of masters.
"""
function = click.option(
'--masters',
type=click.INT,
default=1,
show_default=True,
help='The number of master nodes.',
)(command) # type: Callable[..., None]
return function
def agents_option(command: Callable[..., None]) -> Callable[..., None]:
"""
An option decorator for the number of agents.
"""
function = click.option(
'--agents',
type=click.INT,
default=1,
show_default=True,
help='The number of agent nodes.',
)(command) # type: Callable[..., None]
return function
def public_agents_option(command: Callable[..., None]) -> Callable[..., None]:
"""
An option decorator for the number of agents.
"""
function = click.option(
'--public-agents',
type=click.INT,
default=1,
show_default=True,
help='The number of public agent nodes.',
)(command) # type: Callable[..., None]
return function
| 24.26
| 78
| 0.58615
| 135
| 1,213
| 5.214815
| 0.251852
| 0.153409
| 0.09375
| 0.106534
| 0.809659
| 0.809659
| 0.809659
| 0.809659
| 0.607955
| 0.607955
| 0
| 0.003382
| 0.268755
| 1,213
| 49
| 79
| 24.755102
| 0.790304
| 0.211047
| 0
| 0.62069
| 0
| 0
| 0.13082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0
| 0.068966
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3f7838214bd435f76b66a4ee36131d51bfed7e9e
| 7,734
|
py
|
Python
|
tests/test_environment/test_nk_model.py
|
mesjou/human-frictions
|
2a5c919039bb29643a3e8dd36c0fa13ce7d93e0e
|
[
"MIT"
] | null | null | null |
tests/test_environment/test_nk_model.py
|
mesjou/human-frictions
|
2a5c919039bb29643a3e8dd36c0fa13ce7d93e0e
|
[
"MIT"
] | null | null | null |
tests/test_environment/test_nk_model.py
|
mesjou/human-frictions
|
2a5c919039bb29643a3e8dd36c0fa13ce7d93e0e
|
[
"MIT"
] | null | null | null |
import pytest
from human_friction.environment.new_keynes import NewKeynesMarket
def test_clear_labor_market():
config = {"episode_length": 20, "n_agents": 2, "init_budget": 0.0}
wages = {"agent-0": 0.5627646, "agent-1": 0.0175226}
env = NewKeynesMarket(config)
env.reset()
env.clear_labor_market(wages)
# initial labor demand is 1 for each agent
assert env.agents["agent-0"].budget > env.agents["agent-1"].budget
for agent in env.agents.values():
assert agent.labor == 1.0
assert env.firm.production > 0.0
assert env.inflation != 0.0
assert env.unemployment == 0.0
def test_get_unemployment():
config = {"episode_length": 20, "n_agents": 4, "init_budget": 0.0}
env = NewKeynesMarket(config)
env.reset()
assert env.get_unemployment() == 0.0
env.firm.labor_demand = 2.0
assert env.get_unemployment() == 0.5
with pytest.raises(Exception):
env.firm.labor_demand = 0.0
assert env.get_unemployment() == 1.0
with pytest.raises(Exception):
env.firm.labor_demand = -1.0
env.get_unemployment()
def test_clear_goods_market():
config = {"episode_length": 20, "n_agents": 2, "init_budget": 20.0}
env = NewKeynesMarket(config)
env.reset()
env.firm.price = 1.0
demand = {"agent-0": 0.5, "agent-1": 0.3}
# nothing to sell
env.firm.production = 0.0
env.firm.average_profit = 1.0
env.clear_goods_market(demand)
assert env.agents["agent-0"].budget == env.agents["agent-1"].budget
assert env.agents["agent-0"].budget == 20.0
assert env.firm.profit == 0.0
assert env.firm.labor_demand == 2 * 0.99
# production of 1 to sell
env.firm.production = 1.0
env.clear_goods_market(demand)
assert env.agents["agent-0"].budget < env.agents["agent-1"].budget
assert env.agents["agent-0"].budget < 20.0
assert env.agents["agent-1"].budget < 20.0
assert env.agents["agent-0"].consumption > env.agents["agent-1"].consumption
assert env.firm.profit == 0.8
assert env.firm.labor_demand == 2 * 0.99 * 1.01
# agent-0 has no budget
env.agents["agent-0"].budget = 0.0
env.firm.production = 1.0
env.clear_goods_market(demand)
assert env.agents["agent-0"].budget == -0.5
assert env.agents["agent-1"].budget < 20.0
assert env.agents["agent-1"].budget > 0.0
assert env.agents["agent-0"].consumption == 0.5
assert env.agents["agent-1"].consumption == 0.3
assert env.firm.profit == 0.8
# agent-0 has not enough budget
env.agents["agent-0"].budget = 0.22
env.firm.price = 0.5
env.firm.production = 1.0
env.clear_goods_market(demand)
assert env.agents["agent-0"].budget == -0.03
assert env.agents["agent-1"].budget < 20.0
assert env.agents["agent-1"].budget > 0.0
assert env.agents["agent-0"].consumption == 0.5
assert env.agents["agent-1"].consumption == 0.3
assert env.firm.profit == 0.4
def test_clear_dividends():
config = {"episode_length": 20, "n_agents": 2, "init_budget": 0.0}
env = NewKeynesMarket(config)
env.reset()
env.clear_dividends(10.0)
for agent in env.agents.values():
assert agent.budget == 5.0
env.clear_dividends(-10.0)
for agent in env.agents.values():
assert agent.budget == 0.0
env.clear_dividends(0.0)
for agent in env.agents.values():
assert agent.budget == 0.0
config = {"episode_length": 20, "n_agents": 10, "init_budget": 0.0}
env = NewKeynesMarket(config)
env.reset()
env.clear_dividends(10.0)
for agent in env.agents.values():
assert agent.budget == 1.0
def test_clear_capital_market():
# target is reached and interest = 1.02
config = {"episode_length": 20, "n_agents": 2, "init_budget": 2.0}
env = NewKeynesMarket(config)
env.reset()
env.inflation = 0.02
env.unemployment = 0.0
env.clear_capital_market()
for agent in env.agents.values():
assert agent.budget == 2.04
# inflation too high thus interest > 1.02
config = {"episode_length": 20, "n_agents": 2, "init_budget": 2.0}
env = NewKeynesMarket(config)
env.reset()
env.inflation = 0.03
env.unemployment = 0.0
env.clear_capital_market()
for agent in env.agents.values():
assert agent.budget > 2.04
# inflation too low: interest < 1.02
config = {"episode_length": 20, "n_agents": 2, "init_budget": 2.0}
env = NewKeynesMarket(config)
env.reset()
env.inflation = 0.01
env.unemployment = 0.0
env.clear_capital_market()
for agent in env.agents.values():
assert agent.budget < 2.04
# unemployment too high thus interest < 1.02
config = {"episode_length": 20, "n_agents": 2, "init_budget": 2.0}
env = NewKeynesMarket(config)
env.reset()
env.inflation = 0.02
env.unemployment = 0.5
env.clear_capital_market()
for agent in env.agents.values():
assert agent.budget < 2.04
# unemployment too low: interest > 1.02
config = {"episode_length": 20, "n_agents": 2, "init_budget": 2.0}
env = NewKeynesMarket(config)
env.reset()
env.central_bank.natural_unemployment = 0.2
env.inflation = 0.02
env.unemployment = 0.1
env.clear_capital_market()
for agent in env.agents.values():
assert agent.budget > 2.04
def test_compute_rewards():
# labor comes with disutility
config = {"episode_length": 20, "n_agents": 2, "labor_coefficient": 1.0}
env = NewKeynesMarket(config)
env.reset()
for agent in env.agents.values():
agent.labor = 1.0
agent.consumption = 1.0
rew = env.compute_rewards()
for agent_id in env.agents.keys():
assert rew[agent_id] < 0.0
for agent in env.agents.values():
agent.labor = 0.0
agent.consumption = 0.0
rew = env.compute_rewards()
for agent_id in env.agents.keys():
assert rew[agent_id] == 0.0
i = 1.0
for agent in env.agents.values():
agent.labor = i
agent.consumption = 1.0
i += 1.0
rew = env.compute_rewards()
agent_ids = list(env.agents.keys())
assert rew[agent_ids[0]] > rew[agent_ids[1]]
# labor is for free, no utility loss
config = {"episode_length": 20, "n_agents": 2, "labor_weight": 0.0}
env = NewKeynesMarket(config)
env.reset()
for agent in env.agents.values():
agent.labor = 1.0
agent.consumption = 1.0
rew = env.compute_rewards()
for agent_id in env.agents.keys():
assert rew[agent_id] > 0.0
i = 1.0
for agent in env.agents.values():
agent.labor = i
agent.consumption = 1.0
i += 1.0
rew = env.compute_rewards()
agent_ids = list(env.agents.keys())
assert rew[agent_ids[0]] == rew[agent_ids[1]]
def test_generate_observations():
config = {"episode_length": 20, "n_agents": 3}
env = NewKeynesMarket(config)
env.reset()
actions = {"agent-0": 0, "agent-1": 5, "agent-2": 10}
w, d = env.take_actions(actions)
obs = env.generate_observations(w, d)
for agent_id, agent_obs in obs.items():
assert agent_obs["average_wage_increase"] == 0.0
env.firm.labor_demand = 2.0
actions = {"agent-0": 0, "agent-1": 1, "agent-2": 2}
w, d = env.take_actions(actions)
obs = env.generate_observations(w, d)
for agent_id, agent_obs in obs.items():
assert pytest.approx(agent_obs["average_wage_increase"]) == 0.025
env.firm.labor_demand = 1.5
actions = {"agent-0": 49, "agent-1": 49, "agent-2": 49}
w, d = env.take_actions(actions)
obs = env.generate_observations(w, d)
for agent_id, agent_obs in obs.items():
assert pytest.approx(agent_obs["average_wage_increase"]) == 0.1
| 32.091286
| 80
| 0.637962
| 1,158
| 7,734
| 4.145941
| 0.096718
| 0.080608
| 0.067069
| 0.070819
| 0.84774
| 0.803374
| 0.76734
| 0.730473
| 0.66986
| 0.653406
| 0
| 0.057918
| 0.218645
| 7,734
| 240
| 81
| 32.225
| 0.736555
| 0.050297
| 0
| 0.631579
| 0
| 0
| 0.100927
| 0.008592
| 0
| 0
| 0
| 0
| 0.247368
| 1
| 0.036842
| false
| 0
| 0.010526
| 0
| 0.047368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3fbfc50342b5648c491d87fd651cb95f815bd5eb
| 56
|
py
|
Python
|
war3structs/plaintext/__init__.py
|
sides/war3structs
|
171c91240346e610e22cf10bab0c6d526996f855
|
[
"MIT"
] | 10
|
2019-12-07T12:10:13.000Z
|
2022-02-24T12:45:32.000Z
|
war3structs/plaintext/__init__.py
|
warlockbrawl/war3structs
|
171c91240346e610e22cf10bab0c6d526996f855
|
[
"MIT"
] | null | null | null |
war3structs/plaintext/__init__.py
|
warlockbrawl/war3structs
|
171c91240346e610e22cf10bab0c6d526996f855
|
[
"MIT"
] | 3
|
2020-02-28T12:43:26.000Z
|
2020-06-08T23:31:29.000Z
|
from .jass import JassParser
from .txt import TxtParser
| 18.666667
| 28
| 0.821429
| 8
| 56
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 29
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fe09559590399e4f76d9945f49d271c60794606
| 1,322
|
py
|
Python
|
setup.py
|
MojixCoder/python-jalali
|
e0e777f01587ed12e426bd64e70e1ec610aed6ce
|
[
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.7"
] | 235
|
2015-02-22T15:00:34.000Z
|
2022-03-22T16:55:05.000Z
|
setup.py
|
MojixCoder/python-jalali
|
e0e777f01587ed12e426bd64e70e1ec610aed6ce
|
[
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.7"
] | 91
|
2015-03-11T04:31:26.000Z
|
2022-03-24T01:35:38.000Z
|
setup.py
|
MojixCoder/python-jalali
|
e0e777f01587ed12e426bd64e70e1ec610aed6ce
|
[
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.7"
] | 53
|
2015-01-07T03:25:01.000Z
|
2022-02-19T05:44:37.000Z
|
import setuptools
from distutils.core import setup
setup(
name='jdatetime',
version='3.7.0',
packages=['jdatetime', ],
license='Python Software Foundation License',
keywords='Jalali implementation of Python datetime',
platforms='any',
author='Milad Rastian',
author_email='eslashmili@gmail.com',
description=("Jalali datetime binding for python"),
url="https://github.com/slashmili/python-jalali",
long_description=open('README').read(),
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Software Development",
],
)
| 35.72973
| 56
| 0.614977
| 131
| 1,322
| 6.19084
| 0.473282
| 0.304562
| 0.40074
| 0.288533
| 0.066584
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025896
| 0.240545
| 1,322
| 36
| 57
| 36.722222
| 0.781873
| 0
| 0
| 0
| 0
| 0
| 0.621785
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b75225b9d739c7614ebdffd1ffa74ad737312e87
| 143
|
py
|
Python
|
tests/conftest.py
|
yuvadm/amazon-transcribe-streaming-sdk
|
4b5742c5a0d4f73c221f191f07d484098d1ca2ef
|
[
"Apache-2.0"
] | 61
|
2020-07-30T15:29:22.000Z
|
2022-03-25T15:08:14.000Z
|
tests/conftest.py
|
yuvadm/amazon-transcribe-streaming-sdk
|
4b5742c5a0d4f73c221f191f07d484098d1ca2ef
|
[
"Apache-2.0"
] | 52
|
2020-07-29T19:12:46.000Z
|
2022-03-15T22:55:55.000Z
|
tests/conftest.py
|
yuvadm/amazon-transcribe-streaming-sdk
|
4b5742c5a0d4f73c221f191f07d484098d1ca2ef
|
[
"Apache-2.0"
] | 22
|
2020-10-07T15:04:31.000Z
|
2022-02-20T23:33:51.000Z
|
import pytest
from amazon_transcribe import AWSCRTEventLoop
@pytest.fixture
def default_eventloop():
return AWSCRTEventLoop().bootstrap
| 15.888889
| 45
| 0.818182
| 15
| 143
| 7.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125874
| 143
| 8
| 46
| 17.875
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
b7678573a6e2b4a738a856faa8c67ae16dcfd922
| 103
|
py
|
Python
|
elementalcms/extends/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | 3
|
2022-01-12T09:11:54.000Z
|
2022-02-24T22:39:11.000Z
|
elementalcms/extends/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | null | null | null |
elementalcms/extends/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | 1
|
2022-01-12T09:11:56.000Z
|
2022-01-12T09:11:56.000Z
|
from .controller import Controller
from .applet import Applet
from .actionsmapper import ActionsMapper
| 25.75
| 40
| 0.854369
| 12
| 103
| 7.333333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 103
| 3
| 41
| 34.333333
| 0.967033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b77a27e33c537905857e9c2596e11287a6b9c879
| 1,055
|
py
|
Python
|
tests/test_DataFrameSelector.py
|
messier16/m16_mlutils
|
868775f48106f2e3a2090e98b8508349ca278158
|
[
"MIT"
] | null | null | null |
tests/test_DataFrameSelector.py
|
messier16/m16_mlutils
|
868775f48106f2e3a2090e98b8508349ca278158
|
[
"MIT"
] | 9
|
2018-10-13T06:50:05.000Z
|
2021-06-01T23:07:42.000Z
|
tests/test_DataFrameSelector.py
|
messier16/m16_mlutils
|
868775f48106f2e3a2090e98b8508349ca278158
|
[
"MIT"
] | null | null | null |
import pandas as pd
from m16_mlutils.pipeline import DataFrameSelector
def test_selects_right_columns():
A, B, C = list(range(10)), list(range(5, 15)), list(range(-1, 9))
expected = pd.DataFrame({'A': A, 'C': C})
full = pd.DataFrame({'A': A, 'B': B, 'C': C})
selector = DataFrameSelector(['A', 'C'])
actual = selector.fit_transform(full)
pd.testing.assert_frame_equal(actual, expected)
def test_select_single_column():
A, B, C = list(range(10)), list(range(5, 15)), list(range(-1, 9))
full = pd.DataFrame({'A': A, 'B': B, 'C': C})
expected = full[['A']].copy()
selector = DataFrameSelector(['A'])
actual = selector.fit_transform(full)
pd.testing.assert_frame_equal(actual, expected)
def test_select_series():
A, B, C = list(range(10)), list(range(5, 15)), list(range(-1, 9))
full = pd.DataFrame({'A': A, 'B': B, 'C': C})
expected = full['A'].copy()
selector = DataFrameSelector('A')
actual = selector.fit_transform(full)
pd.testing.assert_series_equal(actual, expected)
| 28.513514
| 69
| 0.632227
| 154
| 1,055
| 4.214286
| 0.25974
| 0.124807
| 0.07396
| 0.080123
| 0.72265
| 0.72265
| 0.72265
| 0.72265
| 0.72265
| 0.690293
| 0
| 0.026498
| 0.177251
| 1,055
| 36
| 70
| 29.305556
| 0.721198
| 0
| 0
| 0.478261
| 0
| 0
| 0.016114
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 1
| 0.130435
| false
| 0
| 0.086957
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b77e5e00d6816f6d3410f394ca73dc4a24c8ed04
| 186
|
py
|
Python
|
hylite/project/__init__.py
|
sandralorenz268/hylite
|
7a76132f80287917a28d0422c09c32dac8926465
|
[
"MIT"
] | null | null | null |
hylite/project/__init__.py
|
sandralorenz268/hylite
|
7a76132f80287917a28d0422c09c32dac8926465
|
[
"MIT"
] | null | null | null |
hylite/project/__init__.py
|
sandralorenz268/hylite
|
7a76132f80287917a28d0422c09c32dac8926465
|
[
"MIT"
] | null | null | null |
"""
Project points between 2D image coordinates and 3D world coordinates. Also includes related problems such as
camera localisation.
"""
from .basic import *
from .camera import Camera
| 26.571429
| 108
| 0.790323
| 25
| 186
| 5.88
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012658
| 0.150538
| 186
| 7
| 109
| 26.571429
| 0.917722
| 0.693548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b78cb64d2b2b2ab8409ed0ce0062fdb4b108cdcd
| 60
|
py
|
Python
|
py2sql-orm_pkg/py2sql_orm/mapper/__init__.py
|
Pilipets/Metaprogramming
|
7659393bc504562b0f74c5a8cb5f4ffd39771e2f
|
[
"MIT"
] | null | null | null |
py2sql-orm_pkg/py2sql_orm/mapper/__init__.py
|
Pilipets/Metaprogramming
|
7659393bc504562b0f74c5a8cb5f4ffd39771e2f
|
[
"MIT"
] | 2
|
2020-11-29T16:24:46.000Z
|
2020-12-14T19:11:02.000Z
|
py2sql-orm_pkg/py2sql_orm/mapper/__init__.py
|
Pilipets/Metaprogramming
|
7659393bc504562b0f74c5a8cb5f4ffd39771e2f
|
[
"MIT"
] | null | null | null |
from .utils import *
from . import sql_mapping as mapping
| 20
| 36
| 0.75
| 9
| 60
| 4.888889
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 60
| 3
| 36
| 20
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7aef7db5a5a02e4bb8e8e499af266d2b3b3bdb0
| 110
|
py
|
Python
|
playground/test/index.py
|
workofart/ml-trading-playground
|
1c619a1917574220798a749fbad2ba1782207362
|
[
"MIT"
] | 1
|
2019-08-31T13:52:46.000Z
|
2019-08-31T13:52:46.000Z
|
playground/test/index.py
|
workofart/ml-trading-playground
|
1c619a1917574220798a749fbad2ba1782207362
|
[
"MIT"
] | null | null | null |
playground/test/index.py
|
workofart/ml-trading-playground
|
1c619a1917574220798a749fbad2ba1782207362
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0,"/Users/Henry/Github/work-trader/playground")
from trading_env import TradingEnv
| 27.5
| 63
| 0.818182
| 17
| 110
| 5.235294
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009709
| 0.063636
| 110
| 4
| 64
| 27.5
| 0.854369
| 0
| 0
| 0
| 0
| 0
| 0.378378
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4d34a40e2b9b11c81917dee60d05260a52e4af2c
| 39
|
py
|
Python
|
api/config.py
|
cedarmora/lifestyle-choice-emissions
|
77cace989b5ca9493a6fb98dcce4e1dd3d6b686f
|
[
"MIT"
] | null | null | null |
api/config.py
|
cedarmora/lifestyle-choice-emissions
|
77cace989b5ca9493a6fb98dcce4e1dd3d6b686f
|
[
"MIT"
] | null | null | null |
api/config.py
|
cedarmora/lifestyle-choice-emissions
|
77cace989b5ca9493a6fb98dcce4e1dd3d6b686f
|
[
"MIT"
] | null | null | null |
# All config is in docker-compose files
| 39
| 39
| 0.794872
| 7
| 39
| 4.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 39
| 1
| 39
| 39
| 0.939394
| 0.948718
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d86fc239efd34de388ae708ff436dc86270dfeb
| 48
|
py
|
Python
|
{{cookiecutter.repo_name}}/ml_service/pipelines/train_pipeline/run_pipeline.py
|
TessFerrandez/SampleCookieTemplate
|
4cd8d9dbfd42162ecd3bdab564b94de3d7821aa0
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/ml_service/pipelines/train_pipeline/run_pipeline.py
|
TessFerrandez/SampleCookieTemplate
|
4cd8d9dbfd42162ecd3bdab564b94de3d7821aa0
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/ml_service/pipelines/train_pipeline/run_pipeline.py
|
TessFerrandez/SampleCookieTemplate
|
4cd8d9dbfd42162ecd3bdab564b94de3d7821aa0
|
[
"MIT"
] | null | null | null |
# this would be filled out in the real template
| 24
| 47
| 0.770833
| 9
| 48
| 4.111111
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 48
| 1
| 48
| 48
| 0.973684
| 0.9375
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4dce00c860a1c9e00d05d02db1bec3789694fba2
| 2,081
|
py
|
Python
|
examples/test_lsat6_2pl.py
|
binhetech/irt-rpy
|
4700cecb58e6ac855c4ed1ff604420634a98570d
|
[
"Apache-2.0"
] | 1
|
2021-06-07T09:13:09.000Z
|
2021-06-07T09:13:09.000Z
|
examples/test_lsat6_2pl.py
|
binhetech/irt-rpy
|
4700cecb58e6ac855c4ed1ff604420634a98570d
|
[
"Apache-2.0"
] | null | null | null |
examples/test_lsat6_2pl.py
|
binhetech/irt-rpy
|
4700cecb58e6ac855c4ed1ff604420634a98570d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/4/16 15:45
# @Author : hebin
# @File : test_lsat6_2pl.py
import sys
import numpy as np
import pandas as pd
sys.path.append("../")
from src.irt import IRTModel
def test_lsat6():
# A data frame with the responses of 1000 individuals to 5 questions
data = open("../examples/lsat6.dat", "r", encoding="utf-8").readlines()
datas = pd.DataFrame()
for it, i in enumerate(data):
cols = i.strip().split("\t")
cols = [int(i) for i in cols]
datas = datas.append(pd.DataFrame([pd.Series(cols).add_prefix("item_")]), ignore_index=True)
print("data shape={}".format(datas.shape))
irt = IRTModel(5, model=None, model_save_path="./irt-model.jbl", item_para_save_path="./irt-item-coef.jbl")
# 试题、能力参数联合估计
irt.fit(datas, dims=1, itemtype="2PL", method="EM")
irt.calc_coef(IRTpars="F")
# 能力参数估计
scores = irt.calc_scores()
print("scores={}".format(scores))
for i, p in enumerate(irt.item_paras):
print("item {} paras: {}".format(i, p))
def test_lsat6_na():
# A data frame with the responses of 1000 individuals to 5 questions
data = open("../examples/lsat6.dat", "r", encoding="utf-8").readlines()
datas = pd.DataFrame()
for it, i in enumerate(data):
cols = i.strip().split("\t")
cols = [int(i) for i in cols]
if it == 0:
# 答题数据缺失时,用np.nan填充
cols = [0, 1, np.nan, 1, 1]
datas = datas.append(pd.DataFrame([pd.Series(cols).add_prefix("item_")]), ignore_index=True)
print("data shape={}".format(datas.shape))
# 答题数据缺失时
datas.iloc[0]["item_1"] = np.nan
datas.iloc[3]["item_2"] = np.nan
irt = IRTModel(5, model=None, model_save_path="./irt-model.jbl", item_para_save_path="./irt-item-coef.jbl")
# 试题、能力参数联合估计
irt.fit(datas, dims=1, itemtype="2PL", method="EM")
irt.calc_coef(IRTpars="F")
for i, p in enumerate(irt.item_paras):
print("item {} paras: {}".format(i, p))
if __name__ == "__main__":
test_lsat6()
test_lsat6_na()
| 32.515625
| 111
| 0.617492
| 314
| 2,081
| 3.968153
| 0.340764
| 0.036116
| 0.035313
| 0.022472
| 0.735152
| 0.735152
| 0.735152
| 0.735152
| 0.735152
| 0.735152
| 0
| 0.028365
| 0.203748
| 2,081
| 63
| 112
| 33.031746
| 0.723597
| 0.145123
| 0
| 0.6
| 0
| 0
| 0.135823
| 0.023769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.1
| 0
| 0.15
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4ddb2d1f4f5aedc1c54e3de158324a02bbb5fb4d
| 134
|
py
|
Python
|
jetstream/__main__.py
|
parallelworks/jetstream
|
ce2c0857a0b3201c739476be47de7570d267b877
|
[
"MIT"
] | 3
|
2020-09-28T16:43:52.000Z
|
2021-11-27T23:42:11.000Z
|
jetstream/__main__.py
|
parallelworks/jetstream
|
ce2c0857a0b3201c739476be47de7570d267b877
|
[
"MIT"
] | 42
|
2019-10-28T17:25:55.000Z
|
2022-03-05T21:53:10.000Z
|
jetstream/__main__.py
|
parallelworks/jetstream
|
ce2c0857a0b3201c739476be47de7570d267b877
|
[
"MIT"
] | 4
|
2020-05-03T06:46:02.000Z
|
2022-03-01T20:33:18.000Z
|
"""Allows the cli to be used with the python -m feature."""
import jetstream.cli
if __name__ == '__main__':
jetstream.cli.main()
| 22.333333
| 59
| 0.69403
| 20
| 134
| 4.25
| 0.75
| 0.282353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171642
| 134
| 5
| 60
| 26.8
| 0.765766
| 0.395522
| 0
| 0
| 0
| 0
| 0.106667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4de46a820772ac159f6ba169677aebeb6f9cb77f
| 90
|
py
|
Python
|
ppython/snippets/paint.py
|
ovolkovskyics/ppython
|
edad2f90638aa47557e3b75358dc0bd657134a00
|
[
"Apache-2.0"
] | 47
|
2019-08-15T05:49:34.000Z
|
2022-01-19T06:32:11.000Z
|
ppython/snippets/paint.py
|
ovolkovskyics/ppython
|
edad2f90638aa47557e3b75358dc0bd657134a00
|
[
"Apache-2.0"
] | 32
|
2019-08-15T20:00:42.000Z
|
2022-02-28T17:26:26.000Z
|
ppython/snippets/paint.py
|
ovolkovskyics/ppython
|
edad2f90638aa47557e3b75358dc0bd657134a00
|
[
"Apache-2.0"
] | 65
|
2019-08-15T14:08:16.000Z
|
2022-02-19T13:59:42.000Z
|
def setup():
pass
def draw():
fill(255, 0, 0)
ellipse(mouseX, mouseY, 20, 20)
| 15
| 35
| 0.566667
| 14
| 90
| 3.642857
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 0.266667
| 90
| 6
| 35
| 15
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0.2
| 0
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
151d454274809c4514e48b0caadc088eab615cf6
| 62
|
py
|
Python
|
python/modules/deep_learning/network/__init__.py
|
darwinbeing/deepdriving-tensorflow
|
036a83871f3515b2c041bc3cd5e845f6d8f7b3b7
|
[
"MIT"
] | 1
|
2018-12-13T14:00:03.000Z
|
2018-12-13T14:00:03.000Z
|
python/modules/deep_learning/network/__init__.py
|
darwinbeing/deepdriving-tensorflow
|
036a83871f3515b2c041bc3cd5e845f6d8f7b3b7
|
[
"MIT"
] | null | null | null |
python/modules/deep_learning/network/__init__.py
|
darwinbeing/deepdriving-tensorflow
|
036a83871f3515b2c041bc3cd5e845f6d8f7b3b7
|
[
"MIT"
] | null | null | null |
from .CFactory import CFactory
from .CNetwork import CNetwork
| 31
| 31
| 0.83871
| 8
| 62
| 6.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 32
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1278ac8df44d1d1fac576c006b4710f3ea2acea1
| 55
|
py
|
Python
|
lang/py/cookbook/v2/source/cb2_4_8_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_4_8_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_4_8_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
import itertools
print map(list, itertools.izip(*arr))
| 18.333333
| 37
| 0.781818
| 8
| 55
| 5.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 55
| 2
| 38
| 27.5
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
1294a1980727aff02a8a936a8a1483dd37e4ac60
| 16,114
|
py
|
Python
|
src/python/bot/tokenizer/grammars/HTMLLexer.py
|
vschs007/clusterfuzz
|
4b5e825abcd80d81d734a69b7457df59a6a9aa6e
|
[
"Apache-2.0"
] | 3
|
2020-12-30T07:00:55.000Z
|
2021-03-16T10:55:05.000Z
|
src/python/bot/tokenizer/grammars/HTMLLexer.py
|
vschs007/clusterfuzz
|
4b5e825abcd80d81d734a69b7457df59a6a9aa6e
|
[
"Apache-2.0"
] | null | null | null |
src/python/bot/tokenizer/grammars/HTMLLexer.py
|
vschs007/clusterfuzz
|
4b5e825abcd80d81d734a69b7457df59a6a9aa6e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated from HTMLLexer.g4 by ANTLR 4.7.1
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2")
buf.write(u"\31\u017d\b\1\b\1\b\1\b\1\b\1\4\2\t\2\4\3\t\3\4\4\t\4")
buf.write(u"\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13")
buf.write(u"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4")
buf.write(u"\21\t\21\4\22\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26")
buf.write(u"\t\26\4\27\t\27\4\30\t\30\4\31\t\31\4\32\t\32\4\33\t")
buf.write(u"\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!")
buf.write(u"\t!\4\"\t\"\3\2\3\2\3\2\3\2\3\2\3\2\7\2P\n\2\f\2\16\2")
buf.write(u"S\13\2\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\7\3^\n\3\f")
buf.write(u"\3\16\3a\13\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4")
buf.write(u"\7\4m\n\4\f\4\16\4p\13\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5")
buf.write(u"\3\5\3\5\3\5\3\5\3\5\3\5\7\5\177\n\5\f\5\16\5\u0082\13")
buf.write(u"\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\7\6\u008c\n\6\f\6")
buf.write(u"\16\6\u008f\13\6\3\6\3\6\3\7\3\7\3\7\3\7\7\7\u0097\n")
buf.write(u"\7\f\7\16\7\u009a\13\7\3\7\3\7\3\7\3\7\3\7\3\7\7\7\u00a2")
buf.write(u"\n\7\f\7\16\7\u00a5\13\7\3\7\3\7\5\7\u00a9\n\7\3\b\3")
buf.write(u"\b\5\b\u00ad\n\b\3\b\6\b\u00b0\n\b\r\b\16\b\u00b1\3\t")
buf.write(u"\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\7\t\u00bd\n\t\f\t\16")
buf.write(u"\t\u00c0\13\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\n")
buf.write(u"\3\n\3\n\7\n\u00ce\n\n\f\n\16\n\u00d1\13\n\3\n\3\n\3")
buf.write(u"\n\3\n\3\13\3\13\3\13\3\13\3\f\6\f\u00dc\n\f\r\f\16\f")
buf.write(u"\u00dd\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17")
buf.write(u"\3\17\3\20\3\20\3\20\3\20\3\21\3\21\7\21\u00f1\n\21\f")
buf.write(u"\21\16\21\u00f4\13\21\3\22\3\22\3\22\3\22\3\23\3\23\3")
buf.write(u"\24\3\24\3\25\3\25\3\25\3\25\5\25\u0102\n\25\3\26\5\26")
buf.write(u"\u0105\n\26\3\27\7\27\u0108\n\27\f\27\16\27\u010b\13")
buf.write(u"\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27")
buf.write(u"\3\27\3\27\3\30\7\30\u011a\n\30\f\30\16\30\u011d\13\30")
buf.write(u"\3\30\3\30\3\30\3\30\3\30\3\30\3\31\7\31\u0126\n\31\f")
buf.write(u"\31\16\31\u0129\13\31\3\31\3\31\3\31\3\31\3\31\3\31\3")
buf.write(u"\31\3\31\3\31\3\31\3\31\3\32\7\32\u0137\n\32\f\32\16")
buf.write(u"\32\u013a\13\32\3\32\3\32\3\32\3\32\3\32\3\32\3\33\7")
buf.write(u"\33\u0143\n\33\f\33\16\33\u0146\13\33\3\33\3\33\3\33")
buf.write(u"\3\33\3\34\3\34\3\34\3\34\3\34\5\34\u0151\n\34\3\35\5")
buf.write(u"\35\u0154\n\35\3\36\6\36\u0157\n\36\r\36\16\36\u0158")
buf.write(u"\3\36\5\36\u015c\n\36\3\37\3\37\6\37\u0160\n\37\r\37")
buf.write(u"\16\37\u0161\3 \6 \u0165\n \r \16 \u0166\3 \5 \u016a")
buf.write(u"\n \3!\3!\7!\u016e\n!\f!\16!\u0171\13!\3!\3!\3\"\3\"")
buf.write(u"\7\"\u0177\n\"\f\"\16\"\u017a\13\"\3\"\3\"\17Q_n\u0080")
buf.write(u"\u008d\u0098\u00a3\u00be\u00cf\u0109\u011b\u0127\u0138")
buf.write(u"\2#\7\3\t\4\13\5\r\6\17\7\21\b\23\t\25\n\27\13\31\f\33")
buf.write(u"\r\35\16\37\17!\20#\21%\22\'\23)\2+\2-\2/\2\61\24\63")
buf.write(u"\25\65\26\67\279\30;\31=\2?\2A\2C\2E\2G\2\7\2\3\4\5\6")
buf.write(u"\16\4\2\13\13\"\"\3\2>>\5\2\13\f\17\17\"\"\5\2\62;CH")
buf.write(u"ch\3\2\62;\4\2/\60aa\5\2\u00b9\u00b9\u0302\u0371\u2041")
buf.write(u"\u2042\n\2<<C\\c|\u2072\u2191\u2c02\u2ff1\u3003\ud801")
buf.write(u"\uf902\ufdd1\ufdf2\uffff\3\2\"\"\t\2%%-=??AAC\\aac|\4")
buf.write(u"\2$$>>\4\2))>>\2\u0190\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3")
buf.write(u"\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2")
buf.write(u"\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2")
buf.write(u"\2\2\3\35\3\2\2\2\3\37\3\2\2\2\3!\3\2\2\2\3#\3\2\2\2")
buf.write(u"\3%\3\2\2\2\3\'\3\2\2\2\4\61\3\2\2\2\4\63\3\2\2\2\5\65")
buf.write(u"\3\2\2\2\5\67\3\2\2\2\69\3\2\2\2\6;\3\2\2\2\7I\3\2\2")
buf.write(u"\2\tX\3\2\2\2\13e\3\2\2\2\rs\3\2\2\2\17\u0087\3\2\2\2")
buf.write(u"\21\u00a8\3\2\2\2\23\u00af\3\2\2\2\25\u00b3\3\2\2\2\27")
buf.write(u"\u00c5\3\2\2\2\31\u00d6\3\2\2\2\33\u00db\3\2\2\2\35\u00df")
buf.write(u"\3\2\2\2\37\u00e3\3\2\2\2!\u00e8\3\2\2\2#\u00ea\3\2\2")
buf.write(u"\2%\u00ee\3\2\2\2\'\u00f5\3\2\2\2)\u00f9\3\2\2\2+\u00fb")
buf.write(u"\3\2\2\2-\u0101\3\2\2\2/\u0104\3\2\2\2\61\u0109\3\2\2")
buf.write(u"\2\63\u011b\3\2\2\2\65\u0127\3\2\2\2\67\u0138\3\2\2\2")
buf.write(u"9\u0144\3\2\2\2;\u0150\3\2\2\2=\u0153\3\2\2\2?\u0156")
buf.write(u"\3\2\2\2A\u015d\3\2\2\2C\u0164\3\2\2\2E\u016b\3\2\2\2")
buf.write(u"G\u0174\3\2\2\2IJ\7>\2\2JK\7#\2\2KL\7/\2\2LM\7/\2\2M")
buf.write(u"Q\3\2\2\2NP\13\2\2\2ON\3\2\2\2PS\3\2\2\2QR\3\2\2\2QO")
buf.write(u"\3\2\2\2RT\3\2\2\2SQ\3\2\2\2TU\7/\2\2UV\7/\2\2VW\7@\2")
buf.write(u"\2W\b\3\2\2\2XY\7>\2\2YZ\7#\2\2Z[\7]\2\2[_\3\2\2\2\\")
buf.write(u"^\13\2\2\2]\\\3\2\2\2^a\3\2\2\2_`\3\2\2\2_]\3\2\2\2`")
buf.write(u"b\3\2\2\2a_\3\2\2\2bc\7_\2\2cd\7@\2\2d\n\3\2\2\2ef\7")
buf.write(u">\2\2fg\7A\2\2gh\7z\2\2hi\7o\2\2ij\7n\2\2jn\3\2\2\2k")
buf.write(u"m\13\2\2\2lk\3\2\2\2mp\3\2\2\2no\3\2\2\2nl\3\2\2\2oq")
buf.write(u"\3\2\2\2pn\3\2\2\2qr\7@\2\2r\f\3\2\2\2st\7>\2\2tu\7#")
buf.write(u"\2\2uv\7]\2\2vw\7E\2\2wx\7F\2\2xy\7C\2\2yz\7V\2\2z{\7")
buf.write(u"C\2\2{|\7]\2\2|\u0080\3\2\2\2}\177\13\2\2\2~}\3\2\2\2")
buf.write(u"\177\u0082\3\2\2\2\u0080\u0081\3\2\2\2\u0080~\3\2\2\2")
buf.write(u"\u0081\u0083\3\2\2\2\u0082\u0080\3\2\2\2\u0083\u0084")
buf.write(u"\7_\2\2\u0084\u0085\7_\2\2\u0085\u0086\7@\2\2\u0086\16")
buf.write(u"\3\2\2\2\u0087\u0088\7>\2\2\u0088\u0089\7#\2\2\u0089")
buf.write(u"\u008d\3\2\2\2\u008a\u008c\13\2\2\2\u008b\u008a\3\2\2")
buf.write(u"\2\u008c\u008f\3\2\2\2\u008d\u008e\3\2\2\2\u008d\u008b")
buf.write(u"\3\2\2\2\u008e\u0090\3\2\2\2\u008f\u008d\3\2\2\2\u0090")
buf.write(u"\u0091\7@\2\2\u0091\20\3\2\2\2\u0092\u0093\7>\2\2\u0093")
buf.write(u"\u0094\7A\2\2\u0094\u0098\3\2\2\2\u0095\u0097\13\2\2")
buf.write(u"\2\u0096\u0095\3\2\2\2\u0097\u009a\3\2\2\2\u0098\u0099")
buf.write(u"\3\2\2\2\u0098\u0096\3\2\2\2\u0099\u009b\3\2\2\2\u009a")
buf.write(u"\u0098\3\2\2\2\u009b\u009c\7A\2\2\u009c\u00a9\7@\2\2")
buf.write(u"\u009d\u009e\7>\2\2\u009e\u009f\7\'\2\2\u009f\u00a3\3")
buf.write(u"\2\2\2\u00a0\u00a2\13\2\2\2\u00a1\u00a0\3\2\2\2\u00a2")
buf.write(u"\u00a5\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a3\u00a1\3\2\2")
buf.write(u"\2\u00a4\u00a6\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a7")
buf.write(u"\7\'\2\2\u00a7\u00a9\7@\2\2\u00a8\u0092\3\2\2\2\u00a8")
buf.write(u"\u009d\3\2\2\2\u00a9\22\3\2\2\2\u00aa\u00b0\t\2\2\2\u00ab")
buf.write(u"\u00ad\7\17\2\2\u00ac\u00ab\3\2\2\2\u00ac\u00ad\3\2\2")
buf.write(u"\2\u00ad\u00ae\3\2\2\2\u00ae\u00b0\7\f\2\2\u00af\u00aa")
buf.write(u"\3\2\2\2\u00af\u00ac\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1")
buf.write(u"\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\24\3\2\2\2\u00b3")
buf.write(u"\u00b4\7>\2\2\u00b4\u00b5\7u\2\2\u00b5\u00b6\7e\2\2\u00b6")
buf.write(u"\u00b7\7t\2\2\u00b7\u00b8\7k\2\2\u00b8\u00b9\7r\2\2\u00b9")
buf.write(u"\u00ba\7v\2\2\u00ba\u00be\3\2\2\2\u00bb\u00bd\13\2\2")
buf.write(u"\2\u00bc\u00bb\3\2\2\2\u00bd\u00c0\3\2\2\2\u00be\u00bf")
buf.write(u"\3\2\2\2\u00be\u00bc\3\2\2\2\u00bf\u00c1\3\2\2\2\u00c0")
buf.write(u"\u00be\3\2\2\2\u00c1\u00c2\7@\2\2\u00c2\u00c3\3\2\2\2")
buf.write(u"\u00c3\u00c4\b\t\2\2\u00c4\26\3\2\2\2\u00c5\u00c6\7>")
buf.write(u"\2\2\u00c6\u00c7\7u\2\2\u00c7\u00c8\7v\2\2\u00c8\u00c9")
buf.write(u"\7{\2\2\u00c9\u00ca\7n\2\2\u00ca\u00cb\7g\2\2\u00cb\u00cf")
buf.write(u"\3\2\2\2\u00cc\u00ce\13\2\2\2\u00cd\u00cc\3\2\2\2\u00ce")
buf.write(u"\u00d1\3\2\2\2\u00cf\u00d0\3\2\2\2\u00cf\u00cd\3\2\2")
buf.write(u"\2\u00d0\u00d2\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d2\u00d3")
buf.write(u"\7@\2\2\u00d3\u00d4\3\2\2\2\u00d4\u00d5\b\n\3\2\u00d5")
buf.write(u"\30\3\2\2\2\u00d6\u00d7\7>\2\2\u00d7\u00d8\3\2\2\2\u00d8")
buf.write(u"\u00d9\b\13\4\2\u00d9\32\3\2\2\2\u00da\u00dc\n\3\2\2")
buf.write(u"\u00db\u00da\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\u00db")
buf.write(u"\3\2\2\2\u00dd\u00de\3\2\2\2\u00de\34\3\2\2\2\u00df\u00e0")
buf.write(u"\7@\2\2\u00e0\u00e1\3\2\2\2\u00e1\u00e2\b\r\5\2\u00e2")
buf.write(u"\36\3\2\2\2\u00e3\u00e4\7\61\2\2\u00e4\u00e5\7@\2\2\u00e5")
buf.write(u"\u00e6\3\2\2\2\u00e6\u00e7\b\16\5\2\u00e7 \3\2\2\2\u00e8")
buf.write(u"\u00e9\7\61\2\2\u00e9\"\3\2\2\2\u00ea\u00eb\7?\2\2\u00eb")
buf.write(u"\u00ec\3\2\2\2\u00ec\u00ed\b\20\6\2\u00ed$\3\2\2\2\u00ee")
buf.write(u"\u00f2\5/\26\2\u00ef\u00f1\5-\25\2\u00f0\u00ef\3\2\2")
buf.write(u"\2\u00f1\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f2\u00f3")
buf.write(u"\3\2\2\2\u00f3&\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f5\u00f6")
buf.write(u"\t\4\2\2\u00f6\u00f7\3\2\2\2\u00f7\u00f8\b\22\7\2\u00f8")
buf.write(u"(\3\2\2\2\u00f9\u00fa\t\5\2\2\u00fa*\3\2\2\2\u00fb\u00fc")
buf.write(u"\t\6\2\2\u00fc,\3\2\2\2\u00fd\u0102\5/\26\2\u00fe\u0102")
buf.write(u"\t\7\2\2\u00ff\u0102\5+\24\2\u0100\u0102\t\b\2\2\u0101")
buf.write(u"\u00fd\3\2\2\2\u0101\u00fe\3\2\2\2\u0101\u00ff\3\2\2")
buf.write(u"\2\u0101\u0100\3\2\2\2\u0102.\3\2\2\2\u0103\u0105\t\t")
buf.write(u"\2\2\u0104\u0103\3\2\2\2\u0105\60\3\2\2\2\u0106\u0108")
buf.write(u"\13\2\2\2\u0107\u0106\3\2\2\2\u0108\u010b\3\2\2\2\u0109")
buf.write(u"\u010a\3\2\2\2\u0109\u0107\3\2\2\2\u010a\u010c\3\2\2")
buf.write(u"\2\u010b\u0109\3\2\2\2\u010c\u010d\7>\2\2\u010d\u010e")
buf.write(u"\7\61\2\2\u010e\u010f\7u\2\2\u010f\u0110\7e\2\2\u0110")
buf.write(u"\u0111\7t\2\2\u0111\u0112\7k\2\2\u0112\u0113\7r\2\2\u0113")
buf.write(u"\u0114\7v\2\2\u0114\u0115\7@\2\2\u0115\u0116\3\2\2\2")
buf.write(u"\u0116\u0117\b\27\5\2\u0117\62\3\2\2\2\u0118\u011a\13")
buf.write(u"\2\2\2\u0119\u0118\3\2\2\2\u011a\u011d\3\2\2\2\u011b")
buf.write(u"\u011c\3\2\2\2\u011b\u0119\3\2\2\2\u011c\u011e\3\2\2")
buf.write(u"\2\u011d\u011b\3\2\2\2\u011e\u011f\7>\2\2\u011f\u0120")
buf.write(u"\7\61\2\2\u0120\u0121\7@\2\2\u0121\u0122\3\2\2\2\u0122")
buf.write(u"\u0123\b\30\5\2\u0123\64\3\2\2\2\u0124\u0126\13\2\2\2")
buf.write(u"\u0125\u0124\3\2\2\2\u0126\u0129\3\2\2\2\u0127\u0128")
buf.write(u"\3\2\2\2\u0127\u0125\3\2\2\2\u0128\u012a\3\2\2\2\u0129")
buf.write(u"\u0127\3\2\2\2\u012a\u012b\7>\2\2\u012b\u012c\7\61\2")
buf.write(u"\2\u012c\u012d\7u\2\2\u012d\u012e\7v\2\2\u012e\u012f")
buf.write(u"\7{\2\2\u012f\u0130\7n\2\2\u0130\u0131\7g\2\2\u0131\u0132")
buf.write(u"\7@\2\2\u0132\u0133\3\2\2\2\u0133\u0134\b\31\5\2\u0134")
buf.write(u"\66\3\2\2\2\u0135\u0137\13\2\2\2\u0136\u0135\3\2\2\2")
buf.write(u"\u0137\u013a\3\2\2\2\u0138\u0139\3\2\2\2\u0138\u0136")
buf.write(u"\3\2\2\2\u0139\u013b\3\2\2\2\u013a\u0138\3\2\2\2\u013b")
buf.write(u"\u013c\7>\2\2\u013c\u013d\7\61\2\2\u013d\u013e\7@\2\2")
buf.write(u"\u013e\u013f\3\2\2\2\u013f\u0140\b\32\5\2\u01408\3\2")
buf.write(u"\2\2\u0141\u0143\t\n\2\2\u0142\u0141\3\2\2\2\u0143\u0146")
buf.write(u"\3\2\2\2\u0144\u0142\3\2\2\2\u0144\u0145\3\2\2\2\u0145")
buf.write(u"\u0147\3\2\2\2\u0146\u0144\3\2\2\2\u0147\u0148\5;\34")
buf.write(u"\2\u0148\u0149\3\2\2\2\u0149\u014a\b\33\5\2\u014a:\3")
buf.write(u"\2\2\2\u014b\u0151\5E!\2\u014c\u0151\5G\"\2\u014d\u0151")
buf.write(u"\5?\36\2\u014e\u0151\5A\37\2\u014f\u0151\5C \2\u0150")
buf.write(u"\u014b\3\2\2\2\u0150\u014c\3\2\2\2\u0150\u014d\3\2\2")
buf.write(u"\2\u0150\u014e\3\2\2\2\u0150\u014f\3\2\2\2\u0151<\3\2")
buf.write(u"\2\2\u0152\u0154\t\13\2\2\u0153\u0152\3\2\2\2\u0154>")
buf.write(u"\3\2\2\2\u0155\u0157\5=\35\2\u0156\u0155\3\2\2\2\u0157")
buf.write(u"\u0158\3\2\2\2\u0158\u0156\3\2\2\2\u0158\u0159\3\2\2")
buf.write(u"\2\u0159\u015b\3\2\2\2\u015a\u015c\7\"\2\2\u015b\u015a")
buf.write(u"\3\2\2\2\u015b\u015c\3\2\2\2\u015c@\3\2\2\2\u015d\u015f")
buf.write(u"\7%\2\2\u015e\u0160\t\5\2\2\u015f\u015e\3\2\2\2\u0160")
buf.write(u"\u0161\3\2\2\2\u0161\u015f\3\2\2\2\u0161\u0162\3\2\2")
buf.write(u"\2\u0162B\3\2\2\2\u0163\u0165\t\6\2\2\u0164\u0163\3\2")
buf.write(u"\2\2\u0165\u0166\3\2\2\2\u0166\u0164\3\2\2\2\u0166\u0167")
buf.write(u"\3\2\2\2\u0167\u0169\3\2\2\2\u0168\u016a\7\'\2\2\u0169")
buf.write(u"\u0168\3\2\2\2\u0169\u016a\3\2\2\2\u016aD\3\2\2\2\u016b")
buf.write(u"\u016f\7$\2\2\u016c\u016e\n\f\2\2\u016d\u016c\3\2\2\2")
buf.write(u"\u016e\u0171\3\2\2\2\u016f\u016d\3\2\2\2\u016f\u0170")
buf.write(u"\3\2\2\2\u0170\u0172\3\2\2\2\u0171\u016f\3\2\2\2\u0172")
buf.write(u"\u0173\7$\2\2\u0173F\3\2\2\2\u0174\u0178\7)\2\2\u0175")
buf.write(u"\u0177\n\r\2\2\u0176\u0175\3\2\2\2\u0177\u017a\3\2\2")
buf.write(u"\2\u0178\u0176\3\2\2\2\u0178\u0179\3\2\2\2\u0179\u017b")
buf.write(u"\3\2\2\2\u017a\u0178\3\2\2\2\u017b\u017c\7)\2\2\u017c")
buf.write(u"H\3\2\2\2&\2\3\4\5\6Q_n\u0080\u008d\u0098\u00a3\u00a8")
buf.write(u"\u00ac\u00af\u00b1\u00be\u00cf\u00dd\u00f2\u0101\u0104")
buf.write(u"\u0109\u011b\u0127\u0138\u0144\u0150\u0153\u0158\u015b")
buf.write(u"\u0161\u0166\u0169\u016f\u0178\b\7\4\2\7\5\2\7\3\2\6")
buf.write(u"\2\2\7\6\2\b\2\2")
return buf.getvalue()
class HTMLLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]
TAG = 1
SCRIPT = 2
STYLE = 3
ATTVALUE = 4
HTML_COMMENT = 1
HTML_CONDITIONAL_COMMENT = 2
XML_DECLARATION = 3
CDATA = 4
DTD = 5
SCRIPTLET = 6
SEA_WS = 7
SCRIPT_OPEN = 8
STYLE_OPEN = 9
TAG_OPEN = 10
HTML_TEXT = 11
TAG_CLOSE = 12
TAG_SLASH_CLOSE = 13
TAG_SLASH = 14
TAG_EQUALS = 15
TAG_NAME = 16
TAG_WHITESPACE = 17
SCRIPT_BODY = 18
SCRIPT_SHORT_BODY = 19
STYLE_BODY = 20
STYLE_SHORT_BODY = 21
ATTVALUE_VALUE = 22
ATTRIBUTE = 23
channelNames = [u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"]
modeNames = [u"DEFAULT_MODE", u"TAG", u"SCRIPT", u"STYLE", u"ATTVALUE"]
literalNames = [u"<INVALID>", u"'<'", u"'>'", u"'/>'", u"'/'", u"'='"]
symbolicNames = [
u"<INVALID>", u"HTML_COMMENT", u"HTML_CONDITIONAL_COMMENT",
u"XML_DECLARATION", u"CDATA", u"DTD", u"SCRIPTLET", u"SEA_WS",
u"SCRIPT_OPEN", u"STYLE_OPEN", u"TAG_OPEN", u"HTML_TEXT", u"TAG_CLOSE",
u"TAG_SLASH_CLOSE", u"TAG_SLASH", u"TAG_EQUALS", u"TAG_NAME",
u"TAG_WHITESPACE", u"SCRIPT_BODY", u"SCRIPT_SHORT_BODY", u"STYLE_BODY",
u"STYLE_SHORT_BODY", u"ATTVALUE_VALUE", u"ATTRIBUTE"
]
ruleNames = [
u"HTML_COMMENT", u"HTML_CONDITIONAL_COMMENT", u"XML_DECLARATION",
u"CDATA", u"DTD", u"SCRIPTLET", u"SEA_WS", u"SCRIPT_OPEN", u"STYLE_OPEN",
u"TAG_OPEN", u"HTML_TEXT", u"TAG_CLOSE", u"TAG_SLASH_CLOSE", u"TAG_SLASH",
u"TAG_EQUALS", u"TAG_NAME", u"TAG_WHITESPACE", u"HEXDIGIT", u"DIGIT",
u"TAG_NameChar", u"TAG_NameStartChar", u"SCRIPT_BODY",
u"SCRIPT_SHORT_BODY", u"STYLE_BODY", u"STYLE_SHORT_BODY",
u"ATTVALUE_VALUE", u"ATTRIBUTE", u"ATTCHAR", u"ATTCHARS", u"HEXCHARS",
u"DECCHARS", u"DOUBLE_QUOTE_STRING", u"SINGLE_QUOTE_STRING"
]
grammarFileName = u"HTMLLexer.g4"
def __init__(self, input=None, output=sys.stdout):
super(HTMLLexer, self).__init__(input, output=output)
self.checkVersion("4.7.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA,
PredictionContextCache())
self._actions = None
self._predicates = None
| 57.964029
| 80
| 0.623185
| 4,105
| 16,114
| 2.421194
| 0.137881
| 0.118523
| 0.074555
| 0.083711
| 0.28202
| 0.197102
| 0.170842
| 0.089647
| 0.084817
| 0.08039
| 0
| 0.329026
| 0.092404
| 16,114
| 277
| 81
| 58.173285
| 0.350564
| 0.037669
| 0
| 0
| 0
| 0.623482
| 0.659932
| 0.607449
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008097
| false
| 0
| 0.016194
| 0
| 0.174089
| 0.004049
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
12968568835ac7ee28d6477ff14b3feded11599a
| 80
|
py
|
Python
|
src/hcb/__init__.py
|
Strilanc/honeycomb-boundaries
|
cc33baac44c7831bd643db81d0053f8ec6eae9d8
|
[
"Apache-2.0"
] | null | null | null |
src/hcb/__init__.py
|
Strilanc/honeycomb-boundaries
|
cc33baac44c7831bd643db81d0053f8ec6eae9d8
|
[
"Apache-2.0"
] | 2
|
2022-02-25T22:28:24.000Z
|
2022-03-23T21:09:04.000Z
|
src/hcb/__init__.py
|
Strilanc/honeycomb-boundaries
|
cc33baac44c7831bd643db81d0053f8ec6eae9d8
|
[
"Apache-2.0"
] | null | null | null |
import stim
_f = stim.Circuit.__repr__
stim.Circuit.__repr__ = lambda e: _f(e)
| 16
| 39
| 0.75
| 13
| 80
| 3.846154
| 0.538462
| 0.44
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1375
| 80
| 4
| 40
| 20
| 0.724638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
12d7cccb3a310d8d20beabf375d561e725871945
| 271
|
py
|
Python
|
kolibri/core/__init__.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 545
|
2016-01-19T19:26:55.000Z
|
2022-03-20T00:13:04.000Z
|
kolibri/core/__init__.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 8,329
|
2016-01-19T19:32:02.000Z
|
2022-03-31T21:23:12.000Z
|
kolibri/core/__init__.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 493
|
2016-01-19T19:26:48.000Z
|
2022-03-28T14:35:05.000Z
|
"""TODO: Write something about this module (everything in the docstring
enters the docs)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
default_app_config = "kolibri.core.apps.KolibriCoreConfig"
| 27.1
| 71
| 0.830258
| 35
| 271
| 5.942857
| 0.771429
| 0.144231
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114391
| 271
| 9
| 72
| 30.111111
| 0.866667
| 0.313653
| 0
| 0
| 0
| 0
| 0.196629
| 0.196629
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12ddfd3cf03abe6c3730669235e3cc9b8c9f646f
| 966
|
py
|
Python
|
stubs/ev3_pybricks_v1_0_0/pybricks/ev3brick.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/pybricks/ev3brick.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/pybricks/ev3brick.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'pybricks.ev3brick' on LEGO EV3 v1.0.0
"""
# MCU: sysname=ev3, nodename=ev3, release=('v1.0.0',), version=('0.0.0',), machine=ev3
# Stubber: 1.3.2 - updated
from typing import Any
class Display:
""""""
_font_height = 8
def _next_line(self, *argv) -> Any:
pass
def _reset_text_history(self, *argv) -> Any:
pass
_valid_devices = None
def clear(self, *argv) -> Any:
pass
def image(self, *argv) -> Any:
pass
def text(self, *argv) -> Any:
pass
class Speaker:
""""""
_valid_devices = None
def beep(self, *argv) -> Any:
pass
def beeps(self, *argv) -> Any:
pass
def file(self, *argv) -> Any:
pass
def speech(self, *argv) -> Any:
pass
def tune(self, *argv) -> Any:
pass
battery = None
def buttons():
pass
display = None
def exit():
pass
def light():
pass
sound = None
stderr = None
| 13.232877
| 86
| 0.541408
| 126
| 966
| 4.063492
| 0.428571
| 0.15625
| 0.214844
| 0.292969
| 0.246094
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027068
| 0.311594
| 966
| 72
| 87
| 13.416667
| 0.742857
| 0.162526
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.361111
| false
| 0.361111
| 0.027778
| 0
| 0.527778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
421a4fa3495b1994db6724fde00bf5f2fc779171
| 235
|
py
|
Python
|
api/admin.py
|
satyap54/SplitBill
|
e2245ac01e586fee00ea0e278829408e975e243c
|
[
"MIT"
] | null | null | null |
api/admin.py
|
satyap54/SplitBill
|
e2245ac01e586fee00ea0e278829408e975e243c
|
[
"MIT"
] | null | null | null |
api/admin.py
|
satyap54/SplitBill
|
e2245ac01e586fee00ea0e278829408e975e243c
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.Trip)
admin.site.register(models.UserAccount)
admin.site.register(models.GangMember)
admin.site.register(models.Transaction)
| 29.375
| 39
| 0.825532
| 32
| 235
| 6.0625
| 0.4375
| 0.185567
| 0.350515
| 0.474227
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07234
| 235
| 8
| 40
| 29.375
| 0.889908
| 0.110638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
42205474ef327593a620ca5365bfcd1fd26d3b6e
| 7,156
|
py
|
Python
|
plgx-esp/migrations/versions/698f286777f0_.py
|
eclecticiq/eiq-er-ce
|
ebb12d5c4e0ee144f8166576924b8ce8dc5dfc94
|
[
"MIT"
] | null | null | null |
plgx-esp/migrations/versions/698f286777f0_.py
|
eclecticiq/eiq-er-ce
|
ebb12d5c4e0ee144f8166576924b8ce8dc5dfc94
|
[
"MIT"
] | null | null | null |
plgx-esp/migrations/versions/698f286777f0_.py
|
eclecticiq/eiq-er-ce
|
ebb12d5c4e0ee144f8166576924b8ce8dc5dfc94
|
[
"MIT"
] | 2
|
2021-11-12T10:25:02.000Z
|
2022-03-30T06:33:52.000Z
|
"""empty message
Revision ID: 698f286777f0
Revises: a76be8b92780
Create Date: 2018-09-10 15:11:38.552110
"""
# revision identifiers, used by Alembic.
revision = "698f286777f0"
down_revision = "a76be8b92780"
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"alert_distributed_query",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("alert_id", sa.String(), nullable=False),
sa.Column("distributed_query_id", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"node_email",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("email_id", sa.String(), nullable=False),
sa.Column("status", sa.String(), nullable=True),
sa.Column("node_id", sa.Integer(), nullable=False),
sa.Column("email_verified", sa.Boolean(), nullable=False),
sa.Column("verification_token", sa.String(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["node_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"alert_email",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("alert_id", sa.Integer(), nullable=False),
sa.Column("status", sa.String(), nullable=True),
sa.Column("node_id", sa.Integer(), nullable=False),
sa.Column("body", sa.String(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["alert_id"],
["alerts.id"],
),
sa.ForeignKeyConstraint(
["node_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
u"alerts", sa.Column("recon_queries", postgresql.JSONB(), nullable=True)
)
op.add_column(u"alerts", sa.Column("result_log_id", sa.String(), nullable=True))
op.alter_column(
u"alerts", "message", existing_type=postgresql.JSONB(), nullable=True
)
op.alter_column(u"alerts", "query_name", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column(u"alerts", "rule_id", existing_type=sa.INTEGER(), nullable=False)
op.alter_column(
u"carve_session", "carve_guid", existing_type=sa.VARCHAR(), nullable=False
)
op.alter_column(
u"carve_session", "session_id", existing_type=sa.VARCHAR(), nullable=False
)
op.add_column(
u"distributed_query", sa.Column("alert_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(None, "distributed_query", "alerts", ["alert_id"], ["id"])
op.add_column(
u"distributed_query_task", sa.Column("data", postgresql.JSONB(), nullable=True)
)
op.alter_column(
u"email_recipient", "status", existing_type=sa.VARCHAR(), nullable=False
)
op.alter_column(
u"email_recipient",
"updated_at",
existing_type=postgresql.TIMESTAMP(),
nullable=False,
)
op.drop_constraint(
u"email_recipient_recipient_key", "email_recipient", type_="unique"
)
op.alter_column(
u"node_config", "apply_by_default", existing_type=sa.BOOLEAN(), nullable=False
)
op.alter_column(
u"node_config", "config", existing_type=sa.VARCHAR(), nullable=False
)
op.alter_column(u"node_config", "name", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column(
u"node_config",
"updated_at",
existing_type=postgresql.TIMESTAMP(),
nullable=False,
)
op.alter_column(
u"node_data", "data", existing_type=postgresql.JSONB(), nullable=False
)
op.alter_column(u"options", "option", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column(
u"options", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False
)
op.drop_constraint(u"options_name_key", "options", type_="unique")
op.add_column(
u"rule", sa.Column("recon_queries", postgresql.JSONB(), nullable=True)
)
op.alter_column(u"settings", "setting", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column(
u"settings", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False
)
op.drop_constraint(u"settings_name_key", "settings", type_="unique")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(u"settings_name_key", "settings", ["name"])
op.alter_column(
u"settings", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True
)
op.alter_column(u"settings", "setting", existing_type=sa.VARCHAR(), nullable=True)
op.drop_column(u"rule", "recon_queries")
op.create_unique_constraint(u"options_name_key", "options", ["name"])
op.alter_column(
u"options", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True
)
op.alter_column(u"options", "option", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column(
u"node_data", "data", existing_type=postgresql.JSONB(), nullable=True
)
op.alter_column(
u"node_config",
"updated_at",
existing_type=postgresql.TIMESTAMP(),
nullable=True,
)
op.alter_column(u"node_config", "name", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column(u"node_config", "config", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column(
u"node_config", "apply_by_default", existing_type=sa.BOOLEAN(), nullable=True
)
op.create_unique_constraint(
u"email_recipient_recipient_key", "email_recipient", ["recipient"]
)
op.alter_column(
u"email_recipient",
"updated_at",
existing_type=postgresql.TIMESTAMP(),
nullable=True,
)
op.alter_column(
u"email_recipient", "status", existing_type=sa.VARCHAR(), nullable=True
)
op.drop_column(u"distributed_query_task", "data")
op.drop_constraint(None, "distributed_query", type_="foreignkey")
op.drop_column(u"distributed_query", "alert_id")
op.alter_column(
u"carve_session", "session_id", existing_type=sa.VARCHAR(), nullable=True
)
op.alter_column(
u"carve_session", "carve_guid", existing_type=sa.VARCHAR(), nullable=True
)
op.alter_column(u"alerts", "rule_id", existing_type=sa.INTEGER(), nullable=True)
op.alter_column(u"alerts", "query_name", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column(
u"alerts", "message", existing_type=postgresql.JSONB(), nullable=False
)
op.drop_column(u"alerts", "result_log_id")
op.drop_column(u"alerts", "recon_queries")
op.drop_table("alert_email")
op.drop_table("node_email")
op.drop_table("alert_distributed_query")
# ### end Alembic commands ###
| 35.251232
| 88
| 0.65232
| 872
| 7,156
| 5.130734
| 0.111239
| 0.065713
| 0.092982
| 0.100134
| 0.836612
| 0.815601
| 0.753017
| 0.722843
| 0.69468
| 0.652436
| 0
| 0.009731
| 0.19578
| 7,156
| 202
| 89
| 35.425743
| 0.76768
| 0.041224
| 0
| 0.404762
| 0
| 0
| 0.203107
| 0.021688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011905
| false
| 0
| 0.017857
| 0
| 0.029762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4251dc86c8f4641295207e7c22579c5441c12b97
| 84
|
py
|
Python
|
tests/unit/unit_tests.py
|
bigSAS/critical-usg-backend
|
b31749eaa9fba42f6302ccb8156513855c7abee9
|
[
"MIT"
] | null | null | null |
tests/unit/unit_tests.py
|
bigSAS/critical-usg-backend
|
b31749eaa9fba42f6302ccb8156513855c7abee9
|
[
"MIT"
] | 1
|
2020-08-02T19:15:40.000Z
|
2020-08-02T19:15:40.000Z
|
tests/unit/unit_tests.py
|
bigSAS/critical-usg-backend
|
b31749eaa9fba42f6302ccb8156513855c7abee9
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.unit
@pytest.mark.example
def test_example():
pass
| 10.5
| 20
| 0.738095
| 12
| 84
| 5.083333
| 0.666667
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154762
| 84
| 7
| 21
| 12
| 0.859155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0.2
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4252b390b10fd2572826a6188bf40fec938ca2c6
| 621
|
py
|
Python
|
dnsimple/service/__init__.py
|
mherrmann/dnsimple-python
|
a89127f0bafb2a001c902206fba87cbc4f3bc2d1
|
[
"MIT"
] | 12
|
2020-06-18T17:16:03.000Z
|
2022-03-23T08:35:49.000Z
|
dnsimple/service/__init__.py
|
mherrmann/dnsimple-python
|
a89127f0bafb2a001c902206fba87cbc4f3bc2d1
|
[
"MIT"
] | 129
|
2020-06-25T12:15:51.000Z
|
2022-03-23T09:42:16.000Z
|
dnsimple/service/__init__.py
|
mherrmann/dnsimple-python
|
a89127f0bafb2a001c902206fba87cbc4f3bc2d1
|
[
"MIT"
] | 6
|
2020-07-03T09:34:01.000Z
|
2021-12-20T04:29:59.000Z
|
from dnsimple.service.accounts import Accounts
from dnsimple.service.certificates import Certificates
from dnsimple.service.contacts import Contacts
from dnsimple.service.domains import Domains
from dnsimple.service.identity import Identity
from dnsimple.service.oauth import Oauth
from dnsimple.service.registrar import Registrar
from dnsimple.service.services import Services
from dnsimple.service.templates import Templates
from dnsimple.service.tlds import Tlds
from dnsimple.service.vanity_name_servers import VanityNameServers
from dnsimple.service.webhooks import Webhooks
from dnsimple.service.zones import Zones
| 44.357143
| 66
| 0.874396
| 80
| 621
| 6.7625
| 0.25
| 0.288355
| 0.456562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083736
| 621
| 13
| 67
| 47.769231
| 0.950791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
426f7f7c31b824159f4cbb54014ff58a95917da9
| 401
|
py
|
Python
|
target/types/example.py
|
ap-research-captcha-efficacy/target
|
60e45791b584c347145a82333a2b78046e06d4af
|
[
"MIT"
] | null | null | null |
target/types/example.py
|
ap-research-captcha-efficacy/target
|
60e45791b584c347145a82333a2b78046e06d4af
|
[
"MIT"
] | null | null | null |
target/types/example.py
|
ap-research-captcha-efficacy/target
|
60e45791b584c347145a82333a2b78046e06d4af
|
[
"MIT"
] | null | null | null |
"""
the only requirements of a CAPTCHA module is that it's generate function returns a tuple of (challenge, solution) strings
and that it takes one argument -- the modifiers (if there are any)
the challenge string should be a data URI
https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
prototypical implementation below:
"""
def generate(mod):
return ("data:,lol", "swej")
| 40.1
| 121
| 0.765586
| 64
| 401
| 4.75
| 0.796875
| 0.039474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137157
| 401
| 10
| 122
| 40.1
| 0.878613
| 0.84788
| 0
| 0
| 1
| 0
| 0.240741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
427f974d17dc08d9fced5d67289d445dbc82f5bb
| 38
|
py
|
Python
|
tests/__init__.py
|
zapp29/OOV
|
2d7fe7095a5fef0cb3dc2ac26bbef2e6054a41ed
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
zapp29/OOV
|
2d7fe7095a5fef0cb3dc2ac26bbef2e6054a41ed
|
[
"MIT"
] | 55
|
2021-07-30T06:18:19.000Z
|
2022-03-31T07:19:52.000Z
|
tests/__init__.py
|
zapp29/OOV
|
2d7fe7095a5fef0cb3dc2ac26bbef2e6054a41ed
|
[
"MIT"
] | null | null | null |
"""Test suite for the OOV package."""
| 19
| 37
| 0.657895
| 6
| 38
| 4.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 38
| 1
| 38
| 38
| 0.78125
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
42a43baec61b2af903d72c193e21698e7f2651e2
| 61
|
py
|
Python
|
src/radical/pilot/db/__init__.py
|
karahbit/radical.pilot
|
c611e1df781749deef899dcf5815728e1d8a962e
|
[
"MIT"
] | 47
|
2015-03-16T01:08:11.000Z
|
2022-02-02T10:36:39.000Z
|
src/radical/pilot/db/__init__.py
|
karahbit/radical.pilot
|
c611e1df781749deef899dcf5815728e1d8a962e
|
[
"MIT"
] | 1,856
|
2015-01-02T09:32:20.000Z
|
2022-03-31T21:45:06.000Z
|
src/radical/pilot/db/__init__.py
|
karahbit/radical.pilot
|
c611e1df781749deef899dcf5815728e1d8a962e
|
[
"MIT"
] | 28
|
2015-06-10T18:15:14.000Z
|
2021-11-07T04:36:45.000Z
|
""" Database abstraction layer
"""
from .database import *
| 10.166667
| 30
| 0.688525
| 6
| 61
| 7
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180328
| 61
| 5
| 31
| 12.2
| 0.84
| 0.42623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.