hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
8805bc2ef3f38bf609ec1e25b51d2872d6b22fd6
190
py
Python
FileManager/ConvertFile.py
lorganthesorn/CryptoArb
292f41cc8fe96473df8c5f67f8e7a5abeadcd692
[ "MIT" ]
null
null
null
FileManager/ConvertFile.py
lorganthesorn/CryptoArb
292f41cc8fe96473df8c5f67f8e7a5abeadcd692
[ "MIT" ]
null
null
null
FileManager/ConvertFile.py
lorganthesorn/CryptoArb
292f41cc8fe96473df8c5f67f8e7a5abeadcd692
[ "MIT" ]
null
null
null
from GetHistory.CrpytoCompare import * #import pandas as pd def hdf5_to_csv(fsym, tsym, exchange, granularity): df = find_history_file(fsym, tsym, exchange, granularity) df.to_csv()
31.666667
61
0.757895
27
190
5.148148
0.703704
0.071942
0.230216
0.388489
0.417266
0
0
0
0
0
0
0.006173
0.147368
190
6
62
31.666667
0.851852
0.1
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
8811cac7088be85b523623f268311f530cb14dec
76
py
Python
build/lib/geonomics/demos/__init__.py
AnushaPB/geonomics-1
deee0c377e81f509463eaf6f9d0b2f0809f2ddc3
[ "MIT" ]
8
2020-08-27T17:06:04.000Z
2021-09-17T22:55:07.000Z
build/lib/geonomics/demos/__init__.py
AnushaPB/geonomics-1
deee0c377e81f509463eaf6f9d0b2f0809f2ddc3
[ "MIT" ]
null
null
null
build/lib/geonomics/demos/__init__.py
AnushaPB/geonomics-1
deee0c377e81f509463eaf6f9d0b2f0809f2ddc3
[ "MIT" ]
2
2020-08-28T23:45:28.000Z
2021-01-25T21:47:40.000Z
from . import _IBD_IBE from . import _simult_select from . import _yosemite
19
28
0.802632
11
76
5.090909
0.636364
0.535714
0
0
0
0
0
0
0
0
0
0
0.157895
76
3
29
25.333333
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7151ee421aa4bcedc483815f675f880a930d6e7c
100
py
Python
app/admin/__init__.py
mworia-Br/super-sendit
1c7634e679c09fb9392dac9920f49d77f525f7d6
[ "MIT" ]
1
2021-05-22T09:48:30.000Z
2021-05-22T09:48:30.000Z
app/admin/__init__.py
mworia-Br/super-sendit
1c7634e679c09fb9392dac9920f49d77f525f7d6
[ "MIT" ]
3
2018-10-31T13:21:04.000Z
2021-06-01T23:02:47.000Z
app/admin/__init__.py
mworia-Br/super-sendit
1c7634e679c09fb9392dac9920f49d77f525f7d6
[ "MIT" ]
6
2018-11-12T15:33:29.000Z
2021-07-31T05:48:21.000Z
from flask import Blueprint from .admin_views import * admin_blueprint=Blueprint("admin", __name__)
25
44
0.82
13
100
5.846154
0.538462
0
0
0
0
0
0
0
0
0
0
0
0.1
100
4
44
25
0.844444
0
0
0
0
0
0.049505
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
716b2536c4e54b188bb932cd8d780d789ec61cf5
148
py
Python
latteys/latteys/doctype/discharge_flow/test_discharge_flow.py
hrgadesha/lattyeys
428b752ac99620ac7ad706fd305f07210bdcb315
[ "MIT" ]
1
2021-09-10T03:51:22.000Z
2021-09-10T03:51:22.000Z
latteys/latteys/doctype/discharge_flow/test_discharge_flow.py
hrgadesha/lattyeys
428b752ac99620ac7ad706fd305f07210bdcb315
[ "MIT" ]
null
null
null
latteys/latteys/doctype/discharge_flow/test_discharge_flow.py
hrgadesha/lattyeys
428b752ac99620ac7ad706fd305f07210bdcb315
[ "MIT" ]
null
null
null
# Copyright (c) 2021, B2Grow and Contributors # See license.txt # import frappe import unittest class TestDischargeFlow(unittest.TestCase): pass
16.444444
45
0.783784
18
148
6.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0.03937
0.141892
148
8
46
18.5
0.874016
0.493243
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
716c179e3f07012cc320dbcfab00fc7ec6b5611a
188
py
Python
lrs/tests/testkafka.py
zsh-paradise/whty_ADL_LRS
c027f2c3fb8305cd8c037ff4449e34f4f340d81e
[ "Apache-2.0" ]
null
null
null
lrs/tests/testkafka.py
zsh-paradise/whty_ADL_LRS
c027f2c3fb8305cd8c037ff4449e34f4f340d81e
[ "Apache-2.0" ]
null
null
null
lrs/tests/testkafka.py
zsh-paradise/whty_ADL_LRS
c027f2c3fb8305cd8c037ff4449e34f4f340d81e
[ "Apache-2.0" ]
null
null
null
from kafka import KafkaClient, SimpleProducer, SimpleConsumer #kafka = KafkaClient("10.5.10.249:9092") #producer = SimpleProducer(kafka) #producer.send_messages("test110","Hello world!")
31.333333
61
0.781915
22
188
6.636364
0.727273
0
0
0
0
0
0
0
0
0
0
0.086705
0.079787
188
6
62
31.333333
0.757225
0.632979
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7171471a60a9df215204b9e7d55246b57588ed3e
2,014
py
Python
Math Softwares/Softwares/vetores.py
artemis-fx/Math
ee4ad0855910592e706edc9366cee7e886448a25
[ "MIT" ]
null
null
null
Math Softwares/Softwares/vetores.py
artemis-fx/Math
ee4ad0855910592e706edc9366cee7e886448a25
[ "MIT" ]
null
null
null
Math Softwares/Softwares/vetores.py
artemis-fx/Math
ee4ad0855910592e706edc9366cee7e886448a25
[ "MIT" ]
null
null
null
import math from time import sleep while True: print('''[1] Para vetores no PLANO [2] Para vetores no ESPAÇO''') res = int(input('Qual a sua escolha? ')) if res == 1: v1x = float(input('Digite o Valor de X para o Primeiro vetor: ')) v1y = float(input('Digite o Valor de Y para o Primeiro vetor: ')) v2x = float(input('Digite o Valor de X para o Segundo vetor: ')) v2y = float(input('Digite o Valor de Y para o Segundo vetor: ')) modv1 = math.sqrt(v1x**2 + v1y**2) print(modv1) modv2 = math.sqrt(v2x**2 + v2y**2) print(modv2) mot = modv1 * modv2 mv = v1x*v2x + v1y*v2y print(mv) cosseno = mv/mot print(cosseno) coss = math.acos(cosseno) print(f'O Ângulo formado por esses 2 vetores é {math.degrees(coss):.1f}') elif res == 2: v1x = float(input('Digite o Valor de X para o Primeiro vetor: ')) v1y = float(input('Digite o Valor de Y para o Primeiro vetor: ')) v1z = float(input('Digite o Valor de Z para o Primeiro vetor: ')) v2x = float(input('Digite o Valor de X para o Segundo vetor: ')) v2y = float(input('Digite o Valor de Y para o Segundo vetor: ')) v2z = float(input('Digite o Valor de Z para o Segundo vetor: ')) modv1 = math.sqrt(v1x ** 2 + v1y ** 2 + v1z ** 2) print(modv1) modv2 = math.sqrt(v2x ** 2 + v2y ** 2 + v2z ** 2) print(modv2) mot = modv1 * modv2 mv = v1x * v2x + v1y * v2y + v1z * v2z print(mv) cosseno = mv / mot print(cosseno) coss = math.acos(cosseno) print(f'O Ângulo formado por esses 2 vetores é {math.degrees(coss):.1f}') else: print('TENTATIVA ÍNVALIDA TENTE NOVAMENTE!!') con = ' ' while con not in 'SN': con = str(input('Quer continuar? [S/N] ')).upper()[0] if con == 'N': break print('Foi bom tem a sua companhia!') print('ENCERRANDO.....') sleep(3) print('ATÉ MAIS... ')
38.730769
81
0.565045
299
2,014
3.80602
0.274247
0.087873
0.140598
0.149385
0.735501
0.735501
0.735501
0.735501
0.735501
0.691564
0
0.044034
0.300894
2,014
51
82
39.490196
0.764205
0
0
0.470588
0
0
0.367428
0.023833
0
0
0
0
0
1
0
false
0
0.039216
0
0.039216
0.294118
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
718407ea13f1cfd71fbec01a93ca779f26227baa
106
py
Python
egresos/admin.py
jmjacquet/IronWeb
974d7fca8db69ffcfec15325cdb641a1b4b2c526
[ "MIT" ]
null
null
null
egresos/admin.py
jmjacquet/IronWeb
974d7fca8db69ffcfec15325cdb641a1b4b2c526
[ "MIT" ]
9
2020-09-22T12:34:00.000Z
2021-09-10T16:32:04.000Z
egresos/admin.py
jmjacquet/IronWeb
974d7fca8db69ffcfec15325cdb641a1b4b2c526
[ "MIT" ]
null
null
null
from django.contrib import admin from ggcontable.settings import * from comprobantes.models import *
11.777778
33
0.792453
13
106
6.461538
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.160377
106
8
34
13.25
0.94382
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
71927feee78032478023793c35c564e4fc4f6bec
109
py
Python
login.py
strivingwl/test27
32ca08994690fbcb351ee43a8f4759fe1ea59697
[ "MIT" ]
null
null
null
login.py
strivingwl/test27
32ca08994690fbcb351ee43a8f4759fe1ea59697
[ "MIT" ]
null
null
null
login.py
strivingwl/test27
32ca08994690fbcb351ee43a8f4759fe1ea59697
[ "MIT" ]
null
null
null
num=1 <<<<<<< HEAD num=2 num3=333333 ======= num2=2 num3=3 >>>>>>> 43ea9b4f1c40760264dc7c48a304676b9c4d8f23
10.9
48
0.66055
12
109
6
0.75
0.138889
0
0
0
0
0
0
0
0
0
0.408163
0.100917
109
9
49
12.111111
0.326531
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
71d339c99996f14b352012e4d3fa989a56d7b998
65
py
Python
bempy/__init__.py
svetlyak40wt/bempy
ad87982d17c2d14c344d9e3d91a48c37dfb72535
[ "BSD-3-Clause" ]
1
2015-04-29T15:19:45.000Z
2015-04-29T15:19:45.000Z
bempy/__init__.py
svetlyak40wt/bempy
ad87982d17c2d14c344d9e3d91a48c37dfb72535
[ "BSD-3-Clause" ]
null
null
null
bempy/__init__.py
svetlyak40wt/bempy
ad87982d17c2d14c344d9e3d91a48c37dfb72535
[ "BSD-3-Clause" ]
1
2019-06-10T16:08:54.000Z
2019-06-10T16:08:54.000Z
from .blocks import block, ImmediateResponse, b, context_blocks
21.666667
63
0.815385
8
65
6.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.123077
65
2
64
32.5
0.912281
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e0a2c76421983c4525ce292a1268208a9d1a1b27
175
py
Python
qore/algorithms/__init__.py
HaoTy/qore
2d866615bb05c5b8a5d6f6c7a2c1ca1008e7851b
[ "BSD-3-Clause" ]
null
null
null
qore/algorithms/__init__.py
HaoTy/qore
2d866615bb05c5b8a5d6f6c7a2c1ca1008e7851b
[ "BSD-3-Clause" ]
null
null
null
qore/algorithms/__init__.py
HaoTy/qore
2d866615bb05c5b8a5d6f6c7a2c1ca1008e7851b
[ "BSD-3-Clause" ]
null
null
null
from .asp import ASP from .pseudoflow import Pseudoflow from qiskit.algorithms import QAOA, VQE, NumPyMinimumEigensolver as ExactDiagonalization from .peps_ite import PEPSITE
35
88
0.851429
22
175
6.727273
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.114286
175
4
89
43.75
0.954839
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e0b0017e45314c6504e272b696691e4e4fd1acfe
6,415
py
Python
tests/validate_config_test.py
bnemanich/open-ce
df79cdb0779814d6500cc1f7d19b376b7cce3c90
[ "Apache-2.0" ]
null
null
null
tests/validate_config_test.py
bnemanich/open-ce
df79cdb0779814d6500cc1f7d19b376b7cce3c90
[ "Apache-2.0" ]
null
null
null
tests/validate_config_test.py
bnemanich/open-ce
df79cdb0779814d6500cc1f7d19b376b7cce3c90
[ "Apache-2.0" ]
null
null
null
# ***************************************************************** # # Licensed Materials - Property of IBM # # (C) Copyright IBM Corp. 2020. All Rights Reserved. # # US Government Users Restricted Rights - Use, duplication or # disclosure restricted by GSA ADP Schedule Contract with IBM Corp. # # ***************************************************************** import sys import os import pathlib import pytest import imp test_dir = pathlib.Path(__file__).parent.absolute() sys.path.append(os.path.join(test_dir, '..', 'open-ce')) import helpers open_ce = imp.load_source('open_ce', os.path.join(test_dir, '..', 'open-ce', 'open-ce')) import validate_config from errors import OpenCEError def test_validate_config(mocker): ''' This is a complete test of `validate_config`. ''' dirTracker = helpers.DirTracker() mocker.patch( 'os.mkdir', return_value=0 #Don't worry about making directories. ) mocker.patch( 'os.system', return_value=0 ) mocker.patch( 'utils.run_command_capture', side_effect=(lambda x: helpers.validate_cli(x, expect=["conda create --dry-run", "upstreamdep1 2.3.*", "upstreamdep2 2.*"], reject=["package"], #No packages from the env files should show up in the create command. retval=[True, "", ""])) ) mocker.patch( 'os.getcwd', side_effect=dirTracker.mocked_getcwd ) mocker.patch( 'os.chdir', side_effect=dirTracker.validate_chdir ) package_deps = {"package11": ["package15"], "package12": ["package11"], "package13": ["package12", "package14"], "package14": ["package15", "package16"], "package15": [], "package16": ["package15"], "package21": ["package13"], "package22": ["package21"]} mocker.patch( 'conda_build.api.render', side_effect=(lambda path, *args, **kwargs: helpers.mock_renderer(os.getcwd(), package_deps)) ) env_file = os.path.join(test_dir, 'test-env2.yaml') open_ce._main(["validate", validate_config.COMMAND, "--conda_build_config", "./conda_build_config.yaml", env_file, "--python_versions", "3.6", "--build_types", "cuda"]) def test_validate_negative(mocker): ''' This is a negative test of `validate_config` where the dry-run fails. ''' dirTracker = helpers.DirTracker() mocker.patch( 'os.mkdir', return_value=0 #Don't worry about making directories. ) mocker.patch( 'os.system', return_value=0 ) mocker.patch( 'utils.run_command_capture', side_effect=(lambda x: helpers.validate_cli(x, expect=["conda create --dry-run", "upstreamdep1 2.3.*", #Checks that the value from the default config file is used. "external_dep1", # Checks that the external dependencies were used. "external_dep2 5.2.*", # Checks that the external dependencies were used. "external_dep3=5.6.*"], # Checks that the external dependencies were used. reject=["package"], retval=[False, "", ""])) ) mocker.patch( 'os.getcwd', side_effect=dirTracker.mocked_getcwd ) mocker.patch( 'os.chdir', side_effect=dirTracker.validate_chdir ) package_deps = {"package11": ["package15"], "package12": ["package11"], "package13": ["package12", "package14"], "package14": ["package15", "package16"], "package15": [], "package16": ["package15"], "package21": ["package13"], "package22": ["package21"]} mocker.patch( 'conda_build.api.render', side_effect=(lambda path, *args, **kwargs: helpers.mock_renderer(os.getcwd(), package_deps)) ) env_file = os.path.join(test_dir, 'test-env2.yaml') with pytest.raises(OpenCEError) as err: open_ce._main(["validate", validate_config.COMMAND, "--conda_build_config", "./conda_build_config.yaml", env_file, "--python_versions", "3.6", "--build_types", "cuda"]) assert "Error validating \"./conda_build_config.yaml\" for " in str(err.value) assert "Dependencies are not compatible.\nCommand:\nconda create" in str(err.value) def test_validate_bad_env(mocker): ''' This is a negative test of `validate_config` where the env file is bad. ''' dirTracker = helpers.DirTracker() mocker.patch( 'os.mkdir', return_value=0 #Don't worry about making directories. ) mocker.patch( 'os.system', return_value=0 ) mocker.patch( 'os.getcwd', side_effect=dirTracker.mocked_getcwd ) mocker.patch( 'os.chdir', side_effect=dirTracker.validate_chdir ) package_deps = {"package11": ["package15"], "package12": ["package11"], "package13": ["package12", "package14"], "package14": ["package15", "package16"], "package15": [], "package16": ["package15"], "package21": ["package13"], "package22": ["package21"]} mocker.patch( 'conda_build.api.render', side_effect=(lambda path, *args, **kwargs: helpers.mock_renderer(os.getcwd(), package_deps)) ) env_file = os.path.join(test_dir, 'test-env-invalid1.yaml') with pytest.raises(OpenCEError) as err: open_ce._main(["validate", validate_config.COMMAND, "--conda_build_config", "./conda_build_config.yaml", env_file, "--python_versions", "3.6", "--build_types", "cuda"]) assert "Error validating \"./conda_build_config.yaml\" for " in str(err.value) assert "Unexpected key chnnels was found in " in str(err.value)
41.121795
176
0.541387
639
6,415
5.270736
0.251956
0.055523
0.046318
0.020784
0.764252
0.764252
0.764252
0.73842
0.709323
0.709323
0
0.030734
0.31021
6,415
155
177
41.387097
0.730395
0.143258
0
0.694656
0
0
0.244432
0.044359
0
0
0
0
0.030534
1
0.022901
false
0
0.061069
0
0.083969
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e0fdb4a6ad1d5dc3b19163d56c0a53408951162f
178
py
Python
src/vimpdb/errors.py
dtrckd/vimpdb
1171938751127d23f66f6b750dd79166c64bdf20
[ "MIT" ]
110
2015-01-11T06:50:42.000Z
2021-07-07T20:08:39.000Z
src/vimpdb/errors.py
dtrckd/vimpdb
1171938751127d23f66f6b750dd79166c64bdf20
[ "MIT" ]
8
2015-06-03T10:23:41.000Z
2021-05-06T15:25:47.000Z
src/vimpdb/errors.py
dtrckd/vimpdb
1171938751127d23f66f6b750dd79166c64bdf20
[ "MIT" ]
24
2015-03-03T16:35:12.000Z
2022-01-19T16:24:06.000Z
class BadRCFile(Exception): pass class ReturnCodeError(Exception): pass class BrokenConfiguration(Exception): pass class RemoteUnavailable(Exception): pass
11.866667
37
0.741573
16
178
8.25
0.4375
0.393939
0.409091
0
0
0
0
0
0
0
0
0
0.191011
178
14
38
12.714286
0.916667
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
4606496c1323884b351817eaf53d52bcba080a88
138
py
Python
functions/notification/repositories.py
tomdewildt/ada
f9800c023ac8b70584b0d2a27a6b4f3b09acc3d9
[ "MIT" ]
null
null
null
functions/notification/repositories.py
tomdewildt/ada
f9800c023ac8b70584b0d2a27a6b4f3b09acc3d9
[ "MIT" ]
2
2022-03-19T20:42:43.000Z
2022-03-19T20:57:41.000Z
functions/notification/repositories.py
tomdewildt/ada
f9800c023ac8b70584b0d2a27a6b4f3b09acc3d9
[ "MIT" ]
1
2022-03-23T21:18:58.000Z
2022-03-23T21:18:58.000Z
class NotificationPrintRepository: def __init__(self): pass def send_notification(self, message): print(message)
19.714286
41
0.681159
13
138
6.846154
0.769231
0
0
0
0
0
0
0
0
0
0
0
0.246377
138
6
42
23
0.855769
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0.2
0
0
0.6
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
460d2e52b2fb51da4b0162652003424975714004
66
py
Python
what_is_the_name_main_in_python/demo2.py
NightmareQAQ/python-notes
4e766be06073a495ff9654f0dd8c0bb03310c559
[ "MIT" ]
106
2017-05-02T10:25:50.000Z
2022-03-23T14:57:28.000Z
what_is_the_name_main_in_python/demo2.py
NightmareQAQ/python-notes
4e766be06073a495ff9654f0dd8c0bb03310c559
[ "MIT" ]
2
2021-01-14T15:07:15.000Z
2021-12-21T07:18:05.000Z
what_is_the_name_main_in_python/demo2.py
NightmareQAQ/python-notes
4e766be06073a495ff9654f0dd8c0bb03310c559
[ "MIT" ]
42
2017-07-31T07:07:38.000Z
2021-12-26T09:36:55.000Z
from demo1 import a1_func print('demo2.py is called') a1_func()
11
27
0.742424
12
66
3.916667
0.833333
0.255319
0
0
0
0
0
0
0
0
0
0.071429
0.151515
66
5
28
13.2
0.767857
0
0
0
0
0
0.276923
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1ca7d0e16e76cb7f5b0b3c6c2f89b44dd4b87c01
15,245
py
Python
maskrcnn_benchmark/utils/visual.py
SIAAAAAA/MMT-PSM
0835c01c5010d3337778f452e9d96416e0f8a11a
[ "MIT" ]
41
2020-07-22T03:55:08.000Z
2022-02-27T12:04:41.000Z
maskrcnn_benchmark/utils/visual.py
SIAAAAAA/MMT-PSM
0835c01c5010d3337778f452e9d96416e0f8a11a
[ "MIT" ]
5
2020-11-08T08:47:34.000Z
2021-07-09T03:53:42.000Z
maskrcnn_benchmark/utils/visual.py
SIAAAAAA/MMT-PSM
0835c01c5010d3337778f452e9d96416e0f8a11a
[ "MIT" ]
5
2020-10-13T11:09:53.000Z
2021-07-28T12:41:53.000Z
import cv2 import sys import os sys.path.append('..') import numpy as np from maskrcnn_benchmark.structures.bounding_box import BoxList from preprocess.colors import get_colors from maskrcnn_benchmark.structures.image_list import ImageList from maskrcnn_benchmark.structures.segmentation_mask import SegmentationMask, Polygons from maskrcnn_benchmark.structures.bounding_box import BoxList from pycocotools import mask as maskUtils import openslide as ops import random import itertools from maskrcnn_benchmark.utils.miscellaneous import maskToPolygons import pdb def vis_bbox(bboxlist, imagelist, normalize = [102.9801, 115.9465, 122.7717] ): if isinstance(imagelist, ImageList): images = [] for i, bbox in enumerate(bboxlist): if bbox.mode != 'xyxy': bbox = bbox.convert('xyxy') image = imagelist.tensors[i].numpy() image = np.squeeze(image) image = np.transpose(image,(1,2,0)) image +=normalize image = image.copy() for j in range(bbox.bbox.shape[0]): box_coordinate = bbox.bbox[j].numpy().astype(np.int32) color = get_colors(j) image = cv2.rectangle(image,tuple(box_coordinate[:2]),tuple(box_coordinate[2:]), color=color.tuple(),thickness=3) images.append(image) else: bbox = bboxlist image = imagelist if bbox.mode != 'xyxy': bbox = bbox.convert('xyxy') image = image.copy() for j in range(bbox.bbox.shape[0]): box_coordinate = bbox.bbox[j].numpy().astype(np.int32) color = get_colors(j) image = cv2.rectangle(image, tuple(box_coordinate[:2]), tuple(box_coordinate[2:]), color=color.tuple(), thickness=3) images =cv2.cvtColor(image, cv2.COLOR_BGR2RGB) return images def vis_mask(masklist, image, normalize =[102.9801, 115.9465, 122.7717] ): if isinstance(masklist, SegmentationMask): for i, polygon in enumerate(SegmentationMask): poly = polygon[0].polygons mask = np.asarray(poly[0]) mask = np.reshape(mask, (int(len(mask) / 2), 2)).astype( np.int32) color = get_colors(i) image = np.asarray(image) cv2.polylines(np.asarray(image), [mask], 1, color.tuple(), 3) image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) else: for j, mask in enumerate(masklist): mask = np.asarray(mask[0]) mask = np.reshape(mask,(int(len(mask)/2),2)).astype(np.int32) color = get_colors(j) image = np.asarray(image) cv2.polylines(np.asarray(image),[mask], 1, color.tuple(),3) image =cv2.cvtColor(image, cv2.COLOR_BGR2RGB) return image def vis_predict(dataset, gt, dt, name, show_gt =True): # input: list of dicts def convert_to_np(x): rle = x['segmentation'] arr = maskUtils.decode(rle) return arr dt = map(convert_to_np, dt) name, w, h = name.split('~') # img = dataset._imgpath%name img = os.path.join(dataset.root, name + '.png' ) img = ops.open_slide(img) img = img.read_region((int(w),int(h)), 0, (dataset.maxWS, dataset.maxWS)).convert("RGB") img = np.asarray(img) canvas = np.zeros_like(img, dtype = np.uint8) for idx, d in enumerate(dt): if d.shape != (1000, 1000): import pdb; pdb.set_trace() r,g,b = get_colors(idx) canvas[:, :, 0] = canvas[:, :, 0] + b * d canvas[:, :, 1] = canvas[:, :, 1] + g * d canvas[:, :, 2] = canvas[:, :, 2] + r * d canvas2 = np.zeros_like(img, dtype = np.uint8) if show_gt: gt = map(convert_to_np, gt) for idx, ins in enumerate(gt): if ins.shape != (1000, 1000): import pdb; pdb.set_trace() r, g, b = get_colors(idx ) canvas2[:, :, 0] = canvas2[:, :, 0] + b * ins canvas2[:, :, 1] = canvas2[:, :, 1] + g * ins canvas2[:, :, 2] = canvas2[:, :, 2] + r * ins img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) add_img = cv2.addWeighted(img,0.5, canvas,0.5,0 ) add_img2 = cv2.addWeighted(img,0.5, canvas2,0.5,0 ) return add_img,add_img2 # def vis_mask(masklist, imagelist, normalize =[102.9801, 115.9465, 122.7717] ): # if isinstance(masklist, SegmentationMask): # for i, polygon in enumerate(SegmentationMask): # poly = polygon[0].convert('mask') # # # else: # image = imagelist # for j, mask in enumerate(masklist): # mask = np.asarray(mask[0]) # mask = np.reshape(mask,(int(len(mask)/2),2)).astype(np.int32) # color = get_colors(j) # image = np.asarray(image) # cv2.polylines(np.asarray(image),[mask], 1, color.tuple(),3) # image =cv2.cvtColor(image, cv2.COLOR_BGR2RGB) # # return image def display_instance(dataset, image_name, gt, dt ,show_masks = False, show_bbox = True, show_gt = True, alpha = 0.5, show_caption = True ): ''' :param image: h,w,c :param dt, gt : dict :param title: (optional) Figure title :param figsize:(optional) the size of the image :param color: (optional) An array or colors to use with each object :param captions:(optional) A list of strings to use as captions for each object :return: ''' # input: list of dicts def convert_seg_to_np(x): rle = x['segmentation'] arr = maskUtils.decode(rle) return arr seg_dt = list(map(convert_seg_to_np, dt)) name, w, h = image_name.split('~') # img = dataset._imgpath%name try: img = os.path.join(dataset.root, name + '.png') img = ops.open_slide(img) except: img = os.path.join(dataset.root,'image', name + '.png') img = ops.open_slide(img) # pdb.set_trace() # img = img.read_region(0, 0, 0, (3152, 2760)).convert("RGB") img = img.read_region((int(w),int(h)), 0, (dataset.maxWS, dataset.maxWS)).convert("RGB") img = np.asarray(img) img1 = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) img2 = img1.copy() # canvas = np.zeros_like(img, dtype = np.uint8) # 1. draw masks # pdb.set_trace() if show_masks: for idx, d in enumerate(seg_dt): r,g,b = get_colors(idx) # visualize masks # convert list to numpy img1[:, :, 0] = img1[:, :, 0] * ( d == 0 ) + ( d > 0 ) * ((b * d * alpha) + img1[:, :, 0] * (1 - alpha)) img1[:, :, 1] = img1[:, :, 1] * ( d == 0 ) + ( d > 0 ) * ((g * d * alpha) + img1[:, :, 1] * (1 - alpha)) img1[:, :, 2] = img1[:, :, 2] * ( d == 0 ) + ( d > 0 ) * ((r * d * alpha) + img1[:, :, 2] * (1 - alpha)) # 2. show others # pdb.set_trace() for idx, d in enumerate(seg_dt): r,g,b = get_colors(idx) # visualize masks contour_list = maskToPolygons(d) cv2.polylines(img1, contour_list, True, (b,g,r), thickness= 1) if show_bbox: bbox = dt[idx]['bbox'] cv2.rectangle(img1, (round(bbox[0]),round( bbox[1])), (round(bbox[2]), round(bbox[3])), (b,g,r), thickness= 1) # add information class_id = dt[idx]['category_id'][0] score = dt[idx]['score'] if show_caption: x = random.randint(int(bbox[1]), round((bbox[1] + bbox[3])/2)) caption = "{} {:.3f}".format(class_id, score) cv2.putText(img1, caption, (round(bbox[0]), x), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (b,g,r),1, cv2.LINE_AA ) # canvas2 = np.zeros_like(img, dtype = np.uint8) # img2 = None if show_gt: # 1. show masks pdb.set_trace() seg_gt = list(map(convert_seg_to_np, gt)) if show_masks: for idx, ins in enumerate(seg_gt): r, g, b = get_colors(idx ) img2[:, :, 0] =img2[:, :, 0] * ( ins == 0 ) + ( ins > 0 ) * ((b * ins * alpha) + img2[:, :, 0] * (1 - alpha)) img2[:, :, 1] =img2[:, :, 1] * ( ins == 0 ) + ( ins > 0 ) * ((g * ins * alpha) + img2[:, :, 1] * (1 - alpha)) img2[:, :, 2] =img2[:, :, 2] * ( ins == 0 ) + ( ins > 0 ) * ((r * ins * alpha) + img2[:, :, 2] * (1 - alpha)) # 2. show others for idx, ins in enumerate(seg_gt): r, g, b = get_colors(idx) contour_list = maskToPolygons(ins) cv2.polylines(img2, contour_list, True, (b,g,r), thickness=2) if show_bbox: bbox = gt[idx]['bbox'] cv2.rectangle(img2, (round(bbox[0]), round(bbox[1])), (round(bbox[2]), round(bbox[3])), (b,g,r), thickness=3) x = random.randint(int(bbox[1]), round((bbox[1] + bbox[3]) / 2)) class_id = gt[idx]['category_id'][0] caption = "{}".format(class_id) cv2.putText(img2, caption, (round(bbox[0]), x), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (b,g,r),2, cv2.LINE_AA) return img1,img2 def visualize_pseudo_label(mask, image, alpha = 0.5): RLES=[] for segm in mask.polygons: rles = maskUtils.frPyObjects( [p.numpy() for p in segm.polygons], 800, 800 ) rle = maskUtils.merge(rles) RLES.append(rle) for idx, cyto in enumerate(RLES): cyto_mask = maskUtils.decode(cyto) r, g, b = get_colors(int(2 * idx)) image[:, :, 0] = image[:, :, 0] * (cyto_mask == 0) + (cyto_mask > 0) * ( (b * alpha) + image[:, :, 0] * (1 - alpha)) image[:, :, 1] = image[:, :, 1] * (cyto_mask == 0) + (cyto_mask > 0) * ( (g * alpha) + image[:, :, 1] * (1 - alpha)) image[:, :, 2] = image[:, :, 2] * (cyto_mask == 0) + (cyto_mask > 0) * ( (r * alpha) + image[:, :, 2] * (1 - alpha)) return image def display_instance_gen_rle(image, cyto_list, nuclei_list, alpha = 0.5): h, w, _ = image.shape for idx, cyto in enumerate(cyto_list): cyto_mask = maskUtils.decode(cyto) r, g, b = get_colors(int(2 * idx)) image[:, :, 0] = image[:, :, 0] * (cyto_mask == 0) + (cyto_mask > 0) * ( (b * alpha) + image[:, :, 0] * (1 - alpha)) image[:, :, 1] = image[:, :, 1] * (cyto_mask == 0) + (cyto_mask > 0) * ( (g * alpha) + image[:, :, 1] * (1 - alpha)) image[:, :, 2] = image[:, :, 2] * (cyto_mask == 0) + (cyto_mask > 0) * ( (r * alpha) + image[:, :, 2] * (1 - alpha)) for idx, cyto in enumerate(nuclei_list): cyto_mask = maskUtils.decode(cyto) r, g, b = get_colors(int(2 * idx + 1)) image[:, :, 0] = image[:, :, 0] * (cyto_mask == 0) + (cyto_mask > 0) * ( (b * alpha) + image[:, :, 0] * (1 - alpha)) image[:, :, 1] = image[:, :, 1] * (cyto_mask == 0) + (cyto_mask > 0) * ( (g * alpha) + image[:, :, 1] * (1 - alpha)) image[:, :, 2] = image[:, :, 2] * (cyto_mask == 0) + (cyto_mask > 0) * ( (r * alpha) + image[:, :, 2] * (1 - alpha)) return image def display_instance_gen(image, cyto_list, nuclei_list, alpha = 0.5): h, w, _ = image.shape for idx, cyto in enumerate(cyto_list): cyto_mask = np.array(cyto, np.int) cyto_mask = [list(itertools.chain.from_iterable(cyto_mask.tolist()))] cyto_mask = maskUtils.frPyObjects(cyto_mask, h, w) cyto_mask = maskUtils.decode(cyto_mask[0]) r, g, b = get_colors(int(2 * idx)) image[:, :, 0] = image[:, :, 0] * (cyto_mask == 0) + (cyto_mask > 0) * ((b* alpha) + image[:, :, 0] * (1 - alpha)) image[:, :, 1] = image[:, :, 1] * (cyto_mask == 0) + (cyto_mask > 0) * ((g* alpha) + image[:, :, 1] * (1 - alpha)) image[:, :, 2] = image[:, :, 2] * (cyto_mask == 0) + (cyto_mask > 0) * ((r* alpha) + image[:, :, 2] * (1 - alpha)) for idx, cyto in enumerate(nuclei_list): cyto_mask = np.array(cyto, np.int) cyto_mask = [list(itertools.chain.from_iterable(cyto_mask.tolist()))] cyto_mask = maskUtils.frPyObjects(cyto_mask, h, w) cyto_mask = maskUtils.decode(cyto_mask[0]) r, g, b = get_colors(int(2 * idx + 1)) image[:, :, 0] = image[:, :, 0] * (cyto_mask == 0) + (cyto_mask > 0) * ((b* alpha) + image[:, :, 0] * (1 - alpha)) image[:, :, 1] = image[:, :, 1] * (cyto_mask == 0) + (cyto_mask > 0) * ((g* alpha) + image[:, :, 1] * (1 - alpha)) image[:, :, 2] = image[:, :, 2] * (cyto_mask == 0) + (cyto_mask > 0) * ((r* alpha) + image[:, :, 2] * (1 - alpha)) return image # for cyto in cyto_list: # cyto_mask = np.array(cyto, np.int) # cyto_mask = list(itertools.chain.from_iterable(cyto_mask.tolist())) # cyto_rle.append() def display_instance_dt(dataset, image_name, dt, show_masks=True, show_bbox=True, alpha=0.5, show_caption=True): ''' :param image: h,w,c :param dt, gt : dict :param title: (optional) Figure title :param figsize:(optional) the size of the image :param color: (optional) An array or colors to use with each object :param captions:(optional) A list of strings to use as captions for each object :return: ''' # input: list of dicts def convert_seg_to_np(x): rle = x['segmentation'] arr = maskUtils.decode(rle) return arr seg_dt = list(map(convert_seg_to_np, dt)) name, w, h = image_name.split('~') # img = dataset._imgpath%name img = os.path.join(dataset.root, name ) img1 = cv2.imread(img) # canvas = np.zeros_like(img, dtype = np.uint8) # 1. draw masks if show_masks: for idx, d in enumerate(seg_dt): r, g, b = get_colors(idx) # visualize masks # convert list to numpy img1[:, :, 0] = img1[:, :, 0] * (d == 0) + (d > 0) * ((b * d * alpha) + img1[:, :, 0] * (1 - alpha)) img1[:, :, 1] = img1[:, :, 1] * (d == 0) + (d > 0) * ((g * d * alpha) + img1[:, :, 1] * (1 - alpha)) img1[:, :, 2] = img1[:, :, 2] * (d == 0) + (d > 0) * ((r * d * alpha) + img1[:, :, 2] * (1 - alpha)) # 2. show others for idx, d in enumerate(seg_dt): r, g, b = get_colors(idx) # visualize masks contour_list = maskToPolygons(d) cv2.polylines(img1, contour_list, True, (b, g, r), thickness=1) if show_bbox: bbox = dt[idx]['bbox'] cv2.rectangle(img1, (round(bbox[0]), round(bbox[1])), (round(bbox[2]), round(bbox[3])), (b, g, r), thickness=1) # add information class_id = dt[idx]['category_id'][0] score = dt[idx]['score'] if show_caption: x = random.randint(int(bbox[1]), round((bbox[1] + bbox[3]) / 2)) caption = "{} {:.3f}".format(class_id, score) cv2.putText(img1, caption, (round(bbox[0]), x), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (b, g, r), 2, cv2.LINE_AA) # canvas2 = np.zeros_like(img, dtype = np.uint8) return img1
41.314363
139
0.529485
2,033
15,245
3.862273
0.099852
0.050942
0.036679
0.025471
0.793046
0.777382
0.759424
0.74809
0.741595
0.713067
0
0.044719
0.298852
15,245
368
140
41.42663
0.689868
0.133355
0
0.59127
0
0
0.011851
0
0
0
0
0
0
1
0.043651
false
0
0.06746
0
0.154762
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1cbd51c57d7db2b5bd58d18dacdbcb877fb9b05f
175
py
Python
patronage/admin.py
phildini/django-patronage
b6262a359251a188809f977a3733f1663cb400b3
[ "Apache-2.0" ]
6
2018-08-21T04:03:25.000Z
2021-01-29T05:51:13.000Z
patronage/admin.py
phildini/django-patronage
b6262a359251a188809f977a3733f1663cb400b3
[ "Apache-2.0" ]
1
2021-06-01T22:40:57.000Z
2021-06-01T22:40:57.000Z
patronage/admin.py
phildini/django-patronage
b6262a359251a188809f977a3733f1663cb400b3
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import Tier, UserTier, RemoteBenefit admin.site.register(Tier) admin.site.register(UserTier) admin.site.register(RemoteBenefit)
25
49
0.828571
23
175
6.304348
0.478261
0.186207
0.351724
0
0
0
0
0
0
0
0
0
0.08
175
6
50
29.166667
0.900621
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1cdfbed2cfe0a5ff3494699f3cc851f4bb2dee87
386
py
Python
tasks/result.py
karthik25/easyApi
46e5c3ce1d4c86b7c06c1751688bb8648e00c4de
[ "MIT" ]
null
null
null
tasks/result.py
karthik25/easyApi
46e5c3ce1d4c86b7c06c1751688bb8648e00c4de
[ "MIT" ]
null
null
null
tasks/result.py
karthik25/easyApi
46e5c3ce1d4c86b7c06c1751688bb8648e00c4de
[ "MIT" ]
null
null
null
import result import json from tasks.task import Task class Result(Task): def __init__(self): pass @staticmethod def handles(): return "result" def run(self, args): if result.Result.result_type == "application/json": print(json.dumps(result.Result.last_result, indent=4)) else: print(result.Result.last_result)
21.444444
66
0.629534
47
386
5.021277
0.531915
0.20339
0.135593
0.186441
0
0
0
0
0
0
0
0.003546
0.26943
386
18
67
21.444444
0.833333
0
0
0
0
0
0.056848
0
0
0
0
0
0
1
0.214286
false
0.071429
0.214286
0.071429
0.571429
0.142857
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
1cfdd53cae50eca478a5d83a5d0aaeab820e9916
4,124
py
Python
RL/one_agent/pyscripts/trainDQN.py
ds-kiel/dimmer
506b3ae1143201cc76c463f140774febd9df4946
[ "BSD-3-Clause" ]
null
null
null
RL/one_agent/pyscripts/trainDQN.py
ds-kiel/dimmer
506b3ae1143201cc76c463f140774febd9df4946
[ "BSD-3-Clause" ]
null
null
null
RL/one_agent/pyscripts/trainDQN.py
ds-kiel/dimmer
506b3ae1143201cc76c463f140774febd9df4946
[ "BSD-3-Clause" ]
1
2022-02-20T07:42:30.000Z
2022-02-20T07:42:30.000Z
import sys, threading, multiprocessing, time, os import gym import gym_dimmer import gym_dimmer.envs.utils.glossai_utils as glossai_utils import gym_dimmer.envs.utils.dimmer_nn from gym_dimmer.envs.utils.Dispatcher import Dispatcher from baselines import deepq import numpy as np from baselines.common.models import register def train(testbed, instance_name, k_worst_nodes, history_size, instance_id): instance = f"{instance_name}_{testbed}_k_{k_worst_nodes}_history_{history_size}_reward_constant_30_instance_{instance_id}" dispatcher = Dispatcher(testbed, use_traces = True, one_agent_per_node = False, use_randomized_order = False, use_randomized_value_at_beginning_of_episode = False) time.sleep(2) dispatcher.daemon = True dispatcher.start() kwargs = {"testbed":testbed, "k_worst_nodes_len":k_worst_nodes, "history_len":history_size} env = gym.make(f"CentralizedControl-v0", **kwargs) act, _ = deepq.learn( env, network='dimmer_deepq_network', lr=5e-4, total_timesteps=200000, exploration_fraction=0.7, exploration_final_eps=0.01, print_freq=100, discount_factor=0.7, dueling=False, checkpoint_freq=1000, ) glossai_utils.log_warning("Saving model") act.save(f"../models/evaluation/{instance}.pkl") glossai_utils.log_success("Saved!") dispatcher.stop() if __name__ == '__main__': glossai_utils.log_warning("The K (worst_nodes) and History size must be set manually in gyms/[]...]/CentralizedControl !") # params testbed = "kiel" instance_name = "dqn" k_worst_nodes = 10 history_size = 5 # check all existing instance IDs, try to get a new one i = 0 found = True while found: found = os.path.exists(f"../models/evaluation/{instance_name}_{testbed}_k_{k_worst_nodes}_history_{history_size}_instance_{i}.pkl") i+=1 i -=1 instance_id = i glossai_utils.log_success(f"Training model named ../models/evaluation/{instance_name}_{testbed}_k_{k_worst_nodes}_history_{history_size}_instance_{i}.pkl") train(testbed, instance_name, k_worst_nodes, history_size, instance_id) # import sys # import threading # import multiprocessing # import time # import gym # import gym_dimmer # import gym_dimmer.envs.utils.glossai_utils as glossai_utils # import gym_dimmer.envs.utils.dimmer_nn # from gym_dimmer.envs.utils.Dispatcher import Dispatcher # from baselines import deepq # import numpy as np # from baselines.common.models import register # tot_rew = [] # def save_reward(lcl, _glb): # tot_rew.append(lcl['episode_rewards'][-1]) # return False # continue training # def main(input_size): # TESTBED = "kiel" # LEARNING_INSTANCE_ID = f"nn_centralized_control_{input_size}_inpt_20_neurons_relu_instance_17" # dispatcher = Dispatcher(TESTBED, # use_traces = True, # one_agent_per_node = False, # use_randomized_order = False, # use_randomized_value_at_beginning_of_episode = False) # time.sleep(2) # dispatcher.daemon = True # dispatcher.start() # kwargs = {"testbed":testbed, "k_worst_nodes_len":k_worst_nodes, "history_len":history_size} # env = gym.make(f"CentralizedControl-v0", **kwargs) # act, dbg = deepq.learn( # env, # network='dimmer_deepq_network', # lr=5e-4, # total_timesteps=200000, # exploration_fraction=0.7, # exploration_final_eps=0.01, # print_freq=100, # discount_factor=0.7, # dueling=False, # callback=save_reward, # checkpoint_freq=1000, # ) # glossai_utils.log_warning("Saving model") # act.save("../models/{}/{}/{}.pkl".format(TESTBED, LEARNING_INSTANCE_ID, TESTBED)) # glossai_utils.log_success("Saved!") # dispatcher.stop() # if __name__ == '__main__': # main(int(sys.argv[1]))
31.242424
159
0.663434
512
4,124
5.005859
0.292969
0.025751
0.04721
0.049161
0.719079
0.719079
0.719079
0.719079
0.719079
0.719079
0
0.01954
0.230601
4,124
131
160
31.480916
0.788213
0.4258
0
0
0
0.038462
0.247841
0.173575
0
0
0
0
0
1
0.019231
false
0
0.173077
0
0.192308
0.019231
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e82d8f5dc89e59465033f369aee7a7cf942bd5cb
302
py
Python
ImageSaver/src/deserializer.py
SvenSommer/LegoSorter.BrickRecognition
1a653b597c3902f3c042182c414640eeaafcca87
[ "MIT" ]
null
null
null
ImageSaver/src/deserializer.py
SvenSommer/LegoSorter.BrickRecognition
1a653b597c3902f3c042182c414640eeaafcca87
[ "MIT" ]
null
null
null
ImageSaver/src/deserializer.py
SvenSommer/LegoSorter.BrickRecognition
1a653b597c3902f3c042182c414640eeaafcca87
[ "MIT" ]
null
null
null
from Models.messages.predictedImagesMessage import PredictedImagesMessage from RabbitMq.src.serializer import deserializeMessage def deserialize_predicted_images_message(body: str) -> PredictedImagesMessage: predicted_images_message = deserializeMessage(body) return predicted_images_message
37.75
78
0.864238
29
302
8.758621
0.586207
0.177165
0.259843
0
0
0
0
0
0
0
0
0
0.092715
302
7
79
43.142857
0.927007
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
e830080c62ce8e1ec28f0fa56e4a72c3b8620102
56
py
Python
hordak/models/__init__.py
audience-platform/django-hordak
aa3a18438136a020794b1c0b10603dd78fa7aa76
[ "MIT" ]
187
2016-12-12T10:58:11.000Z
2022-03-27T08:14:19.000Z
hordak/models/__init__.py
audience-platform/django-hordak
aa3a18438136a020794b1c0b10603dd78fa7aa76
[ "MIT" ]
62
2016-12-10T00:12:47.000Z
2022-03-16T09:23:05.000Z
hordak/models/__init__.py
audience-platform/django-hordak
aa3a18438136a020794b1c0b10603dd78fa7aa76
[ "MIT" ]
47
2016-12-12T11:07:31.000Z
2022-03-15T20:30:07.000Z
from .core import * from .statement_csv_import import *
18.666667
35
0.785714
8
56
5.25
0.625
0
0
0
0
0
0
0
0
0
0
0
0.142857
56
2
36
28
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c26248c9cf1b1d77b6c707e945c9ef088cf3407
149
py
Python
ErrorDefine.py
Wenqi-Zhu/Wavedance
039566360970581e70eabdce7410403cf88a8c20
[ "MIT" ]
1
2020-01-15T14:16:39.000Z
2020-01-15T14:16:39.000Z
ErrorDefine.py
Wenqi-Zhu/Wavedance
039566360970581e70eabdce7410403cf88a8c20
[ "MIT" ]
null
null
null
ErrorDefine.py
Wenqi-Zhu/Wavedance
039566360970581e70eabdce7410403cf88a8c20
[ "MIT" ]
null
null
null
class SimulatorError(Exception): pass class ConvergenceError(Exception): pass class CircuitParameterError(Exception): pass
13.545455
40
0.704698
12
149
8.75
0.5
0.371429
0.342857
0
0
0
0
0
0
0
0
0
0.234899
149
10
41
14.9
0.921053
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
1c27ed1ca879fe27a48d1fbcdec6584a889a51f4
78
py
Python
src/dssp/__init__.py
hassnabdl/Helix-Analysis-Program
6383b132aefb4f0f51965d1812b59625ba35dab2
[ "MIT" ]
1
2021-05-12T20:28:08.000Z
2021-05-12T20:28:08.000Z
src/dssp/__init__.py
hassnabdl/Helix-Analysis-Program
6383b132aefb4f0f51965d1812b59625ba35dab2
[ "MIT" ]
1
2021-05-14T09:12:29.000Z
2021-05-20T14:14:49.000Z
src/dssp/__init__.py
hassnabdl/Helix-Analysis-Program
6383b132aefb4f0f51965d1812b59625ba35dab2
[ "MIT" ]
1
2021-05-20T14:38:31.000Z
2021-05-20T14:38:31.000Z
from .dssp import DSSP from .hbonds import find_hbonds from .. import common
15.6
31
0.782051
12
78
5
0.5
0
0
0
0
0
0
0
0
0
0
0
0.166667
78
4
32
19.5
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c407ce22b0188d626bd19f8a9dfb9016f55a632
652
py
Python
samples/iris/iris/evaluation/evaluation_result.py
katyamust/ml-expr-fw
5ede3ff1f777430cf25e8731e4798fc37387fb9d
[ "MIT" ]
1
2022-03-06T21:52:01.000Z
2022-03-06T21:52:01.000Z
samples/iris/iris/evaluation/evaluation_result.py
omri374/FabricML
a545f1ee907b1b89ca9766a873c5944ec88e54e9
[ "MIT" ]
null
null
null
samples/iris/iris/evaluation/evaluation_result.py
omri374/FabricML
a545f1ee907b1b89ca9766a873c5944ec88e54e9
[ "MIT" ]
null
null
null
from abc import abstractmethod from typing import Dict from iris import LoggableObject class EvaluationResult(LoggableObject): """ Class which holds the evaluation output for one model run. For example, precision or recall, MSE, accuracy etc. """ @abstractmethod def get_metrics(self) -> Dict: """ Return the evaluation result's metrics you wish to be stored in the experiment logging system :return: A dictionary with names of values of metrics to store """ pass def get_params(self): # Evaluation results are not likely to have params, just metrics return None
27.166667
101
0.684049
83
652
5.349398
0.698795
0.085586
0
0
0
0
0
0
0
0
0
0
0.263804
652
23
102
28.347826
0.925
0.509202
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0.111111
0.333333
0.111111
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
1
1
0
0
5
1c547d20f9bbd2b2a23b1f4757f5e6b9ca20af3a
311
py
Python
order-1_voronoi/core/tree/LocationAwareEntry.py
bzliu94/algorithms
43ccefd7ea1fd88339bf2afa0b35b0a3bdf6acff
[ "MIT" ]
null
null
null
order-1_voronoi/core/tree/LocationAwareEntry.py
bzliu94/algorithms
43ccefd7ea1fd88339bf2afa0b35b0a3bdf6acff
[ "MIT" ]
null
null
null
order-1_voronoi/core/tree/LocationAwareEntry.py
bzliu94/algorithms
43ccefd7ea1fd88339bf2afa0b35b0a3bdf6acff
[ "MIT" ]
null
null
null
from Entry import * class LocationAwareEntry(Entry): def __init__(self, key, value, location = None): Entry.__init__(self, key, value) self.location = location def setLocation(self, location): self.location = location def getLocation(self): return self.location
16.368421
51
0.659164
34
311
5.794118
0.441176
0.243655
0.111675
0.162437
0
0
0
0
0
0
0
0
0.250804
311
18
52
17.277778
0.845494
0
0
0.222222
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.111111
0.111111
0.666667
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c732db632beb53b99ba2aa3c5d84b09f3015ce96
41
py
Python
interview_tasks/4.py
borislavstoychev/SoftUni_Bootcamp
a86eee5fe14d7fa0df75fb34daea868585ac406e
[ "MIT" ]
null
null
null
interview_tasks/4.py
borislavstoychev/SoftUni_Bootcamp
a86eee5fe14d7fa0df75fb34daea868585ac406e
[ "MIT" ]
null
null
null
interview_tasks/4.py
borislavstoychev/SoftUni_Bootcamp
a86eee5fe14d7fa0df75fb34daea868585ac406e
[ "MIT" ]
null
null
null
print(sum(int(n) for n in list(input())))
41
41
0.658537
9
41
3
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
0.72973
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
c73d65c5c310f7f6c791f712c60a6e49205d1f42
6,548
py
Python
backbones/resnet_blocks.py
edwardyehuang/iSeg
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
[ "MIT" ]
4
2021-12-13T09:49:26.000Z
2022-02-19T11:16:50.000Z
backbones/resnet_blocks.py
edwardyehuang/iSeg
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
[ "MIT" ]
1
2021-07-28T10:40:56.000Z
2021-08-09T07:14:06.000Z
backbones/resnet_blocks.py
edwardyehuang/iSeg
256b0f7fdb6e854fe026fa8df41d9a4a55db34d5
[ "MIT" ]
null
null
null
# ================================================================ # MIT License # Copyright (c) 2021 edwardyehuang (https://github.com/edwardyehuang) # ================================================================ # This code is motified from https://github.com/keras-team/keras/blob/master/keras/applications/resnet.py # The modifications are refer to https://github.com/tensorflow/models/blob/master/research/deeplab/core/resnet_v1_beta.py # and "Bag of Tricks for Image Classification with Convolutional Neural Networks", CVPR2019 import tensorflow as tf from iseg.layers.normalizations import normalization from tensorflow.python.keras.utils import conv_utils BN_EPSILON = 1.001e-5 def conv2d_same_fn(*args, **kwargs): return tf.keras.layers.Conv2D(*args, **kwargs) class BlockType1(tf.keras.Model): def __init__( self, filters, kernel_size=3, stride=1, conv_shortcut=True, use_bias=True, norm_method=None, name=None ): super(BlockType1, self).__init__(name=name) self.conv_shortcut = conv_shortcut if self.conv_shortcut: self.shortcut_conv = tf.keras.layers.Conv2D( 4 * filters, kernel_size=1, strides=stride, use_bias=use_bias, name=name + "_0_conv" ) self.shortcut_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_0_bn") self.conv1_conv = tf.keras.layers.Conv2D( filters, kernel_size=1, strides=stride, use_bias=use_bias, name=name + "_1_conv" ) self.conv1_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_1_bn") self.conv2_conv = conv2d_same_fn(filters, kernel_size, padding="SAME", use_bias=use_bias, name=name + "_2_conv") self.conv2_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_2_bn") self.conv3_conv = tf.keras.layers.Conv2D(4 * filters, kernel_size=1, use_bias=use_bias, name=name + "_3_conv") self.conv3_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_3_bn") @property def strides(self): return self.conv1_conv.strides[0] @strides.setter def strides(self, value): value = conv_utils.normalize_tuple(value, self.shortcut_conv.rank, "strides") self.conv1_conv.strides = value if self.conv_shortcut: self.shortcut_conv.strides = value @property def atrous_rates(self): return self.conv2_conv.dilation_rate[0] @atrous_rates.setter def atrous_rates(self, value): value = conv_utils.normalize_tuple(value, self.conv2_conv.rank, "dilation_rate") if self.conv2_conv.built: raise ValueError("conv has been built") self.conv2_conv.dilation_rate = value def call(self, inputs, training=None, **kwargs): if self.conv_shortcut: shortcut = self.shortcut_conv(inputs) shortcut = self.shortcut_bn(shortcut, training=training) else: shortcut = inputs x = self.conv1_conv(inputs) x = self.conv1_bn(x, training=training) x = tf.nn.relu(x) tf.assert_equal(x.shape.rank, 4) x = self.conv2_conv(x, training=training) x = self.conv2_bn(x, training=training) x = tf.nn.relu(x) x = self.conv3_conv(x) x = self.conv3_bn(x, training=training) x = tf.add(shortcut, x) x = tf.nn.relu(x) return x class BlockType2(tf.keras.Model): def __init__( self, filters, kernel_size=3, stride=1, conv_shortcut=True, use_bias=False, norm_method=None, downsample_method="avg", name=None, ): super(BlockType2, self).__init__(name=name) self.conv_shortcut = conv_shortcut self.downsample_method = downsample_method if self.conv_shortcut: self.shortcut_conv = tf.keras.layers.Conv2D( 4 * filters, kernel_size=1, strides=stride, use_bias=use_bias, name=name + "_0_conv" ) self.shortcut_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_0_bn") self.conv1_conv = tf.keras.layers.Conv2D(filters, kernel_size=1, use_bias=use_bias, name=name + "_1_conv") self.conv1_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_1_bn") self.conv2_conv = conv2d_same_fn( filters, kernel_size, strides=stride, padding="SAME", use_bias=use_bias, name=name + "_2_conv" ) self.conv2_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_2_bn") self.conv3_conv = tf.keras.layers.Conv2D(4 * filters, kernel_size=1, use_bias=use_bias, name=name + "_3_conv") self.conv3_bn = normalization(epsilon=BN_EPSILON, method=norm_method, name=name + "_3_bn") @property def strides(self): return self.conv2_conv.strides[0] @strides.setter def strides(self, value): if not isinstance(value, tuple): value = (value, value) self.conv2_conv.strides = value if self.conv_shortcut: self.shortcut_conv.strides = value @property def atrous_rates(self): return self.conv2_conv.dilation_rate[0] @atrous_rates.setter def atrous_rates(self, value): if not isinstance(value, tuple): value = (value, value) self.conv2_conv.dilation_rate = value def call(self, inputs, training=None, **kwargs): if self.conv_shortcut: shortcut = self.shortcut_conv(inputs) shortcut = self.shortcut_bn(shortcut, training=training) elif self.strides > 1: if "avg" in self.downsample_method: shortcut = tf.nn.avg_pool2d(inputs, self.conv2_conv.strides, self.conv2_conv.strides, "SAME") elif "max" in self.downsample_method: shortcut = tf.nn.max_pool2d(inputs, self.conv2_conv.strides, self.conv2_conv.strides, "SAME") else: raise ValueError("Only max or avg are supported") else: shortcut = inputs x = self.conv1_conv(inputs) x = self.conv1_bn(x, training=training) x = tf.nn.relu(x) x = self.conv2_conv(x, training=training) x = self.conv2_bn(x, training=training) x = tf.nn.relu(x) x = self.conv3_conv(x) x = self.conv3_bn(x, training=training) x = tf.add(shortcut, x) x = tf.nn.relu(x) return x
33.408163
121
0.634698
854
6,548
4.64637
0.156909
0.045363
0.052419
0.028226
0.771925
0.770665
0.770665
0.753528
0.753528
0.692792
0
0.020879
0.23931
6,548
195
122
33.579487
0.775748
0.079872
0
0.648855
0
0
0.031411
0
0
0
0
0
0.007634
1
0.099237
false
0
0.022901
0.038168
0.19084
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c7437fcc84738acf85980a56bd20f63d528a5e38
140
py
Python
sicwebapp/page/cron.py
Dheerajdoppalapudi/Summer-Internship-Club-Website
9ffa0863d0c86ac895fd0900649c43cf56c8cb59
[ "MIT" ]
1
2022-01-19T10:51:51.000Z
2022-01-19T10:51:51.000Z
sicwebapp/page/cron.py
Dheerajdoppalapudi/Summer-Internship-Club-Website
9ffa0863d0c86ac895fd0900649c43cf56c8cb59
[ "MIT" ]
null
null
null
sicwebapp/page/cron.py
Dheerajdoppalapudi/Summer-Internship-Club-Website
9ffa0863d0c86ac895fd0900649c43cf56c8cb59
[ "MIT" ]
3
2022-01-18T18:30:35.000Z
2022-01-20T08:15:05.000Z
from django.core.management import call_command def my_scheduled_job(): try: call_command('dbbackup') except: pass
17.5
47
0.671429
17
140
5.294118
0.882353
0.244444
0
0
0
0
0
0
0
0
0
0
0.25
140
7
48
20
0.857143
0
0
0
0
0
0.057143
0
0
0
0
0
0
1
0.166667
true
0.166667
0.166667
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
c75921be2286a46a502c2af85795a2275c705dc8
769
py
Python
tests/data/program_analysis/derived-types/derived-types-07.py
rsulli55/automates
1647a8eef85c4f03086a10fa72db3b547f1a0455
[ "Apache-2.0" ]
17
2018-12-19T16:32:38.000Z
2021-10-05T07:58:15.000Z
tests/data/program_analysis/derived-types/derived-types-07.py
rsulli55/automates
1647a8eef85c4f03086a10fa72db3b547f1a0455
[ "Apache-2.0" ]
183
2018-12-20T17:03:01.000Z
2022-02-23T22:21:42.000Z
tests/data/program_analysis/derived-types/derived-types-07.py
rsulli55/automates
1647a8eef85c4f03086a10fa72db3b547f1a0455
[ "Apache-2.0" ]
5
2019-01-04T22:37:49.000Z
2022-01-19T17:34:16.000Z
import sys import os from typing import List import math from automates.program_analysis.for2py.format import * from automates.program_analysis.for2py.arrays import * from automates.program_analysis.for2py.static_save import * from automates.program_analysis.for2py.strings import * from automates.program_analysis.for2py import intrinsics from dataclasses import dataclass from automates.program_analysis.for2py.types_ext import Float32 import automates.program_analysis.for2py.math_ext as math from numbers import Real from random import random @dataclass class mytype: simcontrol = String(120, " ") def main(): test = String(20, "hello world") main()
30.76
136
0.708713
90
769
5.944444
0.411111
0.209346
0.314019
0.392523
0.426168
0.299065
0
0
0
0
0
0.02381
0.235371
769
24
137
32.041667
0.886054
0
0
0
0
0
0.149545
0
0
0
0
0
0
1
0.05
false
0
0.7
0
0.85
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
c78a091f432593413f1fe9de32f7d3c73a14955d
14
py
Python
todo_v3/__init__.py
ken-mathenge/personal_todo_list
3969e4b40a0d01e93d6fa8bc5612b0aa22798255
[ "MIT" ]
1
2020-05-06T12:00:12.000Z
2020-05-06T12:00:12.000Z
todo_v3/__init__.py
ken-mathenge/personal_todo_list
3969e4b40a0d01e93d6fa8bc5612b0aa22798255
[ "MIT" ]
7
2020-04-01T17:54:04.000Z
2020-04-05T16:37:03.000Z
todo_v3/__init__.py
ken-mathenge/personal_todo_list
3969e4b40a0d01e93d6fa8bc5612b0aa22798255
[ "MIT" ]
null
null
null
"""Config."""
7
13
0.428571
1
14
6
1
0
0
0
0
0
0
0
0
0
0
0
0.071429
14
1
14
14
0.461538
0.5
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
c79b72a072ae47f2f6340c83798f36556053a144
87
py
Python
ch04/func_from.py
kxen42/Learn-Python-Programming-Third-Edition
851ddc5e6094fadd44f31a9ad1d3876456b04372
[ "MIT" ]
19
2021-11-05T22:54:09.000Z
2022-03-29T15:03:47.000Z
ch04/func_from.py
kxen42/Learn-Python-Programming-Third-Edition
851ddc5e6094fadd44f31a9ad1d3876456b04372
[ "MIT" ]
null
null
null
ch04/func_from.py
kxen42/Learn-Python-Programming-Third-Edition
851ddc5e6094fadd44f31a9ad1d3876456b04372
[ "MIT" ]
26
2021-11-12T17:04:50.000Z
2022-03-29T01:10:35.000Z
# func_from.py from lib.funcdef import square, cube print(square(10)) print(cube(10))
14.5
36
0.747126
15
87
4.266667
0.666667
0
0
0
0
0
0
0
0
0
0
0.051948
0.114943
87
5
37
17.4
0.779221
0.137931
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
c7a9293d3aa5f4b4e27dccdb8ce1e4d14923c459
100
py
Python
test/__init__.py
sara-nl/iBridges
a630cde7e4cab455a41f41ab96c7a45434dbaf97
[ "Apache-2.0" ]
null
null
null
test/__init__.py
sara-nl/iBridges
a630cde7e4cab455a41f41ab96c7a45434dbaf97
[ "Apache-2.0" ]
null
null
null
test/__init__.py
sara-nl/iBridges
a630cde7e4cab455a41f41ab96c7a45434dbaf97
[ "Apache-2.0" ]
1
2018-08-28T13:38:26.000Z
2018-08-28T13:38:26.000Z
import sys import os sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
20
59
0.65
15
100
4.066667
0.533333
0.196721
0.42623
0
0
0
0
0
0
0
0
0.012821
0.22
100
4
60
25
0.769231
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c7aa06a0b96068d98cbd90795fc4d0486d7fbd07
3,836
py
Python
stubs/loboris-v3_2_24-esp32/display.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
stubs/loboris-v3_2_24-esp32/display.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
stubs/loboris-v3_2_24-esp32/display.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
""" Module: 'display' on esp32_LoBo MCU: (sysname='esp32_LoBo', nodename='esp32_LoBo', release='3.2.24', version='ESP32_LoBo_v3.2.24 on 2018-09-06', machine='ESP32 board with ESP32') Stubber: 1.0.0 - updated """ from typing import Any class TFT: """""" BLACK = 0 BLUE = 255 BMP = 2 BOTTOM = -9004 CENTER = -9003 COLOR_BITS16 = 16 COLOR_BITS24 = 24 CYAN = 65535 DARKCYAN = 32896 DARKGREEN = 32768 DARKGREY = 8421504 FONT_7seg = 9 FONT_Comic = 4 FONT_Default = 0 FONT_DefaultSmall = 8 FONT_DejaVu18 = 1 FONT_DejaVu24 = 2 FONT_Minya = 5 FONT_Small = 7 FONT_Tooney = 6 FONT_Ubuntu = 3 GENERIC = 7 GREEN = 65280 GREENYELLOW = 11336748 HSPI = 1 ILI9341 = 0 ILI9488 = 1 JPG = 1 LANDSCAPE = 1 LANDSCAPE_FLIP = 3 LASTX = 7000 LASTY = 8000 LIGHTGREY = 12632256 M5STACK = 6 MAGENTA = 16515327 MAROON = 8388608 NAVY = 128 OLIVE = 8421376 ORANGE = 16557056 PINK = 16564426 PORTRAIT = 0 PORTRAIT_FLIP = 2 PURPLE = 8388736 RED = 16515072 RIGHT = -9004 ST7735 = 3 ST7735B = 5 ST7735R = 4 ST7789 = 2 TOUCH_NONE = 0 TOUCH_STMPE = 2 TOUCH_XPT = 1 VSPI = 2 WHITE = 16579836 YELLOW = 16579584 def arc(self, *args) -> Any: pass def attrib7seg(self, *args) -> Any: pass def backlight(self, *args) -> Any: pass def circle(self, *args) -> Any: pass def clear(self, *args) -> Any: pass def clearwin(self, *args) -> Any: pass def compileFont(self, *args) -> Any: pass def deinit(self, *args) -> Any: pass def ellipse(self, *args) -> Any: pass def font(self, *args) -> Any: pass def fontSize(self, *args) -> Any: pass def getCalib(self, *args) -> Any: pass def getTouchType(self, *args) -> Any: pass def get_bg(self, *args) -> Any: pass def get_fg(self, *args) -> Any: pass def gettouch(self, *args) -> Any: pass def hsb2rgb(self, *args) -> Any: pass def image(self, *args) -> Any: pass def init(self, *args) -> Any: pass def line(self, *args) -> Any: pass def lineByAngle(self, *args) -> Any: pass def orient(self, *args) -> Any: pass def pixel(self, *args) -> Any: pass def polygon(self, *args) -> Any: pass def readPixel(self, *args) -> Any: pass def readScreen(self, *args) -> Any: pass def rect(self, *args) -> Any: pass def resetwin(self, *args) -> Any: pass def restorewin(self, *args) -> Any: pass def roundrect(self, *args) -> Any: pass def savewin(self, *args) -> Any: pass def screensize(self, *args) -> Any: pass def setCalib(self, *args) -> Any: pass def set_bg(self, *args) -> Any: pass def set_fg(self, *args) -> Any: pass def setwin(self, *args) -> Any: pass def text(self, *args) -> Any: pass def textClear(self, *args) -> Any: pass def textWidth(self, *args) -> Any: pass def text_x(self, *args) -> Any: pass def text_y(self, *args) -> Any: pass def tft_deselect(self, *args) -> Any: pass def tft_readcmd(self, *args) -> Any: pass def tft_select(self, *args) -> Any: pass def tft_setspeed(self, *args) -> Any: pass def tft_writecmd(self, *args) -> Any: pass def tft_writecmddata(self, *args) -> Any: pass def triangle(self, *args) -> Any: pass def winsize(self, *args) -> Any: pass
17.925234
146
0.530761
475
3,836
4.214737
0.353684
0.195804
0.269231
0.367133
0.456543
0.15984
0
0
0
0
0
0.096177
0.35219
3,836
213
147
18.00939
0.709457
0.05292
0
0.316129
0
0
0
0
0
0
0
0
0
1
0.316129
false
0.316129
0.006452
0
0.683871
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
40031e7ac8bc5c0fda71d5bde3797b237df10346
16
py
Python
Dcom-KHU Week7/zz.py
Dcom-KHU/2021_Algorithm_basic
8207d9c2003ae4a5533c91b43992224457c8c023
[ "MIT" ]
3
2021-07-14T13:01:42.000Z
2021-12-28T15:10:12.000Z
Dcom-KHU Week7/zz.py
Dcom-KHU/2021_Algorithm_basic
8207d9c2003ae4a5533c91b43992224457c8c023
[ "MIT" ]
null
null
null
Dcom-KHU Week7/zz.py
Dcom-KHU/2021_Algorithm_basic
8207d9c2003ae4a5533c91b43992224457c8c023
[ "MIT" ]
null
null
null
print("Week 7")
8
15
0.625
3
16
3.333333
1
0
0
0
0
0
0
0
0
0
0
0.071429
0.125
16
1
16
16
0.642857
0
0
0
0
0
0.375
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
4026aa87175cdaa4e99eade246912b10c5777fa3
83
py
Python
django/esite/auto/admin.py
vollov/django-template
ca904ace18919dbb557961acbb9959ffd48d4d20
[ "MIT" ]
null
null
null
django/esite/auto/admin.py
vollov/django-template
ca904ace18919dbb557961acbb9959ffd48d4d20
[ "MIT" ]
null
null
null
django/esite/auto/admin.py
vollov/django-template
ca904ace18919dbb557961acbb9959ffd48d4d20
[ "MIT" ]
null
null
null
from django.contrib import admin from models import Car admin.site.register(Car)
13.833333
32
0.807229
13
83
5.153846
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.13253
83
5
33
16.6
0.930556
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
40567c91fd3b667c9ee6a95fe3981a0dd7e9d1ad
32
py
Python
tfgpu/cli/__init__.py
2sang/oneshot-tfgpu
314d4ffcdf3dd19325988d35d2ab521da89eb9d9
[ "MIT" ]
null
null
null
tfgpu/cli/__init__.py
2sang/oneshot-tfgpu
314d4ffcdf3dd19325988d35d2ab521da89eb9d9
[ "MIT" ]
null
null
null
tfgpu/cli/__init__.py
2sang/oneshot-tfgpu
314d4ffcdf3dd19325988d35d2ab521da89eb9d9
[ "MIT" ]
null
null
null
from tfgpu.cli import run, init
16
31
0.78125
6
32
4.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.15625
32
1
32
32
0.925926
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
406342ef6fff98df6161ed84537aa3056ab0e165
9,997
py
Python
dataset/cifar.py
aouedions11/SSFL-Benchmarking-Semi-supervised-Federated-Learning
78aec81919bf95ed4677d0e0a4ebbbe3be455742
[ "MIT" ]
1
2021-09-17T17:04:02.000Z
2021-09-17T17:04:02.000Z
dataset/cifar.py
aouedions11/SSFL-Benchmarking-Semi-supervised-Federated-Learning
78aec81919bf95ed4677d0e0a4ebbbe3be455742
[ "MIT" ]
null
null
null
dataset/cifar.py
aouedions11/SSFL-Benchmarking-Semi-supervised-Federated-Learning
78aec81919bf95ed4677d0e0a4ebbbe3be455742
[ "MIT" ]
null
null
null
import logging import numpy as np from PIL import Image from torchvision import datasets from torchvision import transforms import copy from .randaugment import RandAugmentMC logger = logging.getLogger(__name__) cifar10_mean = (0.4914, 0.4822, 0.4465) cifar10_std = (0.2471, 0.2435, 0.2616) cifar100_mean = (0.5071, 0.4867, 0.4408) cifar100_std = (0.2675, 0.2565, 0.2761) normal_mean = (0.5, 0.5, 0.5) normal_std = (0.5, 0.5, 0.5) normal_mean = (0.5, 0.5) normal_std = (0.5, 0.5) def get_cifar10(root, num_expand_x, num_expand_u,device_ids, server_idxs): root='./data' transform_labeled = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomCrop(size=32, padding=int(32*0.125), padding_mode='reflect'), transforms.ToTensor(), transforms.Normalize(mean=cifar10_mean, std=cifar10_std) ]) transform_val = transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean=cifar10_mean, std=cifar10_std) ]) base_dataset = datasets.CIFAR10(root, train=True, download=False) train_labeled_idxs, train_unlabeled_idxs = x_u_split( base_dataset.targets, num_expand_x, num_expand_u, device_ids,server_idxs) train_labeled_dataset = CIFAR10SSL( root, train_labeled_idxs, train=True, transform=transform_labeled) train_unlabeled_dataset_list = [] train_unlabeled_idxs_tmp = copy.deepcopy(train_unlabeled_idxs[0]) import functools import operator for id in range(len(train_unlabeled_idxs)): train_unlabeled_dataset = CIFAR10SSL( root, train_unlabeled_idxs[id], train=True, transform=TransformFix(mean=cifar10_mean, std=cifar10_std)) train_unlabeled_dataset_list.append(train_unlabeled_dataset) test_dataset = datasets.CIFAR10( root, train=False, transform=transform_val, download=False) logger.info("Dataset: CIFAR10") return train_labeled_dataset, train_unlabeled_dataset_list, test_dataset def get_emnist(root, num_expand_x, num_expand_u,device_ids, server_idxs): root='./data' transform_labeled = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomCrop(size=28, padding=int(28*0.125), padding_mode='reflect'), transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,)) ]) transform_val = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,)) ]) base_dataset = datasets.EMNIST(root, train=True,split='balanced', download=True) train_labeled_idxs, train_unlabeled_idxs = x_u_split( base_dataset.targets, num_expand_x, num_expand_u, device_ids,server_idxs) train_labeled_dataset = EMNIST( root, train_labeled_idxs, train=True, transform=transform_labeled) train_unlabeled_dataset_list = [] train_unlabeled_idxs_tmp = copy.deepcopy(train_unlabeled_idxs[0]) for id in range(len(train_unlabeled_idxs)): train_unlabeled_dataset = EMNIST( root, train_unlabeled_idxs[id], train=True, transform=TransformFix(size = 28, mean=(0.1307,), std=(0.3081,))) train_unlabeled_dataset_list.append(train_unlabeled_dataset) test_dataset = datasets.EMNIST( root, train=False,split='balanced', transform=transform_val, download=True) return train_labeled_dataset, train_unlabeled_dataset_list, test_dataset def get_svhn(root, num_expand_x, num_expand_u,device_ids, server_idxs): root='./data' transform_labeled = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomCrop(size=32, padding=int(32*0.125), padding_mode='reflect'), transforms.ToTensor(), transforms.Normalize(mean=cifar10_mean, std=cifar10_std) ]) transform_val = transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean=cifar10_mean, std=cifar10_std) ]) base_dataset = datasets.SVHN(root, split='train', download=False) train_labeled_idxs, train_unlabeled_idxs = x_u_split( base_dataset.labels, num_expand_x, num_expand_u, device_ids,server_idxs) train_labeled_dataset = SVHNSSL( root, train_labeled_idxs, split='train', transform=transform_labeled) train_unlabeled_dataset_list = [] train_unlabeled_idxs_tmp = copy.deepcopy(train_unlabeled_idxs[0]) import functools import operator for id in range(len(train_unlabeled_idxs)): train_unlabeled_dataset = SVHNSSL( root, train_unlabeled_idxs[id], split='train', transform=TransformFix(mean=cifar10_mean, std=cifar10_std)) train_unlabeled_dataset_list.append(train_unlabeled_dataset) test_dataset = datasets.SVHN( root, split='train', transform=transform_val, download=False) logger.info("Dataset: SVHN") return train_labeled_dataset, train_unlabeled_dataset_list, test_dataset def x_u_split(labels, num_expand_x, num_expand_u, device_ids, server_idxs): labels = np.array(labels) labeled_idx = copy.deepcopy(server_idxs) unlabeled_idx = [] unlabeled_idx_list = [] for id in range(len(device_ids)): unlabeled_idx = device_ids[id] exapand_unlabeled = num_expand_u // len(device_ids[id]) // len(device_ids) unlabeled_idx = np.hstack( [unlabeled_idx for _ in range(exapand_unlabeled)]) if len(unlabeled_idx) < num_expand_u // len(device_ids): diff = num_expand_u // len(device_ids) - len(unlabeled_idx) unlabeled_idx = np.hstack( (unlabeled_idx, np.random.choice(unlabeled_idx, diff))) else: assert len(unlabeled_idx) == num_expand_u // len(device_ids) unlabeled_idx_list.append(unlabeled_idx) exapand_labeled = num_expand_x // len(labeled_idx) labeled_idx = np.hstack( [labeled_idx for _ in range(exapand_labeled)]) if len(labeled_idx) < num_expand_x: diff = num_expand_x - len(labeled_idx) labeled_idx = np.hstack( (labeled_idx, np.random.choice(labeled_idx, diff))) else: assert len(labeled_idx) == num_expand_x return labeled_idx, unlabeled_idx_list class TransformFix(object): def __init__(self, mean, std,size=32): self.weak = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomCrop(size=size, padding=int(size*0.125), padding_mode='reflect')]) self.strong = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomCrop(size=size, padding=int(size*0.125), padding_mode='reflect'), RandAugmentMC(n=2, m=10)]) self.normalize = transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)]) def __call__(self, x): weak = self.weak(x) strong = self.strong(x) return self.normalize(weak), self.normalize(strong) class CIFAR10SSL(datasets.CIFAR10): def __init__(self, root, indexs, train=True, transform=None, target_transform=None, download=False): super().__init__(root, train=train, transform=transform, target_transform=target_transform, download=download) if indexs is not None: self.data = self.data[indexs] self.targets = np.array(self.targets)[indexs] def __getitem__(self, index): img, target = self.data[index], self.targets[index] img = Image.fromarray(img) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target class EMNIST(datasets.EMNIST): def __init__(self, root, indexs, train=True, transform=None, target_transform=None, download=True,split='balanced'): super().__init__(root, train=train, transform=transform, target_transform=target_transform,split='balanced', download=download) if indexs is not None: self.data = self.data[indexs] self.targets = np.array(self.targets)[indexs] def __getitem__(self, index): img, target = self.data[index], self.targets[index] img = img.cpu().numpy() img = Image.fromarray(img) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = target.cpu().numpy() target = self.target_transform(target) return img, target class SVHNSSL(datasets.SVHN): def __init__(self, root, indexs, split='train', transform=None, target_transform=None, download=False): super().__init__(root, split='train', transform=transform, target_transform=target_transform, download=download) if indexs is not None: self.data = self.data[indexs] self.labels = np.array(self.labels)[indexs] def __getitem__(self, index): img, target = self.data[index], int(self.labels[index]) img = Image.fromarray(np.transpose(img, (1, 2, 0))) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target
34.711806
84
0.638291
1,165
9,997
5.203433
0.111588
0.069284
0.04454
0.037116
0.80221
0.760145
0.726493
0.715275
0.684263
0.638733
0
0.027782
0.261879
9,997
287
85
34.832753
0.793739
0
0
0.59009
0
0
0.014404
0
0
0
0
0
0.009009
1
0.054054
false
0
0.04955
0
0.157658
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4085c24ae9a233332c357205d844a96bc1d6a0c3
39,874
py
Python
lanxinplus_openapi/api/addrbk_staff_api.py
lanxinplus/lanxinplus-python-sdk
39ea9cb66a087df06e61ed4a2b473fb170a47f99
[ "MIT" ]
null
null
null
lanxinplus_openapi/api/addrbk_staff_api.py
lanxinplus/lanxinplus-python-sdk
39ea9cb66a087df06e61ed4a2b473fb170a47f99
[ "MIT" ]
null
null
null
lanxinplus_openapi/api/addrbk_staff_api.py
lanxinplus/lanxinplus-python-sdk
39ea9cb66a087df06e61ed4a2b473fb170a47f99
[ "MIT" ]
null
null
null
""" LanXin+ OpenAPI LanXin+ OpenAPI Platform # noqa: E501 Generated by: https://openapi.lanxin.cn """ import re # noqa: F401 import sys # noqa: F401 from lanxinplus_openapi.api_client import ApiClient, Endpoint as _Endpoint from lanxinplus_openapi.model_utils import ( # noqa: F401 check_allowed_values, check_validations, date, datetime, file_type, none_type, validate_and_convert_types ) from lanxinplus_openapi.model.v1_org_extra_field_ids_fetch_response import V1OrgExtraFieldIdsFetchResponse from lanxinplus_openapi.model.v1_staffs_create_request_body import V1StaffsCreateRequestBody from lanxinplus_openapi.model.v1_staffs_create_response import V1StaffsCreateResponse from lanxinplus_openapi.model.v1_staffs_delete_response import V1StaffsDeleteResponse from lanxinplus_openapi.model.v1_staffs_dept_ancestors_fetch_response import V1StaffsDeptAncestorsFetchResponse from lanxinplus_openapi.model.v1_staffs_fetch_response import V1StaffsFetchResponse from lanxinplus_openapi.model.v1_staffs_infor_fetch_response import V1StaffsInforFetchResponse from lanxinplus_openapi.model.v1_staffs_update_request_body import V1StaffsUpdateRequestBody from lanxinplus_openapi.model.v1_staffs_update_response import V1StaffsUpdateResponse from lanxinplus_openapi.model.v1_tags_fetch_request_body import V1TagsFetchRequestBody from lanxinplus_openapi.model.v1_tags_fetch_response import V1TagsFetchResponse from lanxinplus_openapi.model.v2_staffs_id_mapping_fetch_response import V2StaffsIdMappingFetchResponse from lanxinplus_openapi.model.v2_staffs_search_request_body import V2StaffsSearchRequestBody from lanxinplus_openapi.model.v2_staffs_search_response import V2StaffsSearchResponse class AddrbkStaffApi(object): """NOTE: This class is auto generated by LanXin+ Ref: https://openapi.lanxin.cn Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client self.v1_org_extra_field_ids_fetch_endpoint = _Endpoint( settings={ 'response_type': (V1OrgExtraFieldIdsFetchResponse,), 'auth': [], 'endpoint_path': '/v1/org/{orgid}/extrafieldids/fetch', 'operation_id': 'v1_org_extra_field_ids_fetch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'app_token', 'orgid', 'user_token', 'page', 'page_size', ], 'required': [ 'app_token', 'orgid', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'orgid': (str,), 'user_token': (str,), 'page': (int,), 'page_size': (int,), }, 'attribute_map': { 'app_token': 'app_token', 'orgid': 'orgid', 'user_token': 'user_token', 'page': 'page', 'page_size': 'page_size', }, 'location_map': { 'app_token': 'query', 'orgid': 'path', 'user_token': 'query', 'page': 'query', 'page_size': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v1_staffs_create_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsCreateResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/create', 'operation_id': 'v1_staffs_create', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'app_token', 'v1_staffs_create_request_body', 'user_token', ], 'required': [ 'app_token', 'v1_staffs_create_request_body', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'v1_staffs_create_request_body': (V1StaffsCreateRequestBody,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'v1_staffs_create_request_body': 'body', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client ) self.v1_staffs_delete_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsDeleteResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/{staffid}/delete', 'operation_id': 'v1_staffs_delete', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'app_token', 'staffid', 'user_token', ], 'required': [ 'app_token', 'staffid', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'staffid': (str,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'staffid': 'staffid', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'staffid': 'path', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v1_staffs_dept_ancestors_fetch_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsDeptAncestorsFetchResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/{staffid}/departmentancestors/fetch', 'operation_id': 'v1_staffs_dept_ancestors_fetch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'app_token', 'staffid', 'user_token', ], 'required': [ 'app_token', 'staffid', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'staffid': (str,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'staffid': 'staffid', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'staffid': 'path', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v1_staffs_fetch_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsFetchResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/{staffid}/fetch', 'operation_id': 'v1_staffs_fetch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'app_token', 'staffid', 'user_token', ], 'required': [ 'app_token', 'staffid', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'staffid': (str,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'staffid': 'staffid', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'staffid': 'path', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v1_staffs_infor_fetch_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsInforFetchResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/{staffid}/infor/fetch', 'operation_id': 'v1_staffs_infor_fetch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'app_token', 'staffid', 'user_token', ], 'required': [ 'app_token', 'staffid', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'staffid': (str,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'staffid': 'staffid', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'staffid': 'path', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v1_staffs_update_endpoint = _Endpoint( settings={ 'response_type': (V1StaffsUpdateResponse,), 'auth': [], 'endpoint_path': '/v1/staffs/{staffid}/update', 'operation_id': 'v1_staffs_update', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'app_token', 'staffid', 'v1_staffs_update_request_body', 'user_token', ], 'required': [ 'app_token', 'staffid', 'v1_staffs_update_request_body', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'staffid': (str,), 'v1_staffs_update_request_body': (V1StaffsUpdateRequestBody,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'staffid': 'staffid', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'staffid': 'path', 'v1_staffs_update_request_body': 'body', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client ) self.v1_tags_fetch_endpoint = _Endpoint( settings={ 'response_type': (V1TagsFetchResponse,), 'auth': [], 'endpoint_path': '/v1/tags/staffids/fetch', 'operation_id': 'v1_tags_fetch', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'app_token', 'v1_tags_fetch_request_body', 'user_token', 'page', 'page_size', ], 'required': [ 'app_token', 'v1_tags_fetch_request_body', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'v1_tags_fetch_request_body': (V1TagsFetchRequestBody,), 'user_token': (str,), 'page': (int,), 'page_size': (int,), }, 'attribute_map': { 'app_token': 'app_token', 'user_token': 'user_token', 'page': 'page', 'page_size': 'page_size', }, 'location_map': { 'app_token': 'query', 'v1_tags_fetch_request_body': 'body', 'user_token': 'query', 'page': 'query', 'page_size': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client ) self.v2_staffs_id_mapping_fetch_endpoint = _Endpoint( settings={ 'response_type': (V2StaffsIdMappingFetchResponse,), 'auth': [], 'endpoint_path': '/v2/staffs/id_mapping/fetch', 'operation_id': 'v2_staffs_id_mapping_fetch', 'http_method': 'GET', 'servers': None, }, params_map={ 'all': [ 'app_token', 'org_id', 'id_type', 'id_value', 'user_token', ], 'required': [ 'app_token', 'org_id', 'id_type', 'id_value', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'org_id': (str,), 'id_type': (str,), 'id_value': (str,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'org_id': 'org_id', 'id_type': 'id_type', 'id_value': 'id_value', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'org_id': 'query', 'id_type': 'query', 'id_value': 'query', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [], }, api_client=api_client ) self.v2_staffs_search_endpoint = _Endpoint( settings={ 'response_type': (V2StaffsSearchResponse,), 'auth': [], 'endpoint_path': '/v2/staffs/search', 'operation_id': 'v2_staffs_search', 'http_method': 'POST', 'servers': None, }, params_map={ 'all': [ 'app_token', 'user_id', 'v2_staffs_search_request_body', 'user_token', ], 'required': [ 'app_token', 'user_id', 'v2_staffs_search_request_body', ], 'nullable': [ ], 'enum': [ ], 'validation': [ ] }, root_map={ 'validations': { }, 'allowed_values': { }, 'openapi_types': { 'app_token': (str,), 'user_id': (str,), 'v2_staffs_search_request_body': (V2StaffsSearchRequestBody,), 'user_token': (str,), }, 'attribute_map': { 'app_token': 'app_token', 'user_id': 'user_id', 'user_token': 'user_token', }, 'location_map': { 'app_token': 'query', 'user_id': 'query', 'v2_staffs_search_request_body': 'body', 'user_token': 'query', }, 'collection_format_map': { } }, headers_map={ 'accept': [ 'application/json' ], 'content_type': [ 'application/json' ] }, api_client=api_client ) def v1_org_extra_field_ids_fetch( self, app_token, orgid, **kwargs ): """获取人员信息扩展字段id列表 # noqa: E501 获取组织内人员信息的扩展字段ID列表 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_org_extra_field_ids_fetch(app_token, orgid, async_req=True) >>> result = thread.get() Args: app_token (str): app_token orgid (str): orgid Keyword Args: user_token (str): user_token. [optional] page (int): 起始页码从1开始,默认值为1. [optional] page_size (int): 每页显示个数,默认值是1000,最大值是100000. [optional] Returns: V1OrgExtraFieldIdsFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['orgid'] = \ orgid return self.v1_org_extra_field_ids_fetch_endpoint.call_with_http_info(**kwargs) def v1_staffs_create( self, app_token, v1_staffs_create_request_body, **kwargs ): """创建人员 # noqa: E501 通过此接口,可以创建人员。仅组织内应用经过授权可以调用该接口。 特别说明:目前蓝信不支持应用并发调用人员创建接口,否则会出现添加人员到部门的操作失败,应用需要保证串行化调用该接口 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_create(app_token, v1_staffs_create_request_body, async_req=True) >>> result = thread.get() Args: app_token (str): app_token v1_staffs_create_request_body (V1StaffsCreateRequestBody): Request Body Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['v1_staffs_create_request_body'] = \ v1_staffs_create_request_body return self.v1_staffs_create_endpoint.call_with_http_info(**kwargs) def v1_staffs_delete( self, app_token, staffid, **kwargs ): """人员删除接口 # noqa: E501 通过此接口,删除人员 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_delete(app_token, staffid, async_req=True) >>> result = thread.get() Args: app_token (str): app_token staffid (str): 人员 id Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['staffid'] = \ staffid return self.v1_staffs_delete_endpoint.call_with_http_info(**kwargs) def v1_staffs_dept_ancestors_fetch( self, app_token, staffid, **kwargs ): """获取人员分支祖先列表 # noqa: E501 获取某个人员所在的所有分支的祖先列表 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_dept_ancestors_fetch(app_token, staffid, async_req=True) >>> result = thread.get() Args: app_token (str): app_token staffid (str): staffid Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsDeptAncestorsFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['staffid'] = \ staffid return self.v1_staffs_dept_ancestors_fetch_endpoint.call_with_http_info(**kwargs) def v1_staffs_fetch( self, app_token, staffid, **kwargs ): """获取人员基本信息 # noqa: E501 可以获人员的基本信息 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_fetch(app_token, staffid, async_req=True) >>> result = thread.get() Args: app_token (str): app_token staffid (str): staffid Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['staffid'] = \ staffid return self.v1_staffs_fetch_endpoint.call_with_http_info(**kwargs) def v1_staffs_infor_fetch( self, app_token, staffid, **kwargs ): """获取人员详细信息 # noqa: E501 通过此接口,可以获取人员详细信息。需要组织授权或者个人授权 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_infor_fetch(app_token, staffid, async_req=True) >>> result = thread.get() Args: app_token (str): app_token staffid (str): staffid Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsInforFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['staffid'] = \ staffid return self.v1_staffs_infor_fetch_endpoint.call_with_http_info(**kwargs) def v1_staffs_update( self, app_token, staffid, v1_staffs_update_request_body, **kwargs ): """更新人员 # noqa: E501 通过此接口,可以更新人员信息。仅组织内应用经过授权可以调用该接口。 特别说明:如果涉及人员的部门信息更新,目前蓝信不支持应用并发调用人员更新接口,否则会出现更新人员部门的操作失败,应用需要保证串行化调用该接口 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_staffs_update(app_token, staffid, v1_staffs_update_request_body, async_req=True) >>> result = thread.get() Args: app_token (str): app_token staffid (str): 人员 id v1_staffs_update_request_body (V1StaffsUpdateRequestBody): Request Body Keyword Args: user_token (str): user_token. [optional] Returns: V1StaffsUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['staffid'] = \ staffid kwargs['v1_staffs_update_request_body'] = \ v1_staffs_update_request_body return self.v1_staffs_update_endpoint.call_with_http_info(**kwargs) def v1_tags_fetch( self, app_token, v1_tags_fetch_request_body, **kwargs ): """通过标签获取人员的id列表 # noqa: E501 在组织内,通过指定标签过滤规则来筛选目标人员。 EMC管理后台和开放平台接口都提供关于标签的创建、修改、删除、给人员添加标签等功能,开发人员可以调用开放平台接口获取到已创建的所有标签分组, 然后根据指定的分组ID再获取到该分组下的所有标签 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v1_tags_fetch(app_token, v1_tags_fetch_request_body, async_req=True) >>> result = thread.get() Args: app_token (str): app_token v1_tags_fetch_request_body (V1TagsFetchRequestBody): Request Body Keyword Args: user_token (str): user_token. [optional] page (int): 起始页码从1开始,默认值为1. [optional] page_size (int): 每页显示个数,默认值是1000,最大值是100000. [optional] Returns: V1TagsFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['v1_tags_fetch_request_body'] = \ v1_tags_fetch_request_body return self.v1_tags_fetch_endpoint.call_with_http_info(**kwargs) def v2_staffs_id_mapping_fetch( self, app_token, org_id, id_type, id_value, **kwargs ): """获取人员详细信息 # noqa: E501 通过此接口,可以获取人员详细信息。需要组织授权或者个人授权 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v2_staffs_id_mapping_fetch(app_token, org_id, id_type, id_value, async_req=True) >>> result = thread.get() Args: app_token (str): app_token org_id (str): 查询人员所在的组织Id id_type (str): employ_id/mobile/mail/login/external_id id_value (str): id_type 对应的值:人员编号,手机号... Keyword Args: user_token (str): user_token. [optional] Returns: V2StaffsIdMappingFetchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['org_id'] = \ org_id kwargs['id_type'] = \ id_type kwargs['id_value'] = \ id_value return self.v2_staffs_id_mapping_fetch_endpoint.call_with_http_info(**kwargs) def v2_staffs_search( self, app_token, user_id, v2_staffs_search_request_body, **kwargs ): """搜索人员 # noqa: E501 搜索人员 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.v2_staffs_search(app_token, user_id, v2_staffs_search_request_body, async_req=True) >>> result = thread.get() Args: app_token (str): app_token user_id (str): user_id v2_staffs_search_request_body (V2StaffsSearchRequestBody): Request Body Keyword Args: user_token (str): user_token. [optional] Returns: V2StaffsSearchResponse If the method is called asynchronously, returns the request thread. """ kwargs['async_req'] = kwargs.get( 'async_req', False ) kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) kwargs['_host_index'] = kwargs.get('_host_index') kwargs['app_token'] = \ app_token kwargs['user_id'] = \ user_id kwargs['v2_staffs_search_request_body'] = \ v2_staffs_search_request_body return self.v2_staffs_search_endpoint.call_with_http_info(**kwargs)
32.182405
141
0.452902
3,175
39,874
5.308346
0.066142
0.05696
0.0267
0.018987
0.837665
0.787825
0.71971
0.676813
0.653969
0.629643
0
0.011251
0.447209
39,874
1,238
142
32.208401
0.75338
0.161584
0
0.668012
1
0
0.229028
0.048743
0
0
0
0
0
1
0.0111
false
0
0.018163
0
0.040363
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
40888de51764fa7949653e4552df62b5d3315ddf
1,799
py
Python
main/migrations/0005_auto_20200331_2004.py
GDGVIT/hestia-report
5fedd89b9a8fbc32e4f81a7529f10a706b01fe6c
[ "MIT" ]
null
null
null
main/migrations/0005_auto_20200331_2004.py
GDGVIT/hestia-report
5fedd89b9a8fbc32e4f81a7529f10a706b01fe6c
[ "MIT" ]
1
2020-03-26T00:21:07.000Z
2020-03-26T00:21:07.000Z
main/migrations/0005_auto_20200331_2004.py
GDGVIT/hestia-report
5fedd89b9a8fbc32e4f81a7529f10a706b01fe6c
[ "MIT" ]
3
2020-03-25T18:59:03.000Z
2020-04-01T00:17:11.000Z
# Generated by Django 3.0.4 on 2020-03-31 20:04 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0004_createshoprecommendation_item'), ] operations = [ migrations.AlterField( model_name='createshoprecommendation', name='description_of_shop', field=models.CharField(max_length=250), ), migrations.AlterField( model_name='createshoprecommendation', name='extra_instruction', field=models.CharField(blank=True, max_length=250), ), migrations.AlterField( model_name='createshoprecommendation', name='item', field=models.CharField(max_length=100), ), migrations.AlterField( model_name='createshoprecommendation', name='landmark', field=models.CharField(max_length=100), ), migrations.AlterField( model_name='createshoprecommendation', name='recommended_for', field=models.CharField(max_length=250), ), migrations.AlterField( model_name='createshoprecommendation', name='user_id', field=models.CharField(max_length=250), ), migrations.AlterField( model_name='reportuser', name='reason', field=models.CharField(max_length=250), ), migrations.AlterField( model_name='reportuser', name='reported_by', field=models.CharField(max_length=250), ), migrations.AlterField( model_name='reportuser', name='user_id', field=models.CharField(max_length=250), ), ]
30.491525
63
0.578655
154
1,799
6.584416
0.318182
0.177515
0.221893
0.257396
0.740631
0.740631
0.684418
0.684418
0.684418
0.572978
0
0.037368
0.315731
1,799
58
64
31.017241
0.786353
0.025014
0
0.711538
1
0
0.174658
0.101598
0
0
0
0
0
1
0
false
0
0.019231
0
0.076923
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4098f8ec6bf03e14e9f451af803e0acb99df889f
272
py
Python
tests/test_comments.py
codeacio/isort
314c6e70a93c37b065b7c0f4f4903097fbce4b36
[ "MIT" ]
1
2020-08-18T06:27:01.000Z
2020-08-18T06:27:01.000Z
tests/test_comments.py
codeacio/isort
314c6e70a93c37b065b7c0f4f4903097fbce4b36
[ "MIT" ]
null
null
null
tests/test_comments.py
codeacio/isort
314c6e70a93c37b065b7c0f4f4903097fbce4b36
[ "MIT" ]
1
2020-09-18T06:42:54.000Z
2020-09-18T06:42:54.000Z
from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line) def test_add_to_line(): assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os"
24.727273
96
0.786765
41
272
4.878049
0.487805
0.15
0.225
0.23
0
0
0
0
0
0
0
0
0.113971
272
10
97
27.2
0.829876
0
0
0
0
0
0.106618
0
0
0
0
0
0.166667
1
0.166667
true
0
0.5
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
40db41556d08ee16d3c4cd00e0626d43c2e3fcde
171
py
Python
sidpy/hdf/__init__.py
ondrejdyck/sidpy
779034440b8233e1dae609a58a64ce2d25ca41c0
[ "MIT" ]
5
2020-10-07T14:34:32.000Z
2021-11-17T11:25:06.000Z
sidpy/hdf/__init__.py
ondrejdyck/sidpy
779034440b8233e1dae609a58a64ce2d25ca41c0
[ "MIT" ]
94
2020-07-31T17:34:23.000Z
2022-02-11T21:57:09.000Z
sidpy/hdf/__init__.py
ondrejdyck/sidpy
779034440b8233e1dae609a58a64ce2d25ca41c0
[ "MIT" ]
15
2020-08-16T14:22:47.000Z
2021-08-20T18:15:37.000Z
""" Tools to read, write data in HDF5 files """ from . import hdf_utils, prov_utils, reg_ref, dtype_utils __all__ = ['hdf_utils', 'prov_utils', 'reg_ref', 'dtype_utils']
24.428571
63
0.71345
27
171
4.074074
0.62963
0.145455
0.218182
0.309091
0.6
0.6
0.6
0.6
0
0
0
0.006803
0.140351
171
6
64
28.5
0.741497
0.22807
0
0
0
0
0.298387
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
40dd60d3de69672dc089937002686b1a40fccfcf
161
py
Python
tests/test_module.py
stactools-packages/gap
32b56ad5d713e04d9c799afb9cb75210f9734cde
[ "Apache-2.0" ]
null
null
null
tests/test_module.py
stactools-packages/gap
32b56ad5d713e04d9c799afb9cb75210f9734cde
[ "Apache-2.0" ]
3
2021-06-18T17:52:38.000Z
2021-08-12T18:19:58.000Z
tests/test_module.py
stactools-packages/gap
32b56ad5d713e04d9c799afb9cb75210f9734cde
[ "Apache-2.0" ]
null
null
null
import unittest import stactools.gap class TestModule(unittest.TestCase): def test_version(self): self.assertIsNotNone(stactools.gap.__version__)
17.888889
55
0.770186
18
161
6.611111
0.666667
0.201681
0
0
0
0
0
0
0
0
0
0
0.149068
161
8
56
20.125
0.868613
0
0
0
0
0
0
0
0
0
0
0
0.2
1
0.2
false
0
0.4
0
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
906277fd83605fc6cfa88d404f927ee180cef4f0
152
py
Python
vnpy/api/sgit/__init__.py
black0144/vnpy
0d0ea30dad14a0150f7500ff9a62528030321426
[ "MIT" ]
5
2019-01-17T12:14:14.000Z
2021-05-30T10:24:42.000Z
vnpy/api/sgit/__init__.py
black0144/vnpy
0d0ea30dad14a0150f7500ff9a62528030321426
[ "MIT" ]
null
null
null
vnpy/api/sgit/__init__.py
black0144/vnpy
0d0ea30dad14a0150f7500ff9a62528030321426
[ "MIT" ]
5
2019-03-26T03:17:45.000Z
2019-11-05T08:08:18.000Z
# encoding: UTF-8 from __future__ import absolute_import from .vnsgitmd import MdApi from .vnsgittd import TdApi from .sgit_data_type import defineDict
25.333333
38
0.835526
22
152
5.454545
0.681818
0
0
0
0
0
0
0
0
0
0
0.007519
0.125
152
6
39
25.333333
0.894737
0.098684
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
906362d5e7eb0e6fd7f30bb8f35f8c8abc91f916
82
py
Python
deepab/resnets/__init__.py
antonkulaga/DeepAb
51a32d06d19815705bdbfb35a8a9518c17ec313a
[ "RSA-MD" ]
67
2021-07-02T08:31:10.000Z
2022-03-30T01:25:11.000Z
deepab/resnets/__init__.py
antonkulaga/DeepAb
51a32d06d19815705bdbfb35a8a9518c17ec313a
[ "RSA-MD" ]
9
2021-08-18T10:32:27.000Z
2022-03-30T06:40:05.000Z
deepab/resnets/__init__.py
antonkulaga/DeepAb
51a32d06d19815705bdbfb35a8a9518c17ec313a
[ "RSA-MD" ]
16
2021-07-17T08:33:30.000Z
2022-03-29T07:36:34.000Z
from .ResNet1D import * from .ResNet2D import * from .CrissCrossResNet2D import *
20.5
33
0.780488
9
82
7.111111
0.555556
0.3125
0
0
0
0
0
0
0
0
0
0.042857
0.146341
82
3
34
27.333333
0.871429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
907660e21404d974e862f6fbf154dba9af93ae45
50
py
Python
getArgs.py
xl3ehindTim/Code-buddy
e04b7b4327a0b3ff2790d22aef93dca6fce021f4
[ "MIT" ]
8
2019-11-29T09:20:11.000Z
2020-11-02T10:55:35.000Z
getArgs.py
xl3ehindTim/Code-buddy
e04b7b4327a0b3ff2790d22aef93dca6fce021f4
[ "MIT" ]
2
2019-12-02T13:48:01.000Z
2019-12-02T17:00:56.000Z
getArgs.py
xl3ehindTim/Code-buddy
e04b7b4327a0b3ff2790d22aef93dca6fce021f4
[ "MIT" ]
3
2019-11-29T10:03:44.000Z
2020-10-01T10:23:55.000Z
import sys def getArgs(i): return sys.argv[i]
12.5
22
0.68
9
50
3.777778
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.2
50
4
22
12.5
0.85
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
5
90f60561d05117cfe409611463d54509592c9cd9
76
py
Python
src/utils/__init__.py
VeerSinghCurl/signature-extraction
2e04af078432c8c0168a478bbd52a5985cb0d95e
[ "MIT" ]
12
2020-12-18T12:33:13.000Z
2021-08-20T09:44:57.000Z
src/utils/__init__.py
VeerSinghCurl/signature-extraction
2e04af078432c8c0168a478bbd52a5985cb0d95e
[ "MIT" ]
1
2021-08-20T09:44:26.000Z
2021-09-12T10:34:17.000Z
src/utils/__init__.py
VeerSinghCurl/signature-extraction
2e04af078432c8c0168a478bbd52a5985cb0d95e
[ "MIT" ]
4
2021-05-05T02:58:57.000Z
2021-08-17T11:21:26.000Z
from .metrics import jaccard_score, f1_score from .utils import list_images
25.333333
44
0.842105
12
76
5.083333
0.75
0
0
0
0
0
0
0
0
0
0
0.014925
0.118421
76
2
45
38
0.895522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
291e3e3a6a43cd00d593c1c32746ec0a88acbff7
47
py
Python
scripts/clear_leds.py
tominovak33/blinkt-scripts
cca45ba5bbb839f41db886861b5d6e7efe978c51
[ "MIT" ]
null
null
null
scripts/clear_leds.py
tominovak33/blinkt-scripts
cca45ba5bbb839f41db886861b5d6e7efe978c51
[ "MIT" ]
null
null
null
scripts/clear_leds.py
tominovak33/blinkt-scripts
cca45ba5bbb839f41db886861b5d6e7efe978c51
[ "MIT" ]
null
null
null
from blinkt import show, clear clear() show()
9.4
30
0.723404
7
47
4.857143
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.170213
47
4
31
11.75
0.871795
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
291edad572d94ca0963aefca646fcad106689f89
124
py
Python
tests/fixtures/unused_import_comment_5.py
cdce8p/python-typing-update
2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983
[ "MIT" ]
5
2021-03-17T16:12:09.000Z
2021-09-12T22:19:51.000Z
tests/fixtures/unused_import_comment_5.py
cdce8p/python-typing-update
2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983
[ "MIT" ]
10
2021-03-23T18:14:24.000Z
2022-03-28T03:05:18.000Z
tests/fixtures/unused_import_comment_5.py
cdce8p/python-typing-update
2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983
[ "MIT" ]
2
2021-03-20T08:47:52.000Z
2021-06-07T04:02:02.000Z
"""Test unused import retention.""" import logging # unused-import from typing import Any, List var1: List[str] var2: Any
17.714286
35
0.733871
18
124
5.055556
0.666667
0.263736
0
0
0
0
0
0
0
0
0
0.019048
0.153226
124
6
36
20.666667
0.847619
0.354839
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
46338fe3c8a73181e6a573bdaf51aae6bd16a3b4
298
py
Python
thefeck/rules/java.py
eoinjordan/thefeck
e04f50409ba3069ec6a9f7c0aab39ca835a41b68
[ "MIT" ]
null
null
null
thefeck/rules/java.py
eoinjordan/thefeck
e04f50409ba3069ec6a9f7c0aab39ca835a41b68
[ "MIT" ]
null
null
null
thefeck/rules/java.py
eoinjordan/thefeck
e04f50409ba3069ec6a9f7c0aab39ca835a41b68
[ "MIT" ]
null
null
null
"""Fixes common java command mistake Example: > java bar.java Error: Could not find or load main class bar.java """ from thefeck.utils import for_app @for_app('java') def match(command): return command.script.endswith('.java') def get_new_command(command): return command.script[:-5]
16.555556
49
0.728188
45
298
4.733333
0.644444
0.065728
0.187793
0.244131
0
0
0
0
0
0
0
0.003968
0.154362
298
17
50
17.529412
0.84127
0.365772
0
0
0
0
0.049724
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.333333
0.833333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
46357bdf93e0b3a5d0fc80de7dd3ca46ac12d016
23
py
Python
winshlex/__init__.py
jdjebi/winshlex
2caea0c0da08f6605aace4a6a3ba39030a532158
[ "MIT" ]
null
null
null
winshlex/__init__.py
jdjebi/winshlex
2caea0c0da08f6605aace4a6a3ba39030a532158
[ "MIT" ]
2
2020-08-04T13:31:14.000Z
2021-11-10T22:45:46.000Z
winshlex/__init__.py
jdjebi/winshlex
2caea0c0da08f6605aace4a6a3ba39030a532158
[ "MIT" ]
1
2020-08-02T08:50:55.000Z
2020-08-02T08:50:55.000Z
from .lex import split
23
23
0.782609
4
23
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
463d3d217a57ffc0b89fbdd5181bd0738801b637
233
py
Python
yepes/context_processors.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
yepes/context_processors.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
yepes/context_processors.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
# -*- coding:utf-8 -*- from django.contrib.sites.shortcuts import get_current_site def current_site(request): """ Returns the current site as context variable. """ return {'current_site': get_current_site(request)}
23.3
59
0.703863
30
233
5.266667
0.666667
0.348101
0.177215
0
0
0
0
0
0
0
0
0.005181
0.171674
233
9
60
25.888889
0.813472
0.287554
0
0
0
0
0.08
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
5
467638c0b9dd839518ce5fb519537fd054c4952e
150
py
Python
curso em video - Phython/desafios/desafio 7.py
ThyagoHiggins/LP-Phython
78e84aa77e786cc33b7d91397d17e93c3d5a692a
[ "MIT" ]
null
null
null
curso em video - Phython/desafios/desafio 7.py
ThyagoHiggins/LP-Phython
78e84aa77e786cc33b7d91397d17e93c3d5a692a
[ "MIT" ]
null
null
null
curso em video - Phython/desafios/desafio 7.py
ThyagoHiggins/LP-Phython
78e84aa77e786cc33b7d91397d17e93c3d5a692a
[ "MIT" ]
null
null
null
n1 = float(input('Write the first note: ')) n2 = float(input('Write the second note: ')) media= (n1+n2)/2 print(f'Your average is: {media:.1f} ')
21.428571
45
0.633333
25
150
3.8
0.68
0.210526
0.315789
0.378947
0
0
0
0
0
0
0
0.048
0.166667
150
6
46
25
0.712
0
0
0
0
0
0.506667
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d3b8b0979313019eb8f4c794250d8b1aa5bba8cf
340
py
Python
boucanpy/cli/alembic/__init__.py
bbhunter/boucanpy
7d2fb105e7b1e90653a511534fb878bb62d02f17
[ "MIT" ]
34
2019-11-16T17:22:15.000Z
2022-02-11T23:12:46.000Z
boucanpy/cli/alembic/__init__.py
bbhunter/boucanpy
7d2fb105e7b1e90653a511534fb878bb62d02f17
[ "MIT" ]
1
2021-02-09T09:34:55.000Z
2021-02-10T21:46:20.000Z
boucanpy/cli/alembic/__init__.py
bbhunter/boucanpy
7d2fb105e7b1e90653a511534fb878bb62d02f17
[ "MIT" ]
9
2019-11-18T22:18:07.000Z
2021-02-08T13:23:51.000Z
from .alembic_current import AlembicCurrent from .alembic_downgrade import AlembicDowngrade from .alembic_history import AlembicHistory from .alembic_init import AlembicInit from .alembic_migrate import AlembicMigrate from .alembic_show import AlembicShow from .alembic_stamp import AlembicStamp from .alembic_upgrade import AlembicUpgrade
37.777778
47
0.882353
40
340
7.3
0.475
0.30137
0
0
0
0
0
0
0
0
0
0
0.094118
340
8
48
42.5
0.948052
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
31364f3a8fd79149cf3753e42953a2e3b6bf0464
173
py
Python
platform/core/polyaxon/crons/tasks/utils.py
hackerwins/polyaxon
ff56a098283ca872abfbaae6ba8abba479ffa394
[ "Apache-2.0" ]
null
null
null
platform/core/polyaxon/crons/tasks/utils.py
hackerwins/polyaxon
ff56a098283ca872abfbaae6ba8abba479ffa394
[ "Apache-2.0" ]
null
null
null
platform/core/polyaxon/crons/tasks/utils.py
hackerwins/polyaxon
ff56a098283ca872abfbaae6ba8abba479ffa394
[ "Apache-2.0" ]
null
null
null
from datetime import timedelta from typing import Any from django.utils.timezone import now def get_date_check(days: int) -> Any: return now() - timedelta(days=days)
19.222222
39
0.757225
26
173
4.961538
0.653846
0
0
0
0
0
0
0
0
0
0
0
0.16185
173
8
40
21.625
0.889655
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.6
0.2
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
318794d8913be4f19ae120eaa178f6640f0f0de8
705
py
Python
tests/test_utils.py
schinckel/pipeline-runner
5642e3ce33ba21d42289bc6e3592e8286b7321d3
[ "MIT" ]
6
2021-04-23T20:28:24.000Z
2022-02-12T14:55:27.000Z
tests/test_utils.py
schinckel/pipeline-runner
5642e3ce33ba21d42289bc6e3592e8286b7321d3
[ "MIT" ]
1
2022-01-17T14:43:04.000Z
2022-01-17T14:43:04.000Z
tests/test_utils.py
schinckel/pipeline-runner
5642e3ce33ba21d42289bc6e3592e8286b7321d3
[ "MIT" ]
2
2022-01-16T23:32:11.000Z
2022-02-08T20:39:22.000Z
from pipeline_runner.utils import escape_shell_string def test_escape_shell_string(): assert escape_shell_string(r"echo \n") == r"echo \x5cn" assert escape_shell_string('echo ""') == r"echo \x22\x22" assert escape_shell_string("echo ''") == r"echo \x27\x27" assert escape_shell_string("echo $ENVVAR") == r"echo \x24ENVVAR" assert escape_shell_string("echo ${ENVVAR}") == r"echo \x24\x7bENVVAR\x7d" assert escape_shell_string("awk '(NR % 5 == 0)'") == r"awk \x27(NR \x25 5 == 0)\x27" assert ( escape_shell_string(r"cat /proc/$$/environ | xargs -0 -n1 echo | tr '\n' ','") == r"cat /proc/\x24\x24/environ | xargs -0 -n1 echo | tr \x27\x5cn\x27 \x27,\x27" )
47
89
0.64539
107
705
4.065421
0.308411
0.227586
0.351724
0.370115
0.542529
0.418391
0.321839
0.174713
0
0
0
0.072539
0.178723
705
14
90
50.357143
0.678756
0
0
0
0
0.083333
0.421277
0.031206
0
0
0
0
0.583333
1
0.083333
true
0
0.083333
0
0.166667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
5
31a0d94323b26a2d9a9191a7f28b56c54543b4bd
187
py
Python
tests/test_bot_twitch.py
c-py/cigargary-bot
ad78e10640cad462224ec995b9c4d377229f89d7
[ "MIT" ]
3
2021-09-22T23:37:47.000Z
2022-01-13T06:23:38.000Z
tests/test_bot_twitch.py
c-py/cigargary-bot
ad78e10640cad462224ec995b9c4d377229f89d7
[ "MIT" ]
4
2021-09-19T11:11:07.000Z
2021-10-03T10:06:37.000Z
tests/test_bot_twitch.py
ShivanS93/cigargary-bot
ad78e10640cad462224ec995b9c4d377229f89d7
[ "MIT" ]
null
null
null
import pytest from bots.twitch import TwitchBot @pytest.fixture def default_bot(): return TwitchBot() def test_exists(default_bot): assert isinstance(default_bot, TwitchBot)
14.384615
45
0.770053
24
187
5.833333
0.625
0.214286
0
0
0
0
0
0
0
0
0
0
0.15508
187
12
46
15.583333
0.886076
0
0
0
0
0
0
0
0
0
0
0
0.142857
1
0.285714
false
0
0.285714
0.142857
0.714286
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
31e9d515337ef2fd74220fe7a6a86d334cb94e94
369
py
Python
cursesinquirer/question.py
Kemichal/cursesinquirer
48c36b50f51da1108dd634aeaaedba12edca098a
[ "MIT" ]
null
null
null
cursesinquirer/question.py
Kemichal/cursesinquirer
48c36b50f51da1108dd634aeaaedba12edca098a
[ "MIT" ]
1
2017-05-27T23:21:52.000Z
2017-05-27T23:21:52.000Z
cursesinquirer/question.py
Kemichal/cursesinquirer
48c36b50f51da1108dd634aeaaedba12edca098a
[ "MIT" ]
null
null
null
from abc import ABCMeta, abstractmethod class Question(metaclass=ABCMeta): @abstractmethod def set_screen(self, screen): raise NotImplementedError @abstractmethod def input(self, c): raise NotImplementedError @abstractmethod def renderer(self): raise NotImplementedError @abstractmethod def answer(self): raise NotImplementedError
21.705882
59
0.756098
36
369
7.722222
0.5
0.244604
0.410072
0.442446
0
0
0
0
0
0
0
0
0.181572
369
16
60
23.0625
0.92053
0
0
0.4
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.1
0
0.6
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
9ec48b8ff7b6af89b091b06d1ad21b412b353a69
141
py
Python
SadovaHW/HW5/CW_5.1.py
kolyasalubov/Lv-639.pythonCore
06f10669a188318884adb00723127465ebdf2907
[ "MIT" ]
null
null
null
SadovaHW/HW5/CW_5.1.py
kolyasalubov/Lv-639.pythonCore
06f10669a188318884adb00723127465ebdf2907
[ "MIT" ]
null
null
null
SadovaHW/HW5/CW_5.1.py
kolyasalubov/Lv-639.pythonCore
06f10669a188318884adb00723127465ebdf2907
[ "MIT" ]
null
null
null
def zero_fuel(distance_to_pump, mpg, fuel_left): if fuel_left*mpg >= distance_to_pump: return True else: return False
28.2
48
0.673759
21
141
4.190476
0.619048
0.227273
0.318182
0
0
0
0
0
0
0
0
0
0.255319
141
5
49
28.2
0.838095
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.6
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
9ec97c809f2594e23336a27bd9add30c8f7a2588
104
py
Python
example.py
gemetalreg/test
c680fcc92c7a7a37ba90a05c03e7a0d0175effd5
[ "MIT" ]
null
null
null
example.py
gemetalreg/test
c680fcc92c7a7a37ba90a05c03e7a0d0175effd5
[ "MIT" ]
null
null
null
example.py
gemetalreg/test
c680fcc92c7a7a37ba90a05c03e7a0d0175effd5
[ "MIT" ]
null
null
null
def git_operation(): print("I am adding example.py file to the remote repository.") git_operation()
26
66
0.740385
16
104
4.6875
0.875
0.32
0
0
0
0
0
0
0
0
0
0
0.153846
104
3
67
34.666667
0.852273
0
0
0
0
0
0.509615
0
0
0
0
0
0
1
0.333333
true
0
0
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
9ed0ccd573c4f8d88c598c7e233fd10ea5d999dd
5,132
py
Python
new/stage_two.py
shvdiwnkozbw/Multi-Source-Sound-Localization
de67ce37f34f776112cf9b3d61e105388afc4116
[ "BSD-3-Clause-Attribution" ]
38
2020-03-14T05:55:07.000Z
2022-03-16T12:18:37.000Z
new/stage_two.py
shvdiwnkozbw/Multi-Source-Sound-Localization
de67ce37f34f776112cf9b3d61e105388afc4116
[ "BSD-3-Clause-Attribution" ]
8
2020-07-16T10:33:28.000Z
2021-11-09T02:52:17.000Z
new/stage_two.py
shvdiwnkozbw/Multi-Source-Sound-Localization
de67ce37f34f776112cf9b3d61e105388afc4116
[ "BSD-3-Clause-Attribution" ]
9
2020-07-21T08:19:46.000Z
2022-03-07T12:58:11.000Z
import torch import torch.nn as nn import torch.nn.functional as F def filter_prob(cls_a, cls_v, thres): assert cls_a.shape == cls_v.shape prob_a = F.sigmoid(cls_a).view(-1) prob_v = F.sigmoid(cls_v).view(-1) eff_a = (prob_a.unsqueeze(1)>thres) eff_v = (prob_v.unsqueeze(0)>thres) eff = eff_a * eff_v eff = eff.type(torch.FloatTensor).to(cls_a.device) # eff = eff * (prob_a.unsqueeze(1) * prob_v.unsqueeze(0)) return eff def contrastive(fine_a, fine_v): assert fine_a.shape == fine_v.shape assert fine_a.shape[1] == 128 fine_a = fine_a.permute(0, 2, 1).contiguous().view(-1, 128) fine_v = fine_v.permute(0, 2, 1).contiguous().view(-1, 128) similarity = torch.mm(fine_a, fine_v.permute(1, 0).contiguous()) return similarity class Align(nn.Module): def __init__(self, vision, audio): super(Align, self).__init__() self.vision = vision self.audio = audio self.avc = nn.Sequential( nn.Linear(1024, 128), nn.ReLU(True), nn.Linear(128, 2) ) self.project_a = nn.Sequential( nn.Conv1d(512, 1024, 1, bias=False), nn.ReLU(True), nn.Conv1d(1024, 128, 1, bias=False) ) self.project_v = nn.Sequential( nn.Conv1d(512, 1024, 1, bias=False), nn.ReLU(True), nn.Conv1d(1024, 128, 1, bias=False) ) self.class_a = nn.Conv2d(512, 7, 1, bias=False) self.class_v = nn.Conv2d(512, 7, 1, bias=False) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) def forward(self, spec, img): N = spec.shape[0] feat_a = self.audio(spec) feat_v = self.vision(img) cam_a = F.relu(self.class_a(feat_a)).detach() cam_v = F.relu(self.class_v(feat_v)).detach() fine_a = feat_a.unsqueeze(2) * cam_a.unsqueeze(1) fine_v = feat_v.unsqueeze(2) * cam_v.unsqueeze(1) weight_a = torch.sum(cam_a.view(*cam_a.shape[:2], -1), -1) weight_v = torch.sum(cam_v.view(*cam_v.shape[:2], -1), -1) fine_a = torch.mean(fine_a.view(*fine_a.shape[:3], -1), -1) fine_v = torch.mean(fine_v.view(*fine_v.shape[:3], -1), -1) fine_a = fine_a / (weight_a.unsqueeze(1)+1e-10) fine_v = fine_v / (weight_v.unsqueeze(1)+1e-10) fine_a = self.project_a(fine_a) fine_v = self.project_v(fine_v) fine_a = F.normalize(fine_a, p=2, dim=1) fine_v = F.normalize(fine_v, p=2, dim=1) feat_a = self.avgpool(feat_a) feat_v = self.avgpool(feat_v) fusion = torch.cat([feat_a.unsqueeze(1).repeat([1, N, 1, 1, 1]), feat_v.unsqueeze(0).repeat([N, 1, 1, 1, 1])], 2) fusion = torch.flatten(fusion, 2) avc = self.avc(fusion) cls_a = self.class_a(feat_a) cls_v = self.class_v(feat_v) return avc, cls_a.flatten(1), cls_v.flatten(1), fine_a, fine_v class Location(nn.Module): def __init__(self, vision, audio): super(Location, self).__init__() self.vision = vision self.audio = audio self.avc = nn.Sequential( nn.Linear(1024, 128), nn.ReLU(True), nn.Linear(128, 2) ) self.project_a = nn.Sequential( nn.Conv1d(512, 1024, 1, bias=False), nn.ReLU(True), nn.Conv1d(1024, 128, 1, bias=False) ) self.project_v = nn.Sequential( nn.Conv1d(512, 1024, 1, bias=False), nn.ReLU(True), nn.Conv1d(1024, 128, 1, bias=False) ) self.class_a = nn.Conv2d(512, 7, 1, bias=False) self.class_v = nn.Conv2d(512, 7, 1, bias=False) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) def forward(self, spec, img): N = spec.shape[0] feat_a = self.audio(spec) feat_v = self.vision(img) cam_a = F.relu(self.class_a(feat_a)) cam_v = F.relu(self.class_v(feat_v)) fine_a = feat_a.unsqueeze(2) * cam_a.unsqueeze(1) weight_a = torch.sum(cam_a.view(*cam_a.shape[:2], -1), -1) fine_a = torch.mean(fine_a.view(*fine_a.shape[:3], -1), -1) fine_a = fine_a / (weight_a.unsqueeze(1)+1e-10) fine_a = self.project_a(fine_a) fine_v = self.project_v(feat_v.view(*feat_v.shape[:2], -1)) fine_a = F.normalize(fine_a, p=2, dim=1) fine_v = F.normalize(fine_v, p=2, dim=1) feat_a = self.avgpool(feat_a) feat_v = self.avgpool(feat_v) align_a = self.project_a(feat_a.flatten(2)) align_v = self.project_v(feat_v.flatten(2)) fusion = torch.cat([feat_a.unsqueeze(1).repeat([1, N, 1, 1, 1]), feat_v.unsqueeze(0).repeat([N, 1, 1, 1, 1])], 2) fusion = torch.flatten(fusion, 2) avc = self.avc(fusion) cls_a = self.class_a(feat_a) cls_v = self.class_v(feat_v) return avc, cls_a.flatten(1), cls_v.flatten(1), fine_a, fine_v, cam_a, cam_v,\ align_a, align_v
38.014815
86
0.563523
808
5,132
3.371287
0.096535
0.051395
0.044053
0.041116
0.756608
0.756608
0.748164
0.748164
0.701909
0.684288
0
0.062329
0.287217
5,132
135
87
38.014815
0.68234
0.010717
0
0.627119
0
0
0
0
0
0
0
0
0.025424
1
0.050847
false
0
0.025424
0
0.127119
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9ef9c00f44485090be066349b460d46757009fd4
303
py
Python
conftest.py
pganssle/pytz-deprecation-shim
47bd4bdd9346cafa6c6d66817082ccce099890ad
[ "ECL-2.0", "Apache-2.0" ]
6
2020-06-15T20:23:16.000Z
2021-11-11T16:37:02.000Z
conftest.py
pganssle/pytz-deprecation-shim
47bd4bdd9346cafa6c6d66817082ccce099890ad
[ "ECL-2.0", "Apache-2.0" ]
10
2020-06-11T21:37:09.000Z
2021-11-15T17:47:36.000Z
conftest.py
pganssle/pytz-deprecation-shim
47bd4bdd9346cafa6c6d66817082ccce099890ad
[ "ECL-2.0", "Apache-2.0" ]
1
2022-03-12T11:19:07.000Z
2022-03-12T11:19:07.000Z
import os from datetime import timedelta import hypothesis hypothesis.settings.register_profile("long", max_examples=5000) hypothesis.settings.register_profile( "ci", max_examples=2000, deadline=timedelta(seconds=1) ) hypothesis.settings.load_profile(os.getenv(u"HYPOTHESIS_PROFILE", "default"))
25.25
77
0.811881
38
303
6.315789
0.578947
0.225
0.216667
0.275
0
0
0
0
0
0
0
0.032258
0.079208
303
11
78
27.545455
0.827957
0
0
0
0
0
0.10231
0
0
0
0
0
0
1
0
true
0
0.375
0
0.375
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
730f98c7fda27d07d26ce7549b35750f2aee7d00
119
py
Python
6.evenodd.py
shaunakganorkar/PythonMeetup-2014
a845b1612b5755eeb3b91ba34f3339327763fdfe
[ "MIT" ]
null
null
null
6.evenodd.py
shaunakganorkar/PythonMeetup-2014
a845b1612b5755eeb3b91ba34f3339327763fdfe
[ "MIT" ]
null
null
null
6.evenodd.py
shaunakganorkar/PythonMeetup-2014
a845b1612b5755eeb3b91ba34f3339327763fdfe
[ "MIT" ]
null
null
null
num=int(raw_input("Enter a Number: ")) if num%2==0: print "Number is Even" else: print"Number is Odd"
14.875
39
0.596639
20
119
3.5
0.75
0.314286
0.371429
0
0
0
0
0
0
0
0
0.022727
0.260504
119
7
40
17
0.772727
0
0
0
0
0
0.387387
0
0
0
0
0
0
0
null
null
0
0
null
null
0.4
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
73163bf3ed2d17bb9b82fede6b66ac4b13e2a359
62
py
Python
CovertMark/data/__init__.py
chongyangshi/CovertMark
a3156b45acceadf5fc1b9a56fa56550b4893c285
[ "MIT" ]
4
2021-01-04T09:00:33.000Z
2021-10-02T13:37:03.000Z
CovertMark/data/__init__.py
chongyangshi/CovertMark
a3156b45acceadf5fc1b9a56fa56550b4893c285
[ "MIT" ]
null
null
null
CovertMark/data/__init__.py
chongyangshi/CovertMark
a3156b45acceadf5fc1b9a56fa56550b4893c285
[ "MIT" ]
null
null
null
from . import constants, mongo, parser, retrieve, utils, plot
31
61
0.758065
8
62
5.875
1
0
0
0
0
0
0
0
0
0
0
0
0.145161
62
1
62
62
0.886792
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b476e70912755a1b7dacf16afc4b4e447affa427
161
py
Python
src/datacatalog_custom_entries_manager/__init__.py
ricardolsmendes/datacatalog-custom-entries-manager
a9eba9bbc7663715dd4b5b60cde79088e1b5bf79
[ "MIT" ]
1
2020-09-04T11:25:08.000Z
2020-09-04T11:25:08.000Z
src/datacatalog_custom_entries_manager/__init__.py
ricardolsmendes/datacatalog-custom-types-manager
a9eba9bbc7663715dd4b5b60cde79088e1b5bf79
[ "MIT" ]
1
2020-12-26T21:21:11.000Z
2020-12-26T21:31:14.000Z
src/datacatalog_custom_entries_manager/__init__.py
ricardolsmendes/datacatalog-custom-entries-manager
a9eba9bbc7663715dd4b5b60cde79088e1b5bf79
[ "MIT" ]
null
null
null
from .custom_entries_synchronizer import CustomEntriesSynchronizer from .custom_entries_manager_cli import main __all__ = ('CustomEntriesSynchronizer', 'main')
32.2
66
0.857143
16
161
8.0625
0.625
0.155039
0.263566
0
0
0
0
0
0
0
0
0
0.080745
161
4
67
40.25
0.871622
0
0
0
0
0
0.180124
0.15528
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
c3088f4defa32b2b063b03144efeebdaf6964822
315
py
Python
test/data/testapp-v4/main/views.py
dpaola2/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
15
2015-02-14T02:39:04.000Z
2021-12-13T14:17:15.000Z
test/data/testapp-v4/main/views.py
ojengwa/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
null
null
null
test/data/testapp-v4/main/views.py
ojengwa/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
11
2015-08-07T11:47:02.000Z
2021-04-29T08:08:24.000Z
import os from django.http import HttpResponse from main.models import * def index(request): return HttpResponse('testapp.main second edition') def add_foo(request): f = Foo(name="bar") f.save() return HttpResponse("bar") def count_rows(request): return HttpResponse(Foo.objects.all.count())
21
54
0.720635
43
315
5.232558
0.581395
0.24
0.222222
0
0
0
0
0
0
0
0
0
0.161905
315
14
55
22.5
0.852273
0
0
0
0
0
0.104762
0
0
0
0
0
0
1
0.272727
false
0
0.272727
0.181818
0.818182
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c333c6982d1ecc0b96522bd60a386a9ec8fc7c17
11,336
py
Python
src/insulaudit/lib.py
bewest/insulaudit
2c0aa04a596775517a1e651723796dc19ea99ea7
[ "MIT" ]
22
2015-03-10T20:50:23.000Z
2020-11-28T13:23:54.000Z
src/insulaudit/lib.py
bewest/insulaudit
2c0aa04a596775517a1e651723796dc19ea99ea7
[ "MIT" ]
2
2016-03-13T12:56:34.000Z
2018-11-17T18:11:43.000Z
src/insulaudit/lib.py
bewest/insulaudit
2c0aa04a596775517a1e651723796dc19ea99ea7
[ "MIT" ]
10
2015-06-14T21:30:59.000Z
2018-09-13T19:01:43.000Z
""" This module provides some basic helper/formatting utilities. >>> hexdump( bytearray( [0x00] ) ) '0000 0x00 .' >>> 0x00 == HighByte( 0x0F ) True >>> 0x0F == LowByte( 0x0F ) True >>> CRC16CCITT.compute( bytearray( [ 2, 6, 6, 3 ] ) ) 16845 >>> CRC16CCITT.compute( bytearray( [ 0x02, 0x09, 0x00, ... 0x05, 0x0D, 0x02, 0x03 ] ) ) 29146 >>> BangInt( bytearray( [ 0x71, 0xDA ] ) ) 29146 >>> BangInt( bytearray( [ 0x62, 0xC2 ] ) ) == CRC16CCITT.compute( bytearray( [ 2, 0x06, 0x08, 3 ] ) ) True >>> CRC8.compute( bytearray( [ 0x00, 0xFF, 0x00 ] ) ) 177 >>> BangInt( bytearray( [ 0x02, 0X02 ] ) ) 514 >>> BangLong( bytearray( [ 0x0, 0X0, 0x02, 0x02 ] ) ) 514L """ import dateutil.parser def _fmt_hex( bytez ): return ' '.join( [ '%#04x' % x for x in list( bytez ) ] ) def _fmt_txt( bytez ): return ''.join( [ chr( x ) if 0x20 <= x < 0x7F else '.' \ for x in bytez ] ) class parse: @staticmethod def date( data ): """ >>> parse.date( '2010-11-10T01:46:00' ).isoformat( ) '2010-11-10T01:46:00' >>> parse.date( '2010-11-10 01:46:00' ).isoformat( ) '2010-11-10T01:46:00' >>> parse.date( '2010-11-10 01:46PM' ).isoformat( ) '2010-11-10T13:46:00' >>> parse.date( '2010-11-10 13:46' ).isoformat( ) '2010-11-10T13:46:00' >>> parse.date( '2010-11-10 1:46AM' ).isoformat( ) '2010-11-10T01:46:00' """ return dateutil.parser.parse( data ) def hexdump( src, length=8 ): """ Return a string representing the bytes in src, length bytes per line. """ if len( src ) == 0: return '' result = [ ] digits = 4 if isinstance( src, unicode ) else 2 for i in xrange( 0, len( src ), length ): s = src[i:i+length] hexa = ' '.join( [ '%#04x' % x for x in list( s ) ] ) text = ''.join( [ chr(x) if 0x20 <= x < 0x7F else '.' \ for x in s ] ) result.append( "%04X %-*s %s" % \ ( i, length * 5 , hexa, text ) ) return '\n'.join(result) def HighByte( arg ): return arg >> 8 & 0xFF def LowByte( arg ): return arg & 0xFF class CRC16CCITT: lookup = [ 0, 4129, 8258, 12387, 16516, 20645, 24774, 28903, 33032, 37161, 41290, 45419, 49548, 53677, 57806, 61935, 4657, 528, 12915, 8786, 21173, 17044, 29431, 25302, 37689, 33560, 45947, 41818, 54205, 50076, 62463, 58334, 9314, 13379, 1056, 5121, 25830, 29895, 17572, 21637, 42346, 46411, 34088, 38153, 58862, 62927, 50604, 54669, 13907, 9842, 5649, 1584, 30423, 26358, 22165, 18100, 46939, 42874, 38681, 34616, 63455, 59390, 55197, 51132, 18628, 22757, 26758, 30887, 2112, 6241, 10242, 14371, 51660, 55789, 59790, 63919, 35144, 39273, 43274, 47403, 23285, 19156, 31415, 27286, 6769, 2640, 14899, 10770, 56317, 52188, 64447, 60318, 39801, 35672, 47931, 43802, 27814, 31879, 19684, 23749, 11298, 15363, 3168, 7233, 60846, 64911, 52716, 56781, 44330, 48395, 36200, 40265, 32407, 28342, 24277, 20212, 15891, 11826, 7761, 3696, 65439, 61374, 57309, 53244, 48923, 44858, 40793, 36728, 37256, 33193, 45514, 41451, 53516, 49453, 61774, 57711, 4224, 161, 12482, 8419, 20484, 16421, 28742, 24679, 33721, 37784, 41979, 46042, 49981, 54044, 58239, 62302, 689, 4752, 8947, 13010, 16949, 21012, 25207, 29270, 46570, 42443, 38312, 34185, 62830, 58703, 54572, 50445, 13538, 9411, 5280, 1153, 29798, 25671, 21540, 17413, 42971, 47098, 34713, 38840, 59231, 63358, 50973, 55100, 9939, 14066, 1681, 5808, 26199, 30326, 17941, 22068, 55628, 51565, 63758, 59695, 39368, 35305, 47498, 43435, 22596, 18533, 30726, 26663, 6336, 2273, 14466, 10403, 52093, 56156, 60223, 64286, 35833, 39896, 43963, 48026, 19061, 23124, 27191, 31254, 2801, 6864, 10931, 14994, 64814, 60687, 56684, 52557, 48554, 44427, 40424, 36297, 31782, 27655, 23652, 19525, 15522, 11395, 7392, 3265, 61215, 65342, 53085, 57212, 44955, 49082, 36825, 40952, 28183, 32310, 20053, 24180, 11923, 16050, 3793, 7920 ] @classmethod def compute( klass, block ): result = 65535 #result = 0 for i in xrange( len( block ) ): tmp = block[ i ] ^ result >> 8 result = ( klass.lookup[ tmp ] ^ result << 8 ) & 0xFFFF return result class CRC8: lookup = [ 0, 155, 173, 54, 193, 90, 108, 247, 25, 130, 180, 47, 216, 67, 117, 238, 50, 169, 159, 4, 243, 104, 94, 197, 43, 176, 134, 29, 234, 113, 71, 220, 100, 255, 201, 82, 165, 62, 8, 147, 125, 230, 208, 75, 188, 39, 17, 138, 86, 205, 251, 96, 151, 12, 58, 161, 79, 212, 226, 121, 142, 21, 35, 184, 200, 83, 101, 254, 9, 146, 164, 63, 209, 74, 124, 231, 16, 139, 189, 38, 250, 97, 87, 204, 59, 160, 150, 13, 227, 120, 78, 213, 34, 185, 143, 20, 172, 55, 1, 154, 109, 246, 192, 91, 181, 46, 24, 131, 116, 239, 217, 66, 158, 5, 51, 168, 95, 196, 242, 105, 135, 28, 42, 177, 70, 221, 235, 112, 11, 144, 166, 61, 202, 81, 103, 252, 18, 137, 191, 36, 211, 72, 126, 229, 57, 162, 148, 15, 248, 99, 85, 206, 32, 187, 141, 22, 225, 122, 76, 215, 111, 244, 194, 89, 174, 53, 3, 152, 118, 237, 219, 64, 183, 44, 26, 129, 93, 198, 240, 107, 156, 7, 49, 170, 68, 223, 233, 114, 133, 30, 40, 179, 195, 88, 110, 245, 2, 153, 175, 52, 218, 65, 119, 236, 27, 128, 182, 45, 241, 106, 92, 199, 48, 171, 157, 6, 232, 115, 69, 222, 41, 178, 132, 31, 167, 60, 10, 145, 102, 253, 203, 80, 190, 37, 19, 136, 127, 228, 210, 73, 149, 14, 56, 163, 84, 207, 249, 98, 140, 23, 33, 186, 77, 214, 224, 123 ] @classmethod def compute( klass, block ): result = 0 for i in xrange( len( block ) ): result = klass.lookup[ ( result ^ block[ i ] & 0xFF ) ] return result def BangLong( bytez ): ( a, b, c, d ) = bytez l = a << 24 | b << 16 | c << 8 | d; return long( l ) def BangInt( ints ): ( x, y ) = ints return ( x & 0xFF ) << 8 | y & 0xFF; def makeByte(highNibble, lowNibble): """ 0 <= highNibble <= 15 0 <= lowNibble <= 15 0 <= result <= 255 """ result = highNibble << 4 | lowNibble & 0xF return result ENCODE_TABLE = [ 21, 49, 50, 35, 52, 37, 38, 22, 26, 25, 42, 11, 44, 13, 14, 28 ] _enc_test_1 = [ 0xA7, 0x47, 0x33, 0x62, 0x5D, 0x02, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C ] _enc_result_1 = [ 0xA9, 0x6D, 0x16, 0x8E, 0x39, 0xB2, 0x94, 0xD5, 0x72, 0x57, 0x15, 0x71, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x56, 0xC5 ] _enc_test_2 = [0xA7, 0x47, 0x33, 0x62, 0x8D, 0x00, 0xA6] _enc_result_2 = [0xA9, 0x6D, 0x16, 0x8E, 0x39, 0xB2, 0x68, 0xD5, 0x55, 0xAA, 0x65] def encodeDC(msg): """ >>> encodeDC(_enc_test_1) == bytearray(_enc_result_1) True >>> encodeDC(_enc_test_2) == bytearray(_enc_result_2) True """ msg = bytearray(msg) # realign bytes nibbles = [ ] result = [ ] # collect nibbles for b in msg: highNibble = b >> 4 & 0xF lowNibble = b & 0xF dcValue1 = ENCODE_TABLE[highNibble] dcValue2 = ENCODE_TABLE[lowNibble] nibbles.append(dcValue1 >> 2) high2Bits = dcValue1 & 0x3 low2Bits = dcValue2 >> 4 & 0x3 nibbles.append( high2Bits << 2 | low2Bits ) nibbles.append( dcValue2 & 0xF ) for i in xrange(0, len(nibbles), 2): # last item gets a padding terminator high, low = nibbles[i], 5 # most elide the next item if i < len(nibbles) - 1: low = nibbles[i+1] result.append(makeByte(high, low)) return bytearray(result) _decode_test_1 = [0xA9, 0x6D, 0x16, 0x8E, 0x39, 0xB2, 0x68, 0xD5, 0x59, 0x56, 0x38, 0xD6, 0x8F, 0x28, 0xF2, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0xB3, 0x25] _decode_result_1 = [0xA7, 0x47, 0x33, 0x62, 0x8D, 0x09, 0x03, 0x37, 0x32, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC2] _decode_test_2 = [0xA9, 0x6D, 0x16, 0x8E, 0x39, 0xB2, 0x56, 0x65, 0x55, 0x56, 0x35] _decode_result_2 = [0xA7, 0x47, 0x33, 0x62, 0x06, 0x00, 0x03] def decodeDC(msg): """ >>> decodeDC(_decode_test_1) == bytearray(_decode_result_1) True >>> decodeDC(_decode_test_2) == bytearray(_decode_result_2) True """ msg = bytearray(msg) result = [ ] nibbleCount = 0 bitCount = 0 sixBitValue = 0 highValue = 0 highNibble = 0 # for B in msg: bP = 7 while bP >= 0: bitValue = B >> bP & 0x1 sixBitValue = sixBitValue << 1 | bitValue bitCount += 1 if bitCount != 6: bP -= 1 continue; # next nibbleCount += 1 if nibbleCount == 1: highNibble = decodeDCByte(sixBitValue) else: lowNibble = decodeDCByte(sixBitValue) byteValue = makeByte(highNibble, lowNibble) # append to result result.append(byteValue) nibbleCount = 0 sixBitValue = 0 bitCount = 0 bP -= 1 return bytearray(result) def decodeDCByte(B): # B should be 0 < B && B < 63 # look up in decode table return ENCODE_TABLE.index(B) if __name__ == '__main__': import doctest doctest.testmod( ) ##### # EOF
34.247734
101
0.563514
1,565
11,336
4.040895
0.450479
0.228969
0.339658
0.447818
0.30408
0.290323
0.262492
0.248577
0.229918
0.229918
0
0.426372
0.28952
11,336
330
102
34.351515
0.358828
0.146348
0
0.254902
0
0
0.004205
0
0
0
0.17145
0
0
1
0.068627
false
0
0.009804
0.02451
0.176471
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c35c4b6b57d7432beb47ac9d0dc77353d71ed585
45
py
Python
maskrcnn_benchmark/data/datasets/file_name.py
meryusha/seeds_faster
a80cd144c2826cdee5dd929087005f57567ae367
[ "MIT" ]
1
2021-12-06T10:47:31.000Z
2021-12-06T10:47:31.000Z
maskrcnn_benchmark/data/datasets/file_name.py
SilvioGiancola/seeds_faster
4c6a1f1fa71beff7c9d0722d134eb1291f57983e
[ "MIT" ]
null
null
null
maskrcnn_benchmark/data/datasets/file_name.py
SilvioGiancola/seeds_faster
4c6a1f1fa71beff7c9d0722d134eb1291f57983e
[ "MIT" ]
1
2019-07-18T13:57:07.000Z
2019-07-18T13:57:07.000Z
import os for filename in os.listdir("xyz"):
22.5
35
0.733333
8
45
4.125
0.875
0
0
0
0
0
0
0
0
0
0
0
0.133333
45
2
35
22.5
0.846154
0
0
0
0
0
0.065217
0
0
0
0
0
0
0
null
null
0
0.5
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
5
5eeee9d991e2b0e9f3c3e13df443b021d94613ec
50
py
Python
simulation/common/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
2
2019-01-05T02:33:38.000Z
2020-04-22T16:57:50.000Z
simulation/common/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
3
2019-04-17T18:13:08.000Z
2021-04-23T22:40:23.000Z
simulation/common/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
1
2019-01-31T08:37:44.000Z
2019-01-31T08:37:44.000Z
from device_class_loader import DeviceClassLoader
25
49
0.92
6
50
7.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.08
50
1
50
50
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f2788a7e2c2f3c146e18084a5bf00d6f054331e
32
py
Python
rlberry/agents/jax/__init__.py
riccardodv/rlberry
8bb03772cda1e13c57de0e1da7bc7356a3014cfb
[ "MIT" ]
86
2020-11-20T21:02:27.000Z
2022-03-07T14:57:40.000Z
rlberry/agents/jax/__init__.py
riccardodv/rlberry
8bb03772cda1e13c57de0e1da7bc7356a3014cfb
[ "MIT" ]
103
2020-11-17T12:31:21.000Z
2022-03-28T13:46:16.000Z
rlberry/agents/jax/__init__.py
riccardodv/rlberry
8bb03772cda1e13c57de0e1da7bc7356a3014cfb
[ "MIT" ]
20
2020-11-23T01:47:50.000Z
2022-03-25T07:45:24.000Z
# from .dqn.dqn import DQNAgent
16
31
0.75
5
32
4.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.15625
32
1
32
32
0.888889
0.90625
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
6f28d62470f34e5453e7e8d6a3b217436edaadce
67
py
Python
element/utility.py
antopenrf/FLO
3183af8f4ee63d6ba2188551e322943a2874054a
[ "MIT" ]
3
2021-06-06T14:00:22.000Z
2021-06-07T12:48:19.000Z
element/utility.py
antopenrf/FLO
3183af8f4ee63d6ba2188551e322943a2874054a
[ "MIT" ]
3
2019-03-16T18:22:29.000Z
2021-06-06T14:03:07.000Z
element/utility.py
antopenrf/FLO
3183af8f4ee63d6ba2188551e322943a2874054a
[ "MIT" ]
1
2017-09-27T14:05:38.000Z
2017-09-27T14:05:38.000Z
def prompt_out(input_text, mode = 'term'): print(input_text)
13.4
42
0.686567
10
67
4.3
0.8
0.418605
0
0
0
0
0
0
0
0
0
0
0.179104
67
4
43
16.75
0.781818
0
0
0
0
0
0.061538
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
5
6f3182248b8260388b2540edd5d3d8139f1e9da8
95
py
Python
medical_prescription/dashboardHealthProfessional/views/__init__.py
ristovao/2017.2-Receituario-Medico
5387eb80dfb354e948abe64f7d8bbe087fc4f136
[ "MIT" ]
11
2017-09-19T00:29:40.000Z
2018-04-05T23:52:39.000Z
medical_prescription/dashboardHealthProfessional/views/__init__.py
ristovao/2017.2-Receituario-Medico
5387eb80dfb354e948abe64f7d8bbe087fc4f136
[ "MIT" ]
271
2017-09-09T00:07:28.000Z
2017-12-07T05:00:45.000Z
medical_prescription/dashboardHealthProfessional/views/__init__.py
ristovao/2017.2-Receituario-Medico
5387eb80dfb354e948abe64f7d8bbe087fc4f136
[ "MIT" ]
26
2017-08-31T20:48:49.000Z
2018-03-21T15:11:27.000Z
from .home_health_professional import HomeHealthProfessional from .chart_data import ChartData
31.666667
60
0.894737
11
95
7.454545
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.084211
95
2
61
47.5
0.942529
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f4105bc155d3b1767291aa3047c974d64c78194
45
py
Python
prac_first.py
sandunijayasundara/IBM_Test
42847a8bc43b2642224c5dfbaed47f5235c5daaa
[ "MIT" ]
null
null
null
prac_first.py
sandunijayasundara/IBM_Test
42847a8bc43b2642224c5dfbaed47f5235c5daaa
[ "MIT" ]
null
null
null
prac_first.py
sandunijayasundara/IBM_Test
42847a8bc43b2642224c5dfbaed47f5235c5daaa
[ "MIT" ]
null
null
null
#### Print Hello World prinrt("Hello World")
15
22
0.688889
6
45
5.166667
0.666667
0.645161
0
0
0
0
0
0
0
0
0
0
0.133333
45
2
23
22.5
0.794872
0.377778
0
0
0
0
0.478261
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
6f5202804dda82caa07940aec05c9d9b89bb74b5
64
py
Python
train/torch/nlp/network/bert/seq_cls_ft.py
charliemorning/mlws
8e9bad59ca9f5e774cc1ae7fe454ff3b8a8e1784
[ "MIT" ]
null
null
null
train/torch/nlp/network/bert/seq_cls_ft.py
charliemorning/mlws
8e9bad59ca9f5e774cc1ae7fe454ff3b8a8e1784
[ "MIT" ]
null
null
null
train/torch/nlp/network/bert/seq_cls_ft.py
charliemorning/mlws
8e9bad59ca9f5e774cc1ae7fe454ff3b8a8e1784
[ "MIT" ]
null
null
null
import torch from transformers import BertForTokenClassification
32
51
0.921875
6
64
9.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.078125
64
2
51
32
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f760495a3d4c23f364717c623ceba499af15c63
23
py
Python
src/__init__.py
jeffrylazo/javicho
9cdb4b9c016b7288eec5f8678e9dc347e810ce8a
[ "BSD-3-Clause" ]
null
null
null
src/__init__.py
jeffrylazo/javicho
9cdb4b9c016b7288eec5f8678e9dc347e810ce8a
[ "BSD-3-Clause" ]
null
null
null
src/__init__.py
jeffrylazo/javicho
9cdb4b9c016b7288eec5f8678e9dc347e810ce8a
[ "BSD-3-Clause" ]
null
null
null
from .core import Data
11.5
22
0.782609
4
23
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
6f7932a0a9d31e1e8b1c57ababa18389f480ab79
190
py
Python
backend/alexandria/modules/utils/vendor/flask_restplus_patched/__init__.py
oclay1st/Alexandria
9922cb5b2f8351ef8562bd4d45f56cec9a24d837
[ "MIT" ]
null
null
null
backend/alexandria/modules/utils/vendor/flask_restplus_patched/__init__.py
oclay1st/Alexandria
9922cb5b2f8351ef8562bd4d45f56cec9a24d837
[ "MIT" ]
1
2020-03-02T19:35:48.000Z
2020-03-02T19:35:48.000Z
backend/alexandria/modules/utils/vendor/flask_restplus_patched/__init__.py
oclay1st/alexandria
9922cb5b2f8351ef8562bd4d45f56cec9a24d837
[ "MIT" ]
null
null
null
from .api import Api from .namespace import Namespace from .parameters import Parameters, multi_params from .resource import Resource from .schema import Schema from .swagger import Swagger
27.142857
48
0.831579
26
190
6.038462
0.384615
0
0
0
0
0
0
0
0
0
0
0
0.131579
190
6
49
31.666667
0.951515
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
48b196b37d273912530403f0117ac0f5b9ff0905
11,250
py
Python
armi/physics/neutronics/energyGroups.py
ZanderUF/armi
c55ebe4d77821d3357ddd3326478ffaf44962c89
[ "Apache-2.0" ]
null
null
null
armi/physics/neutronics/energyGroups.py
ZanderUF/armi
c55ebe4d77821d3357ddd3326478ffaf44962c89
[ "Apache-2.0" ]
null
null
null
armi/physics/neutronics/energyGroups.py
ZanderUF/armi
c55ebe4d77821d3357ddd3326478ffaf44962c89
[ "Apache-2.0" ]
1
2020-08-26T09:02:06.000Z
2020-08-26T09:02:06.000Z
""" Energy group structures for multigroup neutronics calculations. """ import itertools import copy import math import numpy from armi import utils from armi import runLog from .const import ( FAST_FLUX_THRESHOLD_EV, MAXIMUM_XS_LIBRARY_ENERGY, ULTRA_FINE_GROUP_LETHARGY_WIDTH, HIGH_ENERGY_EV, ) def getFastFluxGroupCutoff(eGrpStruc): """ Given a constant "fast" energy threshold, return which ARMI energy group index contains this threshold. """ gThres = -1 for g, eV in enumerate(eGrpStruc): if eV < FAST_FLUX_THRESHOLD_EV: gThres = g break dE = eGrpStruc[gThres - 1] - eGrpStruc[gThres] # eV fastFluxFracInG = (eGrpStruc[gThres - 1] - FAST_FLUX_THRESHOLD_EV) / dE return gThres - 1, fastFluxFracInG def _flatten(*numbers): result = [] for item in numbers: if isinstance(item, int): result.append(item) else: result.extend(item) return result def _create_anl_energies_with_group_lethargies(*group_lethargies): anl_energy_max = MAXIMUM_XS_LIBRARY_ENERGY en = anl_energy_max energies = [] for ee in _flatten(*group_lethargies): energies.append(en) en *= math.e ** (-ee * ULTRA_FINE_GROUP_LETHARGY_WIDTH) return energies def getGroupStructure(name): """ Return descending neutron energy group upper bounds in eV for a given structure name. Notes ----- Copy of the group structure is return so that modifications of the energy bounds does not propagate back to the `GROUP_STRUCTURE` dictionary. """ try: return copy.copy(GROUP_STRUCTURE[name]) except KeyError as ke: runLog.error( 'Could not find groupStructure with the name "{}".\n' "Choose one of: {}".format(name, ", ".join(GROUP_STRUCTURE.keys())) ) raise ke def getGroupStructureType(neutronEnergyBoundsInEv): """ Return neutron energy group structure name for a given set of neutron energy group bounds in eV. """ neutronEnergyBoundsInEv = numpy.array(neutronEnergyBoundsInEv) for groupStructureType in GROUP_STRUCTURE: refNeutronEnergyBoundsInEv = numpy.array(getGroupStructure(groupStructureType)) if len(refNeutronEnergyBoundsInEv) != len(neutronEnergyBoundsInEv): continue if numpy.allclose(refNeutronEnergyBoundsInEv, neutronEnergyBoundsInEv, 1e-5): return groupStructureType raise ValueError( "Neutron energy group structure type does not exist for the given neutron energy bounds: {}".format( neutronEnergyBoundsInEv ) ) GROUP_STRUCTURE = {} """ Energy groups for use in multigroup neutronics. Values are the upper bound of each energy in eV from highest energy to lowest (because neutrons typically downscatter...) """ GROUP_STRUCTURE["2"] = [HIGH_ENERGY_EV, 6.25e-01] # Nuclear Reactor Engineering: Reactor Systems Engineering, Vol. 1 GROUP_STRUCTURE["4gGlasstoneSesonske"] = [HIGH_ENERGY_EV, 5.00e04, 5.00e02, 6.25e-01] # http://serpent.vtt.fi/mediawiki/index.php/CASMO_4-group_structure GROUP_STRUCTURE["CASMO4"] = [HIGH_ENERGY_EV, 8.21e05, 5.53e03, 6.25e-01] GROUP_STRUCTURE["CASMO12"] = [ HIGH_ENERGY_EV, 2.23e06, 8.21e05, 5.53e03, 4.81e01, 4.00e00, 6.25e-01, 3.50e-01, 2.80e-01, 1.40e-01, 5.80e-02, 3.00e-02, ] # For typically for use with MCNP will need conversion to MeV, # and ordering from low to high. GROUP_STRUCTURE["CINDER63"] = [ 2.5000e07, 2.0000e07, 1.6905e07, 1.4918e07, 1.0000e07, 6.0650e06, 4.9658e06, 3.6788e06, 2.8651e06, 2.2313e06, 1.7377e06, 1.3534e06, 1.1080e06, 8.2085e05, 6.3928e05, 4.9790e05, 3.8870e05, 3.0200e05, 1.8320e05, 1.1110e05, 6.7380e04, 4.0870e04, 2.5540e04, 1.9890e04, 1.5030e04, 9.1190e03, 5.5310e03, 3.3550e03, 2.8400e03, 2.4040e03, 2.0350e03, 1.2340e03, 7.4850e02, 4.5400e02, 2.7540e02, 1.6700e02, 1.0130e02, 6.1440e01, 3.7270e01, 2.2600e01, 1.3710e01, 8.3150e00, 5.0430e00, 3.0590e00, 1.8550e00, 1.1250e00, 6.8300e-01, 4.1400e-01, 2.5100e-01, 1.5200e-01, 1.0000e-01, 8.0000e-02, 6.7000e-02, 5.8000e-02, 5.0000e-02, 4.2000e-02, 3.5000e-02, 3.0000e-02, 2.5000e-02, 2.0000e-02, 1.5000e-02, 1.0000e-02, 5.0000e-03, ] # fmt: off # Group structures below here are derived from Appendix E in # https://www.osti.gov/biblio/1483949-mc2-multigroup-cross-section-generation-code-fast-reactor-analysis-nuclear GROUP_STRUCTURE["ANL9"] = _create_anl_energies_with_group_lethargies( 222, 120, itertools.repeat(180, 5), 540, 300 ) GROUP_STRUCTURE["ANL33"] = _create_anl_energies_with_group_lethargies( 42, itertools.repeat(60, 28), 90, 240, 29, 1 ) GROUP_STRUCTURE["ANL70"] = _create_anl_energies_with_group_lethargies( 42, itertools.repeat(30, 67), 29, 1 ) GROUP_STRUCTURE["ANL230"] = _create_anl_energies_with_group_lethargies( [ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 6, 6, 6, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 6, 6, 6, 6, 2, 2, 1, 1, 2, 2, 2, 6, 6, 3, 3, 3, 3, 6, 6, 3, 3, 3, 3, 6, 6, 6, 6, 3, 3, 6, 6, 6, 3, 2, 1, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 15, 15, 15, 15, 9, 6, 6, 9, 15, 15, 15, 3, 3, 9, 15, 9, 6, 3, 3, 9, 3, 12, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, 6, 6, 12, 12, 12, 7, 5, 6, 6, 12, 12, 12, 12, 6, 6, 12, 12, 6, 6, 6, 6, 6, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 6, 24, 10, 20, 29, 1, ] ) # Reactor agnostic. Similar to ANL1041 but with 6 UFGs grouped together. # More likely to not error out on memory than 703 GROUP_STRUCTURE["348"] = _create_anl_energies_with_group_lethargies( itertools.repeat(6, 346), 5, 1 ) # Note that at one point the MC2 manual was inconsistent with the code itself GROUP_STRUCTURE["ANL703"] = _create_anl_energies_with_group_lethargies( [ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 1, ] ) GROUP_STRUCTURE["ANL1041"] = _create_anl_energies_with_group_lethargies( itertools.repeat(2, 1041) ) GROUP_STRUCTURE["ANL2082"] = _create_anl_energies_with_group_lethargies( itertools.repeat(1, 2082) ) # fmt: on def _create_anl_energies_with_group_energies(group_energy_bounds): """Set energy group bounds to the nearest ultra-fine group boundaries.""" ufgEnergies = _create_anl_energies_with_group_lethargies(itertools.repeat(1, 2082)) modifiedEnergyBounds = [] for energyBound in group_energy_bounds: modifiedEnergyBounds.append(utils.findNearestValue(ufgEnergies, energyBound)) return modifiedEnergyBounds # Energy bounds of ARMI33 and ARMI45 are modified to the nearest ultra-fine group boundaries GROUP_STRUCTURE["ARMI33"] = _create_anl_energies_with_group_energies( [ 1.4190e07, 1.0000e07, 6.0650e06, 3.6780e06, 2.2313e06, 1.3530e06, 8.2080e05, 4.9787e05, 3.0190e05, 1.8310e05, 1.1109e05, 6.7370e04, 4.0860e04, 2.4788e04, 1.5030e04, 9.1180e03, 5.5308e03, 3.3540e03, 2.0340e03, 1.2341e03, 7.4850e02, 4.5390e02, 3.0432e02, 1.4860e02, 9.1660e01, 6.7904e01, 4.0160e01, 2.2600e01, 1.3709e01, 8.3150e00, 4.0000e00, 5.4000e-01, 4.1400e-01, ] ) GROUP_STRUCTURE["ARMI45"] = _create_anl_energies_with_group_energies( [ 1.419e07, 1.000e07, 6.065e06, 4.966e06, 3.679e06, 2.865e06, 2.231e06, 1.738e06, 1.353e06, 1.108e06, 8.209e05, 6.393e05, 4.979e05, 3.887e05, 3.020e05, 1.832e05, 1.111e05, 6.738e04, 4.087e04, 2.554e04, 1.989e04, 1.503e04, 9.119e03, 5.531e03, 3.355e03, 2.840e03, 2.404e03, 2.035e03, 1.234e03, 7.485e02, 4.540e02, 2.754e02, 1.670e02, 1.013e02, 6.144e01, 3.727e01, 2.260e01, 1.371e01, 8.315e00, 5.043e00, 3.059e00, 1.855e00, 1.125e00, 6.830e-01, 4.140e-01, ] )
30
112
0.534222
1,926
11,250
3.045171
0.195223
0.25098
0.366752
0.47809
0.28133
0.269906
0.246377
0.219437
0.201023
0.17954
0
0.2779
0.312622
11,250
374
113
30.080214
0.480538
0.113333
0
0.129568
0
0
0.026362
0
0
0
0
0
0
1
0.019934
false
0
0.023256
0
0.063123
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
48bc347c13d56e4d20523053a40914553240adc3
66
py
Python
generators/app/templates/package/__init__.py
thinkulum/generator-python-cmd
769a5854a30ecfe39e14caabb41dd1133ba47b7f
[ "MIT" ]
null
null
null
generators/app/templates/package/__init__.py
thinkulum/generator-python-cmd
769a5854a30ecfe39e14caabb41dd1133ba47b7f
[ "MIT" ]
3
2020-04-21T02:11:37.000Z
2021-05-06T20:17:31.000Z
generators/app/templates/package/__init__.py
thinkulum/generator-python-cmd
769a5854a30ecfe39e14caabb41dd1133ba47b7f
[ "MIT" ]
null
null
null
from . import cli from . import controller __version__ = '0.0.1'
13.2
24
0.712121
10
66
4.3
0.7
0.465116
0
0
0
0
0
0
0
0
0
0.055556
0.181818
66
4
25
16.5
0.740741
0
0
0
0
0
0.075758
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
48d56f1fffaaf825a021bb985d234363f13ce60a
347
py
Python
mak/libs/pyxx/cxx/grammar/statement/declaration.py
motor-dev/Motor
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
[ "BSD-3-Clause" ]
null
null
null
mak/libs/pyxx/cxx/grammar/statement/declaration.py
motor-dev/Motor
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
[ "BSD-3-Clause" ]
null
null
null
mak/libs/pyxx/cxx/grammar/statement/declaration.py
motor-dev/Motor
98cb099fe1c2d31e455ed868cc2a25eae51e79f0
[ "BSD-3-Clause" ]
null
null
null
""" declaration-statement: block-declaration """ import glrp from ...parser import cxx98 from motor_typing import TYPE_CHECKING @glrp.rule('declaration-statement : block-declaration') @cxx98 def declaration_statement(self, p): # type: (CxxParser, glrp.Production) -> None pass if TYPE_CHECKING: from ...parser import CxxParser
18.263158
55
0.73487
41
347
6.121951
0.512195
0.239044
0.199203
0.286853
0
0
0
0
0
0
0
0.013652
0.15562
347
19
56
18.263158
0.843003
0.253602
0
0
0
0
0.162698
0.083333
0
0
0
0
0
1
0.111111
false
0.111111
0.444444
0
0.555556
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
5
5b02965c389196734094576a9b0574ee5c8f5f87
256
py
Python
scrapy_jsonschema/draft.py
BurnzZ/scrapy-jsonschema
43dc70db23b4e68e4c4f8e4a1c8e091398daffbd
[ "BSD-3-Clause" ]
43
2017-01-21T09:47:13.000Z
2022-03-26T18:07:38.000Z
scrapy_jsonschema/draft.py
BurnzZ/scrapy-jsonschema
43dc70db23b4e68e4c4f8e4a1c8e091398daffbd
[ "BSD-3-Clause" ]
26
2017-01-20T13:34:03.000Z
2021-03-22T17:17:02.000Z
scrapy_jsonschema/draft.py
BurnzZ/scrapy-jsonschema
43dc70db23b4e68e4c4f8e4a1c8e091398daffbd
[ "BSD-3-Clause" ]
14
2017-01-20T13:30:23.000Z
2021-03-17T15:25:55.000Z
JSON_SCHEMA_DRAFT_3 = "http://json-schema.org/draft-03/schema#" JSON_SCHEMA_DRAFT_4 = "http://json-schema.org/draft-04/schema#" JSON_SCHEMA_DRAFT_6 = "http://json-schema.org/draft-06/schema#" JSON_SCHEMA_DRAFT_7 = "http://json-schema.org/draft-07/schema#"
51.2
63
0.765625
44
256
4.181818
0.295455
0.434783
0.326087
0.369565
0.478261
0
0
0
0
0
0
0.04918
0.046875
256
4
64
64
0.704918
0
0
0
0
0
0.609375
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5b03b05410fbc7d0f9da83847cc99039846ca559
50
py
Python
Calculator/Subtraction.py
vk536/MiniProject-Calculator
eaae40343e260a718af72247e9115e2e386abf47
[ "MIT" ]
1
2020-11-08T02:31:21.000Z
2020-11-08T02:31:21.000Z
Calculator/Subtraction.py
Nithinreddy127/Calculator-MiniProjet
88ba92d160e2028ca98bafd872b4f2ea123862b3
[ "MIT" ]
13
2020-11-08T01:06:05.000Z
2020-11-09T04:03:59.000Z
Calculator/Subtraction.py
Nithinreddy127/Calculator-MiniProjet
88ba92d160e2028ca98bafd872b4f2ea123862b3
[ "MIT" ]
1
2020-11-09T04:19:09.000Z
2020-11-09T04:19:09.000Z
def subtract(a, b): return float(a) - float(b)
25
30
0.62
9
50
3.444444
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.2
50
2
30
25
0.775
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
5b05c603c4b2e009f301a031e6e9d30930f06638
3,484
py
Python
application/app/__init__.py
LucasAntognoni/JWT_Security_Tests
dc35b562c096c220cc12c3a71b83c76e2c995acf
[ "MIT" ]
null
null
null
application/app/__init__.py
LucasAntognoni/JWT_Security_Tests
dc35b562c096c220cc12c3a71b83c76e2c995acf
[ "MIT" ]
null
null
null
application/app/__init__.py
LucasAntognoni/JWT_Security_Tests
dc35b562c096c220cc12c3a71b83c76e2c995acf
[ "MIT" ]
null
null
null
""" +-----------------+------------------------------------------------------------------------+ | **Version** | 0.1 | +-----------------+------------------------------------------------------------------------+ | **Start** | 27 Nov 2018 | +-----------------+------------------------------------------------------------------------+ | **Platform** | Unix | +-----------------+------------------------------------------------------------------------+ | **Authors** | Lucas Antognoni | +-----------------+------------------------------------------------------------------------+ | **Description** | Security Tests for JWT authentication | +-----------------+------------------------------------------------------------------------+ | **Modifications** | +-----------------+-----------+------------------------------------------------------------+ | **Date** | **Author** | **Modification** | +-----------------+------------------------------------------------------------------------+ | 27 Nov 2018 | Lucas Antognoni | Base application structure | +-----------------+------------------------------------------------------------------------+ | 27 Nov 2018 | Lucas Antognoni | Organizing application structure | +-----------------+------------------------------------------------------------------------+ | 27 Nov 2018 | Lucas Antognoni | JWT tools | +-----------------+------------------------------------------------------------------------+ | 27 Nov 2018 | Lucas Antognoni | Started tests development | +-----------------+------------------------------------------------------------------------+ | 28 Nov 2018 | Lucas Antognoni | None & claims tests and started RSA to HMAC attack | +-----------------+------------------------------------------------------------------------+ | 29 Nov 2018 | Lucas Antognoni | Finished all tests and started code documentation | +-----------------+------------------------------------------------------------------------+ | 29 Nov 2018 | Lucas Antognoni | Upgrading tests robustness | +-----------------+------------------------------------------------------------------------+ | 29 Nov 2018 | Lucas Antognoni | Started documentation with Sphinx | +-----------------+------------------------------------------------------------------------+ | 03 Dec 2018 | Lucas Antognoni | Finished documentation | +-----------------+------------------------------------------------------------------------+ Implementation ============== """ import sys sys.path.extend(['/home/lucas/Git/JWT_Security_Tests']) from flask import Flask from flask_jwt_extended import JWTManager from config import config app = Flask(__name__) config_name = 'development' app.config.from_object(config[config_name]) instance_path = app.root_path jwt = JWTManager(app) from views.rest import restapi app.register_blueprint(restapi)
59.050847
92
0.258611
159
3,484
5.578616
0.440252
0.157835
0.182638
0.189402
0.24239
0.096956
0.096956
0
0
0
0
0.023728
0.25
3,484
59
93
59.050847
0.315729
0.889782
0
0
0
0
0.119363
0.090186
0
0
0
0
0
1
0
false
0
0.416667
0
0.416667
0.083333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
961204b851c543ecf414af621ffd96df7b111cc1
7,872
py
Python
flowable_sdk/api/deployment/deployment_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
5
2019-07-31T04:11:05.000Z
2021-01-07T03:23:20.000Z
flowable_sdk/api/deployment/deployment_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
null
null
null
flowable_sdk/api/deployment/deployment_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- import os import sys import flowable_sdk.api.deployment.delete_deployment_pb2 import google.protobuf.empty_pb2 import flowable_sdk.api.deployment.get_deployment_pb2 import flowable_sdk.model.flowable.deployment_pb2 import flowable_sdk.api.deployment.get_deployment_resource_pb2 import flowable_sdk.model.flowable.deployment_resource_pb2 import flowable_sdk.api.deployment.list_deployment_pb2 import flowable_sdk.utils.http_util import google.protobuf.json_format class DeploymentClient(object): def __init__(self, server_ip="", server_port=0, service_name="", host=""): """ 初始化client :param server_ip: 指定sdk请求的server_ip,为空时走名字服务路由 :param server_port: 指定sdk请求的server_port,与server_ip一起使用, 为空时走名字服务路由 :param service_name: 指定sdk请求的service_name, 为空时按契约名称路由。如果server_ip和service_name同时设置,server_ip优先级更高 :param host: 指定sdk请求服务的host名称, 如cmdb.easyops-only.com """ if server_ip == "" and server_port != 0 or server_ip != "" and server_port == 0: raise Exception("server_ip和server_port必须同时指定") self._server_ip = server_ip self._server_port = server_port self._service_name = service_name self._host = host def delete_deployment(self, request, org, user, timeout=10): # type: (flowable_sdk.api.deployment.delete_deployment_pb2.DeleteDeploymentRequest, int, str, int) -> google.protobuf.empty_pb2.Empty """ 删除部署 :param request: delete_deployment请求 :param org: 客户的org编号,为数字 :param user: 调用api使用的用户名 :param timeout: 调用超时时间,单位秒 :return: google.protobuf.empty_pb2.Empty """ headers = {"org": org, "user": user} route_name = "" server_ip = self._server_ip if self._service_name != "": route_name = self._service_name elif self._server_ip != "": route_name = "easyops.api.flowable.deployment.DeleteDeployment" uri = "/flowable-rest/service/repository/deployments/{deploymentId}".format( deploymentId=request.deploymentId, ) requestParam = request rsp_obj = flowable_sdk.utils.http_util.do_api_request( method="DELETE", src_name="logic.flowable_sdk", dst_name=route_name, server_ip=server_ip, server_port=self._server_port, host=self._host, uri=uri, params=google.protobuf.json_format.MessageToDict( requestParam, preserving_proto_field_name=True), headers=headers, timeout=timeout, ) rsp = google.protobuf.empty_pb2.Empty() google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True) return rsp def get_deployment(self, request, org, user, timeout=10): # type: (flowable_sdk.api.deployment.get_deployment_pb2.GetDeploymentRequest, int, str, int) -> flowable_sdk.model.flowable.deployment_pb2.FlowableDeployment """ 获取部署详情 :param request: get_deployment请求 :param org: 客户的org编号,为数字 :param user: 调用api使用的用户名 :param timeout: 调用超时时间,单位秒 :return: flowable_sdk.model.flowable.deployment_pb2.FlowableDeployment """ headers = {"org": org, "user": user} route_name = "" server_ip = self._server_ip if self._service_name != "": route_name = self._service_name elif self._server_ip != "": route_name = "easyops.api.flowable.deployment.GetDeployment" uri = "/flowable-rest/service/repository/deployments/{deploymentId}".format( deploymentId=request.deploymentId, ) requestParam = request rsp_obj = flowable_sdk.utils.http_util.do_api_request( method="GET", src_name="logic.flowable_sdk", dst_name=route_name, server_ip=server_ip, server_port=self._server_port, host=self._host, uri=uri, params=google.protobuf.json_format.MessageToDict( requestParam, preserving_proto_field_name=True), headers=headers, timeout=timeout, ) rsp = flowable_sdk.model.flowable.deployment_pb2.FlowableDeployment() google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True) return rsp def get_deployment_resource(self, request, org, user, timeout=10): # type: (flowable_sdk.api.deployment.get_deployment_resource_pb2.GetDeploymentResourceRequest, int, str, int) -> flowable_sdk.model.flowable.deployment_resource_pb2.FlowableDeploymentResource """ 获取部署资源 :param request: get_deployment_resource请求 :param org: 客户的org编号,为数字 :param user: 调用api使用的用户名 :param timeout: 调用超时时间,单位秒 :return: flowable_sdk.model.flowable.deployment_resource_pb2.FlowableDeploymentResource """ headers = {"org": org, "user": user} route_name = "" server_ip = self._server_ip if self._service_name != "": route_name = self._service_name elif self._server_ip != "": route_name = "easyops.api.flowable.deployment.GetDeploymentResource" uri = "/flowable-rest/service/repository/deployments/{deploymentId}/resources".format( deploymentId=request.deploymentId, ) requestParam = request rsp_obj = flowable_sdk.utils.http_util.do_api_request( method="GET", src_name="logic.flowable_sdk", dst_name=route_name, server_ip=server_ip, server_port=self._server_port, host=self._host, uri=uri, params=google.protobuf.json_format.MessageToDict( requestParam, preserving_proto_field_name=True), headers=headers, timeout=timeout, ) rsp = flowable_sdk.model.flowable.deployment_resource_pb2.FlowableDeploymentResource() google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True) return rsp def list_deployment(self, request, org, user, timeout=10): # type: (flowable_sdk.api.deployment.list_deployment_pb2.ListDeploymentRequest, int, str, int) -> flowable_sdk.api.deployment.list_deployment_pb2.ListDeploymentResponse """ 部署列表 :param request: list_deployment请求 :param org: 客户的org编号,为数字 :param user: 调用api使用的用户名 :param timeout: 调用超时时间,单位秒 :return: flowable_sdk.api.deployment.list_deployment_pb2.ListDeploymentResponse """ headers = {"org": org, "user": user} route_name = "" server_ip = self._server_ip if self._service_name != "": route_name = self._service_name elif self._server_ip != "": route_name = "easyops.api.flowable.deployment.ListDeployment" uri = "/flowable-rest/service/repository/deployments" requestParam = request rsp_obj = flowable_sdk.utils.http_util.do_api_request( method="GET", src_name="logic.flowable_sdk", dst_name=route_name, server_ip=server_ip, server_port=self._server_port, host=self._host, uri=uri, params=google.protobuf.json_format.MessageToDict( requestParam, preserving_proto_field_name=True), headers=headers, timeout=timeout, ) rsp = flowable_sdk.api.deployment.list_deployment_pb2.ListDeploymentResponse() google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True) return rsp
38.588235
199
0.643547
834
7,872
5.780576
0.142686
0.063887
0.031944
0.05476
0.830741
0.804605
0.785314
0.715827
0.616677
0.616677
0
0.006054
0.265625
7,872
203
200
38.778325
0.827884
0.215955
0
0.682171
0
0
0.097099
0.077474
0
0
0
0
0
1
0.03876
false
0
0.085271
0
0.162791
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
96127041dcb5733dddc2a0b39563bc65a3039e07
1,027
py
Python
CSI_Web/models.py
Chennai-Society-of-Inventors/CSI-Web
05f9cf14dafb87cd4b4bea54e2ba781904f53d26
[ "MIT" ]
null
null
null
CSI_Web/models.py
Chennai-Society-of-Inventors/CSI-Web
05f9cf14dafb87cd4b4bea54e2ba781904f53d26
[ "MIT" ]
null
null
null
CSI_Web/models.py
Chennai-Society-of-Inventors/CSI-Web
05f9cf14dafb87cd4b4bea54e2ba781904f53d26
[ "MIT" ]
null
null
null
from django.db import models class CarouselInfo(models.Model): image_link = models.CharField(max_length=100) image_header = models.CharField(max_length=100) image_description = models.CharField(max_length=500) def __str__(self): return self.image_header class ProblemInfo(models.Model): problem_category = models.CharField(max_length=100) problem_description = models.CharField(max_length=500) name = models.CharField(max_length=30) contact_number = models.CharField(max_length=15) contact_address = models.CharField(max_length=100) email_id = models.EmailField() def __str__(self): return self.problem_category + " by " + self.name class InventionInfo(models.Model): invention = models.CharField(max_length=100) abstract = models.CharField(max_length=500) team_details = models.CharField(max_length=200) contact_number = models.CharField(max_length=15) email_id = models.EmailField() def __str__(self): return self.invention
30.205882
58
0.738072
129
1,027
5.589147
0.310078
0.249653
0.299584
0.399445
0.599168
0.421637
0.227462
0.119279
0.119279
0
0
0.038551
0.166504
1,027
33
59
31.121212
0.803738
0
0
0.291667
0
0
0.003895
0
0
0
0
0
0
1
0.125
false
0
0.041667
0.125
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
824a53d32d7fb89cb4f3b641dfe3600f28e49e82
131
py
Python
astropy_healpix/tests/test_bench.py
astrofrog/testbatch
f6a80bed8aa6ebd7ca428d296a8420cd4f3cb92a
[ "BSD-3-Clause" ]
null
null
null
astropy_healpix/tests/test_bench.py
astrofrog/testbatch
f6a80bed8aa6ebd7ca428d296a8420cd4f3cb92a
[ "BSD-3-Clause" ]
2
2019-06-17T21:53:09.000Z
2020-10-29T19:51:53.000Z
astropy_healpix/tests/test_bench.py
astrofrog/testbatch
f6a80bed8aa6ebd7ca428d296a8420cd4f3cb92a
[ "BSD-3-Clause" ]
1
2019-06-17T21:48:26.000Z
2019-06-17T21:48:26.000Z
from __future__ import absolute_import, print_function, division from ..bench import main def test_bench(): main(fast=True)
16.375
64
0.770992
18
131
5.222222
0.722222
0
0
0
0
0
0
0
0
0
0
0
0.152672
131
7
65
18.714286
0.846847
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
0
0
0
5
8297dbd7279173ccd26ab137941c1905862ef099
173
py
Python
saleor/core/templatetags/urls.py
skazancev/saleor
42746ba00080ce36dedc0954be66b42f0e0a7499
[ "BSD-3-Clause" ]
1
2018-03-17T02:41:15.000Z
2018-03-17T02:41:15.000Z
saleor/core/templatetags/urls.py
skazancev/saleor
42746ba00080ce36dedc0954be66b42f0e0a7499
[ "BSD-3-Clause" ]
86
2018-03-08T14:19:19.000Z
2018-05-12T14:55:16.000Z
saleor/core/templatetags/urls.py
skazancev/saleor
42746ba00080ce36dedc0954be66b42f0e0a7499
[ "BSD-3-Clause" ]
2
2018-03-05T12:29:10.000Z
2018-09-28T12:40:52.000Z
from django import template register = template.Library() @register.simple_tag def build_absolute_uri(request, location): return request.build_absolute_uri(location)
19.222222
47
0.809249
22
173
6.136364
0.681818
0.192593
0.237037
0
0
0
0
0
0
0
0
0
0.115607
173
8
48
21.625
0.882353
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0.2
0.6
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
829cb4ce6cfb92a8c46db8f8acf04f96daac9f2d
83
py
Python
paranormal-pioneers/project/__main__.py
python-discord/code-jam-6
a7eb3b1256ae113c93f0337892c667768e8bc199
[ "MIT" ]
76
2020-01-17T12:09:48.000Z
2022-03-26T19:17:26.000Z
paranormal-pioneers/project/__main__.py
1nf1del/code-jam-6
a7eb3b1256ae113c93f0337892c667768e8bc199
[ "MIT" ]
17
2020-01-21T23:13:34.000Z
2020-02-07T00:07:04.000Z
paranormal-pioneers/project/__main__.py
1nf1del/code-jam-6
a7eb3b1256ae113c93f0337892c667768e8bc199
[ "MIT" ]
91
2020-01-17T12:01:06.000Z
2022-03-22T20:38:59.000Z
from project.core.terminal import Terminal terminal = Terminal() terminal.start()
16.6
42
0.795181
10
83
6.6
0.6
0.727273
0.727273
0
0
0
0
0
0
0
0
0
0.108434
83
4
43
20.75
0.891892
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
82c3cd09e6d2b233f34fd6d9eb84ecb764617fd5
2,018
py
Python
tests/test_managers.py
incuna/django-user-deletion
5a19505accac0db8b4f49cbbe55eaadf14243595
[ "BSD-2-Clause" ]
2
2016-07-16T07:15:44.000Z
2020-07-29T14:35:34.000Z
tests/test_managers.py
incuna/django-user-deletion
5a19505accac0db8b4f49cbbe55eaadf14243595
[ "BSD-2-Clause" ]
13
2016-04-14T14:04:36.000Z
2021-06-10T19:09:07.000Z
tests/test_managers.py
incuna/django-user-deletion
5a19505accac0db8b4f49cbbe55eaadf14243595
[ "BSD-2-Clause" ]
null
null
null
from dateutil.relativedelta import relativedelta from django.apps import apps from django.test import TestCase from django.utils import timezone from .factories import UserFactory from .models import User user_deletion_config = apps.get_app_config('user_deletion') class TestUserDeletionManager(TestCase): def test_users_to_notify(self): last_login = timezone.now() - relativedelta( months=user_deletion_config.MONTH_NOTIFICATION, ) user = UserFactory.create(last_login=last_login, notified=False) users = User.objects.users_to_notify() self.assertCountEqual(users, [user]) def test_users_not_to_notify(self): user = UserFactory.create(last_login=timezone.now(), notified=False) users = User.objects.users_to_notify() self.assertNotIn(user, users) def test_users_already_notified(self): last_login = timezone.now() - relativedelta( months=user_deletion_config.MONTH_NOTIFICATION, ) user = UserFactory.create(last_login=last_login, notified=True) users = User.objects.users_to_notify() self.assertNotIn(user, users) def test_users_to_delete(self): last_login = timezone.now() - relativedelta( months=user_deletion_config.MONTH_DELETION, ) user = UserFactory.create(last_login=last_login, notified=True) users = User.objects.users_to_delete() self.assertCountEqual(users, [user]) def test_users_not_to_delete(self): user = UserFactory.create(last_login=timezone.now(), notified=False) users = User.objects.users_to_delete() self.assertNotIn(user, users) def test_users_to_delete_not_notified(self): last_login = timezone.now() - relativedelta( months=user_deletion_config.MONTH_DELETION, ) user = UserFactory.create(last_login=last_login, notified=False) users = User.objects.users_to_delete() self.assertNotIn(user, users)
33.081967
76
0.703667
238
2,018
5.693277
0.172269
0.092989
0.053137
0.088561
0.779336
0.779336
0.779336
0.774908
0.774908
0.681919
0
0
0.207631
2,018
60
77
33.633333
0.847405
0
0
0.590909
0
0
0.006442
0
0
0
0
0
0.136364
1
0.136364
false
0
0.136364
0
0.295455
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
82cbba23399ee357dd6994ceef2cbc0c7150173f
379
py
Python
protobuf_serialization/__init__.py
alvinchow86/protobuf-serialization-py
af856b7b872317917274f74bb69418d19dafc3fa
[ "MIT" ]
1
2020-05-17T04:26:52.000Z
2020-05-17T04:26:52.000Z
protobuf_serialization/__init__.py
alvinchow86/protobuf-serialization-py
af856b7b872317917274f74bb69418d19dafc3fa
[ "MIT" ]
null
null
null
protobuf_serialization/__init__.py
alvinchow86/protobuf-serialization-py
af856b7b872317917274f74bb69418d19dafc3fa
[ "MIT" ]
null
null
null
# flake8: noqa # Convenience imports from protobuf_serialization.deserialization import protobuf_to_dict from protobuf_serialization.serialization import ( ProtobufSerializer, ProtobufDictSerializer, serialize_to_protobuf, get_serializer_for_proto_cls ) from protobuf_serialization.serialization import fields from protobuf_serialization.serialization import serializer
34.454545
67
0.873351
39
379
8.179487
0.512821
0.15047
0.31348
0.357367
0.413793
0
0
0
0
0
0
0.002915
0.094987
379
10
68
37.9
0.927114
0.084433
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.571429
0
0.571429
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
82d64b3b9ee42b18dd2290a5e511f595ff433821
135
py
Python
occurrences_api/occurrences/admin.py
ruipedrodias94/django-rest-api
10cc39f2604c1a37d5d7d4aa4ed38ab3394624b8
[ "Apache-2.0" ]
1
2020-01-28T21:23:55.000Z
2020-01-28T21:23:55.000Z
occurrences_api/occurrences/admin.py
ruipedrodias94/django-rest-api
10cc39f2604c1a37d5d7d4aa4ed38ab3394624b8
[ "Apache-2.0" ]
7
2020-06-05T20:45:51.000Z
2021-09-22T18:29:16.000Z
occurrences_api/occurrences/admin.py
ruipedrodias94/django-rest-api
10cc39f2604c1a37d5d7d4aa4ed38ab3394624b8
[ "Apache-2.0" ]
1
2020-01-28T21:25:04.000Z
2020-01-28T21:25:04.000Z
from django.contrib import admin from .models import OccurrenceModel # Register your models here. admin.site.register(OccurrenceModel)
27
36
0.837037
17
135
6.647059
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.103704
135
5
36
27
0.933884
0.192593
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7d68ed7483649598ac323df6c489d266fde2b81a
137
py
Python
src/westpa/westext/adaptvoronoi/__init__.py
jdrusso/westpa_test
3383b59a5a6ec5401415e74eb5a7fc61e4b3abbc
[ "MIT" ]
1
2021-03-19T19:58:07.000Z
2021-03-19T19:58:07.000Z
src/westpa/westext/adaptvoronoi/__init__.py
jdrusso/westpa_test
3383b59a5a6ec5401415e74eb5a7fc61e4b3abbc
[ "MIT" ]
null
null
null
src/westpa/westext/adaptvoronoi/__init__.py
jdrusso/westpa_test
3383b59a5a6ec5401415e74eb5a7fc61e4b3abbc
[ "MIT" ]
1
2021-01-09T22:46:25.000Z
2021-01-09T22:46:25.000Z
from . import adaptVor_driver from .adaptVor_driver import AdaptiveVoronoiDriver __all__ = ['adaptVor_driver', 'AdaptiveVoronoiDriver']
27.4
54
0.832117
13
137
8.230769
0.461538
0.392523
0
0
0
0
0
0
0
0
0
0
0.094891
137
4
55
34.25
0.862903
0
0
0
0
0
0.262774
0.153285
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
7d8868b03ac19a42e42ead616eafc045edbc334a
73
py
Python
Visualization/__init__.py
jzw0025/Kyber
ce2069da469095e6a086f7bbf9cd980f10563b22
[ "Unlicense" ]
3
2017-02-20T18:18:27.000Z
2021-07-31T17:00:56.000Z
Visualization/__init__.py
jzw0025/Kyber
ce2069da469095e6a086f7bbf9cd980f10563b22
[ "Unlicense" ]
null
null
null
Visualization/__init__.py
jzw0025/Kyber
ce2069da469095e6a086f7bbf9cd980f10563b22
[ "Unlicense" ]
1
2016-12-16T17:51:32.000Z
2016-12-16T17:51:32.000Z
from DataVisu import DataVisulization from MarchCube import VolumeSurface
36.5
37
0.90411
8
73
8.25
0.75
0
0
0
0
0
0
0
0
0
0
0
0.09589
73
2
38
36.5
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7d8d9d44a6e122db864e0ab16edcb751c60e588b
10,071
py
Python
test/local/test_config_elt.py
ros-windows/rosinstall
f4210e8cddbed9ced1581a7d048b645b97c6930e
[ "BSD-3-Clause" ]
1
2018-09-11T23:28:41.000Z
2018-09-11T23:28:41.000Z
test/local/test_config_elt.py
ros-windows/rosinstall
f4210e8cddbed9ced1581a7d048b645b97c6930e
[ "BSD-3-Clause" ]
null
null
null
test/local/test_config_elt.py
ros-windows/rosinstall
f4210e8cddbed9ced1581a7d048b645b97c6930e
[ "BSD-3-Clause" ]
null
null
null
# Software License Agreement (BSD License) # # Copyright (c) 2009, Willow Garage, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Willow Garage, Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import unittest import rosinstall.config from rosinstall.common import MultiProjectException from . import mock_client class ConfigElements_Test(unittest.TestCase): def test_simple_config_element_API(self): path = "some/path" localname = "some/local/name" other1 = rosinstall.config_elements.ConfigElement(path, localname) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertFalse(other1.is_vcs_element()) other1 = rosinstall.config_elements.OtherConfigElement(path, localname) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertEqual({'other': {'local-name': 'some/local/name'}}, other1.get_path_spec().get_legacy_yaml()) self.assertFalse(other1.is_vcs_element()) other1 = rosinstall.config_elements.SetupConfigElement(path, localname) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertEqual({'setup-file': {'local-name': 'some/local/name'}}, other1.get_path_spec().get_legacy_yaml()) self.assertFalse(other1.is_vcs_element()) other1 = rosinstall.config_elements.OtherConfigElement(path, localname, properties=[{}]) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertEqual({'other': {'local-name': 'some/local/name'}}, other1.get_path_spec().get_legacy_yaml()) self.assertFalse(other1.is_vcs_element()) other1 = rosinstall.config_elements.OtherConfigElement(path, localname, properties=['meta']) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertEqual({'other': {'local-name': 'some/local/name', 'meta': None}}, other1.get_path_spec().get_legacy_yaml()) self.assertFalse(other1.is_vcs_element()) other1 = rosinstall.config_elements.OtherConfigElement(path, localname, properties=[{'meta': {'repo-name': 'skynetish-ros-pkg'}}]) self.assertEqual(path, other1.get_path()) self.assertEqual(localname, other1.get_local_name()) self.assertEqual({'other': {'local-name': 'some/local/name', 'meta': {'repo-name': 'skynetish-ros-pkg'}}}, other1.get_path_spec().get_legacy_yaml()) self.assertFalse(other1.is_vcs_element()) def test_mock_vcs_config_element_init(self): path = "some/path" localname = "some/local/name" try: rosinstall.config_elements.AVCSConfigElement("mock", None, None, None) self.fail("Exception expected") except MultiProjectException: pass try: rosinstall.config_elements.AVCSConfigElement("mock", "path", None, None) self.fail("Exception expected") except MultiProjectException: pass try: rosinstall.config_elements.AVCSConfigElement("mock", None, None, "some/uri") self.fail("Exception expected") except MultiProjectException: pass path = "some/path" localname = "some/local/name" uri = 'some/uri' version = 'some.version' vcsc = rosinstall.config_elements.AVCSConfigElement("mock", path, localname, uri, vcsc=mock_client.MockVcsClient()) self.assertEqual(path, vcsc.get_path()) self.assertEqual(localname, vcsc.get_local_name()) self.assertEqual(uri, vcsc.uri) self.assertTrue(vcsc.is_vcs_element()) self.assertEqual("mocktypemockdiffNone", vcsc.get_diff()) self.assertEqual("mocktype mockstatusNone,False", vcsc.get_status()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'uri': 'some/uri'}}, vcsc.get_path_spec().get_legacy_yaml()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'uri': 'some/uri', }}, vcsc.get_versioned_path_spec().get_legacy_yaml()) vcsc = rosinstall.config_elements.AVCSConfigElement("mock", path, localname, uri, None, vcsc=mock_client.MockVcsClient()) self.assertEqual(path, vcsc.get_path()) self.assertEqual(localname, vcsc.get_local_name()) self.assertEqual(uri, vcsc.uri) self.assertTrue(vcsc.is_vcs_element()) self.assertEqual("mocktypemockdiffNone", vcsc.get_diff()) self.assertEqual("mocktype mockstatusNone,False", vcsc.get_status()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'uri': 'some/uri'}}, vcsc.get_path_spec().get_legacy_yaml()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'uri': 'some/uri', }}, vcsc.get_versioned_path_spec().get_legacy_yaml()) vcsc = rosinstall.config_elements.AVCSConfigElement("mock", path, localname, uri, version, vcsc=mock_client.MockVcsClient()) self.assertEqual(path, vcsc.get_path()) self.assertEqual(localname, vcsc.get_local_name()) self.assertEqual(uri, vcsc.uri) self.assertTrue(vcsc.is_vcs_element()) self.assertEqual("mocktypemockdiffNone", vcsc.get_diff()) self.assertEqual("mocktype mockstatusNone,False", vcsc.get_status()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri'}}, vcsc.get_path_spec().get_legacy_yaml()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri'}}, vcsc.get_versioned_path_spec().get_legacy_yaml()) vcsc = rosinstall.config_elements.AVCSConfigElement( "mock", path, localname, uri, version, vcsc=mock_client.MockVcsClient(), properties=[{'meta': {'repo-name': 'skynetish-ros-pkg'}}]) self.assertEqual(path, vcsc.get_path()) self.assertEqual(localname, vcsc.get_local_name()) self.assertEqual(uri, vcsc.uri) self.assertTrue(vcsc.is_vcs_element()) self.assertEqual("mocktypemockdiffNone", vcsc.get_diff()) self.assertEqual("mocktype mockstatusNone,False", vcsc.get_status()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri', 'meta': {'repo-name': 'skynetish-ros-pkg'}}}, vcsc.get_path_spec().get_legacy_yaml()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri', 'meta': {'repo-name': 'skynetish-ros-pkg'}}}, vcsc.get_versioned_path_spec().get_legacy_yaml()) # this time using 'uri_shortcut' in mock_client.MockVcsClient, get special treatment un url_matches() uri2 = 'some/uri2' vcsc = rosinstall.config_elements.AVCSConfigElement( "mock", path, localname, uri2, version, vcsc=mock_client.MockVcsClient(url='url_shortcut'), properties=[{'meta': {'repo-name': 'skynetish-ros-pkg'}}]) self.assertEqual(path, vcsc.get_path()) self.assertEqual(localname, vcsc.get_local_name()) self.assertEqual(uri2, vcsc.uri) self.assertTrue(vcsc.is_vcs_element()) self.assertEqual("mocktypemockdiffNone", vcsc.get_diff()) self.assertEqual("mocktype mockstatusNone,False", vcsc.get_status()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri2', 'meta': {'repo-name': 'skynetish-ros-pkg'}}}, vcsc.get_path_spec().get_legacy_yaml()) self.assertEqual({'mock': {'local-name': 'some/local/name', 'version': 'some.version', 'uri': 'some/uri2', 'meta': {'repo-name': 'skynetish-ros-pkg'}}}, vcsc.get_versioned_path_spec().get_legacy_yaml()) def test_mock_install(self): path = "some/path" localname = "some/local/name" uri = 'some/uri' version = 'some.version' mockclient = mock_client.MockVcsClient(url=uri) vcsc = rosinstall.config_elements.AVCSConfigElement("mock", path, localname, uri, None, vcsc=mockclient) vcsc.install() self.assertTrue(mockclient.checkedout) self.assertFalse(mockclient.updated) # checkout failure mockclient = mock_client.MockVcsClient(url=uri, checkout_success=False) try: vcsc = rosinstall.config_elements.AVCSConfigElement("mock", path, localname, uri, None, vcsc=mockclient) vcsc.install() self.fail("should have raised Exception") except MultiProjectException: pass
58.213873
210
0.68871
1,190
10,071
5.684874
0.17395
0.115299
0.036511
0.039911
0.758463
0.746341
0.73082
0.722543
0.692683
0.683075
0
0.0047
0.17605
10,071
172
211
58.552326
0.810557
0.164631
0
0.6875
0
0
0.173648
0
0
0
0
0
0.507813
1
0.023438
false
0.03125
0.03125
0
0.0625
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
7da40f21370bc9393806cd5f2e7e0fb2dd21ecc7
3,784
py
Python
homes_to_let/tests/test_queryset.py
Xtuden-com/django-property
6656d469a5d06c103a34c2e68b9f1754413fb3ba
[ "MIT" ]
null
null
null
homes_to_let/tests/test_queryset.py
Xtuden-com/django-property
6656d469a5d06c103a34c2e68b9f1754413fb3ba
[ "MIT" ]
null
null
null
homes_to_let/tests/test_queryset.py
Xtuden-com/django-property
6656d469a5d06c103a34c2e68b9f1754413fb3ba
[ "MIT" ]
null
null
null
from datetime import datetime, timedelta from django.test import TestCase from homes_to_let.factories.letting_factory import LettingFactory from homes_to_let.models import Letting import pytz class SaleQuerySetTestCase(TestCase): def test_published_queryset(self): lettings = [ LettingFactory(status=Letting.STATUS_CHOICE_ACTIVE), LettingFactory(status=Letting.STATUS_CHOICE_INACTIVE) ] results = Letting.filtered.published() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title) def test_unpublished_queryset(self): lettings = [ LettingFactory(status=Letting.STATUS_CHOICE_ACTIVE), LettingFactory(status=Letting.STATUS_CHOICE_INACTIVE) ] results = Letting.filtered.unpublished() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[1].title) def test_unexpired_queryset(self): lettings = [ LettingFactory(expires_at=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=30)), LettingFactory(expires_at=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=-30)) ] results = Letting.filtered.unexpired() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title) def test_expired_queryset(self): lettings = [ LettingFactory(expires_at=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=30)), LettingFactory(expires_at=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=-30)) ] results = Letting.filtered.expired() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[1].title) def test_let_agreed_queryset(self): lettings = [ LettingFactory(let_agreed=True), LettingFactory(let_agreed=False) ] results = Letting.filtered.let_agreed() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title) def test_let_not_agreed_queryset(self): lettings = [ LettingFactory(let_agreed=True), LettingFactory(let_agreed=False) ] results = Letting.filtered.let_not_agreed() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[1].title) def test_furnished_queryset(self): lettings = [ LettingFactory(furnished=True), LettingFactory(furnished=False) ] results = Letting.filtered.furnished() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title) def test_unfurnished_queryset(self): lettings = [ LettingFactory(furnished=True), LettingFactory(furnished=False) ] results = Letting.filtered.unfurnished() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[1].title) def test_type_of_let_queryset(self): lettings = [ LettingFactory(type_of_let=Letting.TYPE_OF_LET_LONG_TERM), LettingFactory(type_of_let=Letting.TYPE_OF_LET_SHORT_TERM) ] results = Letting.filtered.type_of_let(Letting.TYPE_OF_LET_LONG_TERM) self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title) def test_house_share_queryset(self): lettings = [ LettingFactory(house_share=True), LettingFactory(house_share=False) ] results = Letting.filtered.house_share() self.assertEquals(len(results),1) self.assertEquals(results[0].title, lettings[0].title)
37.84
103
0.665169
407
3,784
6.009828
0.142506
0.130826
0.081766
0.139002
0.793132
0.793132
0.793132
0.793132
0.771464
0.744481
0
0.013005
0.227801
3,784
100
104
37.84
0.824093
0
0
0.534884
0
0
0
0
0
0
0
0
0.232558
1
0.116279
false
0
0.05814
0
0.186047
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
816c667beb8ba3467f9d1c17a6cf9f1881b247af
108
py
Python
pyshley/lib/enum.py
IndiBowstring/pyshley
417976574833ffd1e2824e14d34c851cc238b2bc
[ "MIT" ]
null
null
null
pyshley/lib/enum.py
IndiBowstring/pyshley
417976574833ffd1e2824e14d34c851cc238b2bc
[ "MIT" ]
null
null
null
pyshley/lib/enum.py
IndiBowstring/pyshley
417976574833ffd1e2824e14d34c851cc238b2bc
[ "MIT" ]
null
null
null
# TODO: Currently unused """ from enum import Enum class ContainerType(Enum): PROD = 1 DEV = 2 """
12
26
0.62963
14
108
4.857143
0.857143
0
0
0
0
0
0
0
0
0
0
0.024691
0.25
108
9
27
12
0.814815
0.916667
0
null
0
null
0
0
null
0
0
0.111111
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
0
0
0
5
c4a312f4ddb6ec782af2e3e63e31638c73b054e8
29
py
Python
anitracker/sync/__init__.py
Phxntxm/AniTracker
522ece6cc41da3e2875907ff9dce82f31146d450
[ "MIT" ]
12
2021-06-27T23:59:14.000Z
2022-03-24T04:38:30.000Z
anitracker/sync/__init__.py
Phxntxm/AniTracker
522ece6cc41da3e2875907ff9dce82f31146d450
[ "MIT" ]
1
2022-03-24T04:53:28.000Z
2022-03-24T04:53:28.000Z
anitracker/sync/__init__.py
Phxntxm/AniTracker
522ece6cc41da3e2875907ff9dce82f31146d450
[ "MIT" ]
null
null
null
from .anilist import AniList
14.5
28
0.827586
4
29
6
0.75
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c4a591d909d0cc651ceab331f08fa1284071bdc2
84
py
Python
bot/utils/__init__.py
t3m8ch/holy-war-detector
bb23694fafc9276ee95c711f4354fba47c4b7e2c
[ "MIT" ]
null
null
null
bot/utils/__init__.py
t3m8ch/holy-war-detector
bb23694fafc9276ee95c711f4354fba47c4b7e2c
[ "MIT" ]
null
null
null
bot/utils/__init__.py
t3m8ch/holy-war-detector
bb23694fafc9276ee95c711f4354fba47c4b7e2c
[ "MIT" ]
null
null
null
"""This package contains modules to simplify the code""" from .router import Router
28
56
0.77381
12
84
5.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.142857
84
2
57
42
0.902778
0.595238
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c4bf502ac0e64ee5644185505153c261889b728f
23
py
Python
atcoder/other/tkppc2016_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
1
2018-11-12T15:18:55.000Z
2018-11-12T15:18:55.000Z
atcoder/other/tkppc2016_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
null
null
null
atcoder/other/tkppc2016_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
null
null
null
print(input()+input())
11.5
22
0.652174
3
23
5
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.043478
23
1
23
23
0.681818
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
48150448d7e00a4c2680cfd62a989ca8eb62bc8b
28
py
Python
mymldev/datasets/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
mymldev/datasets/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
mymldev/datasets/__init__.py
Suneel123/mymldev
d80826432f97c9004986cd5a625f74757cf362bb
[ "MIT" ]
null
null
null
# Datasets used for testing
14
27
0.785714
4
28
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.178571
28
1
28
28
0.956522
0.892857
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
481937c7bad4a83b06506c2ee130e8958fe88401
326
py
Python
is_core/utils/decorators.py
zzuzzy/django-is-core
3f87ec56a814738683c732dce5f07e0328c2300d
[ "BSD-3-Clause" ]
null
null
null
is_core/utils/decorators.py
zzuzzy/django-is-core
3f87ec56a814738683c732dce5f07e0328c2300d
[ "BSD-3-Clause" ]
null
null
null
is_core/utils/decorators.py
zzuzzy/django-is-core
3f87ec56a814738683c732dce5f07e0328c2300d
[ "BSD-3-Clause" ]
null
null
null
def short_description(description): """ Sets 'short_description' attribute (this attribute is in exports to generate header name). """ def decorator(func): if isinstance(func, property): func = func.fget func.short_description = description return func return decorator
29.636364
94
0.656442
35
326
6.028571
0.571429
0.227488
0.255924
0
0
0
0
0
0
0
0
0
0.266871
326
10
95
32.6
0.882845
0.276074
0
0
1
0
0
0
0
0
0
0
0
1
0.285714
false
0
0
0
0.571429
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
48279507ad40b0a42340fa6407d525d089a3ac30
30
py
Python
corehq/celery_monitoring/models.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
471
2015-01-10T02:55:01.000Z
2022-03-29T18:07:18.000Z
corehq/celery_monitoring/models.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
14,354
2015-01-01T07:38:23.000Z
2022-03-31T20:55:14.000Z
corehq/celery_monitoring/models.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
175
2015-01-06T07:16:47.000Z
2022-03-29T13:27:01.000Z
# here so tasks get picked up
15
29
0.733333
6
30
3.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.233333
30
1
30
30
0.956522
0.9
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
484251431dcfe1d4f33dba2bd4fa4221759b2eb8
56
py
Python
utils/stringutils.py
74wny0wl/entusergenerator
45ae4dc9b8b8675454e3058708e004cfe5db5055
[ "MIT" ]
1
2020-08-26T08:10:08.000Z
2020-08-26T08:10:08.000Z
utils/stringutils.py
74wny0wl/entusergenerator
45ae4dc9b8b8675454e3058708e004cfe5db5055
[ "MIT" ]
null
null
null
utils/stringutils.py
74wny0wl/entusergenerator
45ae4dc9b8b8675454e3058708e004cfe5db5055
[ "MIT" ]
null
null
null
def prefix(string, length): return string[0:length]
18.666667
27
0.714286
8
56
5
0.75
0
0
0
0
0
0
0
0
0
0
0.021277
0.160714
56
2
28
28
0.829787
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5