hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
291bfbb6b5b2b9e12bc73b5eb65200e4deaf9fa0
| 42
|
py
|
Python
|
All_Source_Code/VisionDeepLearning/VisionDeepLearning_2.py
|
APMonitor/pds
|
fa9a7ec920802de346dcdf7f5dd92d752142c16f
|
[
"MIT"
] | 11
|
2021-01-21T09:46:29.000Z
|
2022-03-16T19:33:10.000Z
|
All_Source_Code/VisionDeepLearning/VisionDeepLearning_2.py
|
the-mahapurush/pds
|
7cb4087dd8e75cb1e9b2a4283966c798175f61f7
|
[
"MIT"
] | 1
|
2022-03-16T19:47:09.000Z
|
2022-03-16T20:11:50.000Z
|
All_Source_Code/VisionDeepLearning/VisionDeepLearning_2.py
|
the-mahapurush/pds
|
7cb4087dd8e75cb1e9b2a4283966c798175f61f7
|
[
"MIT"
] | 12
|
2021-02-08T21:11:11.000Z
|
2022-03-20T12:42:49.000Z
|
for x in faces:
print(x['confidence'])
| 21
| 26
| 0.642857
| 7
| 42
| 3.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 26
| 21
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
2931ae18f7ad0cdbb9d8ba4f1ad27c772f793843
| 115
|
py
|
Python
|
templates_advanced_lab/templates_advanced/profiles/apps.py
|
nrgxtra/web_framework
|
dd84968a77b84a03d66c5db190b28bffc479f05e
|
[
"MIT"
] | null | null | null |
templates_advanced_lab/templates_advanced/profiles/apps.py
|
nrgxtra/web_framework
|
dd84968a77b84a03d66c5db190b28bffc479f05e
|
[
"MIT"
] | null | null | null |
templates_advanced_lab/templates_advanced/profiles/apps.py
|
nrgxtra/web_framework
|
dd84968a77b84a03d66c5db190b28bffc479f05e
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class ProfilesConfig(AppConfig):
name = 'templates_advanced.profiles'
| 19.166667
| 41
| 0.756522
| 12
| 115
| 7.166667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 115
| 5
| 42
| 23
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0.245455
| 0.245455
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
293366e3322897bfeea42e3b5ff97ac8d54c4038
| 152
|
py
|
Python
|
got/utils/remove_got.py
|
CircArgs/got
|
1f7194bb94511df0bb967cfa9dc17bb13d3907b7
|
[
"Apache-2.0"
] | null | null | null |
got/utils/remove_got.py
|
CircArgs/got
|
1f7194bb94511df0bb967cfa9dc17bb13d3907b7
|
[
"Apache-2.0"
] | 1
|
2020-06-08T23:32:48.000Z
|
2020-06-08T23:32:48.000Z
|
got/utils/remove_got.py
|
CircArgs/got
|
1f7194bb94511df0bb967cfa9dc17bb13d3907b7
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
from . import is_got
def remove_got(path=os.getcwd()):
exists = is_got(path)
if exists:
shutil.rmtree(exists)
| 15.2
| 33
| 0.677632
| 23
| 152
| 4.347826
| 0.565217
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223684
| 152
| 9
| 34
| 16.888889
| 0.847458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.428571
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
29520668b2876e36d587fc97775da9bcc6104b3d
| 87
|
py
|
Python
|
viable/__init__.py
|
otosense/viable
|
5ef9ab4edcb2bc878d6eb2374bd74852023fe93c
|
[
"Apache-2.0"
] | null | null | null |
viable/__init__.py
|
otosense/viable
|
5ef9ab4edcb2bc878d6eb2374bd74852023fe93c
|
[
"Apache-2.0"
] | 1
|
2021-08-31T22:20:07.000Z
|
2021-08-31T22:20:07.000Z
|
viable/__init__.py
|
otosense/viable
|
5ef9ab4edcb2bc878d6eb2374bd74852023fe93c
|
[
"Apache-2.0"
] | null | null | null |
"""Diagnose audio and other input data for ML pipelines
>>> print('hello')
hello
"""
| 12.428571
| 55
| 0.678161
| 12
| 87
| 4.916667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 87
| 6
| 56
| 14.5
| 0.819444
| 0.896552
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
46293db2abf97e9b6e4e290bbab55c5e334d308c
| 216
|
py
|
Python
|
String_Immutable.py
|
BeenashPervaiz/Command_Line_Task
|
a603fbdd06717ff157ecd72881d08329413fd82c
|
[
"MIT"
] | null | null | null |
String_Immutable.py
|
BeenashPervaiz/Command_Line_Task
|
a603fbdd06717ff157ecd72881d08329413fd82c
|
[
"MIT"
] | null | null | null |
String_Immutable.py
|
BeenashPervaiz/Command_Line_Task
|
a603fbdd06717ff157ecd72881d08329413fd82c
|
[
"MIT"
] | null | null | null |
string = "Character"
print(string[2])
# string[2] = 'A' not change it
# string.replace('a','A') we can replace it but it cannot change the our string variables.
new_string = string.replace('a','A')
print(new_string)
| 30.857143
| 90
| 0.708333
| 36
| 216
| 4.194444
| 0.472222
| 0.092715
| 0.18543
| 0.198676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010695
| 0.134259
| 216
| 7
| 91
| 30.857143
| 0.796791
| 0.546296
| 0
| 0
| 0
| 0
| 0.115789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
466898a6b05a27d4468518147527eb379ac58424
| 161
|
py
|
Python
|
dwitter_app/apps.py
|
MMohan1/Dwitter
|
dacf36f2819a33f107fb33b3ded81bb33231edb5
|
[
"MIT"
] | null | null | null |
dwitter_app/apps.py
|
MMohan1/Dwitter
|
dacf36f2819a33f107fb33b3ded81bb33231edb5
|
[
"MIT"
] | 7
|
2017-09-09T07:11:56.000Z
|
2017-09-19T04:50:58.000Z
|
dwitter_app/apps.py
|
MMohan1/dwitter
|
dacf36f2819a33f107fb33b3ded81bb33231edb5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class DwitterAppConfig(AppConfig):
name = 'dwitter_app'
| 17.888889
| 39
| 0.745342
| 19
| 161
| 6
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 0.15528
| 161
| 8
| 40
| 20.125
| 0.830882
| 0.130435
| 0
| 0
| 0
| 0
| 0.07971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
467be5b671c01893bdd4fef6b573d53b011ecc78
| 137
|
py
|
Python
|
student_enrollment/models/database/__init__.py
|
Nicusor97/student_enrollment
|
d311dba2e77c7dc30df62f4ab48af7b6836e8e11
|
[
"MIT"
] | null | null | null |
student_enrollment/models/database/__init__.py
|
Nicusor97/student_enrollment
|
d311dba2e77c7dc30df62f4ab48af7b6836e8e11
|
[
"MIT"
] | null | null | null |
student_enrollment/models/database/__init__.py
|
Nicusor97/student_enrollment
|
d311dba2e77c7dc30df62f4ab48af7b6836e8e11
|
[
"MIT"
] | null | null | null |
from .Person import Person
from .Student import Student
from .Subject import Subject
from .User import User
from .teacher import Teacher
| 22.833333
| 28
| 0.817518
| 20
| 137
| 5.6
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145985
| 137
| 5
| 29
| 27.4
| 0.957265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
469a60800b250ec7cc6e92e73abb146dd83ca7a7
| 511
|
py
|
Python
|
Section10_Facade/RegularFacade/Console.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section10_Facade/RegularFacade/Console.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section10_Facade/RegularFacade/Console.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
from Section10_Facade.RegularFacade.Buffer import Buffer
from Section10_Facade.RegularFacade.Viewport import Viewport
class Console:
def __init__(self):
b = Buffer()
self.current_viewport: Viewport = Viewport(b)
self.buffers = [b]
self.viewports = [self.current_viewport]
# high-level
def write(self, text):
self.current_viewport.buffer.write(text)
# low-level
def get_char_at(self, index):
return self.current_viewport.get_char_at(index)
| 26.894737
| 60
| 0.694716
| 63
| 511
| 5.412698
| 0.412698
| 0.129032
| 0.222874
| 0.187683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009975
| 0.215264
| 511
| 18
| 61
| 28.388889
| 0.840399
| 0.039139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.166667
| 0.083333
| 0.583333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
46a0417ceb9d41ed51f5dd267b05e07f1b939d3f
| 164
|
py
|
Python
|
zad1_5.py
|
kamilhabrych/python-semestr5-lista1
|
65faeffe83bcc4706b2818e2e7802d986b19244b
|
[
"MIT"
] | null | null | null |
zad1_5.py
|
kamilhabrych/python-semestr5-lista1
|
65faeffe83bcc4706b2818e2e7802d986b19244b
|
[
"MIT"
] | null | null | null |
zad1_5.py
|
kamilhabrych/python-semestr5-lista1
|
65faeffe83bcc4706b2818e2e7802d986b19244b
|
[
"MIT"
] | null | null | null |
x = int(input('Podaj pierwsza liczbe calkowita: '))
y = int(input('Podaj druga liczbe calkowita: '))
print()
print(x%y)
print(y%x)
print()
print(x//y)
print(y//x)
| 16.4
| 51
| 0.664634
| 28
| 164
| 3.892857
| 0.357143
| 0.146789
| 0.238532
| 0.220183
| 0.348624
| 0.348624
| 0.348624
| 0
| 0
| 0
| 0
| 0
| 0.128049
| 164
| 10
| 52
| 16.4
| 0.762238
| 0
| 0
| 0.25
| 0
| 0
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
46aeeb459a57fd1ef8b7eec747b21051df216020
| 141
|
py
|
Python
|
maedn/strategy.py
|
Daniel31415/maedn
|
98821402328c235f1e46f92a960880745a8a299b
|
[
"MIT"
] | null | null | null |
maedn/strategy.py
|
Daniel31415/maedn
|
98821402328c235f1e46f92a960880745a8a299b
|
[
"MIT"
] | null | null | null |
maedn/strategy.py
|
Daniel31415/maedn
|
98821402328c235f1e46f92a960880745a8a299b
|
[
"MIT"
] | null | null | null |
class BaseStrategy(object):
def get_next_move(self, board):
raise NotImplementedError
class MoveFirst(BaseStrategy):
pass
| 15.666667
| 35
| 0.723404
| 15
| 141
| 6.666667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205674
| 141
| 8
| 36
| 17.625
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
46b11f9bde3881456e71c2f22b6b91f9a898e9ae
| 106
|
py
|
Python
|
pkg/codegen/internal/test/testdata/random-pet-pp/python/random-pet.py
|
pcen/pulumi
|
1bb85ca98c90f2161fe915df083d47c56c135e4d
|
[
"Apache-2.0"
] | 1
|
2022-01-04T13:56:27.000Z
|
2022-01-04T13:56:27.000Z
|
pkg/codegen/internal/test/testdata/random-pet-pp/python/random-pet.py
|
pcen/pulumi
|
1bb85ca98c90f2161fe915df083d47c56c135e4d
|
[
"Apache-2.0"
] | 23
|
2020-12-08T02:44:30.000Z
|
2022-03-31T11:21:34.000Z
|
pkg/codegen/internal/test/testdata/random-pet-pp/python/random-pet.py
|
pcen/pulumi
|
1bb85ca98c90f2161fe915df083d47c56c135e4d
|
[
"Apache-2.0"
] | 1
|
2021-07-01T13:03:23.000Z
|
2021-07-01T13:03:23.000Z
|
import pulumi
import pulumi_random as random
random_pet = random.RandomPet("random_pet", prefix="doggo")
| 21.2
| 59
| 0.801887
| 15
| 106
| 5.466667
| 0.533333
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103774
| 106
| 4
| 60
| 26.5
| 0.863158
| 0
| 0
| 0
| 0
| 0
| 0.141509
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d3bc2cc4b3dc6d2774793a5646bd8b098a82a434
| 91
|
py
|
Python
|
django_on_cloudrun/basicapi/apps.py
|
louwjlabuschagne/basic-python-api-ci
|
db7804332859c0e0dc7529edeaeddc476f28be3c
|
[
"MIT"
] | null | null | null |
django_on_cloudrun/basicapi/apps.py
|
louwjlabuschagne/basic-python-api-ci
|
db7804332859c0e0dc7529edeaeddc476f28be3c
|
[
"MIT"
] | 4
|
2021-03-30T13:46:01.000Z
|
2021-09-22T19:20:14.000Z
|
django_on_cloudrun/basicapi/apps.py
|
louwjlabuschagne/basic-python-api-ci
|
db7804332859c0e0dc7529edeaeddc476f28be3c
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class BasicapiConfig(AppConfig):
name = 'basicapi'
| 15.166667
| 33
| 0.758242
| 10
| 91
| 6.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 5
| 34
| 18.2
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
310567293474013f3d4c87db9a3158c849329acd
| 14,188
|
py
|
Python
|
test/unit_test/controller/test_callback.py
|
ArnovanHilten/NVFlare
|
bb45e7d606849c6bc8f7542347459c6ba1be00c4
|
[
"Apache-2.0"
] | 155
|
2021-08-05T18:05:09.000Z
|
2022-03-27T15:32:56.000Z
|
test/unit_test/controller/test_callback.py
|
ArnovanHilten/NVFlare
|
bb45e7d606849c6bc8f7542347459c6ba1be00c4
|
[
"Apache-2.0"
] | 216
|
2021-12-01T06:07:12.000Z
|
2022-03-30T23:34:02.000Z
|
test/unit_test/controller/test_callback.py
|
ArnovanHilten/NVFlare
|
bb45e7d606849c6bc8f7542347459c6ba1be00c4
|
[
"Apache-2.0"
] | 44
|
2021-11-24T16:03:29.000Z
|
2022-03-24T23:28:39.000Z
|
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import time
import pytest
from nvflare.apis.controller_spec import ClientTask, Task, TaskCompletionStatus
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
from .controller_test import TestController, create_client, create_task, get_ready, launch_task
def _get_task_done_callback_test_cases():
task_name = "__test_task"
def task_done_cb(task: Task, fl_ctx: FLContext):
client_names = [x.client.name for x in task.client_tasks]
expected_str = "_".join(client_names)
task.props[task_name] = expected_str
input_data = Shareable()
test_cases = [
[
"broadcast",
[create_client(f"__test_client{i}") for i in range(10)],
task_name,
input_data,
task_done_cb,
"_".join([f"__test_client{i}" for i in range(10)]),
],
[
"broadcast_and_wait",
[create_client(f"__test_client{i}") for i in range(10)],
task_name,
input_data,
task_done_cb,
"_".join([f"__test_client{i}" for i in range(10)]),
],
["send", [create_client("__test_client")], task_name, input_data, task_done_cb, "__test_client"],
["send_and_wait", [create_client("__test_client")], task_name, input_data, task_done_cb, "__test_client"],
["relay", [create_client("__test_client")], task_name, input_data, task_done_cb, "__test_client"],
["relay_and_wait", [create_client("__test_client")], task_name, input_data, task_done_cb, "__test_client"],
]
return test_cases
class TestCallback(TestController):
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_before_task_sent_cb(self, method):
def before_task_sent_cb(client_task: ClientTask, fl_ctx: FLContext):
client_task.task.data["_test_data"] = client_task.client.name
client_name = "_test_client"
controller, fl_ctx = self.start_controller()
client = create_client(name=client_name)
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, _, data = controller.process_task_request(client, fl_ctx)
expected = Shareable()
expected["_test_data"] = client_name
assert data == expected
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_result_received_cb(self, method):
def result_received_cb(client_task: ClientTask, fl_ctx: FLContext):
client_task.result["_test_data"] = client_task.client.name
client_name = "_test_client"
input_data = Shareable()
input_data["_test_data"] = "_old_data"
controller, fl_ctx = self.start_controller()
client = create_client(name=client_name)
task = create_task("__test_task", data=input_data, result_received_cb=result_received_cb)
kwargs = {"targets": [client]}
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": kwargs,
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=data
)
expected = Shareable()
expected["_test_data"] = client_name
assert task.last_client_task_map[client_name].result == expected
controller._check_tasks()
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("task_complete", ["normal", "timeout", "cancel"])
@pytest.mark.parametrize("method,clients,task_name,input_data,cb,expected", _get_task_done_callback_test_cases())
def test_task_done_cb(self, method, clients, task_name, input_data, cb, expected, task_complete):
controller, fl_ctx = self.start_controller()
timeout = 0 if task_complete != "timeout" else 1
task = create_task("__test_task", data=input_data, task_done_cb=cb, timeout=timeout)
kwargs = {"targets": clients}
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": kwargs,
},
)
get_ready(launch_thread)
client_task_ids = len(clients) * [None]
for i, client in enumerate(clients):
task_name_out, client_task_ids[i], _ = controller.process_task_request(client, fl_ctx)
if task_name_out == "":
client_task_ids[i] = None
# in here we make up client results:
result = Shareable()
result["result"] = "result"
for client, client_task_id in zip(clients, client_task_ids):
if client_task_id is not None:
if task_complete == "normal":
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
if task_complete == "timeout":
time.sleep(timeout)
assert task.completion_status == TaskCompletionStatus.TIMEOUT
elif task_complete == "cancel":
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
controller._check_tasks()
assert task.props[task_name] == expected
assert controller.get_num_standing_tasks() == 0
launch_thread.join()
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_task_before_send_cb(self, method):
def before_task_sent_cb(client_task: ClientTask, fl_ctx: FLContext):
client_task.task.completion_status = TaskCompletionStatus.CANCELLED
controller, fl_ctx = self.start_controller()
client = create_client(name="__test_client")
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
launch_thread.join()
assert task.completion_status == TaskCompletionStatus.CANCELLED
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_task_result_received_cb(self, method):
def result_received_cb(client_task: ClientTask, fl_ctx: FLContext):
client_task.task.completion_status = TaskCompletionStatus.CANCELLED
controller, fl_ctx = self.start_controller()
client1 = create_client(name="__test_client")
client2 = create_client(name="__another_client")
task = create_task("__test_task", result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client1, client2]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client1, fl_ctx)
result = Shareable()
result["__result"] = "__test_result"
controller.process_submission(
client=client1, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
assert task.last_client_task_map["__test_client"].result == result
task_name_out, client_task_id, data = controller.process_task_request(client2, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
launch_thread.join()
assert task.completion_status == TaskCompletionStatus.CANCELLED
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("method2", ["broadcast", "send", "relay"])
def test_schedule_task_before_send_cb(self, method, method2):
def before_task_sent_cb(client_task: ClientTask, fl_ctx: FLContext):
controller = fl_ctx.get_prop(key="controller")
new_task = create_task("__new_test_task")
inner_launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": new_task,
"method": method2,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client_task.client]},
},
)
inner_launch_thread.start()
inner_launch_thread.join()
controller, fl_ctx = self.start_controller()
fl_ctx.set_prop("controller", controller)
client = create_client(name="__test_client")
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
launch_thread.start()
task_name_out = ""
while task_name_out == "":
task_name_out, _, _ = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
new_task_name_out = ""
while new_task_name_out == "":
new_task_name_out, _, _ = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert new_task_name_out == "__new_test_task"
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.stop_controller(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("method2", ["broadcast", "send", "relay"])
def test_schedule_task_result_received_cb(self, method, method2):
def result_received_cb(client_task: ClientTask, fl_ctx: FLContext):
controller = fl_ctx.get_prop(key="controller")
new_task = create_task("__new_test_task")
inner_launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": new_task,
"method": method2,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client_task.client]},
},
)
get_ready(inner_launch_thread)
inner_launch_thread.join()
controller, fl_ctx = self.start_controller()
fl_ctx.set_prop("controller", controller)
client = create_client(name="__test_client")
task = create_task("__test_task", result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
launch_thread.start()
task_name_out = ""
client_task_id = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=data
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
new_task_name_out = ""
while new_task_name_out == "":
new_task_name_out, _, _ = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert new_task_name_out == "__new_test_task"
launch_thread.join()
self.stop_controller(controller, fl_ctx)
| 40.537143
| 117
| 0.622145
| 1,597
| 14,188
| 5.13087
| 0.112085
| 0.037222
| 0.033561
| 0.015865
| 0.787772
| 0.757628
| 0.720039
| 0.70332
| 0.670491
| 0.657798
| 0
| 0.004392
| 0.27784
| 14,188
| 349
| 118
| 40.653295
| 0.795335
| 0.04391
| 0
| 0.667797
| 0
| 0
| 0.090554
| 0.003469
| 0
| 0
| 0
| 0
| 0.071186
| 1
| 0.050847
| false
| 0
| 0.023729
| 0
| 0.081356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31110171d2380c30f6ab3bf74e526110fcf98d31
| 3,412
|
py
|
Python
|
crawler/frontier.py
|
dangrasso/oda-crawler
|
bb8f58c27fe762d175677b14297b3be95f77f394
|
[
"MIT"
] | null | null | null |
crawler/frontier.py
|
dangrasso/oda-crawler
|
bb8f58c27fe762d175677b14297b3be95f77f394
|
[
"MIT"
] | null | null | null |
crawler/frontier.py
|
dangrasso/oda-crawler
|
bb8f58c27fe762d175677b14297b3be95f77f394
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from collections import deque
from itertools import chain
from typing import Iterable
class Frontier(ABC):
"""
This class abstracts away the idea of a frontier of urls for the crawler.
It should be possible to add new urls to the frontier, either at low or high priority.
"""
@abstractmethod
def pop(self) -> str:
"""Extract the next url from the frontier"""
pass
@abstractmethod
def enqueue_next(self, urls: Iterable[str]):
"""Add all given urls to the frontier with the highest priority"""
pass
@abstractmethod
def enqueue_last(self, urls: Iterable[str]):
"""Add all given urls to the frontier with the lowest priority"""
pass
@abstractmethod
def __len__(self) -> int:
"""Returns how many urls of the frontier"""
pass
@abstractmethod
def __iter__(self):
"""Returns an iterator"""
pass
# Round 1: a simple deque
class InMemoryDequeFrontier(Frontier):
"""
A Frontier implementation that keeps urls in memory and works like a deque
"""
def __init__(self):
self._deque: deque[str] = deque()
def enqueue_next(self, urls: Iterable[str]):
self._deque.extend(urls)
def enqueue_last(self, urls: Iterable[str]):
self._deque.extendleft(urls)
def pop(self):
return self._deque.pop()
def __len__(self) -> int:
return len(self._deque)
def __iter__(self):
return iter(self._deque)
# Round 2: using both deque and set to avoid duplicates (at the expense of memory)
class InMemoryHybridFrontier(Frontier):
"""
A Frontier implementation that keeps urls in memory and works like a deque but also filters out duplicates
"""
def __init__(self):
self._seen: set[str] = set()
self._deque: deque[str] = deque()
def enqueue_next(self, urls: Iterable[str]):
new_urls = {u for u in urls if u not in self._seen}
self._deque.extend(new_urls)
self._seen.update(new_urls)
def enqueue_last(self, urls: Iterable[str]):
new_urls = {u for u in urls if u not in self._seen}
self._deque.extendleft(new_urls)
self._seen.update(new_urls)
def pop(self):
popped = self._deque.pop()
self._seen.discard(popped) # this allows re-visiting
return popped
def __len__(self) -> int:
return len(self._deque)
def __iter__(self):
return iter(self._deque)
# Round 3: based on 2 sets to avoid duplicates (within same prio)
class InMemorySetFrontier(Frontier):
"""
A Frontier implementation that keeps urls in memory and works like a set, avoiding duplicates.
There could still be duplicates between low and high prio.
"""
def __init__(self):
self._set_high_prio: set[str] = set()
self._set_low_prio: set[str] = set()
def enqueue_next(self, urls: Iterable[str]):
self._set_high_prio.update(urls)
def enqueue_last(self, urls: Iterable[str]):
self._set_low_prio.update(urls)
def pop(self):
return self._set_high_prio.pop() if len(self._set_high_prio) > 0 else self._set_low_prio.pop()
def __len__(self) -> int:
return len(self._set_high_prio) + len(self._set_low_prio)
def __iter__(self):
return chain(self._set_high_prio, self._set_low_prio)
| 28.198347
| 110
| 0.655334
| 472
| 3,412
| 4.506356
| 0.235169
| 0.050776
| 0.060179
| 0.071462
| 0.503056
| 0.460743
| 0.430654
| 0.417489
| 0.348848
| 0.310296
| 0
| 0.001952
| 0.249414
| 3,412
| 120
| 111
| 28.433333
| 0.828583
| 0.26612
| 0
| 0.626866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.343284
| false
| 0.074627
| 0.059701
| 0.119403
| 0.597015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
31bb4479dcaf1d213c0d681c5670d68184910df0
| 55
|
py
|
Python
|
tests/row_generation/__init__.py
|
jamesscottbrown/wheatley
|
c1d83cedad542efeb259475c2c9ba88395aee715
|
[
"MIT"
] | 14
|
2020-08-16T21:41:13.000Z
|
2021-07-13T01:15:01.000Z
|
tests/row_generation/__init__.py
|
jamesscottbrown/wheatley
|
c1d83cedad542efeb259475c2c9ba88395aee715
|
[
"MIT"
] | 121
|
2020-08-13T16:54:46.000Z
|
2021-09-17T10:32:04.000Z
|
tests/row_generation/__init__.py
|
jamesscottbrown/wheatley
|
c1d83cedad542efeb259475c2c9ba88395aee715
|
[
"MIT"
] | 10
|
2020-12-20T03:52:47.000Z
|
2021-11-22T14:46:15.000Z
|
from .generator_test_helpers import gen_rows, as_bells
| 27.5
| 54
| 0.872727
| 9
| 55
| 4.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 55
| 1
| 55
| 55
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
31c594a18823f70124013bf613a6d21ed20723ee
| 4,375
|
py
|
Python
|
tests/core/test_repeat.py
|
perillaroc/takler
|
607a64ff22b33d507f90acca4640963e69626879
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_repeat.py
|
perillaroc/takler
|
607a64ff22b33d507f90acca4640963e69626879
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_repeat.py
|
perillaroc/takler
|
607a64ff22b33d507f90acca4640963e69626879
|
[
"Apache-2.0"
] | null | null | null |
from datetime import date
from takler.core import Repeat, RepeatDate, Flow, NodeStatus, Parameter
import pytest
# RepeatDate
@pytest.fixture
def start_date_int():
return 20220601
@pytest.fixture
def end_date_int():
return 20220607
def test_repeat_date_create(start_date_int, end_date_int):
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int)
assert r.start == 20220601
assert r.end == 20220607
assert r.step == 1
assert r.value == 20220601
def test_repeat_date_increment(start_date_int, end_date_int):
# step = 1
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int)
assert r.increment()
assert r.value == 20220602
assert r.increment()
assert r.value == 20220603
assert r.increment()
assert r.value == 20220604
assert r.increment()
assert r.value == 20220605
assert r.increment()
assert r.value == 20220606
assert r.increment()
assert r.value == 20220607
assert not r.increment()
assert r.value == 20220607
# step = 2
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int, 2)
assert r.increment()
assert r.value == 20220603
assert r.increment()
assert r.value == 20220605
assert r.increment()
assert r.value == 20220607
assert not r.increment()
assert r.value == 20220607
def test_repeat_date_reset(start_date_int, end_date_int):
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int)
assert r.increment()
assert r.increment()
assert r.increment()
assert r.value == 20220604
r.reset()
assert r.value == 20220601
def test_repeat_date_change(start_date_int, end_date_int):
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int)
r.change(20220602)
assert r.value == 20220602
r.change("20220603")
assert r.value == 20220603
r.change(date(2022, 6, 4))
assert r.value == 20220604
with pytest.raises(ValueError):
r.change(date(2022, 6, 8))
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int, 2)
r.change(20220603)
assert r.value == 20220603
r.change("20220605")
assert r.value == 20220605
r.change(date(2022, 6, 7))
assert r.value == 20220607
with pytest.raises(ValueError):
r.change(date(2022, 5, 8))
with pytest.raises(ValueError):
r.change(date(2022, 6, 8))
with pytest.raises(ValueError):
r.change(date(2022, 6, 2))
def test_repeat_date_generated_params(start_date_int, end_date_int):
r = RepeatDate("TAKLER_DATE", start_date_int, end_date_int)
assert r.generated_parameters() == {
"TAKLER_DATE": Parameter("TAKLER_DATE", 20220601)
}
r.increment()
assert r.generated_parameters() == {
"TAKLER_DATE": Parameter("TAKLER_DATE", 20220602)
}
def test_repeat_date_create_in_flow(start_date_int, end_date_int):
flow1 = Flow("flow1")
task1 = flow1.add_task("task1")
task1.add_repeat(RepeatDate("YMD", start_date_int, end_date_int))
def test_repeat_date_run_in_flow(start_date_int, end_date_int):
# create flow
flow1 = Flow("flow1")
task1 = flow1.add_task("task1")
task1.add_repeat(RepeatDate("YMD", start_date_int, end_date_int))
# requeue
task1.requeue()
# run 1
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220601
# run 2
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220602
# run 3
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220603
# run 4
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220604
# run 5
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220605
# run 6
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220606
# run 7
task1.complete()
task1.resolve_dependencies()
assert task1.find_generated_parameter("YMD").value == 20220607
# full complete
task1.complete()
task1.resolve_dependencies()
assert task1.state.node_status == NodeStatus.complete
assert task1.find_generated_parameter("YMD").value == 20220607
| 25.584795
| 71
| 0.689143
| 582
| 4,375
| 4.946735
| 0.113402
| 0.089962
| 0.083362
| 0.083362
| 0.78951
| 0.756165
| 0.746787
| 0.722473
| 0.632164
| 0.567211
| 0
| 0.111079
| 0.199543
| 4,375
| 170
| 72
| 25.735294
| 0.711022
| 0.023771
| 0
| 0.646018
| 0
| 0
| 0.043928
| 0
| 0
| 0
| 0
| 0
| 0.424779
| 1
| 0.079646
| false
| 0
| 0.026549
| 0.017699
| 0.123894
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31cc85ecee66029e67ebd46865687e0489c078dd
| 165
|
py
|
Python
|
api_test/settings.py
|
spothero/api-test
|
377850c1d80c927c730875317ebf2b7f2391bf60
|
[
"MIT"
] | null | null | null |
api_test/settings.py
|
spothero/api-test
|
377850c1d80c927c730875317ebf2b7f2391bf60
|
[
"MIT"
] | 12
|
2016-09-10T00:41:28.000Z
|
2017-02-01T20:47:35.000Z
|
api_test/settings.py
|
spothero/api-test
|
377850c1d80c927c730875317ebf2b7f2391bf60
|
[
"MIT"
] | 1
|
2017-02-27T22:18:19.000Z
|
2017-02-27T22:18:19.000Z
|
from django.conf import settings
INSTALLED_APPS = settings.INSTALLED_APPS + (
'django_hosts',
)
FIXTURE_FILE = getattr(settings, 'API_TEST_FIXTURE_FILE', '')
| 18.333333
| 61
| 0.751515
| 20
| 165
| 5.85
| 0.65
| 0.290598
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139394
| 165
| 8
| 62
| 20.625
| 0.823944
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.127273
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31d77f5073f6e74a400b554e5e7a696f04a9c4f5
| 125
|
py
|
Python
|
users/forms/fields/__init__.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
users/forms/fields/__init__.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
users/forms/fields/__init__.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
from wtforms import Field
class ListField(Field):
def process_formdata(self, valuelist):
self.data = valuelist
| 17.857143
| 42
| 0.72
| 15
| 125
| 5.933333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208
| 125
| 6
| 43
| 20.833333
| 0.89899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9edfc3dcf3dc241993c3eac1acb4da1059c6048a
| 447
|
py
|
Python
|
datahub_client/apis/__init__.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
datahub_client/apis/__init__.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
datahub_client/apis/__init__.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
# import apis into api package
from .admin_api import AdminApi
from .data_api import DataApi
from .dataset_api import DatasetApi
from .element_api import ElementApi
from .invoice_api import InvoiceApi
from .marketplace_api import MarketplaceApi
from .query_api import QueryApi
from .release_api import ReleaseApi
from .scheme_api import SchemeApi
from .user_api import UserApi
from .view_api import ViewApi
| 29.8
| 43
| 0.850112
| 65
| 447
| 5.6
| 0.461538
| 0.271978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123043
| 447
| 14
| 44
| 31.928571
| 0.928571
| 0.06264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9ee3827d40bd3c2ef977372eb92c54f84febb5e5
| 17,983
|
py
|
Python
|
tests/test_utilities/test_state_machine.py
|
quest-gmulcahy/bacpypes
|
fd27c54ac7cf90078bd2cad3084242132311e3b6
|
[
"MIT"
] | null | null | null |
tests/test_utilities/test_state_machine.py
|
quest-gmulcahy/bacpypes
|
fd27c54ac7cf90078bd2cad3084242132311e3b6
|
[
"MIT"
] | null | null | null |
tests/test_utilities/test_state_machine.py
|
quest-gmulcahy/bacpypes
|
fd27c54ac7cf90078bd2cad3084242132311e3b6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test Utilities State Machine
----------------------------
"""
import unittest
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from ..state_machine import State, StateMachine, StateMachineGroup, match_pdu
from ..time_machine import reset_time_machine, run_time_machine
from ..trapped_classes import TrappedState, TrappedStateMachine
# some debugging
_debug = 0
_log = ModuleLogger(globals())
@bacpypes_debugging
class TPDU:
def __init__(self, **kwargs):
if _debug: TPDU._debug("__init__ %r", kwargs)
self.__dict__.update(kwargs)
def __repr__(self):
return '<TPDU {}>'.format(', '.join(
'{}={}'.format(k, v) for k,v in self.__dict__.items(),
))
@bacpypes_debugging
class TestMatchPDU(unittest.TestCase):
def test_match_pdu(self):
if _debug: TestMatchPDU._debug("test_match_pdu")
tpdu = TPDU(x=1)
Anon = type('Anon', (), {})
anon = Anon()
# no criteria passes
assert match_pdu(tpdu)
assert match_pdu(anon)
# matching/not matching types
assert match_pdu(tpdu, TPDU)
assert not match_pdu(tpdu, Anon)
assert match_pdu(tpdu, (TPDU, Anon))
# matching/not matching attributes
assert match_pdu(tpdu, x=1)
assert not match_pdu(tpdu, x=2)
assert not match_pdu(tpdu, y=1)
assert not match_pdu(anon, x=1)
# matching/not matching types and attributes
assert match_pdu(tpdu, TPDU, x=1)
assert not match_pdu(tpdu, TPDU, x=2)
assert not match_pdu(tpdu, TPDU, y=1)
@bacpypes_debugging
class TestState(unittest.TestCase):
def test_state_doc(self):
if _debug: TestState._debug("test_state_doc")
# change the doc string
ts = State(None)
ns = ts.doc("test state")
assert ts.doc_string == "test state"
assert ns is ts
if _debug: TestState._debug(" - passed")
def test_state_success(self):
if _debug: TestState._debug("test_state_success")
# create a state and flag it success
ts = State(None)
ns = ts.success()
assert ts.is_success_state
assert ns is ts
with self.assertRaises(RuntimeError):
ts.success()
with self.assertRaises(RuntimeError):
ts.fail()
if _debug: TestState._debug(" - passed")
def test_state_fail(self):
if _debug: TestState._debug("test_state_fail")
# create a state and flag it fail
ts = State(None)
ns = ts.fail()
assert ts.is_fail_state
assert ns is ts
with self.assertRaises(RuntimeError):
ts.success()
with self.assertRaises(RuntimeError):
ts.fail()
if _debug: TestState._debug(" - passed")
def test_something_else(self):
if _debug: TestState._debug("test_something_else")
if _debug: TestState._debug(" - passed")
@bacpypes_debugging
class TestStateMachine(unittest.TestCase):
def test_state_machine_run(self):
if _debug: TestStateMachine._debug("test_state_machine_run")
# create a state machine
tsm = StateMachine()
# run the machine
tsm.run()
# check for still running in the start state
assert tsm.running
assert tsm.current_state is tsm.start_state
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_success(self):
if _debug: TestStateMachine._debug("test_state_machine_success")
# create a trapped state machine
tsm = TrappedStateMachine()
assert isinstance(tsm.start_state, TrappedState)
# make the start state a success
tsm.start_state.success()
# run the machine
tsm.run()
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_fail(self):
if _debug: TestStateMachine._debug("test_state_machine_fail")
# create a trapped state machine
tsm = TrappedStateMachine()
assert isinstance(tsm.start_state, TrappedState)
# make the start state a fail
tsm.start_state.fail()
# run the machine
tsm.run()
# check for success
assert not tsm.running
assert tsm.current_state.is_fail_state
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_send(self):
if _debug: TestStateMachine._debug("test_state_machine_send")
# create a trapped state machine
tsm = TrappedStateMachine()
# make pdu object
pdu = TPDU()
# make a send transition from start to success, run the machine
tsm.start_state.send(pdu).success()
tsm.run()
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
# check the callbacks
assert tsm.start_state.before_send_pdu is pdu
assert tsm.start_state.after_send_pdu is pdu
assert tsm.before_send_pdu is pdu
assert tsm.after_send_pdu is pdu
# make sure the pdu was sent
assert tsm.sent is pdu
# check the transaction log
assert len(tsm.transaction_log) == 1
assert tsm.transaction_log[0][1] is pdu
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_receive(self):
if _debug: TestStateMachine._debug("test_state_machine_receive")
# create a trapped state machine
tsm = TrappedStateMachine()
# make pdu object
pdu = TPDU()
# make a receive transition from start to success, run the machine
tsm.start_state.receive(TPDU).success()
tsm.run()
# check for still running
assert tsm.running
# tell the machine it is receiving the pdu
tsm.receive(pdu)
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
# check the callbacks
assert tsm.start_state.before_receive_pdu is pdu
assert tsm.start_state.after_receive_pdu is pdu
assert tsm.before_receive_pdu is pdu
assert tsm.after_receive_pdu is pdu
# check the transaction log
assert len(tsm.transaction_log) == 1
assert tsm.transaction_log[0][1] is pdu
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_unexpected(self):
if _debug: TestStateMachine._debug("test_state_machine_unexpected")
# create a trapped state machine
tsm = TrappedStateMachine()
# make pdu object
good_pdu = TPDU(a=1)
bad_pdu = TPDU(b=2)
# make a receive transition from start to success, run the machine
tsm.start_state.receive(TPDU, a=1).success()
tsm.run()
# check for still running
assert tsm.running
# give the machine a bad pdu
tsm.receive(bad_pdu)
# check for fail
assert not tsm.running
assert tsm.current_state.is_fail_state
assert tsm.current_state is tsm.unexpected_receive_state
# check the callback
assert tsm.unexpected_receive_pdu is bad_pdu
# check the transaction log
assert len(tsm.transaction_log) == 1
assert tsm.transaction_log[0][1] is bad_pdu
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_call(self):
if _debug: TestStateMachine._debug("test_state_machine_call")
# simple hook
self._called = False
# create a trapped state machine
tsm = TrappedStateMachine()
# make a send transition from start to success, run the machine
tsm.start_state.call(setattr, self, '_called', True).success()
tsm.run()
# check for success
assert not tsm.running
assert tsm.is_success_state
# check for the call
assert self._called
def test_state_machine_call_exception(self):
if _debug: TestStateMachine._debug("test_state_machine_call_exception")
# simple hook
self._called = False
def fn():
self._called = True
raise AssertionError("error")
# create a trapped state machine
tsm = TrappedStateMachine()
# make a send transition from start to success, run the machine
tsm.start_state.call(fn).success()
tsm.run()
# check for failed call
assert not tsm.running
assert tsm.is_fail_state
# check for the call
assert self._called
def test_state_machine_loop_01(self):
if _debug: TestStateMachine._debug("test_state_machine_loop_01")
# create a trapped state machine
tsm = TrappedStateMachine()
# make pdu object
first_pdu = TPDU(a=1)
if _debug: TestStateMachine._debug(" - first_pdu: %r", first_pdu)
second_pdu = TPDU(a=2)
if _debug: TestStateMachine._debug(" - second_pdu: %r", second_pdu)
# after sending the first pdu, wait for the second
s0 = tsm.start_state
s1 = s0.send(first_pdu)
s2 = s1.receive(TPDU, a=2)
s2.success()
# run the machine
tsm.run()
# check for still running and waiting
assert tsm.running
assert tsm.current_state is s1
if _debug: TestStateMachine._debug(" - still running and waiting")
# give the machine the second pdu
tsm.receive(second_pdu)
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
if _debug: TestStateMachine._debug(" - success")
# check the callbacks
assert s0.before_send_pdu is first_pdu
assert s0.after_send_pdu is first_pdu
assert s1.before_receive_pdu is second_pdu
assert s1.after_receive_pdu is second_pdu
if _debug: TestStateMachine._debug(" - callbacks passed")
# check the transaction log
assert len(tsm.transaction_log) == 2
assert tsm.transaction_log[0][1] is first_pdu
assert tsm.transaction_log[1][1] is second_pdu
if _debug: TestStateMachine._debug(" - transaction log passed")
def test_state_machine_loop_02(self):
if _debug: TestStateMachine._debug("test_state_machine_loop_02")
# create a trapped state machine
tsm = TrappedStateMachine()
# make pdu object
first_pdu = TPDU(a=1)
second_pdu = TPDU(a=2)
# when the first pdu is received, send the second
s0 = tsm.start_state
s1 = s0.receive(TPDU, a=1)
s2 = s1.send(second_pdu)
s2.success()
# run the machine
tsm.run()
# check for still running
assert tsm.running
if _debug: TestStateMachine._debug(" - still running")
# give the machine the first pdu
tsm.receive(first_pdu)
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
if _debug: TestStateMachine._debug(" - success")
# check the callbacks
assert s0.before_receive_pdu is first_pdu
assert s0.after_receive_pdu is first_pdu
assert s1.before_send_pdu is second_pdu
assert s1.after_send_pdu is second_pdu
if _debug: TestStateMachine._debug(" - callbacks passed")
# check the transaction log
assert len(tsm.transaction_log) == 2
assert tsm.transaction_log[0][1] is first_pdu
assert tsm.transaction_log[1][1] is second_pdu
if _debug: TestStateMachine._debug(" - transaction log passed")
@bacpypes_debugging
class TestStateMachineTimeout1(unittest.TestCase):
def test_state_machine_timeout_1(self):
if _debug: TestStateMachineTimeout1._debug("test_state_machine_timeout_1")
# create a trapped state machine
tsm = TrappedStateMachine()
# make a timeout transition from start to success
tsm.start_state.timeout(1.0).success()
reset_time_machine()
if _debug: TestStateMachineTimeout1._debug(" - time machine reset")
tsm.run()
run_time_machine(60.0)
if _debug: TestStateMachineTimeout1._debug(" - time machine finished")
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
if _debug: TestStateMachine._debug(" - passed")
@bacpypes_debugging
class TestStateMachineTimeout2(unittest.TestCase):
def test_state_machine_timeout_2(self):
if _debug: TestStateMachineTimeout2._debug("test_state_machine_timeout_2")
# make some pdu's
first_pdu = TPDU(a=1)
second_pdu = TPDU(a=2)
# create a trapped state machine
tsm = TrappedStateMachine()
s0 = tsm.start_state
# send something, wait, send something, wait, success
s1 = s0.send(first_pdu)
s2 = s1.timeout(1.0)
s3 = s2.send(second_pdu)
s4 = s3.timeout(1.0).success()
reset_time_machine()
if _debug: TestStateMachineTimeout2._debug(" - time machine reset")
tsm.run()
run_time_machine(60.0)
if _debug: TestStateMachineTimeout2._debug(" - time machine finished")
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
# check the transaction log
assert len(tsm.transaction_log) == 2
assert tsm.transaction_log[0][1] is first_pdu
assert tsm.transaction_log[1][1] is second_pdu
if _debug: TestStateMachine._debug(" - passed")
@bacpypes_debugging
class TestStateMachineGroup(unittest.TestCase):
def test_state_machine_group_success(self):
if _debug: TestStateMachineGroup._debug("test_state_machine_group_success")
# create a state machine group
smg = StateMachineGroup()
# create a trapped state machine, start state is success
tsm = TrappedStateMachine()
tsm.start_state.success()
# add it to the group
smg.append(tsm)
reset_time_machine()
if _debug: TestStateMachineGroup._debug(" - time machine reset")
# tell the group to run
smg.run()
run_time_machine(60.0)
if _debug: TestStateMachineGroup._debug(" - time machine finished")
# check for success
assert not tsm.running
assert tsm.current_state.is_success_state
assert not smg.is_running
assert smg.is_success_state
if _debug: TestStateMachine._debug(" - passed")
def test_state_machine_group_fail(self):
if _debug: TestStateMachineGroup._debug("test_state_machine_group_fail")
# create a state machine group
smg = StateMachineGroup()
# create a trapped state machine, start state is fail
tsm = TrappedStateMachine()
tsm.start_state.fail()
# add it to the group
smg.append(tsm)
reset_time_machine()
if _debug: TestStateMachineGroup._debug(" - time machine reset")
# tell the group to run
smg.run()
run_time_machine(60.0)
if _debug: TestStateMachineGroup._debug(" - time machine finished")
# check for success
assert not tsm.running
assert tsm.current_state.is_fail_state
assert not smg.is_running
assert smg.is_fail_state
if _debug: TestStateMachine._debug(" - passed")
@bacpypes_debugging
class TestStateMachineEvents(unittest.TestCase):
def test_state_machine_event_01(self):
if _debug: TestStateMachineEvents._debug("test_state_machine_event_01")
# create a state machine group
smg = StateMachineGroup()
# create a trapped state machine, start state is success
tsm1 = TrappedStateMachine()
tsm1.start_state.set_event('e').success()
smg.append(tsm1)
# create another trapped state machine, waiting for the event
tsm2 = TrappedStateMachine()
tsm2.start_state.wait_event('e').success()
smg.append(tsm2)
reset_time_machine()
if _debug: TestStateMachineEvents._debug(" - time machine reset")
# tell the group to run
smg.run()
run_time_machine(60.0)
if _debug: TestStateMachineEvents._debug(" - time machine finished")
# check for success
assert tsm1.current_state.is_success_state
assert tsm2.current_state.is_success_state
assert not smg.is_running
assert smg.is_success_state
if _debug: TestStateMachineEvents._debug(" - passed")
def test_state_machine_event_02(self):
if _debug: TestStateMachineEvents._debug("test_state_machine_event_02")
# create a state machine group
smg = StateMachineGroup()
# create a trapped state machine, waiting for an event
tsm1 = TrappedStateMachine()
tsm1.start_state.wait_event('e').success()
smg.append(tsm1)
# create another trapped state machine, start state is success
tsm2 = TrappedStateMachine()
tsm2.start_state.set_event('e').success()
smg.append(tsm2)
reset_time_machine()
if _debug: TestStateMachineEvents._debug(" - time machine reset")
# tell the group to run
smg.run()
run_time_machine(60.0)
if _debug: TestStateMachineEvents._debug(" - time machine finished")
# check for success
assert tsm1.current_state.is_success_state
assert tsm2.current_state.is_success_state
assert not smg.is_running
assert smg.is_success_state
if _debug: TestStateMachineEvents._debug(" - passed")
| 30.223529
| 83
| 0.642551
| 2,186
| 17,983
| 5.048948
| 0.076395
| 0.038054
| 0.046389
| 0.076108
| 0.813627
| 0.74214
| 0.696385
| 0.646643
| 0.580774
| 0.513998
| 0
| 0.01103
| 0.279041
| 17,983
| 594
| 84
| 30.274411
| 0.840262
| 0.158928
| 0
| 0.563636
| 0
| 0
| 0.086102
| 0.028612
| 0
| 0
| 0
| 0
| 0.321212
| 0
| null | null | 0.060606
| 0.015152
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
9eeee591e15663733b5a5672013dabf998d02a06
| 71
|
py
|
Python
|
docs/bld/example/r/r_example/setup.py
|
hmgaudecker/econ-project-templates
|
0bf8c4701c112a96e2f2a2efe05be5ff39040111
|
[
"BSD-3-Clause"
] | 67
|
2015-01-19T17:41:02.000Z
|
2019-10-17T19:12:49.000Z
|
docs/bld/example/r/r_example/setup.py
|
hmgaudecker/econ-project-templates
|
0bf8c4701c112a96e2f2a2efe05be5ff39040111
|
[
"BSD-3-Clause"
] | 64
|
2015-02-23T15:18:24.000Z
|
2019-11-20T19:58:59.000Z
|
docs/bld/example/r/r_example/setup.py
|
hmgaudecker/econ-project-templates
|
0bf8c4701c112a96e2f2a2efe05be5ff39040111
|
[
"BSD-3-Clause"
] | 51
|
2015-01-15T16:10:33.000Z
|
2019-10-28T21:14:03.000Z
|
from setuptools import setup
setup(name="r_example", version="0.0.1")
| 17.75
| 40
| 0.746479
| 12
| 71
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 0.098592
| 71
| 3
| 41
| 23.666667
| 0.765625
| 0
| 0
| 0
| 0
| 0
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9ef57c886ccbb47f3b0baa0c29d3eed412d64e20
| 253
|
py
|
Python
|
apis/schemas/users.py
|
Fusemachines/fuse-python-training
|
939bf935646a60817b1da329791e376c7e59464b
|
[
"FSFAP"
] | null | null | null |
apis/schemas/users.py
|
Fusemachines/fuse-python-training
|
939bf935646a60817b1da329791e376c7e59464b
|
[
"FSFAP"
] | null | null | null |
apis/schemas/users.py
|
Fusemachines/fuse-python-training
|
939bf935646a60817b1da329791e376c7e59464b
|
[
"FSFAP"
] | null | null | null |
from apis.models import User
from flask_marshmallow.schema import Schema
from flask_marshmallow.fields import fields
class UserSchema(Schema):
email = fields.Email(required=True)
name = fields.String()
address = fields.String(required=True)
| 31.625
| 43
| 0.782609
| 33
| 253
| 5.939394
| 0.515152
| 0.091837
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13834
| 253
| 8
| 44
| 31.625
| 0.899083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7341f85e584be4926fb6ddfbfa265e4fe91e6d8a
| 39
|
py
|
Python
|
rio_glui/__init__.py
|
vincentsarago/rio-glui
|
04b4d0fcfb3394aa2e994f5192e68aab6426dfcf
|
[
"MIT"
] | null | null | null |
rio_glui/__init__.py
|
vincentsarago/rio-glui
|
04b4d0fcfb3394aa2e994f5192e68aab6426dfcf
|
[
"MIT"
] | null | null | null |
rio_glui/__init__.py
|
vincentsarago/rio-glui
|
04b4d0fcfb3394aa2e994f5192e68aab6426dfcf
|
[
"MIT"
] | null | null | null |
"""rio_glui."""
__version__ = "1.0.5"
| 9.75
| 21
| 0.564103
| 6
| 39
| 2.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0.128205
| 39
| 3
| 22
| 13
| 0.411765
| 0.230769
| 0
| 0
| 0
| 0
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b4167dda7be66a6b03fe974d8ae4fda8ea679a2f
| 1,169
|
py
|
Python
|
lingorm/drivers/query_builder_abstract.py
|
xiaolingzi/python-orm-lingorm
|
4b614bac1d6427010d7b355e1f67b0bbff52edbc
|
[
"MIT"
] | 2
|
2020-10-12T02:42:41.000Z
|
2020-11-29T07:41:40.000Z
|
lingorm/drivers/query_builder_abstract.py
|
xiaolingzi/lingorm-python
|
4b614bac1d6427010d7b355e1f67b0bbff52edbc
|
[
"MIT"
] | null | null | null |
lingorm/drivers/query_builder_abstract.py
|
xiaolingzi/lingorm-python
|
4b614bac1d6427010d7b355e1f67b0bbff52edbc
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class QueryBuilderAbstract:
_sql = ""
_param_dict = {}
_select_sql = ""
_from_sql = ""
_join_sql = ""
_where_sql = ""
_group_sql = ""
_order_sql = ""
_limit_sql = ""
@abstractmethod
def select(self, **kwargs):
pass
@abstractmethod
def from_table(self, cls, on_expression):
pass
@abstractmethod
def left_join(self, cls, on_expression):
pass
@abstractmethod
def right_join(self, cls, on_expression):
pass
@abstractmethod
def inner_join(self, cls, on_expression):
pass
@abstractmethod
def where(self, *args):
pass
@abstractmethod
def order_by(self, *args):
pass
@abstractmethod
def group_by(self, **args):
pass
@abstractmethod
def limit(self, top_count):
pass
@abstractmethod
def first(self, cls=None):
pass
@abstractmethod
def find(self, cls=None):
pass
@abstractmethod
def find_page(self, page_index, page_size, cls=None):
pass
@abstractmethod
def find_count(self):
pass
| 17.712121
| 57
| 0.597092
| 125
| 1,169
| 5.328
| 0.296
| 0.331832
| 0.378378
| 0.114114
| 0.551051
| 0.507508
| 0.366366
| 0.198198
| 0
| 0
| 0
| 0
| 0.310522
| 1,169
| 65
| 58
| 17.984615
| 0.826303
| 0
| 0
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26
| false
| 0.26
| 0.02
| 0
| 0.48
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
b419ab0fa40796bc165ce31f6e38ed3e52cedc29
| 32
|
py
|
Python
|
tests/serialization/__init__.py
|
crleblanc/citrine-python
|
4e89e9efc498352b2eb6b9abd60c14835c3dd686
|
[
"Apache-2.0"
] | null | null | null |
tests/serialization/__init__.py
|
crleblanc/citrine-python
|
4e89e9efc498352b2eb6b9abd60c14835c3dd686
|
[
"Apache-2.0"
] | null | null | null |
tests/serialization/__init__.py
|
crleblanc/citrine-python
|
4e89e9efc498352b2eb6b9abd60c14835c3dd686
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for Schema classes."""
| 16
| 31
| 0.65625
| 4
| 32
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.75
| 0.78125
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b41e7074e7bb2e46305d76c3aa329e044b2f1928
| 1,207
|
py
|
Python
|
tests.py
|
tektx/RSA-Encryption
|
3401bf5802c06804722184ebbffaad139fa0410d
|
[
"MIT"
] | 1
|
2021-11-20T11:51:24.000Z
|
2021-11-20T11:51:24.000Z
|
tests.py
|
tektx/RSA-Encryption
|
3401bf5802c06804722184ebbffaad139fa0410d
|
[
"MIT"
] | null | null | null |
tests.py
|
tektx/RSA-Encryption
|
3401bf5802c06804722184ebbffaad139fa0410d
|
[
"MIT"
] | null | null | null |
import main
import pytest
from unittest import mock
import sympy
def test_get_nth_prime():
"""Prime numbers are generated"""
assert sympy.prime(4) == 7
assert sympy.prime(9999999) == 179424671
with pytest.raises(ValueError):
assert sympy.prime(0)
def test_get_totatives():
"""Totatives of n are calculated"""
assert main.get_totatives(9) == [1, 2, 4, 5, 7, 8]
assert main.get_totatives(15) == [1, 2, 4, 7, 8, 11, 13, 14]
with pytest.raises(ValueError):
assert main.get_totatives(0)
with pytest.raises(ValueError):
assert main.get_totatives(50.5)
def test_get_encryption_key():
"""Encryption key is calculated"""
assert main.get_encryption_key(14, [1, 3, 5, 9, 11, 13]) == 5
assert main.get_encryption_key(10, [1, 3, 7, 9]) == 3
with pytest.raises(ValueError):
assert main.get_encryption_key(6, [1, 5])
with pytest.raises(ValueError):
assert main.get_encryption_key(33.3, [])
def test_get_decryption_key():
"""Decryption key is calculated"""
assert main.get_decryption_key(5, 6) == 5
with mock.patch('random.randint', return_value=2):
assert main.get_decryption_key(5, 6, True) == 11
| 30.175
| 65
| 0.665286
| 179
| 1,207
| 4.329609
| 0.290503
| 0.129032
| 0.167742
| 0.167742
| 0.477419
| 0.385806
| 0.330323
| 0.258065
| 0.134194
| 0
| 0
| 0.07772
| 0.200497
| 1,207
| 39
| 66
| 30.948718
| 0.725389
| 0.095278
| 0
| 0.185185
| 1
| 0
| 0.013072
| 0
| 0
| 0
| 0
| 0
| 0.481481
| 1
| 0.148148
| true
| 0
| 0.148148
| 0
| 0.296296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b4219764e7a8842e09d9e943d8cb62b801fa1f9c
| 1,996
|
py
|
Python
|
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GL/AMD/framebuffer_sample_positions.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GL/AMD/framebuffer_sample_positions.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GL/AMD/framebuffer_sample_positions.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
'''OpenGL extension AMD.framebuffer_sample_positions
This module customises the behaviour of the
OpenGL.raw.GL.AMD.framebuffer_sample_positions to provide a more
Python-friendly API
Overview (from the spec)
In unextended GL, the sub-pixel loations of multisampled textures and
renderbuffers are generally determined in an implementation dependent
manner. Some algorithms -- in particular custom antialiasing functions --
depend on the knowledge of, or even require control over the positions of
samples within each pixel.
The AMD_sample_positions extension added some control over the positions
of samples within a single framebuffer. However, it forced all pixels
within a framebuffer to have the set of sample positions.
This extension provides a mechanism to explicitly set sample positions for
a framebuffer object with multi-sampled attachments in a repeating pattern,
allowing different pixels to use different sub-pixel locations for their
samples. The sample locations used by the FBO can be fixed for all pixels
in the FBOs attachments or they can be fixed for a sampling pattern
comprised of multiple pixels, where the sampling pattern is repeated over
all pixels. The rate of repeat of this sampling pattern size itself is
fixed and is implementation-dependent.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/AMD/framebuffer_sample_positions.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.AMD.framebuffer_sample_positions import *
from OpenGL.raw.GL.AMD.framebuffer_sample_positions import _EXTENSION_NAME
def glInitFramebufferSamplePositionsAMD():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
| 45.363636
| 77
| 0.799599
| 278
| 1,996
| 5.676259
| 0.467626
| 0.076046
| 0.063371
| 0.091888
| 0.136882
| 0.136882
| 0.136882
| 0.063371
| 0.063371
| 0
| 0
| 0
| 0.162826
| 1,996
| 44
| 78
| 45.363636
| 0.944345
| 0.828156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| true
| 0
| 0.777778
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b43256cea1b6d76e7ca9d9cb76207a7046e3a170
| 235
|
py
|
Python
|
comments/admin.py
|
KingAnduin/fitness
|
2e510ab0ce9fdb1a90e7a7470d56be5188835e06
|
[
"Apache-2.0"
] | null | null | null |
comments/admin.py
|
KingAnduin/fitness
|
2e510ab0ce9fdb1a90e7a7470d56be5188835e06
|
[
"Apache-2.0"
] | 4
|
2021-03-19T02:05:47.000Z
|
2021-09-22T18:53:30.000Z
|
comments/admin.py
|
KingAnduin/fitness
|
2e510ab0ce9fdb1a90e7a7470d56be5188835e06
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from comments.models import CommentsInfo
class CommentAdmin(admin.ModelAdmin):
list_display = ('comment_content', 'comment_create_time', 'order')
admin.site.register(CommentsInfo, CommentAdmin)
| 23.5
| 70
| 0.8
| 27
| 235
| 6.814815
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 235
| 9
| 71
| 26.111111
| 0.87619
| 0
| 0
| 0
| 0
| 0
| 0.165957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b43b23f6c9c290898367a52cb7732c09e5830c22
| 157
|
py
|
Python
|
mypy/test/data/fixtures/bool.py
|
silky/mypy
|
de6a8d3710df9f49109cb682f2092e4967bfb92c
|
[
"PSF-2.0"
] | 1
|
2019-06-27T11:34:27.000Z
|
2019-06-27T11:34:27.000Z
|
mypy/test/data/fixtures/bool.py
|
silky/mypy
|
de6a8d3710df9f49109cb682f2092e4967bfb92c
|
[
"PSF-2.0"
] | null | null | null |
mypy/test/data/fixtures/bool.py
|
silky/mypy
|
de6a8d3710df9f49109cb682f2092e4967bfb92c
|
[
"PSF-2.0"
] | null | null | null |
# builtins stub used in boolean-related test cases.
class object:
def __init__(self) -> None: pass
class type: pass
class bool: pass
class int: pass
| 14.272727
| 51
| 0.719745
| 24
| 157
| 4.541667
| 0.75
| 0.247706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203822
| 157
| 10
| 52
| 15.7
| 0.872
| 0.312102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.8
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
b4449458d1af3ee20ba316c8a78eb5573a21dba8
| 97
|
py
|
Python
|
calculators/apps.py
|
hughest64/brew_app
|
f450e9923e6242b3c37dc3934af6ab540c39999f
|
[
"MIT"
] | null | null | null |
calculators/apps.py
|
hughest64/brew_app
|
f450e9923e6242b3c37dc3934af6ab540c39999f
|
[
"MIT"
] | null | null | null |
calculators/apps.py
|
hughest64/brew_app
|
f450e9923e6242b3c37dc3934af6ab540c39999f
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class CalculatorsConfig(AppConfig):
name = 'calculators'
| 16.166667
| 35
| 0.773196
| 10
| 97
| 7.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154639
| 97
| 5
| 36
| 19.4
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b468328061b42a1663be7a99054e6c61fa2c3dda
| 3,708
|
py
|
Python
|
tests/join_test.py
|
reuster986/arkouda
|
f1781e8a51c0d3d78a4d1753a6a70e7a41db8e7b
|
[
"MIT"
] | null | null | null |
tests/join_test.py
|
reuster986/arkouda
|
f1781e8a51c0d3d78a4d1753a6a70e7a41db8e7b
|
[
"MIT"
] | null | null | null |
tests/join_test.py
|
reuster986/arkouda
|
f1781e8a51c0d3d78a4d1753a6a70e7a41db8e7b
|
[
"MIT"
] | null | null | null |
import importlib
import numpy as np
import math
import gc
import sys
from base_test import ArkoudaTest
from context import arkouda as ak
'''
Encapsulates a variety of arkouda join_on_eq_with_dt test cases.
'''
class JoinTest(ArkoudaTest):
def setUp(self):
ArkoudaTest.setUp(self)
self.N = 1000
self.a1 = ak.ones(self.N,dtype=np.int64)
self.a2 = ak.arange(0,self.N,1)
self.t1 = self.a1
self.t2 = self.a1 * 10
self.dt = 10
ak.verbose = False
def test_join_on_eq_with_true_dt(self):
I,J = ak.join_on_eq_with_dt(self.a2,self.a1,self.t1,self.t2,self.dt,"true_dt")
nl = ak.get_config()['numLocales']
self.assertEqual(self.N//nl, I.size)
self.assertEqual(self.N//nl, J.size)
def test_join_on_eq_with_true_dt_with_result_limit(self):
nl = ak.get_config()['numLocales']
lim = (self.N + nl) * self.N
res_size = self.N * self.N
I,J = ak.join_on_eq_with_dt(self.a1,self.a1,self.a1,self.a1,self.dt,"true_dt",result_limit=lim)
self.assertEqual(res_size, I.size)
self.assertEqual(res_size, J.size)
def test_join_on_eq_with_abs_dt(self):
I,J = ak.join_on_eq_with_dt(self.a2,self.a1,self.t1,self.t2,self.dt,"abs_dt")
nl = ak.get_config()['numLocales']
self.assertEqual(self.N//nl, I.size)
self.assertEqual(self.N//nl, J.size)
def test_join_on_eq_with_pos_dt(self):
I,J = ak.join_on_eq_with_dt(self.a2,self.a1,self.t1,self.t2,self.dt,"pos_dt")
nl = ak.get_config()['numLocales']
self.assertEqual(self.N//nl, I.size)
self.assertEqual(self.N//nl, J.size)
def test_join_on_eq_with_abs_dt_outside_window(self):
'''
Should get 0 answers because N^2 matches but 0 within dt window
'''
dt = 8
I,J = ak.join_on_eq_with_dt(self.a1,self.a1,self.t1,self.t1*10,dt,"abs_dt")
self.assertEqual(0, I.size)
self.assertEqual(0, J.size)
I,J = ak.join_on_eq_with_dt(self.a2,self.a1,self.t1,self.t2,dt,"abs_dt")
self.assertEqual(0, I.size)
self.assertEqual(0, J.size)
def test_join_on_eq_with_pos_dt_outside_window(self):
'''
Should get 0 answers because N matches but 0 within dt window
'''
dt = 8
I,J = ak.join_on_eq_with_dt(self.a2,self.a1,self.t1,self.t2,dt,"pos_dt")
self.assertEqual(0, I.size)
self.assertEqual(0, J.size)
def test_error_handling(self):
"""
Tests error TypeError and ValueError handling
"""
with self.assertRaises(TypeError):
ak.join_on_eq_with_dt([list(range(0,11))],
self.a1,self.t1,self.t2,8,"pos_dt")
with self.assertRaises(TypeError):
ak.join_on_eq_with_dt([self.a1, list(range(0,11))],
self.t1,self.t2,8,"pos_dt")
with self.assertRaises(TypeError):
ak.join_on_eq_with_dt([self.a1, self.a1, list(range(0,11))],
self.t2,8,"pos_dt")
with self.assertRaises(TypeError):
ak.join_on_eq_with_dt([self.a1, self.a1, self.t1,
list(range(0,11))],8,"pos_dt")
with self.assertRaises(TypeError):
ak.join_on_eq_with_dt(self.a1,
self.a1,self.t1,self.t2,'8',"pos_dt")
with self.assertRaises(ValueError):
ak.join_on_eq_with_dt(self.a1,self.a1,self.t1,self.t1*10,8,"ab_dt")
with self.assertRaises(ValueError):
ak.join_on_eq_with_dt(self.a1,self.a1,self.t1,self.t1*10,8,"abs_dt",-1)
| 38.625
| 103
| 0.60329
| 595
| 3,708
| 3.544538
| 0.136134
| 0.073969
| 0.104315
| 0.119488
| 0.749644
| 0.726411
| 0.726411
| 0.709341
| 0.69559
| 0.69559
| 0
| 0.039912
| 0.263484
| 3,708
| 95
| 104
| 39.031579
| 0.732332
| 0.046386
| 0
| 0.347222
| 0
| 0
| 0.037124
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 1
| 0.111111
| false
| 0
| 0.097222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b4880b11f6591cd2aca8100f9d8bfa56b04f9c2f
| 188
|
py
|
Python
|
examples/reversed.py
|
Fogapod/pink-accents
|
deac9b4baf0a60c1d30fd56ff9529c03889b74c1
|
[
"MIT"
] | 1
|
2022-01-01T10:12:57.000Z
|
2022-01-01T10:12:57.000Z
|
examples/reversed.py
|
Fogapod/pink-accents
|
deac9b4baf0a60c1d30fd56ff9529c03889b74c1
|
[
"MIT"
] | null | null | null |
examples/reversed.py
|
Fogapod/pink-accents
|
deac9b4baf0a60c1d30fd56ff9529c03889b74c1
|
[
"MIT"
] | null | null | null |
from typing import Any
from pink_accents import Accent
class Reversed(Accent):
""".txet sesreveR"""
def apply(self, text: str, **kwargs: Any) -> str:
return text[::-1]
| 17.090909
| 53
| 0.638298
| 25
| 188
| 4.76
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.223404
| 188
| 10
| 54
| 18.8
| 0.808219
| 0.074468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
81f0d6e7da63e2eacacfeca251573a3d39051c96
| 1,055
|
py
|
Python
|
teleband/instruments/migrations/0002_data_migration_seed_transpositions.py
|
JMU-CIME/CPR-Music-Backend
|
b72b70ed8826595c96c028595181293edcf1e368
|
[
"MIT"
] | 2
|
2022-01-08T20:21:43.000Z
|
2022-03-18T03:31:30.000Z
|
teleband/instruments/migrations/0002_data_migration_seed_transpositions.py
|
JMU-CIME/CPR-Music-Backend
|
b72b70ed8826595c96c028595181293edcf1e368
|
[
"MIT"
] | 16
|
2022-01-08T02:12:54.000Z
|
2022-03-02T03:02:59.000Z
|
teleband/instruments/migrations/0002_data_migration_seed_transpositions.py
|
JMU-CIME/CPR-Music-Backend
|
b72b70ed8826595c96c028595181293edcf1e368
|
[
"MIT"
] | 2
|
2022-01-08T00:21:37.000Z
|
2022-01-18T05:33:15.000Z
|
# Generated by Django 3.2.11 on 2022-01-07 21:56
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Transposition = apps.get_model("instruments", "Transposition")
Transposition.objects.update_or_create(
name="Alto Clef",
)
Transposition.objects.update_or_create(
name="Bb",
)
Transposition.objects.update_or_create(
name="Concert Pitch BC",
)
Transposition.objects.update_or_create(
name="Concert Pitch BC 8vb",
)
Transposition.objects.update_or_create(
name="Concert Pitch TC",
)
Transposition.objects.update_or_create(
name="Concert Pitch TC 8va",
)
Transposition.objects.update_or_create(
name="Eb",
)
Transposition.objects.update_or_create(
name="F",
)
class Migration(migrations.Migration):
dependencies = [
("instruments", "0001_initial"),
]
operations = [migrations.RunPython(update_site_forward, migrations.RunPython.noop)]
| 25.119048
| 87
| 0.669194
| 119
| 1,055
| 5.739496
| 0.445378
| 0.234261
| 0.304539
| 0.327965
| 0.527086
| 0.527086
| 0.304539
| 0.304539
| 0.304539
| 0
| 0
| 0.026797
| 0.221801
| 1,055
| 41
| 88
| 25.731707
| 0.805116
| 0.069194
| 0
| 0.25
| 1
| 0
| 0.13627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.03125
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
81f24abb9c82f9cf1e1879fbe7195fc60a9ade2c
| 100
|
py
|
Python
|
config.py
|
DerWaldi/cat-dog-audio-classifier
|
697025d438b3dc45d441d5e34ec62a9041b68a36
|
[
"MIT"
] | null | null | null |
config.py
|
DerWaldi/cat-dog-audio-classifier
|
697025d438b3dc45d441d5e34ec62a9041b68a36
|
[
"MIT"
] | null | null | null |
config.py
|
DerWaldi/cat-dog-audio-classifier
|
697025d438b3dc45d441d5e34ec62a9041b68a36
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
# Hyper Parameters
BATCH_SIZE = 30
| 25
| 64
| 0.84
| 13
| 100
| 5.923077
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.12
| 100
| 4
| 65
| 25
| 0.852273
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 4
|
c321cba9b4ad5ef09033ba7630fa7d61ea70e07a
| 951
|
py
|
Python
|
robostat/web/views/timetable/renderers.py
|
teknologiakerho/robostat-web
|
10065c0bb9a387ba95e41d28c2f8911c22330372
|
[
"MIT"
] | null | null | null |
robostat/web/views/timetable/renderers.py
|
teknologiakerho/robostat-web
|
10065c0bb9a387ba95e41d28c2f8911c22330372
|
[
"MIT"
] | null | null | null |
robostat/web/views/timetable/renderers.py
|
teknologiakerho/robostat-web
|
10065c0bb9a387ba95e41d28c2f8911c22330372
|
[
"MIT"
] | null | null | null |
import flask
from robostat.rulesets.xsumo import XSumoRuleset
from robostat.rulesets.rescue import RescueRuleset
from robostat.rulesets.tanssi import DanceInterviewRuleset, DancePerformanceRuleset
from robostat.rulesets.haastattelu import HaastatteluRuleset
from robostat.web.views.timetable import event_renderer
@event_renderer.of(XSumoRuleset)
def render_xsumo_event(event):
return flask.render_template("timetable/event-xsumo.html", event=event)
@event_renderer.of(RescueRuleset)
def render_recue_event(event):
return flask.render_template("timetable/event-rescue.html", event=event)
@event_renderer.of(DancePerformanceRuleset)
def render_dance_event(event):
return flask.render_template("timetable/event-dance.html", event=event)
@event_renderer.of(HaastatteluRuleset)
@event_renderer.of(DanceInterviewRuleset)
def render_haastattelu_event(event):
return flask.render_template("timetable/event-haastattelu.html", event=event)
| 39.625
| 83
| 0.840168
| 115
| 951
| 6.791304
| 0.234783
| 0.140845
| 0.096031
| 0.107554
| 0.362356
| 0.362356
| 0.25096
| 0.25096
| 0
| 0
| 0
| 0
| 0.071504
| 951
| 23
| 84
| 41.347826
| 0.884485
| 0
| 0
| 0
| 0
| 0
| 0.116719
| 0.116719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.315789
| 0.210526
| 0.736842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
c3309b6a724f763b6c0a1e25654b324f11e40da4
| 238
|
py
|
Python
|
codetools/util/tests/dict_test_case.py
|
braidedlogix/Codetools_wxPhoenix_py3
|
555e7e3e4895ef9d79716e0323db4445436d10dc
|
[
"BSD-3-Clause"
] | 1
|
2017-05-12T04:17:50.000Z
|
2017-05-12T04:17:50.000Z
|
codetools/util/tests/dict_test_case.py
|
braidedlogix/Codetools_wxPhoenix_py3
|
555e7e3e4895ef9d79716e0323db4445436d10dc
|
[
"BSD-3-Clause"
] | null | null | null |
codetools/util/tests/dict_test_case.py
|
braidedlogix/Codetools_wxPhoenix_py3
|
555e7e3e4895ef9d79716e0323db4445436d10dc
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from traits.testing.api import doctest_for_module
import codetools.util.dict as dict
class DictDocTestCase(doctest_for_module(dict)):
pass
if __name__ == '__main__':
import sys
unittest.main(argv=sys.argv)
| 18.307692
| 49
| 0.768908
| 33
| 238
| 5.181818
| 0.636364
| 0.116959
| 0.187135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151261
| 238
| 12
| 50
| 19.833333
| 0.846535
| 0
| 0
| 0
| 0
| 0
| 0.033613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 0.5
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
c34d0a5423a6326d7300e7ec40960aa7ec8583c5
| 1,120
|
py
|
Python
|
actors/Big_brother.py
|
MaryanMorel/utopical-brotherhood
|
de80c0b38a2410fc4ebd765decdda738e8faf889
|
[
"MIT"
] | null | null | null |
actors/Big_brother.py
|
MaryanMorel/utopical-brotherhood
|
de80c0b38a2410fc4ebd765decdda738e8faf889
|
[
"MIT"
] | null | null | null |
actors/Big_brother.py
|
MaryanMorel/utopical-brotherhood
|
de80c0b38a2410fc4ebd765decdda738e8faf889
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python2
# -*- coding: utf8 -*-
import pykka
import time
from Manager import Manager
class Big_Brother(pykka.ThreadingActor):
def __init__(self):
super(Big_Brother, self).__init__()
self.pool = {'managers':[]}
def start_manager(self, token):
self.pool['managers'].append(Manager.start(token).proxy())
return len(self.pool['managers']) #id of the manager
def run_manager(self, manager_id):
self.pool['managers'][manager_id].runAll()
def learn(self, manager_id, k):
self.pool['managers'][manager_id].learn(k)
def erase_manager_raw_data(self, manager_id):
self.pool['managers'][manager_id].erase_raw_data()
def erase_manager_parsed_data(self, manager_id):
self.pool['managers'][manager_id].erase_parsed_data()
def erase_manager_clusterings(self, manager_id):
self.pool['managers'][manager_id].erase_clusterings()
def stop_manager(self, manager_id):
answer = self.pool['managers'][manager_id].stop_slaves()
answer.get() # block thread
self.pool['managers'][manager_id].stop()
| 31.111111
| 66
| 0.675
| 146
| 1,120
| 4.90411
| 0.30137
| 0.163408
| 0.223464
| 0.22486
| 0.360335
| 0.325419
| 0.244413
| 0.244413
| 0.191341
| 0.131285
| 0
| 0.002181
| 0.18125
| 1,120
| 35
| 67
| 32
| 0.778626
| 0.061607
| 0
| 0
| 0
| 0
| 0.076409
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.125
| 0
| 0.541667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
c34eb2cbe27121eb684d0b5496ff31a067a2d088
| 8,162
|
py
|
Python
|
k2/python/host/tests/properties_test.py
|
EmreOzkose/k2
|
818b138b33eabe440601df8910a2b97ac088594b
|
[
"Apache-2.0"
] | 491
|
2020-09-17T09:05:05.000Z
|
2022-03-31T13:38:18.000Z
|
k2/python/host/tests/properties_test.py
|
jimbozhang/k2
|
eeeabf187aae5fb4bb91dc66dada32a0e555db6c
|
[
"Apache-2.0"
] | 600
|
2020-09-17T13:55:06.000Z
|
2022-03-30T23:23:40.000Z
|
k2/python/host/tests/properties_test.py
|
jimbozhang/k2
|
eeeabf187aae5fb4bb91dc66dada32a0e555db6c
|
[
"Apache-2.0"
] | 121
|
2020-09-17T15:44:56.000Z
|
2022-03-23T13:22:52.000Z
|
#!/usr/bin/env python3
#
# Copyright 2020 Xiaomi Corporation (author: Haowen Qiu)
#
# See ../../../LICENSE for clarification regarding multiple authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run this single test, use
#
# ctest --verbose -R host_properties_test_py
#
import unittest
import torch
import k2host
class TestIsValid(unittest.TestCase):
def test_bad_case1(self):
# fsa should contain at least two states
array_size = k2host.IntArray2Size(1, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertFalse(k2host.is_valid(fsa))
def test_bad_case2(self):
# only kFinalSymbol arcs enter the final state
s = r'''
0 1 0 0
0 2 1 0
1 2 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_valid(fsa))
def test_bad_case3(self):
# `arc_indexes` and `arcs` in this state are not consistent
arc_indexes = torch.IntTensor([0, 2, 2, 2])
arcs = torch.IntTensor([[0, 1, 0, 0], [0, 2, 1, 0], [1, 2, 0, 0]])
fsa = k2host.Fsa(arc_indexes, arcs)
self.assertFalse(k2host.is_valid(fsa))
def test_good_cases1(self):
# empty fsa is valid
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_valid(fsa))
def test_good_case2(self):
s = r'''
0 1 0 0
0 2 0 0
2 3 -1 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_valid(fsa))
def test_good_case3(self):
s = r'''
0 1 0 0
0 2 -1 0
1 2 -1 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_valid(fsa))
class TestIsTopSorted(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 0 0
0 2 0 0
2 1 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_top_sorted(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_top_sorted(fsa))
def test_good_case2(self):
s = r'''
0 1 0 0
0 2 0 0
1 2 0 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_top_sorted(fsa))
class TestIsArcSorted(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 1 0
0 2 2 0
1 2 2 0
1 3 1 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_arc_sorted(fsa))
def test_bad_cases2(self):
# same label on two arcs
s = r'''
0 2 0 0
0 1 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_arc_sorted(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_arc_sorted(fsa))
def test_good_case2(self):
s = r'''
0 1 0 0
0 2 0 0
1 2 1 0
1 3 2 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_arc_sorted(fsa))
class TestHasSelfLoops(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 0 0
0 2 0 0
1 2 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.has_self_loops(fsa))
def test_bad_cases2(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertFalse(k2host.has_self_loops(fsa))
def test_good_case2(self):
s = r'''
0 1 0 0
1 2 0 0
1 1 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.has_self_loops(fsa))
class TestIsDeterministic(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 2 0
1 2 0 0
1 3 0 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_deterministic(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_deterministic(fsa))
def test_good_case2(self):
s = r'''
0 1 2 0
1 2 0 0
1 3 2 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_deterministic(fsa))
class TestIsEpsilonFree(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 2 0
0 2 0 0
1 2 1 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_epsilon_free(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_epsilon_free(fsa))
def test_good_case2(self):
s = r'''
0 1 2 0
0 2 1 0
1 2 1 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_epsilon_free(fsa))
class TestIsConnected(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 2 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_connected(fsa))
def test_bad_cases2(self):
s = r'''
0 1 0 0
0 2 0 0
2
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_connected(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_connected(fsa))
def test_good_case2(self):
s = r'''
0 1 0 0
0 3 0 0
1 2 0 0
2 3 0 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_connected(fsa))
def test_good_case3(self):
s = r'''
0 3 0 0
1 2 0 0
2 3 0 0
2 3 0 0
2 4 0 0
3 1 0 0
4
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_connected(fsa))
class TestIsAcyclic(unittest.TestCase):
def test_bad_cases1(self):
s = r'''
0 1 2 0
0 4 0 0
0 2 0 0
1 2 1 0
1 3 0 0
2 1 0 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_acyclic(fsa))
def test_good_cases1(self):
# empty fsa
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_acyclic(fsa))
def test_good_case2(self):
s = r'''
0 1 2 0
0 2 1 0
1 2 0 0
1 3 5 0
2 3 6 0
3
'''
fsa = k2host.str_to_fsa(s)
self.assertTrue(k2host.is_acyclic(fsa))
class TestIsEmpty(unittest.TestCase):
def test_good_cases1(self):
array_size = k2host.IntArray2Size(0, 0)
fsa = k2host.Fsa.create_fsa_with_size(array_size)
self.assertTrue(k2host.is_empty(fsa))
def test_bad_case1(self):
s = r'''
0 1 2 0
1
'''
fsa = k2host.str_to_fsa(s)
self.assertFalse(k2host.is_empty(fsa))
if __name__ == '__main__':
unittest.main()
| 23.795918
| 74
| 0.561872
| 1,216
| 8,162
| 3.594572
| 0.125822
| 0.032487
| 0.019218
| 0.067262
| 0.744223
| 0.721803
| 0.704415
| 0.684969
| 0.661633
| 0.631434
| 0
| 0.08473
| 0.342073
| 8,162
| 342
| 75
| 23.865497
| 0.729237
| 0.122151
| 0
| 0.758893
| 0
| 0
| 0.210039
| 0
| 0
| 0
| 0
| 0
| 0.126482
| 1
| 0.126482
| false
| 0
| 0.011858
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c35a1638fee74ca8bf00639684ca6fde0cde25da
| 6,034
|
py
|
Python
|
HW2-6/HW4/Code/CSCI567_hw4_fall16.py
|
suhail-ansari/Machine-Learning-Algortihms
|
e116c28848a2cb2132a09fcfdc0301ae89ebcf8b
|
[
"MIT"
] | null | null | null |
HW2-6/HW4/Code/CSCI567_hw4_fall16.py
|
suhail-ansari/Machine-Learning-Algortihms
|
e116c28848a2cb2132a09fcfdc0301ae89ebcf8b
|
[
"MIT"
] | null | null | null |
HW2-6/HW4/Code/CSCI567_hw4_fall16.py
|
suhail-ansari/Machine-Learning-Algortihms
|
e116c28848a2cb2132a09fcfdc0301ae89ebcf8b
|
[
"MIT"
] | null | null | null |
import hw_utils as ml_utils
from datetime import datetime
def main():
start = datetime.now()
print "Loading Data..."
X_tr, y_tr, X_te, y_te = ml_utils.loaddata('./MiniBooNE_PID.txt')
print X_tr.shape, y_tr.shape
"""
print "Normalizing Data..."
nX_tr, nX_te = ml_utils.normalize(X_tr, X_te)
print "Starting Training..."
linear_activations(nX_tr, y_tr, nX_te, y_te)
sigmoid_activations(nX_tr, y_tr, nX_te, y_te)
relu_activations(nX_tr, y_tr, nX_te, y_te)
l2_regularization(nX_tr, y_tr, nX_te, y_te)
best_reg_coeff = early_stopping_l2_regularization(nX_tr, y_tr, nX_te, y_te)
print "\nbest_reg_coeff: {}\n".format(best_reg_coeff)
best_decay = SGD_with_weight_decay(nX_tr, y_tr, nX_te, y_te, din=50, dout=2)
print "\nbest_decay: {}\n".format(best_decay)
best_momentum = momentum_fn(nX_tr, y_tr, nX_te, y_te, best_decay, din=50, dout=2)
print "\nbest_momentum: {}\n".format(best_momentum)
combination(nX_tr, y_tr, nX_te, y_te, best_reg_coeff, best_decay, best_momentum, din=50, dout=2)
grid_search_with_cross_validation(nX_tr, y_tr, nX_te, y_te, din=50, dout=2)
stop = datetime.now()
print "Total Script Time: {}s".format((stop - start).total_seconds())
"""
def linear_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Linear Activations"
archs_1 = [
[din, dout],
[din, 50, dout],
[din, 50, 50, dout],
[din, 50, 50, 50, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs_1, actfn='linear', sgd_lr=1e-3, verbose=0)
archs_2 = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs_2, sgd_lr=1e-3, verbose=0)
print "Linear Activations - END"
def sigmoid_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Sigmoid Activations"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='sigmoid', sgd_lr=1e-3, verbose=0)
print "Sigmoid Activations - END"
def relu_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "ReLu Activations"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', sgd_lr=5e-4 , verbose=0)
print "ReLu Activations - END"
def l2_regularization(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "L2 Regularization"
archs = [ [din, 800, 500, 300, dout] ]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', reg_coeffs=reg_coeffs, sgd_lr=5e-4 , verbose=0)
print "L2 Regularization - END"
def early_stopping_l2_regularization(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Early Stopping and L2-regularization"
archs = [ [din, 800, 500, 300, dout] ]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr,
nX_te, y_te, archs, actfn='relu', reg_coeffs=reg_coeffs, sgd_lr=5e-4, EStop=True, verbose=0)
print "Early Stopping and L2-regularization - END"
return _lambda
def SGD_with_weight_decay(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "SGD with weight decay"
archs = [ [din, 800, 500, 300, dout] ]
decays = [5e-5, 1e-4, 3e-4, 7e-4, 1e-3]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs,
actfn='relu', last_act='softmax', reg_coeffs=[5e-7],
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=decays, sgd_moms=[0.0],
sgd_Nesterov=False, EStop=False, verbose=0)
print "SGD with weight decay - END"
return decay
def momentum_fn(nX_tr, y_tr, nX_te, y_te, best_decay, din=50, dout=2):
print "momentum"
archs = [ [din, 800, 500, 300, dout] ]
decays = [1e-5, 5e-5, 1e-4, 3e-4, 7e-4, 1e-3]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr,
nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=[0.0],
num_epoch=50, batch_size=1000, sgd_lr=1e-5, sgd_decays=[best_decay], sgd_moms= [0.99, 0.98, 0.95, 0.9, 0.85],
sgd_Nesterov=True, EStop=False, verbose=0)
print "momentum - END"
return momentum
def combination(nX_tr, y_tr, nX_te, y_te, best_reg_coeff, best_decay, best_momentum, din=50, dout=2):
print "best combination"
archs = [ [din, 800, 500, 300, dout] ]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=[best_reg_coeff],
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=[best_decay], sgd_moms= [best_momentum],
sgd_Nesterov=True, EStop=True, verbose=0)
print "best combination - END"
def grid_search_with_cross_validation(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Grid search with cross-validation"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
decays = [1e-5, 5e-5, 1e-4]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=reg_coeffs,
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=decays, sgd_moms= [0.99],
sgd_Nesterov=True, EStop=True, verbose=0)
print "Grid search with cross-validation - END"
if __name__ == "__main__":
main()
| 32.793478
| 119
| 0.629102
| 1,001
| 6,034
| 3.532468
| 0.110889
| 0.025452
| 0.041007
| 0.05543
| 0.786765
| 0.744627
| 0.70362
| 0.661765
| 0.640837
| 0.640837
| 0
| 0.080171
| 0.226881
| 6,034
| 184
| 120
| 32.793478
| 0.677814
| 0
| 0
| 0.342857
| 0
| 0
| 0.10626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.019048
| null | null | 0.190476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c36132b473fdb5d9c9f231f589e9efd2ad61fbda
| 22
|
py
|
Python
|
fabric_cf/__init__.py
|
fabric-testbed/OrchestratorClient
|
ea8904d8c9d3e63f471b2a51ae5b5b4e5e0ca763
|
[
"MIT"
] | null | null | null |
fabric_cf/__init__.py
|
fabric-testbed/OrchestratorClient
|
ea8904d8c9d3e63f471b2a51ae5b5b4e5e0ca763
|
[
"MIT"
] | 9
|
2021-06-08T21:45:01.000Z
|
2021-12-03T15:39:48.000Z
|
fabric_cf/__init__.py
|
fabric-testbed/OrchestratorClient
|
ea8904d8c9d3e63f471b2a51ae5b5b4e5e0ca763
|
[
"MIT"
] | null | null | null |
__VERSION__ = "1.1b5"
| 11
| 21
| 0.681818
| 3
| 22
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.136364
| 22
| 1
| 22
| 22
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c382499ab445a5d8c442bd500300ae557a752b8f
| 3,212
|
py
|
Python
|
tests/integration/mci/test_dates_in_questions.py
|
qateam123/eq
|
704757952323647d659c49a71975c56406ff4047
|
[
"MIT"
] | null | null | null |
tests/integration/mci/test_dates_in_questions.py
|
qateam123/eq
|
704757952323647d659c49a71975c56406ff4047
|
[
"MIT"
] | 8
|
2020-03-24T15:24:18.000Z
|
2022-03-02T04:32:56.000Z
|
tests/integration/mci/test_dates_in_questions.py
|
qateam123/eq
|
704757952323647d659c49a71975c56406ff4047
|
[
"MIT"
] | null | null | null |
from tests.integration.create_token import create_token
from tests.integration.integration_test_case import IntegrationTestCase
class TestHappyPath(IntegrationTestCase):
def test_try_date_203(self):
self.try_date('0203', '1')
def test_try_another_data_203(self):
self.try_another_date('0203', '1')
def test_try_date_205(self):
self.try_date('0205', '1')
def test_try_another_date_205(self):
self.try_another_date('0205', '1')
def try_date(self, form_type_id, eq_id):
# Get a token
start_date = "2016-04-01"
end_date = "2016-04-30"
token = create_token(form_type_id, eq_id, start_date, end_date)
resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
# We are on the landing page
content = resp.get_data(True)
self.assertIn('<title>Introduction</title>', content)
# We proceed to the questionnaire
post_data = {
'action[start_questionnaire]': 'Start Questionnaire'
}
resp = self.client.post('/questionnaire/' + eq_id + '/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False)
self.assertEqual(resp.status_code, 302)
block_one_url = resp.location
resp = self.client.get(block_one_url, follow_redirects=False)
self.assertEqual(resp.status_code, 200)
# We are in the Questionnaire
content = resp.get_data(True)
self.assertIn('<title>Survey</title>', content)
self.assertIn('>Monthly Business Survey - Retail Sales Index</', content)
self.assertIn("What are the dates of the sales period you are reporting for?", content)
self.assertIn("1 April 2016", content)
self.assertIn("30 April 2016", content)
def try_another_date(self, form_type_id, eq_id):
# Try another date
# Get a token
token = create_token(form_type_id, eq_id, '2017-08-01', '2017-08-31')
resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
# We are on the landing page
content = resp.get_data(True)
self.assertIn('<title>Introduction</title>', content)
# We proceed to the questionnaire
post_data = {
'action[start_questionnaire]': 'Start Questionnaire'
}
resp = self.client.post('/questionnaire/' + eq_id + '/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False)
self.assertEqual(resp.status_code, 302)
block_one_url = resp.location
resp = self.client.get(block_one_url, follow_redirects=False)
self.assertEqual(resp.status_code, 200)
# We are in the Questionnaire
content = resp.get_data(True)
self.assertIn('<title>Survey</title>', content)
self.assertIn('>Monthly Business Survey - Retail Sales Index</', content)
self.assertIn("What are the dates of the sales period you are reporting for?", content)
self.assertIn("1 August 2017", content)
self.assertIn("31 August 2017", content)
| 38.238095
| 141
| 0.657223
| 417
| 3,212
| 4.868106
| 0.206235
| 0.070936
| 0.074877
| 0.073892
| 0.8
| 0.755665
| 0.736946
| 0.715271
| 0.685714
| 0.685714
| 0
| 0.044409
| 0.228829
| 3,212
| 83
| 142
| 38.698795
| 0.775131
| 0.066625
| 0
| 0.566038
| 0
| 0
| 0.204819
| 0.050201
| 0
| 0
| 0
| 0
| 0.339623
| 1
| 0.113208
| false
| 0
| 0.037736
| 0
| 0.169811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6f0172d8d4a17e436cbbf3c046e97f48d0572542
| 138
|
py
|
Python
|
monasca_predictor/__init__.py
|
giacomolanciano/monasca-predictor
|
f0e07304d51cfa06b189ff1ea2350d26c4461329
|
[
"Apache-2.0"
] | null | null | null |
monasca_predictor/__init__.py
|
giacomolanciano/monasca-predictor
|
f0e07304d51cfa06b189ff1ea2350d26c4461329
|
[
"Apache-2.0"
] | null | null | null |
monasca_predictor/__init__.py
|
giacomolanciano/monasca-predictor
|
f0e07304d51cfa06b189ff1ea2350d26c4461329
|
[
"Apache-2.0"
] | null | null | null |
import monasca_predictor.common.util as util
# set up logging before importing any other components
util.initialize_logging("predictor")
| 27.6
| 54
| 0.833333
| 19
| 138
| 5.947368
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 138
| 4
| 55
| 34.5
| 0.918699
| 0.376812
| 0
| 0
| 0
| 0
| 0.107143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6f1d6f25c88a4b1349fbb36a0a1b176732d6e506
| 215
|
py
|
Python
|
libs/SmartMeshSDK/HartMoteConnector/__init__.py
|
rgrr/smartmeshsdk
|
a95f3e4d9e2254d59d326428fef8c77319cd4373
|
[
"BSD-3-Clause"
] | 29
|
2015-02-17T14:22:14.000Z
|
2021-02-19T06:01:10.000Z
|
libs/SmartMeshSDK/HartMoteConnector/__init__.py
|
rgrr/smartmeshsdk
|
a95f3e4d9e2254d59d326428fef8c77319cd4373
|
[
"BSD-3-Clause"
] | 17
|
2017-02-10T09:43:13.000Z
|
2017-09-09T05:46:49.000Z
|
libs/SmartMeshSDK/HartMoteConnector/__init__.py
|
rgrr/smartmeshsdk
|
a95f3e4d9e2254d59d326428fef8c77319cd4373
|
[
"BSD-3-Clause"
] | 35
|
2015-07-10T18:58:15.000Z
|
2022-03-20T08:56:25.000Z
|
try:
import HartMoteConnectorClib
HartMoteConnector = HartMoteConnectorClib
print 'Note: using the C implementation of the HartMoteConnector connector'
except ImportError:
import HartMoteConnector
| 30.714286
| 79
| 0.795349
| 19
| 215
| 9
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176744
| 215
| 6
| 80
| 35.833333
| 0.966102
| 0
| 0
| 0
| 0
| 0
| 0.320574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6f2f9900fc1d3a7330abdfe0ffb0b148c2f42550
| 1,010
|
py
|
Python
|
tests/test_swf/test_utils.py
|
David-Wobrock/simpleflow
|
09f59105b48ae79aef37b506bbde0cd1f2c360d1
|
[
"MIT"
] | 69
|
2015-02-24T00:49:40.000Z
|
2022-02-05T02:35:04.000Z
|
tests/test_swf/test_utils.py
|
David-Wobrock/simpleflow
|
09f59105b48ae79aef37b506bbde0cd1f2c360d1
|
[
"MIT"
] | 295
|
2015-02-06T11:02:00.000Z
|
2022-03-21T11:01:34.000Z
|
tests/test_swf/test_utils.py
|
David-Wobrock/simpleflow
|
09f59105b48ae79aef37b506bbde0cd1f2c360d1
|
[
"MIT"
] | 27
|
2015-08-31T22:14:42.000Z
|
2022-02-08T07:25:01.000Z
|
# -*- coding:utf-8 -*-
import unittest
from swf.utils import *
class TestUtils(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_get_non_existent_subkey_from_first_level(self):
base_dict = {"a": {"1": 2, "2": 3,}}
self.assertIsNone(get_subkey(base_dict, "b"))
def test_get_existent_subkey_from_first_level(self):
base_dict = {"a": {"1": 2, "2": 3,}}
self.assertEqual(get_subkey(base_dict, "a"), base_dict["a"])
def test_get_non_existent_subkey_from_n_level(self):
base_dict = {"a": {"1": 2, "2": 3,}}
self.assertIsNone(get_subkey(base_dict, ["a", "3"]))
def test_get_existent_subkey_from_n_level(self):
base_dict = {"a": {"1": 2, "2": 3,}}
self.assertEqual(get_subkey(base_dict, ["a", "1"]), 2)
def test_get_existent_subkey_with_missing_parent_key(self):
base_dict = {"a": {"1": 2, "2": 3,}}
self.assertIsNone(get_subkey(base_dict, ["b", "1"]))
| 25.897436
| 68
| 0.609901
| 146
| 1,010
| 3.883562
| 0.253425
| 0.155203
| 0.142857
| 0.10582
| 0.756614
| 0.708995
| 0.673721
| 0.627866
| 0.627866
| 0.627866
| 0
| 0.031526
| 0.214851
| 1,010
| 38
| 69
| 26.578947
| 0.68348
| 0.019802
| 0
| 0.318182
| 0
| 0
| 0.024292
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 1
| 0.318182
| false
| 0.090909
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
6f469e4c931735950ab4050af39d78c8d1229fd2
| 67
|
py
|
Python
|
stock_alerter/__init__.py
|
rentes/test-driven-python-development
|
a0070b654e22349a54c5a6183446790f28239496
|
[
"MIT"
] | 1
|
2019-05-29T23:53:04.000Z
|
2019-05-29T23:53:04.000Z
|
stock_alerter/__init__.py
|
rentes/test-driven-python-development
|
a0070b654e22349a54c5a6183446790f28239496
|
[
"MIT"
] | null | null | null |
stock_alerter/__init__.py
|
rentes/test-driven-python-development
|
a0070b654e22349a54c5a6183446790f28239496
|
[
"MIT"
] | 2
|
2015-11-01T10:11:23.000Z
|
2021-04-19T21:32:25.000Z
|
# -*- coding: utf-8 -*-
"""Initialise the stock alerter package"""
| 22.333333
| 42
| 0.626866
| 8
| 67
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.149254
| 67
| 2
| 43
| 33.5
| 0.719298
| 0.880597
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6f68faaf1a02cdb641a0d4707bb667b88b08e941
| 524
|
py
|
Python
|
tests/notebooks/mirror/ipynb_to_hydrogen/nteract_with_parameter.py
|
st--/jupytext
|
f8e8352859cc22e17b11154d0770fd946c4a430a
|
[
"MIT"
] | 5,378
|
2018-09-01T22:03:43.000Z
|
2022-03-31T06:51:42.000Z
|
tests/notebooks/mirror/ipynb_to_hydrogen/nteract_with_parameter.py
|
st--/jupytext
|
f8e8352859cc22e17b11154d0770fd946c4a430a
|
[
"MIT"
] | 812
|
2018-08-31T08:26:13.000Z
|
2022-03-30T18:12:11.000Z
|
tests/notebooks/mirror/ipynb_to_hydrogen/nteract_with_parameter.py
|
st--/jupytext
|
f8e8352859cc22e17b11154d0770fd946c4a430a
|
[
"MIT"
] | 380
|
2018-09-02T01:40:07.000Z
|
2022-03-25T13:57:23.000Z
|
# ---
# jupyter:
# kernel_info:
# name: python3
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% outputHidden=false inputHidden=false tags=["parameters"]
param = 4
# %% outputHidden=false inputHidden=false
import pandas as pd
# %% outputHidden=false inputHidden=false
df = pd.DataFrame({'A': [1, 2], 'B': [3 + param, 4]},
index=pd.Index(['x0', 'x1'], name='x'))
df
# %% outputHidden=false inputHidden=false
%matplotlib inline
df.plot(kind='bar')
| 20.96
| 61
| 0.624046
| 62
| 524
| 5.241935
| 0.580645
| 0.209231
| 0.344615
| 0.406154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023923
| 0.20229
| 524
| 24
| 62
| 21.833333
| 0.753589
| 0.589695
| 0
| 0
| 0
| 0
| 0.049751
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.142857
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48b62ac233c4cf7e05b9bee2be74a30e789497ae
| 694
|
py
|
Python
|
scripts/patches/lightsail.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/patches/lightsail.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/patches/lightsail.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
patches = [
# Rename AWS::Lightsail::Instance.Disk to AWS::Lightsail::Instance.DiskProperty
{
"op": "move",
"from": "/PropertyTypes/AWS::Lightsail::Instance.Disk",
"path": "/PropertyTypes/AWS::Lightsail::Instance.DiskProperty",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Lightsail::Instance.Hardware/Properties/Disks/ItemType",
"value": "DiskProperty",
},
# Remove Location and State attribute properties
{
"op": "remove",
"path": "/PropertyTypes/AWS::Lightsail::Instance.Location",
},
{
"op": "remove",
"path": "/PropertyTypes/AWS::Lightsail::Instance.State",
},
]
| 30.173913
| 93
| 0.583573
| 60
| 694
| 6.75
| 0.4
| 0.207407
| 0.345679
| 0.407407
| 0.523457
| 0.222222
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0.236311
| 694
| 22
| 94
| 31.545455
| 0.764151
| 0.178674
| 0
| 0.1
| 0
| 0
| 0.583774
| 0.463845
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48c2f6b0b33d8352c130e2af01de0dbb1ffab2d1
| 22
|
py
|
Python
|
jenkins_job_wrecker/__init__.py
|
chenhuahuan/jenkins-job-wrecker
|
ebe639e1334e436a35a463118af35b9f5cecd655
|
[
"MIT"
] | 210
|
2017-12-18T09:53:04.000Z
|
2019-12-18T00:25:13.000Z
|
jenkins_job_wrecker/__init__.py
|
chenhuahuan/jenkins-job-wrecker
|
ebe639e1334e436a35a463118af35b9f5cecd655
|
[
"MIT"
] | 40
|
2015-08-14T23:11:08.000Z
|
2019-11-11T22:27:29.000Z
|
jenkins_job_wrecker/__init__.py
|
chenhuahuan/jenkins-job-wrecker
|
ebe639e1334e436a35a463118af35b9f5cecd655
|
[
"MIT"
] | 29
|
2018-02-05T18:55:57.000Z
|
2019-12-27T18:25:46.000Z
|
__version__ = '1.7.1'
| 11
| 21
| 0.636364
| 4
| 22
| 2.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.136364
| 22
| 1
| 22
| 22
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48dc261d875e4e734311f7cf8fa99887055deb84
| 520
|
py
|
Python
|
xmatters/__init__.py
|
matthewhenry1/pyxmatters
|
9c376e370b30f0b8b2e589ba05fed26dad2814ae
|
[
"MIT"
] | 1
|
2021-08-09T17:33:38.000Z
|
2021-08-09T17:33:38.000Z
|
xmatters/__init__.py
|
xmatters/pyxmatters
|
ebe4a88558b7eb3408b59fbf9cabed3a748601a0
|
[
"MIT"
] | null | null | null |
xmatters/__init__.py
|
xmatters/pyxmatters
|
ebe4a88558b7eb3408b59fbf9cabed3a748601a0
|
[
"MIT"
] | 2
|
2020-04-03T22:30:32.000Z
|
2020-04-20T13:44:18.000Z
|
from .rest.api import *
from .rest.device import *
from .rest.group import *
from .rest.libraries import *
from .rest.person import *
from .rest.plans import *
from .rest.roster import *
from .rest.shift import *
from .rest.site import *
from .rest.dynamic_teams import *
from .rest.oncall import *
from .rest.event import *
from .rest.audit import *
# the collection must always be at the bottom since it references other modules
from .rest.collection import *
from .util.column import *
from .util.timecalc import *
| 26
| 79
| 0.751923
| 78
| 520
| 5
| 0.410256
| 0.287179
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 520
| 19
| 80
| 27.368421
| 0.886364
| 0.148077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
48dcc6c8f2212815b103d9818355fcb88d77f1e7
| 3,483
|
py
|
Python
|
ocdsmerge/fixtures/mergeid_example.py
|
kindly/ocds-merge
|
c8d446c67a149abe341722cb25624cb24cada644
|
[
"BSD-3-Clause"
] | null | null | null |
ocdsmerge/fixtures/mergeid_example.py
|
kindly/ocds-merge
|
c8d446c67a149abe341722cb25624cb24cada644
|
[
"BSD-3-Clause"
] | null | null | null |
ocdsmerge/fixtures/mergeid_example.py
|
kindly/ocds-merge
|
c8d446c67a149abe341722cb25624cb24cada644
|
[
"BSD-3-Clause"
] | null | null | null |
releases = [
{
"ocid": "A",
"id": "1",
"date": "2014-01-01",
"tag": ["tender"],
"tender": {
"items": [
{
"id": "1",
"description": "Item 1",
"quantity": 1
},
{
"id": "2",
"description": "Item 2",
"quantity": 1
}
]
}
},
{
"ocid": "A",
"id": "2",
"date": "2014-01-02",
"tag": ["tender"],
"tender": {
"items": [
{
"id": "1",
"description": "Item 1",
"quantity": 2
},
{
"id": "3",
"description": "Item 3",
"quantity": 1
}
]
}
}
]
compiledRelease = {
"ocid": "A",
"id": "2",
"date": "2014-01-02",
"tag": ["compiled"],
"tender": {
"items": [
{
"id": "1",
"description": "Item 1",
"quantity": 2
},
{
"id": "2",
"description": "Item 2",
"quantity": 1
},
{
"id": "3",
"description": "Item 3",
"quantity": 1
}
]
}
}
versionedRelease = {
"ocid": "A",
"tender": {
"items": [
{
"id": "1",
"description": [
{
"value": "Item 1",
"releaseDate": "2014-01-01",
"releaseTag": ["tender"],
"releaseID": "1"
}
],
"quantity": [
{
"value": 1,
"releaseDate": "2014-01-01",
"releaseTag": ["tender"],
"releaseID": "1"
},
{
"value": 2,
"releaseDate": "2014-01-02",
"releaseTag": ["tender"],
"releaseID": "2"
}
]
},
{
"id": "2",
"description": [
{
"value": "Item 2",
"releaseDate": "2014-01-01",
"releaseTag": ["tender"],
"releaseID": "1"
}
],
"quantity": [
{
"value": 1,
"releaseDate": "2014-01-01",
"releaseTag": ["tender"],
"releaseID": "1"
},
]
},
{
"id": "3",
"description": [
{
"value": "Item 3",
"releaseDate": "2014-01-02",
"releaseTag": ["tender"],
"releaseID": "2"
}
],
"quantity": [
{
"value": 1,
"releaseDate": "2014-01-02",
"releaseTag": ["tender"],
"releaseID": "2"
},
]
}
]
}
}
| 24.702128
| 52
| 0.231984
| 177
| 3,483
| 4.564972
| 0.135593
| 0.074257
| 0.147277
| 0.069307
| 0.831683
| 0.800743
| 0.783416
| 0.649752
| 0.482673
| 0.428218
| 0
| 0.090841
| 0.617571
| 3,483
| 140
| 53
| 24.878571
| 0.515766
| 0
| 0
| 0.554745
| 0
| 0
| 0.226307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48eed93ac827b534015eaac19161917a33513480
| 232
|
py
|
Python
|
wtl/streaming_log_response/urls.py
|
elegion/djangodash2013
|
3814123f9bff213a5d74db05db3caa83caea731c
|
[
"MIT"
] | null | null | null |
wtl/streaming_log_response/urls.py
|
elegion/djangodash2013
|
3814123f9bff213a5d74db05db3caa83caea731c
|
[
"MIT"
] | 1
|
2017-09-19T17:06:49.000Z
|
2017-09-19T17:06:49.000Z
|
wtl/streaming_log_response/urls.py
|
elegion/djangodash2013
|
3814123f9bff213a5d74db05db3caa83caea731c
|
[
"MIT"
] | null | null | null |
from django.conf.urls import patterns, url
urlpatterns = patterns(
'wtl.streaming_log_response.views',
url(r'^$', 'test', name='streamin_log_response_test'),
url(r'^2$', 'test2', name='streamin_log_response_test'),
)
| 23.2
| 60
| 0.698276
| 31
| 232
| 4.967742
| 0.612903
| 0.214286
| 0.194805
| 0.298701
| 0.350649
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00995
| 0.133621
| 232
| 9
| 61
| 25.777778
| 0.756219
| 0
| 0
| 0
| 0
| 0
| 0.422414
| 0.362069
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d293f2fec1cd964529f199f2da44e5c02d163228
| 538
|
py
|
Python
|
4_Data_Driven_Testing/File_management/example6-fileobject.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
4_Data_Driven_Testing/File_management/example6-fileobject.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
4_Data_Driven_Testing/File_management/example6-fileobject.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
with open("mynewtextfile.txt","w+") as f:
f.writelines("\nOtus we are learning python\nOtus we are learning python\nOtus we are learning python")
f.seek(0)
print(f.readlines())
print("Is readable:", f.readable())
print("Is writeable:", f.writable())
print("File no:", f.fileno())
print("Is connected to tty-like device:", f.isatty())
f.truncate(20)
f.flush()
f.seek(0)
print(f.readline())
print(f.readline())
print(f.readline())
print(f.readline())
print(f.readline())
f.close()
| 31.647059
| 107
| 0.624535
| 79
| 538
| 4.253165
| 0.443038
| 0.107143
| 0.208333
| 0.22619
| 0.47619
| 0.422619
| 0.422619
| 0.422619
| 0.422619
| 0.422619
| 0
| 0.009174
| 0.189591
| 538
| 17
| 108
| 31.647059
| 0.761468
| 0
| 0
| 0.411765
| 0
| 0
| 0.317254
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.588235
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
d2cd956f42c999fffe303ac09ceb7d170d1eb44b
| 805
|
py
|
Python
|
client/data/email_builder.py
|
jurandirjdsilva/pycryptomail
|
baa20875627470f68273a4224db493ddffd2b408
|
[
"MIT"
] | 1
|
2018-04-20T19:17:45.000Z
|
2018-04-20T19:17:45.000Z
|
client/data/email_builder.py
|
jurandirjdsilva/pycryptomail
|
baa20875627470f68273a4224db493ddffd2b408
|
[
"MIT"
] | null | null | null |
client/data/email_builder.py
|
jurandirjdsilva/pycryptomail
|
baa20875627470f68273a4224db493ddffd2b408
|
[
"MIT"
] | 1
|
2018-05-25T16:07:20.000Z
|
2018-05-25T16:07:20.000Z
|
class EmailBuilder:
def __init__(self):
self.message = {}
def set_from_email(self, email):
self.message['from'] = email
return self
def set_receiver_email(self, email):
self.message['receiver'] = email
def set_cc_emails(self, emails):
self.message['cc'] = emails
return self
def set_attachaments(self, attachments):
self.message['attachments'] = attachments
return self
def set_subject(self, subject):
self.message['subject'] = subject
return self
def set_msg(self, message):
self.message['message'] = message
return self
def set_priority(self, priority):
self.message['priority'] = priority
return self
def build(self):
return self.message
| 23.676471
| 49
| 0.613665
| 91
| 805
| 5.274725
| 0.208791
| 0.229167
| 0.1625
| 0.166667
| 0.104167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.284472
| 805
| 33
| 50
| 24.393939
| 0.833333
| 0
| 0
| 0.24
| 0
| 0
| 0.058385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.36
| false
| 0
| 0
| 0.04
| 0.68
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
d2e0cdda7cda40e01166dad17e062c12c346e2cd
| 8,380
|
py
|
Python
|
suncasa/suncasatasks/calibeovsa.py
|
wyq24/suncasa
|
e6ed6d8b9bd2186c4af6d0354d03af5fff9aef7a
|
[
"BSD-2-Clause"
] | 2
|
2018-02-12T09:34:23.000Z
|
2019-07-16T18:25:12.000Z
|
suncasa/suncasatasks/calibeovsa.py
|
wulinhui1/suncasa-src
|
1f94aaabaf6a3911fa532648ec6676a221553436
|
[
"BSD-2-Clause"
] | 26
|
2016-11-09T17:11:45.000Z
|
2021-08-20T13:41:50.000Z
|
suncasa/suncasatasks/calibeovsa.py
|
wulinhui1/suncasa-src
|
1f94aaabaf6a3911fa532648ec6676a221553436
|
[
"BSD-2-Clause"
] | 17
|
2016-10-27T18:35:46.000Z
|
2021-08-03T05:33:57.000Z
|
##################### generated by xml-casa (v2) from calibeovsa.xml ################
##################### f295775a00a77dd77b81d6e69924145c ##############################
from __future__ import absolute_import
import numpy
from casatools.typecheck import CasaValidator as _val_ctor
_pc = _val_ctor( )
from casatools.coercetype import coerce as _coerce
from .private.task_calibeovsa import calibeovsa as _calibeovsa_t
from casatasks.private.task_logging import start_log as _start_log
from casatasks.private.task_logging import end_log as _end_log
class _calibeovsa:
"""
calibeovsa ---- Calibrating EOVSA one or more measurement sets using calibration products in the SQL database.
Calibrating EOVSA one or more measurement sets using calibration products in the SQL database. This task currently only works on pipeline.
--------- parameter descriptions ---------------------------------------------
vis input EOVSA (uncalibrated) measurement set(s).
caltype Types of calibrations to perform
caltbdir Directory to place calibration tables.
interp Temporal interpolation for phacal table(s) (nearest or linear)
docalib If False, only create the calibration tables but do not perform applycal.
doflag If true then perform flagging.
flagant Antennas to be flagged. Follow CASA syntax of "antenna".
doimage If True, produce a quicklook image after calibration (sunpy must be installed).
imagedir directory to place output images. Default current directory.
antenna antenna/baselines to be used for imaging. Follow CASA syntax of "antenna".
timerange Timerange to be imaged. Follow CASA syntax of "timerange". Default is the entire duration of the ms.
spw spectral windows to be imaged. Follow CASA syntax of "spw".
stokes stokes to be imaged. Follow CASA syntax of "stokes".
dosplit If True, plit the corrected data column as output visibility file.
outputvis Name of output visibility file. Default is the name of the first vis file ended with ".corrected.ms".
doconcat If True, and if more than one visibility dataset provided, concatenate all into one visibility.
concatvis Name of output visibility file. Default is the name of the first + last vis file ended with ".corrected.ms".
keep_orig_ms Keep the original seperated ms datasets after split?
--------- examples -----------------------------------------------------------
Calibrating EOVSA one or more measurement sets using calibration products in the SQL database.
Detailed Keyword arguments:
vis -- Name of input EOVSA measurement set dataset(s)
default: none. Must be supplied
example: vis = 'IDB20160524000518.ms'
example: vis = ['IDB20160524000518.ms','IDB20160524000528.ms']
caltype -- list. Type of calibrations to be applied.
'refpha': reference phase calibration
'refamp': reference amplitude calibration (not used anymore)
'phacal': daily phase calibration
'fluxcal': flux calibration based on total-power measurements
default value: ['refpha','phacal']
*** note fluxcal is already implemented in udb_corr when doing importeovsa, should not be used anymore ****
*** pipeline only uses ['refpha','phacal']
caltbdir -- string. Place to hold calibration tables. Default is current directory. Pipeline should use /data1/eovsa/caltable
interp -- string. How interpolation is done for phacal? 'nearest' or 'linear'
docalib -- boolean. Default True. If False, only create the calibration tables but do not perform applycal
doflag -- boolean. Default True. Peforming flags?
flagant -- string. Follow CASA antenna selection syntax. Default '13~15'.
doimage -- boolean. Default False. If true, make a quicklook image using the specified time range and specified spw range
imagedir -- string. Directory to place the output image.
antenna -- string. Default '0~12'. Antenna/baselines to be used for imaging. Follow CASA antenna selection syntax.
timerange -- string. Default '' (the whole duration of the visibility data). Follow CASA timerange syntax.
e.g., '2017/07/11/20:16:00~2017/07/11/20:17:00'
spw -- string. Default '1~3'. Follow CASA spw selection syntax.
stokes -- string. Which stokes for the quicklook image. CASA syntax. Default 'XX'
dosplit -- boolean. Split the corrected data column?
outputvis -- string. Output visibility file after split
doconcat -- boolean. If more than one visibility dataset provided, concatenate all into one or make separate outputs if True
concatvis -- string. Output visibility file after concatenation
keep_orig_ms -- boolean. Default True. Inherited from suncasa.eovsa.concateovsa.
Keep the original seperated ms datasets after concatenation?
"""
_info_group_ = """Calibration"""
_info_desc_ = """Calibrating EOVSA one or more measurement sets using calibration products in the SQL database."""
def __call__( self, vis='', caltype=[ ], caltbdir='', interp='nearest', docalib=True, doflag=True, flagant='13~15', doimage=False, imagedir='.', antenna='0~12', timerange='', spw='1~3', stokes='XX', dosplit=False, outputvis='', doconcat=False, concatvis='', keep_orig_ms=True ):
schema = {'vis': {'anyof': [{'type': 'cStr', 'coerce': _coerce.to_str}, {'type': 'cStrVec', 'coerce': [_coerce.to_list,_coerce.to_strvec]}]}, 'caltype': {'anyof': [{'type': 'cStr', 'coerce': _coerce.to_str}, {'type': 'cStrVec', 'coerce': [_coerce.to_list,_coerce.to_strvec]}]}, 'caltbdir': {'type': 'cStr', 'coerce': _coerce.to_str}, 'interp': {'type': 'cStr', 'coerce': _coerce.to_str}, 'docalib': {'type': 'cBool'}, 'doflag': {'type': 'cBool'}, 'flagant': {'type': 'cStr', 'coerce': _coerce.to_str}, 'doimage': {'type': 'cBool'}, 'imagedir': {'type': 'cStr', 'coerce': _coerce.to_str}, 'antenna': {'type': 'cStr', 'coerce': _coerce.to_str}, 'timerange': {'type': 'cStr', 'coerce': _coerce.to_str}, 'spw': {'type': 'cStr', 'coerce': _coerce.to_str}, 'stokes': {'type': 'cStr', 'coerce': _coerce.to_str}, 'dosplit': {'type': 'cBool'}, 'outputvis': {'anyof': [{'type': 'cStr', 'coerce': _coerce.to_str}, {'type': 'cStrVec', 'coerce': [_coerce.to_list,_coerce.to_strvec]}]}, 'doconcat': {'type': 'cBool'}, 'concatvis': {'type': 'cStr', 'coerce': _coerce.to_str}, 'keep_orig_ms': {'type': 'cBool'}}
doc = {'vis': vis, 'caltype': caltype, 'caltbdir': caltbdir, 'interp': interp, 'docalib': docalib, 'doflag': doflag, 'flagant': flagant, 'doimage': doimage, 'imagedir': imagedir, 'antenna': antenna, 'timerange': timerange, 'spw': spw, 'stokes': stokes, 'dosplit': dosplit, 'outputvis': outputvis, 'doconcat': doconcat, 'concatvis': concatvis, 'keep_orig_ms': keep_orig_ms}
assert _pc.validate(doc,schema), str(_pc.errors)
_logging_state_ = _start_log( 'calibeovsa', [ 'vis=' + repr(_pc.document['vis']), 'caltype=' + repr(_pc.document['caltype']), 'caltbdir=' + repr(_pc.document['caltbdir']), 'interp=' + repr(_pc.document['interp']), 'docalib=' + repr(_pc.document['docalib']), 'doflag=' + repr(_pc.document['doflag']), 'flagant=' + repr(_pc.document['flagant']), 'doimage=' + repr(_pc.document['doimage']), 'imagedir=' + repr(_pc.document['imagedir']), 'antenna=' + repr(_pc.document['antenna']), 'timerange=' + repr(_pc.document['timerange']), 'spw=' + repr(_pc.document['spw']), 'stokes=' + repr(_pc.document['stokes']), 'dosplit=' + repr(_pc.document['dosplit']), 'outputvis=' + repr(_pc.document['outputvis']), 'doconcat=' + repr(_pc.document['doconcat']), 'concatvis=' + repr(_pc.document['concatvis']), 'keep_orig_ms=' + repr(_pc.document['keep_orig_ms']) ] )
return _end_log( _logging_state_, 'calibeovsa', _calibeovsa_t( _pc.document['vis'], _pc.document['caltype'], _pc.document['caltbdir'], _pc.document['interp'], _pc.document['docalib'], _pc.document['doflag'], _pc.document['flagant'], _pc.document['doimage'], _pc.document['imagedir'], _pc.document['antenna'], _pc.document['timerange'], _pc.document['spw'], _pc.document['stokes'], _pc.document['dosplit'], _pc.document['outputvis'], _pc.document['doconcat'], _pc.document['concatvis'], _pc.document['keep_orig_ms'] ) )
calibeovsa = _calibeovsa( )
| 74.159292
| 1,103
| 0.678998
| 1,031
| 8,380
| 5.384093
| 0.237633
| 0.064853
| 0.045397
| 0.043235
| 0.30481
| 0.269501
| 0.205909
| 0.176725
| 0.176725
| 0.160872
| 0
| 0.01612
| 0.163484
| 8,380
| 112
| 1,104
| 74.821429
| 0.775749
| 0.497017
| 0
| 0
| 1
| 0
| 0.2891
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.055556
| false
| 0
| 0.388889
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2f68391546f02bbc5d5716562ac27bb7086e6a3
| 61
|
py
|
Python
|
app/components/OpenLogger/__init__.py
|
Eksno/DevOps_Flask_Login
|
6d5034a3dd719a25ac410093628f033edbbde17c
|
[
"Apache-2.0"
] | 1
|
2022-01-06T09:21:49.000Z
|
2022-01-06T09:21:49.000Z
|
app/components/OpenLogger/__init__.py
|
Eksno/DevOps_Flask_Login
|
6d5034a3dd719a25ac410093628f033edbbde17c
|
[
"Apache-2.0"
] | null | null | null |
app/components/OpenLogger/__init__.py
|
Eksno/DevOps_Flask_Login
|
6d5034a3dd719a25ac410093628f033edbbde17c
|
[
"Apache-2.0"
] | null | null | null |
from .open_logger import configure_loggers, configure_logger
| 30.5
| 60
| 0.885246
| 8
| 61
| 6.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081967
| 61
| 1
| 61
| 61
| 0.910714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
960fa873b5a677c342e19b1a6148a00aaa1b07f4
| 112
|
py
|
Python
|
match_full_name.py
|
GYosifov88/Python-Fundamentals
|
b46ba2822bd2dac6ff46830c6a520e559b448442
|
[
"MIT"
] | null | null | null |
match_full_name.py
|
GYosifov88/Python-Fundamentals
|
b46ba2822bd2dac6ff46830c6a520e559b448442
|
[
"MIT"
] | null | null | null |
match_full_name.py
|
GYosifov88/Python-Fundamentals
|
b46ba2822bd2dac6ff46830c6a520e559b448442
|
[
"MIT"
] | null | null | null |
import re
text = input()
matches = re.findall(r"\b[A-Z][a-z]+ [A-Z][a-z]+\b", text)
print(' '.join(matches))
| 14
| 58
| 0.571429
| 22
| 112
| 2.909091
| 0.545455
| 0.125
| 0.140625
| 0.1875
| 0.125
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133929
| 112
| 7
| 59
| 16
| 0.659794
| 0
| 0
| 0
| 0
| 0.25
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9623952d22c36d9e71c1be2649e9bfe8c76188ea
| 372
|
py
|
Python
|
cleanup/__main__.py
|
luist18/feup-pri-proj
|
ec9272edc27375cffab6387f116b602cd0231371
|
[
"MIT"
] | null | null | null |
cleanup/__main__.py
|
luist18/feup-pri-proj
|
ec9272edc27375cffab6387f116b602cd0231371
|
[
"MIT"
] | null | null | null |
cleanup/__main__.py
|
luist18/feup-pri-proj
|
ec9272edc27375cffab6387f116b602cd0231371
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from cleanup import cleaner
def main():
cleaner.books("data/scraper/book.csv")
cleaner.sections("data/scraper/section.csv")
cleaner.articles("data/scraper/article.csv",
"data/scraper/article_version.csv")
# cleaner.article_versions("data/scraper/article_version.csv")
if __name__ == '__main__':
main()
| 23.25
| 66
| 0.66129
| 44
| 372
| 5.340909
| 0.5
| 0.234043
| 0.229787
| 0.212766
| 0.238298
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003289
| 0.182796
| 372
| 15
| 67
| 24.8
| 0.769737
| 0.22043
| 0
| 0
| 0
| 0
| 0.379791
| 0.351916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9624a7da670ac89ed23a5aca134434019a395a13
| 5,012
|
py
|
Python
|
app/models.py
|
CheropS/PitchPerfect
|
94841bf87f1f8efdc420f2d24415e197f46b5e8f
|
[
"MIT"
] | null | null | null |
app/models.py
|
CheropS/PitchPerfect
|
94841bf87f1f8efdc420f2d24415e197f46b5e8f
|
[
"MIT"
] | null | null | null |
app/models.py
|
CheropS/PitchPerfect
|
94841bf87f1f8efdc420f2d24415e197f46b5e8f
|
[
"MIT"
] | null | null | null |
from werkzeug.security import generate_password_hash,check_password_hash
from . import db
from flask_login import UserMixin
from . import login_manager
from datetime import datetime
from dataclasses import dataclass
@login_manager.user_loader
def load_user(pitch_id):
return User.query.get(int(pitch_id))
# class Pitch(db.Model):
# __tablename__='pitch'
# '''This is a class that defines Pitch class
# '''
# id=db.Column(db.Integer, primary_key= True)
# category=db.Column(db.String(255))
# title=db.Column(db.String(255))
# description=db.Column(db.Text())
# pitch=db.Column(db.Text())
# publishedtime=db.Column(db.DateTime, default=datetime.utcnow)
# upvote=db.relationship('Upvote', backref='pitch', lazy='dynamic')
# downvote=db.relationship('Downvote', backref='pitch', lazy='dynamic')
# comment=db.relationship('Comment', backref='pitch', lazy='dynamic')
# pass_secure = db.Column(db.String(255))
# def save_pitch(self):
# db.session.add(self)
# db.session.commit()
# def delete_pitch(self):
# db.session.delete(self)
# db.session.commit()
# def __repr__(self):
# return f'Pitch {self.post}'
# @property
# def password(self):
# raise AttributeError('You cannot read the password attribute')
# @password.setter
# def password(self, password):
# self.pass_secure = generate_password_hash(password)
# def verify_password(self,password):
# return check_password_hash(self.pass_secure,password)
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255))
bio=db.Column(db.String(255))
pic=db.Column(db.String(255))
email=db.Column(db.String(255), unique=True)
pitch_id = db.Column(db.Integer,db.ForeignKey('pitch.id'))
upvote=db.relationship('Upvote', backref='user', lazy='dynamic')
downvote=db.relationship('Downvotes', backref='user', lazy='dynamic')
# comment=db.relationship('Comment', backref='pitch', lazy='dynamic')
# password_secure=db.Column(db.String(255), nullable=False)
password_hash = db.Column(db.String(255))
@property
def password(self):
raise AttributeError('cannot be accessed')
@password.setter
def password(self, password):
self.password_hash=generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return f'User {self.username}'
class UPitch(db.Model):
__tablename__ = 'pitch'
id = db.Column(db.Integer,primary_key = True)
title=db.Column(db.String(255))
category=db.Column(db.String(255))
description=db.Column(db.String(255))
pitch=db.Column(db.Text())
publishedtime=db.Column(db.DateTime, default=datetime.utcnow)
upvote=db.relationship('Upvote', backref='pitch', lazy='dynamic')
downvote=db.relationship('Downvotes', backref='pitch', lazy='dynamic')
# comment=db.relationship('Comment', backref='pitch', lazy='dynamic')
pass_secure = db.Column(db.String(255))
def save_pitch(self):
db.session.add(self)
db.session.commit()
def delete_pitch(self):
db.session.delete(self)
db.session.commit()
def __repr__(self):
return f'UPitch {self.post}'
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.pass_secure,password)
@dataclass
class Upvote(db.Model):
__tablename__ = 'upvote'
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
pitch_id = db.Column(db.Integer, db.ForeignKey('pitch.id'))
def save(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_upvotes(cls, id):
return Upvote.query.filter_by(pitch_id=id).all()
def __repr__(self):
return f'{self.user_id}:{self.pitch_id}'
@dataclass
class Downvotes(db.Model):
__tablename__ = 'downvote'
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
pitch_id = db.Column(db.Integer, db.ForeignKey('pitch.id'))
def save(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_downvotes(cls, id):
return Downvotes.query.filter_by(pitch_id=id).all()
def __repr__(self):
return f'{self.user_id}:{self.pitch_id}'
'''
class user-bio, email, username,pic db.relationship(1:many)
comments-userid, pitchid, comment
upvote
downvote-id,pitch
'''
| 30.375758
| 76
| 0.673783
| 645
| 5,012
| 5.062016
| 0.16124
| 0.068606
| 0.085758
| 0.063706
| 0.762634
| 0.715161
| 0.673507
| 0.643185
| 0.612864
| 0.588361
| 0
| 0.009826
| 0.187749
| 5,012
| 165
| 77
| 30.375758
| 0.792189
| 0.276137
| 0
| 0.45977
| 1
| 0
| 0.093426
| 0.017301
| 0
| 0
| 0
| 0
| 0
| 1
| 0.195402
| false
| 0.183908
| 0.068966
| 0.103448
| 0.758621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 4
|
824e29bb76c60efabcce0b1ec1e5588b7f020107
| 390
|
py
|
Python
|
nanome/_internal/_network/_commands/_serialization/_ui/_get_menu_transform.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
nanome/_internal/_network/_commands/_serialization/_ui/_get_menu_transform.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
nanome/_internal/_network/_commands/_serialization/_ui/_get_menu_transform.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
from nanome._internal._util._serializers import _TypeSerializer
class _GetMenuTransform(_TypeSerializer):
def __init__(self):
pass
def version(self):
return 0
def name(self):
return "GetMenuTransform"
def serialize(self, version, value, context):
context.write_byte(value)
def deserialize(self, version, context):
return None
| 22.941176
| 63
| 0.682051
| 41
| 390
| 6.219512
| 0.585366
| 0.078431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003367
| 0.238462
| 390
| 17
| 64
| 22.941176
| 0.855219
| 0
| 0
| 0
| 0
| 0
| 0.040921
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0.083333
| 0.083333
| 0.25
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
8267dde16e79ccf1ba05048eaf97384a0f6dd407
| 150
|
py
|
Python
|
verify/checker/abc150/a.py
|
naskya/testcase-generator
|
02765184a275152e1d8c177f2028ca8db315cfee
|
[
"MIT"
] | 4
|
2020-09-23T07:11:41.000Z
|
2022-02-02T09:08:21.000Z
|
verify/checker/abc150/a.py
|
naskya/testcase-generator
|
02765184a275152e1d8c177f2028ca8db315cfee
|
[
"MIT"
] | 5
|
2021-08-29T18:23:01.000Z
|
2021-11-20T03:53:19.000Z
|
verify/checker/abc150/a.py
|
naskya/testcase-generator
|
02765184a275152e1d8c177f2028ca8db315cfee
|
[
"MIT"
] | null | null | null |
def main() -> None:
K, X = map(int, input().split())
assert 1 <= K <= 100
assert 1 <= X <= 10**5
if __name__ == '__main__':
main()
| 15
| 36
| 0.486667
| 22
| 150
| 2.954545
| 0.727273
| 0.215385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 0.3
| 150
| 9
| 37
| 16.666667
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.166667
| true
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
826f63b596c6cc25a010c826bb77a3ef1a64984c
| 106
|
py
|
Python
|
lib/PyAMF-0.7.2/pyamf/tests/remoting/__init__.py
|
MiCHiLU/google_appengine_sdk
|
3da9f20d7e65e26c4938d2c4054bc4f39cbc5522
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/PyAMF-0.6.1/pyamf/tests/remoting/__init__.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/PyAMF-0.6.1/pyamf/tests/remoting/__init__.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Remoting tests.
@since: 0.1.0
"""
| 11.777778
| 34
| 0.650943
| 16
| 106
| 4.3125
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.179245
| 106
| 8
| 35
| 13.25
| 0.758621
| 0.877358
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
82771b01893f77029cb6d3ac928fce6deb02cfe4
| 1,362
|
py
|
Python
|
src/libs/baseview.py
|
Moniter123/websql
|
6510f9264526087d499ca690bdff63a630136d2d
|
[
"MIT"
] | 1
|
2021-08-16T14:52:50.000Z
|
2021-08-16T14:52:50.000Z
|
src/libs/baseview.py
|
Moniter123/websql
|
6510f9264526087d499ca690bdff63a630136d2d
|
[
"MIT"
] | null | null | null |
src/libs/baseview.py
|
Moniter123/websql
|
6510f9264526087d499ca690bdff63a630136d2d
|
[
"MIT"
] | null | null | null |
from django.views.generic.base import View
from django.utils.decorators import method_decorator
from rest_framework.decorators import api_view
from rest_framework.permissions import (
IsAdminUser,
AllowAny
)
from rest_framework.views import APIView
@method_decorator(api_view(['DELETE', 'GET', 'POST', 'PUT']), 'dispatch')
class BaseView(View):
def dispatch(self, *args, **kwargs):
return super(BaseView, self).dispatch(*args, **kwargs)
def get(self, request, args: str = None):
pass
def post(self, request, args: str = None):
pass
def put(self, request, args: str = None):
pass
def delete(self, request, args: str = None):
pass
class SuperUserpermissions(APIView):
permission_classes = (IsAdminUser,)
def get(self, request, args: str = None):
pass
def post(self, request, args: str = None):
pass
def put(self, request, args: str = None):
pass
def delete(self, request, args: str = None):
pass
class AnyLogin(APIView):
permission_classes = ()
authentication_classes = ()
def get(self, request, args: str = None):
pass
def post(self, request, args: str = None):
pass
def put(self, request, args: str = None):
pass
def delete(self, request, args: str = None):
pass
| 22.327869
| 73
| 0.633627
| 167
| 1,362
| 5.107784
| 0.239521
| 0.154748
| 0.21102
| 0.253224
| 0.475967
| 0.475967
| 0.475967
| 0.475967
| 0.475967
| 0.475967
| 0
| 0
| 0.254038
| 1,362
| 60
| 74
| 22.7
| 0.839567
| 0
| 0
| 0.585366
| 0
| 0
| 0.017621
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.317073
| false
| 0.292683
| 0.121951
| 0.02439
| 0.609756
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
82b5548963c572c57584bf94afd5a4b3a94f7777
| 7,945
|
py
|
Python
|
tests/test_alt_extrapolation.py
|
DeNederlandscheBank/solvency2-rfr
|
61f192b98283274594f80605b11823a0a505d0f6
|
[
"MIT"
] | null | null | null |
tests/test_alt_extrapolation.py
|
DeNederlandscheBank/solvency2-rfr
|
61f192b98283274594f80605b11823a0a505d0f6
|
[
"MIT"
] | null | null | null |
tests/test_alt_extrapolation.py
|
DeNederlandscheBank/solvency2-rfr
|
61f192b98283274594f80605b11823a0a505d0f6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for alternative extrapolation"""
import solvency2_data
import numpy as np
import pandas as pd
import unittest
from collections import OrderedDict
from datetime import datetime
class TestAltExtra(unittest.TestCase):
def test_1(self):
assert solvency2_data.DiscountedValue4par2forwards(1, 2, 3, 4, 5) == (3.50016, -0.37503999999999993)
def test_2(self):
assert solvency2_data.DiscountedValue4par2forwards(10, 20, 30, 40, 50) == (314.0, -0.375)
def test_3(self):
assert solvency2_data.DiscountedValue4par2forwards(1, 0.01, 0.02, 0.03, 5) == (-0.9704579707187194, -0.04448945219269113)
def test_4(self):
assert solvency2_data.DiscountedValue4par2forwards(19.408164389475854, 0.9130243814567768, 0.00422, 0, 5) == (0.014191649629102854, -4.622916350630098)
def test_5(self):
assert solvency2_data.DiscountedValue4par2forwards(19.408164389475854, 0.9130243814567768, 0.00422, 0.003098358839788617, 5) == (1.1003249270058468e-08, -4.538134856300949)
def test_6(self):
d = solvency2_data.read(str(datetime(2021, 12, 31).date()))
actual_swap = solvency2_data.create_swap_struct(rfr = d['RFR_spot_no_VA']['Euro'],
additional_swaps = {25: 0.00522, 30: 0.00476, 40: 0.00400, 50: 0.00340})
expected_swap = pd.Series(index = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 25, 30, 40, 50],
data = [-0.00585 , -0.00394624, -0.00245477, -0.00144549, -0.00083689,
-0.00025882, 0.00029833, 0.00087393, 0.00145754, 0.00202904,
0.00249068, 0.00296985, 0.00339847, 0.0037291 , 0.00392453,
0.00398574, 0.00401794, 0.0040783 , 0.00421415, 0.00448139,
0.00422 , 0.00376 , 0.003 , 0.0024 ],
dtype = 'float64')
# self.assertEqual(actual_swap.values, expected_swap.values)
# self.assertEqual(actual_swap.index, expected_swap.index)
pd.testing.assert_series_equal(actual_swap, expected_swap)
def test_7(self):
d = solvency2_data.read(str(datetime(2021, 12, 31).date()))
actual_swap = solvency2_data.create_swap_struct(rfr = d['RFR_spot_no_VA']['Euro'],
additional_swaps = {25: 0.00522, 30: 0.00476, 40: 0.00400, 50: 0.00340})
actual_forwards = solvency2_data.FromParToForwards(term_struct = actual_swap)
expected_forwards = pd.Series(index = range(1, 121),
data = [-0.00585, -0.00204637, 0.00052669, 0.00158614, 0.00160373, 0.00264505,
0.00366659, 0.00494943, 0.00620254, 0.00728514, 0.00723214, 0.00841583,
0.00874508, 0.0082205, 0.00679419, 0.00495043, 0.00456012, 0.00516055,
0.00680334, 0.00989486, 0.00309836, 0.00309836, 0.00309836, 0.00309836,
0.00309836, 0.00130144, 0.00130144, 0.00130144, 0.00130144, 0.00130144,
0.00057508, 0.00057508, 0.00057508, 0.00057508, 0.00057508, 0.00057508,
0.00057508, 0.00057508, 0.00057508, 0.00057508, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927,
-0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927, -0.00011927],
dtype = 'float64')
# self.assertEqual(actual_forwards.values, expected_forwards.values)
# self.assertEqual(actual_forwards.index, expected_forwards.index)
pd.testing.assert_series_equal(actual_forwards, expected_forwards)
def test_8(self):
d = solvency2_data.read(str(datetime(2021, 12, 31).date()))
actual_swap = solvency2_data.create_swap_struct(rfr = d['RFR_spot_no_VA']['Euro'],
additional_swaps = {25: 0.00522, 30: 0.00476, 40: 0.00400, 50: 0.00340})
actual_forwards = solvency2_data.FromParToForwards(term_struct = actual_swap)
actual_term = solvency2_data.forwardstruct2termstruct(actual_forwards)
expected_term = pd.Series(index = range(1, 121),
data = [-0.00585 , -0.00395 , -0.00246 , -0.00145 , -0.00084 ,
-0.00026 , 0.0003 , 0.00088 , 0.00147 , 0.00205 ,
0.00252 , 0.00301 , 0.00345 , 0.00379 , 0.00399 ,
0.00405 , 0.00408 , 0.00414 , 0.00428 , 0.00456 ,
0.00449035, 0.00442704, 0.00436923, 0.00431625, 0.0042675 ,
0.00415326, 0.00404749, 0.00394929, 0.00385787, 0.00377255,
0.00366925, 0.00357241, 0.00348145, 0.00339585, 0.00331514,
0.00323893, 0.00316684, 0.00309855, 0.00303377, 0.00297223,
0.00289671, 0.00282479, 0.00275623, 0.00269079, 0.00262825,
0.00256844, 0.00251118, 0.00245631, 0.00240368, 0.00235316,
0.00230462, 0.00225796, 0.00221305, 0.00216981, 0.00212814,
0.00208797, 0.0020492 , 0.00201177, 0.00197562, 0.00194067,
0.00190686, 0.00187415, 0.00184248, 0.00181179, 0.00178206,
0.00175322, 0.00172525, 0.0016981 , 0.00167174, 0.00164613,
0.00162124, 0.00159705, 0.00157352, 0.00155062, 0.00152834,
0.00150664, 0.00148551, 0.00146492, 0.00144485, 0.00142528,
0.0014062 , 0.00138758, 0.00136941, 0.00135168, 0.00133436,
0.00131745, 0.00130092, 0.00128477, 0.00126898, 0.00125355,
0.00123845, 0.00122368, 0.00120923, 0.00119509, 0.00118125,
0.00116769, 0.00115442, 0.00114141, 0.00112867, 0.00111618,
0.00110394, 0.00109194, 0.00108018, 0.00106864, 0.00105732,
0.00104621, 0.00103531, 0.00102461, 0.00101411, 0.0010038 ,
0.00099368, 0.00098374, 0.00097397, 0.00096438, 0.00095495,
0.00094568, 0.00093658, 0.00092762, 0.00091882, 0.00091017],
dtype = 'float64')
pd.testing.assert_series_equal(actual_term, expected_term)
| 72.889908
| 174
| 0.546381
| 924
| 7,945
| 4.616883
| 0.297619
| 0.168776
| 0.185185
| 0.333333
| 0.483826
| 0.45429
| 0.42241
| 0.405063
| 0.405063
| 0.383966
| 0
| 0.516183
| 0.32725
| 7,945
| 109
| 175
| 72.889908
| 0.281946
| 0.041032
| 0
| 0.285714
| 0
| 0
| 0.009857
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 1
| 0.087912
| false
| 0
| 0.065934
| 0
| 0.164835
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7da999f7e5298c7ad893f8e67ebaac3244daa810
| 617
|
py
|
Python
|
MinecraftStats/mcstats/stats/eat_rawmeat.py
|
jeffry1829/mcweb
|
8f736427d8adbba626194380e18f4a882ff31162
|
[
"CC-BY-3.0"
] | null | null | null |
MinecraftStats/mcstats/stats/eat_rawmeat.py
|
jeffry1829/mcweb
|
8f736427d8adbba626194380e18f4a882ff31162
|
[
"CC-BY-3.0"
] | null | null | null |
MinecraftStats/mcstats/stats/eat_rawmeat.py
|
jeffry1829/mcweb
|
8f736427d8adbba626194380e18f4a882ff31162
|
[
"CC-BY-3.0"
] | 1
|
2020-01-17T13:59:56.000Z
|
2020-01-17T13:59:56.000Z
|
from mcstats import mcstats
mcstats.registry.append(
mcstats.MinecraftStat(
'eat_rawmeat',
{
'title': '生食者',
'desc': '吃過的生肉數量',
'unit': 'int',
},
mcstats.StatSumReader([
mcstats.StatReader(['minecraft:used','minecraft:porkchop']),
mcstats.StatReader(['minecraft:used','minecraft:beef']),
mcstats.StatReader(['minecraft:used','minecraft:chicken']),
mcstats.StatReader(['minecraft:used','minecraft:mutton']),
mcstats.StatReader(['minecraft:used','minecraft:rabbit']),
])
))
| 32.473684
| 72
| 0.564019
| 49
| 617
| 7.081633
| 0.469388
| 0.244957
| 0.37464
| 0.432277
| 0.56196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272285
| 617
| 18
| 73
| 34.277778
| 0.772829
| 0
| 0
| 0
| 0
| 0
| 0.3047
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7db78bab15c709b59d39940f56879c6edaa22548
| 4,223
|
py
|
Python
|
useintest/services/models.py
|
wtsi-hgi/startfortest
|
426343c0ff340d4d83575cdafe2c4184707e7693
|
[
"MIT"
] | 1
|
2019-06-18T20:56:42.000Z
|
2019-06-18T20:56:42.000Z
|
useintest/services/models.py
|
wtsi-hgi/useintest
|
426343c0ff340d4d83575cdafe2c4184707e7693
|
[
"MIT"
] | 3
|
2017-09-21T12:14:44.000Z
|
2018-02-19T11:18:47.000Z
|
useintest/services/models.py
|
wtsi-hgi/useintest
|
426343c0ff340d4d83575cdafe2c4184707e7693
|
[
"MIT"
] | null | null | null |
from typing import Set, Optional, Generic, TypeVar
from bidict import bidict
from docker.errors import NotFound
from docker.models.containers import Container
from useintest.common import UseInTestModel, docker_client
from useintest.services.exceptions import UnexpectedNumberOfPortsError
UserType = TypeVar("UserType", bound="User")
class Service(UseInTestModel):
"""
A service.
"""
@property
def port(self) -> int:
"""
Gets the port on the host machine.
:return: the exposed port
:raises UnexpectedNumberOfPortsException: if there is more than one port exposed
"""
if len(self.ports) != 1:
raise UnexpectedNumberOfPortsError(f"{len(self.ports)} ports are exposed (cannot use `port`)")
return list(self.ports.values())[0]
@property
def url(self) -> str:
"""
Gets base URL.
:return: the base url (without trailing slash)
:raises UnexpectedNumberOfPortsException: if there is more than one port exposed
"""
return f"http://{self.host}:{self.port}"
def __init__(self):
"""
Constructor.
"""
super().__init__()
# FIXME: Assumption about where the Docker machine is accessible (i.e. it could be on a VM)
self.host = "localhost"
self.ports = bidict()
def get_external_port_mapping_to(self, port: int) -> int:
"""
Gets the port on localhost to which the given port in the container maps to.
:param port: the port inside the container
:return: the port outside that maps to that in the container
"""
return self.ports.inv[port]
class DockerisedService(Service):
"""
A service running in a Docker container.
"""
@property
def container(self) -> Optional[Container]:
if self.container_id is None:
return None
try:
return docker_client.containers.get(self.container_id)
except NotFound:
return None
@container.setter
def container(self, container: Container):
self.container_id = container.id
@property
def container_id(self) -> Optional[str]:
return self._container_id
@container_id.setter
def container_id(self, container_id: str):
self._container_id = container_id
def __init__(self):
super().__init__()
self.name = None
self._container_id: str = None
self.controller = None
# TODO: Not sure of the best way to specify the type as it could be that of a subclass...
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.controller.stop_service(self)
class User(UseInTestModel):
"""
A user with an associated password.
"""
def __init__(self, username: str, password: str=None):
self.username = username
self.password = password
def __eq__(self, other) -> bool:
return type(other) == type(self) \
and other.username == self.username \
and other.password == self.password
def __hash__(self) -> hash:
return hash(self.username + self.password)
class ServiceWithUsers(Generic[UserType], Service):
"""
A service with users.
"""
def __init__(self):
super().__init__()
self.users: Set[UserType] = set()
self._root_user: Optional[UserType] = None
@property
def root_user(self) -> Optional[UserType]:
"""
Gets a user of the service that has privileged access.
:return: a user with privilege access
"""
assert self._root_user in self.users
return self._root_user
@root_user.setter
def root_user(self, user: Optional[UserType]):
"""
Sets the user of the service that has privileged access.
:param user: the user with privilege access
"""
if user is not None and not user in self.users:
self.users.add(user)
self._root_user = user
class DockerisedServiceWithUsers(Generic[UserType], DockerisedService, ServiceWithUsers[UserType]):
"""
Service running on Docker with users.
"""
| 29.326389
| 106
| 0.632252
| 503
| 4,223
| 5.151093
| 0.276342
| 0.050946
| 0.040525
| 0.027789
| 0.144346
| 0.101891
| 0.083366
| 0.083366
| 0.053261
| 0.053261
| 0
| 0.000653
| 0.274923
| 4,223
| 143
| 107
| 29.531469
| 0.845526
| 0.236798
| 0
| 0.175676
| 0
| 0
| 0.036005
| 0
| 0
| 0
| 0
| 0.013986
| 0.013514
| 1
| 0.22973
| false
| 0.054054
| 0.081081
| 0.054054
| 0.527027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
7df02855c8c20e3ce9aae2101eb956085a791e19
| 304
|
py
|
Python
|
src/headers.py
|
ZuuZaa/defacto_price_parser
|
4cac5b5ba56ae113ecc4d0670f7e1ea1a3136f98
|
[
"MIT"
] | null | null | null |
src/headers.py
|
ZuuZaa/defacto_price_parser
|
4cac5b5ba56ae113ecc4d0670f7e1ea1a3136f98
|
[
"MIT"
] | null | null | null |
src/headers.py
|
ZuuZaa/defacto_price_parser
|
4cac5b5ba56ae113ecc4d0670f7e1ea1a3136f98
|
[
"MIT"
] | null | null | null |
#for request headers
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Max-Age': '3600',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'
}
| 38
| 96
| 0.661184
| 42
| 304
| 4.761905
| 0.690476
| 0.26
| 0.27
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099617
| 0.141447
| 304
| 8
| 97
| 38
| 0.666667
| 0.0625
| 0
| 0
| 0
| 0.142857
| 0.740351
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
814b3414baeae8ada72bb100f38af2e9c8d037ae
| 141
|
py
|
Python
|
src/run.py
|
colinnewell/Adventure-Insecure
|
46717dd14d88887559bb3a392c67b534c294edaa
|
[
"MIT"
] | 4
|
2016-09-24T19:46:12.000Z
|
2017-07-08T02:17:06.000Z
|
src/run.py
|
colinnewell/Adventure-Insecure
|
46717dd14d88887559bb3a392c67b534c294edaa
|
[
"MIT"
] | null | null | null |
src/run.py
|
colinnewell/Adventure-Insecure
|
46717dd14d88887559bb3a392c67b534c294edaa
|
[
"MIT"
] | null | null | null |
from app import app
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.DEBUG)
app.run(host='0.0.0.0')
| 17.625
| 44
| 0.680851
| 21
| 141
| 4.190476
| 0.619048
| 0.068182
| 0.068182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034783
| 0.184397
| 141
| 7
| 45
| 20.142857
| 0.730435
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
815337034d73063e67749cfde215e6f0e78751bc
| 551
|
py
|
Python
|
baleen/console/__init__.py
|
Rizwanabro/DataLabs
|
9e095bc4cac584e906dfd6e38eb77b1ef5afe107
|
[
"MIT"
] | null | null | null |
baleen/console/__init__.py
|
Rizwanabro/DataLabs
|
9e095bc4cac584e906dfd6e38eb77b1ef5afe107
|
[
"MIT"
] | null | null | null |
baleen/console/__init__.py
|
Rizwanabro/DataLabs
|
9e095bc4cac584e906dfd6e38eb77b1ef5afe107
|
[
"MIT"
] | null | null | null |
# baleen.console
# Implements the baleen console utility.
#
# Author: Benjamin Bengfort <benjamin@bengfort.com>
# Created: Wed Mar 02 10:52:36 2016 -0500
#
# Copyright (C) 2016 Bengfort.com
# For license information, see LICENSE.txt
#
# ID: __init__.py [] benjamin@bengfort.com $
"""
Implements the baleen console utility.
"""
##########################################################################
## Imports
##########################################################################
from .app import COMMANDS
from .app import BaleenUtility
| 25.045455
| 74
| 0.53176
| 53
| 551
| 5.45283
| 0.641509
| 0.134948
| 0.131488
| 0.179931
| 0.228374
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041237
| 0.119782
| 551
| 21
| 75
| 26.238095
| 0.554639
| 0.562613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
8166af147e99853ff920fed42ceed2604492a1c4
| 320
|
py
|
Python
|
chemper/mol_toolkits/__init__.py
|
alexmalins/chemper
|
cc82c7d538a5157f90e3560cb2f4f6320038a80f
|
[
"MIT"
] | 10
|
2018-05-21T16:50:05.000Z
|
2021-12-25T02:30:07.000Z
|
chemper/mol_toolkits/__init__.py
|
alexmalins/chemper
|
cc82c7d538a5157f90e3560cb2f4f6320038a80f
|
[
"MIT"
] | 88
|
2018-04-12T01:34:18.000Z
|
2020-10-23T16:34:52.000Z
|
chemper/mol_toolkits/__init__.py
|
alexmalins/chemper
|
cc82c7d538a5157f90e3560cb2f4f6320038a80f
|
[
"MIT"
] | 10
|
2018-04-30T19:07:43.000Z
|
2021-12-25T02:30:08.000Z
|
from . import adapters
from . import mol_toolkit
if mol_toolkit.HAS_OE:
from . import cp_openeye
if mol_toolkit.HAS_RDK:
from . import cp_rdk
if not mol_toolkit.HAS_OE and not mol_toolkit.HAS_RDK:
raise Exception("Neither OpenEye or RDKit is installed"\
"ChemPer requires at least one of these toolkits")
| 24.615385
| 60
| 0.76875
| 53
| 320
| 4.433962
| 0.528302
| 0.212766
| 0.221277
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 320
| 12
| 61
| 26.666667
| 0.890152
| 0
| 0
| 0
| 0
| 0
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.444444
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
81b85c0cca0c578a8886560bf00e8bceada0758e
| 225
|
py
|
Python
|
rest_framework_security/brute_force_protection/exceptions.py
|
RubenEu/django-rest-framework-security
|
638cf271c51a5bafd434a6b6a9c25a7c4849b485
|
[
"MIT"
] | 7
|
2020-09-01T09:55:25.000Z
|
2021-11-04T06:59:04.000Z
|
rest_framework_security/brute_force_protection/exceptions.py
|
RubenEu/django-rest-framework-security
|
638cf271c51a5bafd434a6b6a9c25a7c4849b485
|
[
"MIT"
] | 32
|
2020-10-28T17:09:18.000Z
|
2022-03-12T00:55:09.000Z
|
rest_framework_security/brute_force_protection/exceptions.py
|
RubenEu/django-rest-framework-security
|
638cf271c51a5bafd434a6b6a9c25a7c4849b485
|
[
"MIT"
] | 2
|
2020-12-18T01:26:53.000Z
|
2021-11-04T06:59:07.000Z
|
class BruteForceProtectionException(Exception):
pass
class BruteForceProtectionBanException(BruteForceProtectionException):
pass
class BruteForceProtectionCaptchaException(BruteForceProtectionException):
pass
| 20.454545
| 74
| 0.848889
| 12
| 225
| 15.916667
| 0.5
| 0.094241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 225
| 10
| 75
| 22.5
| 0.955
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
c4c350124d7a13ebe8dc6d46b3540225ccf95ce8
| 190
|
py
|
Python
|
tests/constants.py
|
fraudnet/kibana-docker
|
2551584f9ac88ad9bcaf19c342d52fd659434317
|
[
"Apache-2.0"
] | 256
|
2016-09-26T22:29:37.000Z
|
2021-11-08T09:39:33.000Z
|
tests/constants.py
|
fraudnet/kibana-docker
|
2551584f9ac88ad9bcaf19c342d52fd659434317
|
[
"Apache-2.0"
] | 97
|
2016-09-30T11:52:46.000Z
|
2019-06-25T18:11:53.000Z
|
tests/constants.py
|
fraudnet/kibana-docker
|
2551584f9ac88ad9bcaf19c342d52fd659434317
|
[
"Apache-2.0"
] | 150
|
2016-10-24T20:42:41.000Z
|
2021-12-30T05:43:27.000Z
|
import os
from subprocess import run, PIPE
try:
version = os.environ['ELASTIC_VERSION']
except KeyError:
version = run('./bin/elastic-version', stdout=PIPE).stdout.decode().strip()
| 23.75
| 79
| 0.721053
| 25
| 190
| 5.44
| 0.64
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 190
| 7
| 80
| 27.142857
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0.189474
| 0.110526
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c4d24c857ebdd84af0d0024be25ec9ad4621a64f
| 270
|
py
|
Python
|
blogging/migrations/0006_merge_20201125_0248.py
|
greeneyedsoandso/django
|
7b4c5c9ec75cdc34de0fdaf6c3c705539cea0dfc
|
[
"Unlicense"
] | null | null | null |
blogging/migrations/0006_merge_20201125_0248.py
|
greeneyedsoandso/django
|
7b4c5c9ec75cdc34de0fdaf6c3c705539cea0dfc
|
[
"Unlicense"
] | 2
|
2020-11-22T22:34:56.000Z
|
2020-11-25T03:59:30.000Z
|
blogging/migrations/0006_merge_20201125_0248.py
|
greeneyedsoandso/django-blog
|
7b4c5c9ec75cdc34de0fdaf6c3c705539cea0dfc
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-11-25 02:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("blogging", "0005_auto_20201123_1846"),
("blogging", "0005_auto_20201123_0354"),
]
operations = []
| 19.285714
| 48
| 0.662963
| 33
| 270
| 5.242424
| 0.757576
| 0.138728
| 0.184971
| 0.277457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221698
| 0.214815
| 270
| 13
| 49
| 20.769231
| 0.59434
| 0.166667
| 0
| 0
| 1
| 0
| 0.278027
| 0.206278
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
f2111b21f4e34c6cf964064e9b2b4acfa9583e06
| 198
|
py
|
Python
|
Functions.Templates/Templates/HttpTrigger-Python/run.py
|
jeffhollan/azure-functions-templates
|
4e202cf4712e3eb57e6270a48a439987bb1502d1
|
[
"MIT"
] | 1
|
2018-08-10T14:49:38.000Z
|
2018-08-10T14:49:38.000Z
|
Functions.Templates/Templates/HttpTrigger-Python/run.py
|
jeffhollan/azure-functions-templates
|
4e202cf4712e3eb57e6270a48a439987bb1502d1
|
[
"MIT"
] | null | null | null |
Functions.Templates/Templates/HttpTrigger-Python/run.py
|
jeffhollan/azure-functions-templates
|
4e202cf4712e3eb57e6270a48a439987bb1502d1
|
[
"MIT"
] | 1
|
2020-01-04T18:45:32.000Z
|
2020-01-04T18:45:32.000Z
|
import os
import json
postreqdata = json.loads(open(os.environ['req']).read())
response = open(os.environ['res'], 'w')
response.write("hello world from "+postreqdata['name'])
response.close()
| 28.285714
| 57
| 0.69697
| 27
| 198
| 5.111111
| 0.666667
| 0.086957
| 0.188406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 198
| 7
| 58
| 28.285714
| 0.784091
| 0
| 0
| 0
| 0
| 0
| 0.145078
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
f2121e6239d4fdf2a9e2f605f941869405a147d4
| 111
|
py
|
Python
|
python/protoconf/__init__.py
|
mcornea/protoconf
|
d929833e4594188732297160d4a2d48a688f31ae
|
[
"MIT"
] | 70
|
2019-12-05T11:24:27.000Z
|
2022-02-28T10:04:36.000Z
|
python/protoconf/__init__.py
|
mcornea/protoconf
|
d929833e4594188732297160d4a2d48a688f31ae
|
[
"MIT"
] | 171
|
2019-12-12T20:12:17.000Z
|
2022-03-28T16:26:47.000Z
|
python/protoconf/__init__.py
|
mcornea/protoconf
|
d929833e4594188732297160d4a2d48a688f31ae
|
[
"MIT"
] | 5
|
2021-01-02T04:55:53.000Z
|
2022-02-22T05:34:12.000Z
|
from .protoconf import (
Protoconf,
ProtoconfSync,
ProtoconfMutation,
ProtoconfMutationSync,
)
| 15.857143
| 26
| 0.711712
| 7
| 111
| 11.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225225
| 111
| 6
| 27
| 18.5
| 0.918605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1ef570d916755d71a737aa832b9e474bdb70e3cc
| 83
|
py
|
Python
|
activecampaign/admin/__init__.py
|
pmiguelima/django-activecampaign
|
2a0b7a3f852c84a1cb061d93bb21f4f74ad4a959
|
[
"BSD-3-Clause"
] | 1
|
2021-01-10T21:05:14.000Z
|
2021-01-10T21:05:14.000Z
|
activecampaign/admin/__init__.py
|
pmiguelima/django-activecampaign
|
2a0b7a3f852c84a1cb061d93bb21f4f74ad4a959
|
[
"BSD-3-Clause"
] | null | null | null |
activecampaign/admin/__init__.py
|
pmiguelima/django-activecampaign
|
2a0b7a3f852c84a1cb061d93bb21f4f74ad4a959
|
[
"BSD-3-Clause"
] | null | null | null |
from .list import List
from .message import Message
from .campaign import Campaign
| 20.75
| 30
| 0.819277
| 12
| 83
| 5.666667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 83
| 3
| 31
| 27.666667
| 0.957746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1ef82b1c9f271fb64981a4c058776dab385fb086
| 334
|
py
|
Python
|
commands.py
|
tommyka/Lockscreen_button
|
c93305520b59d4a1b88791974b991d2f8b895e22
|
[
"MIT"
] | null | null | null |
commands.py
|
tommyka/Lockscreen_button
|
c93305520b59d4a1b88791974b991d2f8b895e22
|
[
"MIT"
] | null | null | null |
commands.py
|
tommyka/Lockscreen_button
|
c93305520b59d4a1b88791974b991d2f8b895e22
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, Tommy Kjær Andersen. All rights reserved.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import subprocess
def lockscreen():
"""
Locks the screen on a mac
"""
subprocess.call('pmset displaysleepnow', shell=True)
| 23.857143
| 79
| 0.494012
| 32
| 334
| 5.15625
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020576
| 0.272455
| 334
| 13
| 80
| 25.692308
| 0.658436
| 0.625749
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
1efccfbe9bbb605fddab41d78fd983f6b502125f
| 105
|
py
|
Python
|
autograder/autograder/__init__.py
|
elihschiff/Submitty
|
8b980997b6f1dfcd73eb4cf4cca43398e67f96dc
|
[
"BSD-3-Clause"
] | 3
|
2020-07-27T16:23:09.000Z
|
2022-01-07T16:07:31.000Z
|
autograder/autograder/__init__.py
|
elihschiff/Submitty
|
8b980997b6f1dfcd73eb4cf4cca43398e67f96dc
|
[
"BSD-3-Clause"
] | 2
|
2021-05-10T14:33:39.000Z
|
2022-01-06T19:47:03.000Z
|
autograder/autograder/__init__.py
|
elihschiff/Submitty
|
8b980997b6f1dfcd73eb4cf4cca43398e67f96dc
|
[
"BSD-3-Clause"
] | 1
|
2020-06-25T22:45:25.000Z
|
2020-06-25T22:45:25.000Z
|
import os
CONFIG_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'config')
| 26.25
| 93
| 0.666667
| 15
| 105
| 4.333333
| 0.533333
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 105
| 3
| 94
| 35
| 0.677083
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
48154e7f1734f1ee7811b79aa0a65eb5a7bef6aa
| 97
|
py
|
Python
|
phd/__init__.py
|
jusjusjus/phase-dynamics
|
34af63c9d7514fe54cc9ef25ff01d3694f318923
|
[
"MIT"
] | 1
|
2019-01-15T12:03:03.000Z
|
2019-01-15T12:03:03.000Z
|
phd/__init__.py
|
jusjusjus/phase-dynamics
|
34af63c9d7514fe54cc9ef25ff01d3694f318923
|
[
"MIT"
] | null | null | null |
phd/__init__.py
|
jusjusjus/phase-dynamics
|
34af63c9d7514fe54cc9ef25ff01d3694f318923
|
[
"MIT"
] | null | null | null |
from . import matplotlib
from .core import mod, unmod, poincare_times, threshold_data, gradient
| 24.25
| 70
| 0.804124
| 13
| 97
| 5.846154
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 97
| 3
| 71
| 32.333333
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
4850344d2d95ae56440e0e29a7eda3a81af2d824
| 134
|
py
|
Python
|
socket 编程/__init__.py
|
Aloof-0/codesr
|
a8dc07af32be05d3166b0c869a24cb311a4c0d4e
|
[
"MIT"
] | 1
|
2021-02-23T07:07:21.000Z
|
2021-02-23T07:07:21.000Z
|
socket 编程/__init__.py
|
Aloof-0/codesr
|
a8dc07af32be05d3166b0c869a24cb311a4c0d4e
|
[
"MIT"
] | null | null | null |
socket 编程/__init__.py
|
Aloof-0/codesr
|
a8dc07af32be05d3166b0c869a24cb311a4c0d4e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@Time : 2020/12/27 23:45
@Auth : 高冷Aloof
@File :__init__.py
@IDE :PyCharm
@Motto:ABC(Always Be Coding)
"""
| 16.75
| 28
| 0.619403
| 21
| 134
| 3.761905
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114035
| 0.149254
| 134
| 8
| 29
| 16.75
| 0.578947
| 0.932836
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4879e65d62a90b06d091e261bc5b32f7b70880f2
| 192
|
py
|
Python
|
src/util.py
|
BobToninho/cybersecurity-project
|
019d9ec254a9f46a4fe728c95fe3c03b8df0efba
|
[
"MIT"
] | null | null | null |
src/util.py
|
BobToninho/cybersecurity-project
|
019d9ec254a9f46a4fe728c95fe3c03b8df0efba
|
[
"MIT"
] | null | null | null |
src/util.py
|
BobToninho/cybersecurity-project
|
019d9ec254a9f46a4fe728c95fe3c03b8df0efba
|
[
"MIT"
] | null | null | null |
# Taken from https://github.com/ojroques/garbled-circuit
import json
# HELPER FUNCTIONS
def parse_json(json_path):
with open(json_path) as json_file:
return json.load(json_file)
| 21.333333
| 56
| 0.744792
| 29
| 192
| 4.758621
| 0.724138
| 0.115942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161458
| 192
| 8
| 57
| 24
| 0.857143
| 0.369792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6f8e4994895333e942cf67d64fafde8ca9e371dc
| 662
|
py
|
Python
|
hydra/plugins/launcher.py
|
Lysander6/hydra
|
3b24ee6472b1153d2f0f2622f3d2dee372ef17eb
|
[
"MIT"
] | 1
|
2019-12-29T17:58:59.000Z
|
2019-12-29T17:58:59.000Z
|
hydra/plugins/launcher.py
|
Lysander6/hydra
|
3b24ee6472b1153d2f0f2622f3d2dee372ef17eb
|
[
"MIT"
] | 6
|
2021-03-11T06:20:24.000Z
|
2022-02-27T10:43:29.000Z
|
hydra/plugins/launcher.py
|
Lysander6/hydra
|
3b24ee6472b1153d2f0f2622f3d2dee372ef17eb
|
[
"MIT"
] | 1
|
2020-11-19T06:31:52.000Z
|
2020-11-19T06:31:52.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Launcher plugin interface
"""
from abc import abstractmethod
from .plugin import Plugin
class Launcher(Plugin):
"""
Abstract launcher
"""
def __init__(self):
raise NotImplementedError()
@abstractmethod
def setup(self, config, config_loader, task_function):
"""
Sets this launcher instance up.
"""
raise NotImplementedError()
@abstractmethod
def launch(self, job_overrides):
"""
:param job_overrides: a batch of job arguments (list<list<string>>)
"""
raise NotImplementedError()
| 22.066667
| 75
| 0.640483
| 66
| 662
| 6.30303
| 0.651515
| 0.173077
| 0.182692
| 0.197115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26284
| 662
| 29
| 76
| 22.827586
| 0.852459
| 0.321752
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6f9807ee99ea1c454cca40ee24e64998b880fdf1
| 4,426
|
py
|
Python
|
tests/issues/test_issue.py
|
mubashshirjamal/code
|
d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382
|
[
"BSD-3-Clause"
] | 1,582
|
2015-01-05T02:41:44.000Z
|
2022-03-30T20:03:22.000Z
|
tests/issues/test_issue.py
|
mubashshirjamal/code
|
d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382
|
[
"BSD-3-Clause"
] | 66
|
2015-01-23T07:58:04.000Z
|
2021-11-12T02:23:27.000Z
|
tests/issues/test_issue.py
|
mubashshirjamal/code
|
d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382
|
[
"BSD-3-Clause"
] | 347
|
2015-01-05T07:47:07.000Z
|
2021-09-20T21:22:32.000Z
|
# encoding: UTF-8
from tests.base import TestCase
from vilya.models.issue import Issue
from vilya.models.issue_comment import IssueComment
class TestIssue(TestCase):
def test_add_issue(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
assert isinstance(i, Issue)
assert i.title == 'test'
assert i.description == 'test description'
assert i.creator_id == 'test'
assert i.assignee_id == 'assignee'
def test_update_issue(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
i.update("test1", "test1 description")
i = Issue.get(i.id)
assert i.title == 'test1'
assert i.description == 'test1 description'
assert i.creator_id == 'test'
assert i.assignee_id == 'assignee'
assert i.closer_id is None
def test_close_issue(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
i.close("test")
i = Issue.get(i.id)
assert i.title == 'test'
assert i.description == 'test description'
assert i.creator_id == 'test'
assert i.closer_id == "test"
assert i.assignee_id == 'assignee'
def test_get_issue(self):
issue1 = Issue.add('test1', 'test1 description', 'test', 'assignee')
issue2 = Issue.add('test2', 'test2 description', 'test', 'assignee')
issue2.close("test")
i1 = Issue.get(issue1.id)
assert isinstance(i1, Issue)
assert i1.title == 'test1'
assert i1.description == 'test1 description'
assert i1.creator_id == 'test'
assert i1.assignee_id == 'assignee'
assert i1.closer_id is None
i2 = Issue.get(issue2.id)
assert isinstance(i2, Issue)
assert i2.title == 'test2'
assert i2.description == 'test2 description'
assert i2.creator_id == 'test'
assert i2.assignee_id == 'assignee'
assert i2.closer_id == 'test'
i1 = Issue.get(issue1.id)
assert isinstance(i1, Issue)
assert i1.title == 'test1'
assert i1.description == 'test1 description'
assert i1.creator_id == 'test'
assert i1.assignee_id == 'assignee'
assert i1.closer_id is None
iss = Issue.gets_by_creator_id("test")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 4
iss = Issue.gets_by_creator_id("test", "open")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 4
iss = Issue.gets_by_creator_id("test", "closed")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 2
iss = Issue.gets_by_assignee_id("assignee")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 6
iss = Issue.gets_by_assignee_id("assignee", "open")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 4
iss = Issue.gets_by_assignee_id("assignee", "closed")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 2
iss = Issue.gets_by_closer_id("test")
assert all([isinstance(i, Issue) for i in iss])
assert len(iss) == 2
def test_add_comment(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
c = IssueComment.add(i.id, 'content', 'test')
assert isinstance(c, IssueComment)
assert c.issue_id == i.id
assert c.content == 'content'
assert c.author_id == 'test'
def test_get_comment(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
c = IssueComment.add(i.id, 'content', 'test')
c = IssueComment.get(c.id)
assert isinstance(c, IssueComment)
assert c.issue_id == i.id
assert c.content == 'content'
assert c.author_id == 'test'
c = IssueComment.add(i.id, 'content', 'test')
cs = IssueComment.gets_by_issue_id(i.id)
assert all([isinstance(t, IssueComment) for t in cs])
assert len(cs) == 2
def test_update_comment(self):
i = Issue.add('test', 'test description', 'test', 'assignee')
c = IssueComment.add(i.id, 'content', 'test')
c.update('content1')
c = IssueComment.get(c.id)
assert c.issue_id == i.id
assert c.content == 'content1'
assert c.author_id == 'test'
| 34.850394
| 76
| 0.593538
| 576
| 4,426
| 4.453125
| 0.098958
| 0.037427
| 0.045614
| 0.051852
| 0.734893
| 0.721248
| 0.706823
| 0.660429
| 0.646394
| 0.616764
| 0
| 0.016144
| 0.272255
| 4,426
| 126
| 77
| 35.126984
| 0.780192
| 0.003389
| 0
| 0.598039
| 0
| 0
| 0.146065
| 0
| 0
| 0
| 0
| 0
| 0.588235
| 1
| 0.068627
| false
| 0
| 0.029412
| 0
| 0.107843
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6fb12dcb199badef8fd62e3f2a07d596e18d25ce
| 339
|
py
|
Python
|
users/views.py
|
riggedCoinflip/mydjango
|
9f30effc0dccd95916f59a3b65d7e02bdd2827b5
|
[
"MIT"
] | null | null | null |
users/views.py
|
riggedCoinflip/mydjango
|
9f30effc0dccd95916f59a3b65d7e02bdd2827b5
|
[
"MIT"
] | 1
|
2021-02-26T02:13:35.000Z
|
2021-02-26T02:13:35.000Z
|
users/views.py
|
riggedCoinflip/mydjango
|
9f30effc0dccd95916f59a3b65d7e02bdd2827b5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import get_object_or_404
from django.views import generic
from .models import User
class UserView(generic.DetailView):
template_name = 'users/user.html'
model = User
def get_object(self, queryset=None):
return get_object_or_404(User, username=self.kwargs['username'])
# TODO passwordResetForm
| 24.214286
| 72
| 0.761062
| 46
| 339
| 5.434783
| 0.630435
| 0.108
| 0.088
| 0.112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020979
| 0.156342
| 339
| 14
| 73
| 24.214286
| 0.853147
| 0.064897
| 0
| 0
| 0
| 0
| 0.072785
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 1
| 0.125
| false
| 0
| 0.375
| 0.125
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
d200b5abad2f5d2d4884a423bd1145df06bca377
| 763
|
py
|
Python
|
rasa/shared/utils/cli.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 1
|
2020-10-14T18:09:10.000Z
|
2020-10-14T18:09:10.000Z
|
rasa/shared/utils/cli.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 187
|
2020-02-25T16:07:06.000Z
|
2022-03-01T13:42:41.000Z
|
rasa/shared/utils/cli.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | null | null | null |
import sys
from typing import Any, Text, NoReturn
import rasa.shared.utils.io
def print_color(*args: Any, color: Text):
print(rasa.shared.utils.io.wrap_with_color(*args, color=color))
def print_success(*args: Any):
print_color(*args, color=rasa.shared.utils.io.bcolors.OKGREEN)
def print_info(*args: Any):
print_color(*args, color=rasa.shared.utils.io.bcolors.OKBLUE)
def print_warning(*args: Any):
print_color(*args, color=rasa.shared.utils.io.bcolors.WARNING)
def print_error(*args: Any):
print_color(*args, color=rasa.shared.utils.io.bcolors.FAIL)
def print_error_and_exit(message: Text, exit_code: int = 1) -> NoReturn:
"""Print error message and exit the application."""
print_error(message)
sys.exit(exit_code)
| 23.84375
| 72
| 0.731324
| 117
| 763
| 4.623932
| 0.282051
| 0.110906
| 0.166359
| 0.18854
| 0.369686
| 0.369686
| 0.369686
| 0.369686
| 0.369686
| 0.369686
| 0
| 0.001511
| 0.132372
| 763
| 31
| 73
| 24.612903
| 0.81571
| 0.058978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.1875
| 0
| 0.5625
| 0.75
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 4
|
d203d60326cd778064c2b58fdfad02866e7047a3
| 404
|
py
|
Python
|
app/core/thermometer/models.py
|
FHellmann/MLWTF
|
582c3505d638907a848d5a6c739ee99981300f17
|
[
"Apache-2.0"
] | null | null | null |
app/core/thermometer/models.py
|
FHellmann/MLWTF
|
582c3505d638907a848d5a6c739ee99981300f17
|
[
"Apache-2.0"
] | null | null | null |
app/core/thermometer/models.py
|
FHellmann/MLWTF
|
582c3505d638907a848d5a6c739ee99981300f17
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
"""
Author: Fabio Hellmann <info@fabio-hellmann.de>
"""
from attr import s, ib
from attr.validators import instance_of
from datetime import datetime
@s(frozen=True)
class ThermometerEntry:
timestamp = ib(validator=instance_of(datetime), type=datetime)
temperature = ib(validator=instance_of(float), type=float)
humidity = ib(validator=instance_of(float), type=float)
| 25.25
| 66
| 0.747525
| 54
| 404
| 5.518519
| 0.5
| 0.134228
| 0.191275
| 0.211409
| 0.234899
| 0.234899
| 0.234899
| 0
| 0
| 0
| 0
| 0
| 0.133663
| 404
| 15
| 67
| 26.933333
| 0.851429
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d20761dd5c9f1d0596917a6693751e8965e19d6e
| 115
|
py
|
Python
|
02_templates_advanced/templates_advanced-recources/templates_advanced/templates_advanced/pythons_app/apps.py
|
BoyanPeychinov/python_web_framework
|
bb3a78c36790821d8b3a2b847494a1138d063193
|
[
"MIT"
] | null | null | null |
02_templates_advanced/templates_advanced-recources/templates_advanced/templates_advanced/pythons_app/apps.py
|
BoyanPeychinov/python_web_framework
|
bb3a78c36790821d8b3a2b847494a1138d063193
|
[
"MIT"
] | null | null | null |
02_templates_advanced/templates_advanced-recources/templates_advanced/templates_advanced/pythons_app/apps.py
|
BoyanPeychinov/python_web_framework
|
bb3a78c36790821d8b3a2b847494a1138d063193
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class PythonsAppConfig(AppConfig):
name = 'templates_advanced.pythons_app'
| 19.166667
| 43
| 0.8
| 13
| 115
| 6.923077
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 115
| 5
| 44
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d21fcd966e7bf85f4693c4d656c53dc2bbd11651
| 22
|
py
|
Python
|
rayvision_api/tests/__init__.py
|
renderbus/rayvision_api
|
e9b0f0ad0e4c9c2c3d5ec6b2ce7f80a7c2cb0f42
|
[
"Apache-2.0"
] | 5
|
2019-11-15T10:51:03.000Z
|
2022-02-15T00:22:25.000Z
|
rayvision_api/tests/__init__.py
|
foxrenderfarm/rayvision_api
|
a41b8e03b9028fa7cd60f5ac5fbd0d33dbf19dea
|
[
"Apache-2.0"
] | 2
|
2020-04-16T08:03:03.000Z
|
2021-12-01T03:46:31.000Z
|
rayvision_api/tests/__init__.py
|
foxrenderfarm/rayvision_api
|
a41b8e03b9028fa7cd60f5ac5fbd0d33dbf19dea
|
[
"Apache-2.0"
] | 2
|
2020-01-02T10:37:21.000Z
|
2020-05-10T07:44:21.000Z
|
"""A test package."""
| 11
| 21
| 0.545455
| 3
| 22
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.631579
| 0.681818
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d232d40a2f892cbeeb7b0eb4647e339870d56efa
| 89
|
py
|
Python
|
Website/AmazeSafe/AppHome/apps.py
|
jankit18/AmazeSafe
|
92b95220efc72a9375f8845d4c40a3647abddf7b
|
[
"MIT"
] | 1
|
2021-05-30T18:14:08.000Z
|
2021-05-30T18:14:08.000Z
|
Website/AmazeSafe/AppHome/apps.py
|
jankit18/HackOn-AmazeSafe
|
92b95220efc72a9375f8845d4c40a3647abddf7b
|
[
"MIT"
] | null | null | null |
Website/AmazeSafe/AppHome/apps.py
|
jankit18/HackOn-AmazeSafe
|
92b95220efc72a9375f8845d4c40a3647abddf7b
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class ApphomeConfig(AppConfig):
name = 'AppHome'
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2355a815facde0a8a0ded435a5651df455fbd72
| 273
|
py
|
Python
|
pyrolite/util/skl/__init__.py
|
JustinGOSSES/pyrolite
|
21eb5b28d9295625241b73b820fc8892b00fc6b0
|
[
"BSD-3-Clause"
] | 1
|
2020-03-13T07:11:47.000Z
|
2020-03-13T07:11:47.000Z
|
pyrolite/util/skl/__init__.py
|
JustinGOSSES/pyrolite
|
21eb5b28d9295625241b73b820fc8892b00fc6b0
|
[
"BSD-3-Clause"
] | null | null | null |
pyrolite/util/skl/__init__.py
|
JustinGOSSES/pyrolite
|
21eb5b28d9295625241b73b820fc8892b00fc6b0
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Utilities for use with scikit-learn.
"""
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
logger = logging.getLogger(__name__)
from .pipeline import *
from .vis import *
from .select import *
from .transform import *
from .impute import *
| 19.5
| 61
| 0.758242
| 33
| 273
| 6.030303
| 0.575758
| 0.201005
| 0.201005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 273
| 13
| 62
| 21
| 0.836134
| 0.131868
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d235f5872cca7ad61a4ff202d40d6a25085d79d3
| 736
|
py
|
Python
|
rlkit/torch/irl/encoders/aggregators.py
|
yifan-you-37/rl_swiss
|
8b0ee7caa5c1fa93860916004cf4fd970667764f
|
[
"MIT"
] | 56
|
2019-10-20T03:09:02.000Z
|
2022-03-25T09:21:40.000Z
|
rlkit/torch/irl/encoders/aggregators.py
|
yifan-you-37/rl_swiss
|
8b0ee7caa5c1fa93860916004cf4fd970667764f
|
[
"MIT"
] | 3
|
2020-10-01T07:33:51.000Z
|
2021-05-12T03:40:57.000Z
|
rlkit/torch/irl/encoders/aggregators.py
|
yifan-you-37/rl_swiss
|
8b0ee7caa5c1fa93860916004cf4fd970667764f
|
[
"MIT"
] | 10
|
2019-11-04T16:56:09.000Z
|
2022-03-25T09:21:41.000Z
|
'''
the r_tensor is N_tasks x N_samples x dim
the mask is N_tasks x N_samples x 1
'''
import torch
from torch.nn.functional import tanh
def sum_aggregator(r_tensor, mask):
return torch.sum(r_tensor*mask, 1)
def mean_aggregator(r_tensor, mask):
num_r_per_task = torch.sum(mask, 1)
return torch.sum(r_tensor*mask, 1) / num_r_per_task
def tanh_sum_aggregator(r_tensor, mask):
return tanh(torch.sum(r_tensor*mask, 1))
def sum_aggregator_unmasked(r_tensor):
return torch.sum(r_tensor, 1)
def mean_aggregator_unmasked(r_tensor):
num_r_per_task = r_tensor.size(1)
return torch.sum(r_tensor, 1) / num_r_per_task
def tanh_sum_aggregator_unmasked(r_tensor):
return tanh(torch.sum(r_tensor, 1))
| 22.30303
| 55
| 0.740489
| 132
| 736
| 3.818182
| 0.204545
| 0.194444
| 0.130952
| 0.178571
| 0.666667
| 0.664683
| 0.35119
| 0.126984
| 0.126984
| 0
| 0
| 0.01461
| 0.163043
| 736
| 32
| 56
| 23
| 0.803571
| 0.10462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
d26db0687e51360b0440cabdbe8201ef36bd05db
| 197
|
py
|
Python
|
datahub/company_referral/apps.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 6
|
2019-12-02T16:11:24.000Z
|
2022-03-18T10:02:02.000Z
|
datahub/company_referral/apps.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 1,696
|
2019-10-31T14:08:37.000Z
|
2022-03-29T12:35:57.000Z
|
datahub/company_referral/apps.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 9
|
2019-11-22T12:42:03.000Z
|
2021-09-03T14:25:05.000Z
|
from django.apps import AppConfig
class CompanyReferralConfig(AppConfig):
"""Configuration class for this app."""
name = 'datahub.company_referral'
verbose_name = 'Company referral'
| 21.888889
| 43
| 0.741117
| 21
| 197
| 6.857143
| 0.761905
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167513
| 197
| 8
| 44
| 24.625
| 0.878049
| 0.167513
| 0
| 0
| 0
| 0
| 0.253165
| 0.151899
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9632e5d6abbcb9e5ea2787f0df804fe93c9675b2
| 179
|
py
|
Python
|
examples/script.py
|
jack1142/zxpy
|
fb527cc69169dc884e48bf194c77972a54d1123f
|
[
"MIT"
] | 418
|
2021-05-08T11:46:29.000Z
|
2022-03-31T07:28:37.000Z
|
examples/script.py
|
jack1142/zxpy
|
fb527cc69169dc884e48bf194c77972a54d1123f
|
[
"MIT"
] | 31
|
2021-05-10T07:58:57.000Z
|
2022-03-07T20:05:32.000Z
|
examples/script.py
|
jack1142/zxpy
|
fb527cc69169dc884e48bf194c77972a54d1123f
|
[
"MIT"
] | 11
|
2021-05-12T12:20:55.000Z
|
2022-03-17T22:02:34.000Z
|
#! /usr/bin/env zxpy
~'echo Hello world!'
def print_file_count():
file_count = ~'ls -1 | wc -l'
~"echo -n 'file count is: '"
print(file_count)
print_file_count()
| 13.769231
| 33
| 0.608939
| 28
| 179
| 3.678571
| 0.607143
| 0.436893
| 0.407767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007194
| 0.223464
| 179
| 12
| 34
| 14.916667
| 0.733813
| 0.106145
| 0
| 0
| 0
| 0
| 0.345912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.166667
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
9643216c8a9249fe50dc3e86c952e0e448650576
| 102
|
py
|
Python
|
tradearn/questions/apps.py
|
didils/tradmarkearn
|
a2a49314639a419db1b7b414319745c81fcc26e1
|
[
"MIT"
] | null | null | null |
tradearn/questions/apps.py
|
didils/tradmarkearn
|
a2a49314639a419db1b7b414319745c81fcc26e1
|
[
"MIT"
] | null | null | null |
tradearn/questions/apps.py
|
didils/tradmarkearn
|
a2a49314639a419db1b7b414319745c81fcc26e1
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class QuestionsConfig(AppConfig):
name = 'tradearn.questions'
| 17
| 33
| 0.77451
| 11
| 102
| 7.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 102
| 5
| 34
| 20.4
| 0.908046
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
96660186e772461dc562759f49df5fce53174b64
| 203
|
py
|
Python
|
device_finder.py
|
Slathian/Virtual_Assistant
|
9cae28d0ace22332d4f68d4133e839ba3a3834e0
|
[
"MIT"
] | null | null | null |
device_finder.py
|
Slathian/Virtual_Assistant
|
9cae28d0ace22332d4f68d4133e839ba3a3834e0
|
[
"MIT"
] | null | null | null |
device_finder.py
|
Slathian/Virtual_Assistant
|
9cae28d0ace22332d4f68d4133e839ba3a3834e0
|
[
"MIT"
] | null | null | null |
import speech_recognition as sr
for index, name in enumerate(sr.Microphone.list_microphone_names()):
print("Microphone with name \"{1}\" found for `Microphone(device_index={0})`".format(index, name))
| 67.666667
| 102
| 0.758621
| 29
| 203
| 5.172414
| 0.689655
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010929
| 0.098522
| 203
| 3
| 102
| 67.666667
| 0.808743
| 0
| 0
| 0
| 0
| 0
| 0.308824
| 0.147059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
967e112086f981ce610c67002072accb4f25f01e
| 156,160
|
py
|
Python
|
spytest/apis/routing/bgp.py
|
macikgozwa/sonic-mgmt
|
86338be8b2e55fd03d4913037d0e641e443762b0
|
[
"Apache-2.0"
] | null | null | null |
spytest/apis/routing/bgp.py
|
macikgozwa/sonic-mgmt
|
86338be8b2e55fd03d4913037d0e641e443762b0
|
[
"Apache-2.0"
] | 1
|
2021-02-24T13:48:41.000Z
|
2021-02-24T13:48:41.000Z
|
spytest/apis/routing/bgp.py
|
macikgozwa/sonic-mgmt
|
86338be8b2e55fd03d4913037d0e641e443762b0
|
[
"Apache-2.0"
] | null | null | null |
# This file contains the list of API's which performs BGP operations.
# Author : Chaitanya Vella (Chaitanya-vella.kumar@broadcom.com)
import re
import json
from spytest import st, putils
import apis.system.reboot as reboot
from apis.system.rest import config_rest, delete_rest
from utilities.utils import fail_on_error, get_interface_number_from_name, is_valid_ip_address
from utilities.common import filter_and_select
def get_forced_cli_type(cmd_type):
cmn_type = st.getenv("SPYTEST_BGP_API_UITYPE", "")
if cmd_type == "show":
return st.getenv("SPYTEST_BGP_SHOW_API_UITYPE", cmn_type)
if cmd_type == "config":
return st.getenv("SPYTEST_BGP_CFG_API_UITYPE", cmn_type)
return cmn_type
def get_cfg_cli_type(dut, **kwargs):
cli_type = get_forced_cli_type("config")
cli_type = cli_type or st.get_ui_type(dut, **kwargs)
if cli_type in ["click", "vtysh"]:
cli_type = "vtysh"
else:
cli_type = "klish"
return cli_type
def get_show_cli_type(dut, **kwargs):
cli_type = get_forced_cli_type("show")
cli_type = cli_type or st.get_ui_type(dut, **kwargs)
if cli_type in ["click", "vtysh"]:
cli_type = "vtysh"
else:
cli_type = "klish"
return cli_type
def enable_docker_routing_config_mode(dut, **kwargs):
"""
:param dut:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type in ["click", "vtysh"]:
data = {"DEVICE_METADATA": {"localhost": {"docker_routing_config_mode": "split"}}}
split_config = json.dumps(data)
json.loads(split_config)
st.apply_json(dut, split_config)
reboot.config_save(dut)
elif cli_type == 'klish':
pass
def enable_router_bgp_mode(dut, **kwargs):
"""
:param dut:
:param local_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type in ['vtysh', 'click']:
cli_type ='vtysh'
st.log("Enabling router BGP mode ..")
if 'local_asn' in kwargs:
command = "router bgp {}".format(kwargs['local_asn'])
else:
command = "router bgp"
if 'vrf_name' in kwargs and kwargs['vrf_name'] != 'default-vrf':
command += ' vrf ' + kwargs['vrf_name']
if 'router_id' in kwargs:
command += '\n bgp router-id {}'.format(kwargs['router_id'])
elif cli_type == 'klish':
st.log("Enabling router BGP mode ..")
if 'local_asn' in kwargs:
command = "router bgp {}".format(kwargs['local_asn'])
if 'vrf_name' in kwargs and kwargs['vrf_name'] != 'default-vrf':
command += ' vrf ' + kwargs['vrf_name']
if 'router_id' in kwargs:
command += ' router-id {}'.format(kwargs['router_id'])
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
return True
def config_router_bgp_mode(dut, local_asn, config_mode='enable', vrf='default', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param config_mode:
:param vrf:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Config router BGP mode .. {}".format(config_mode))
mode = "no" if config_mode.lower() == 'disable' else ""
if cli_type in ['vtysh', 'click']:
cli_type = 'vtysh'
if vrf.lower() == 'default':
command = "{} router bgp {}".format(mode, local_asn)
else:
command = "{} router bgp {} vrf {}".format(mode, local_asn, vrf)
elif cli_type == 'klish':
if vrf.lower() == 'default':
if not mode:
command = "router bgp {}".format(local_asn)
else:
command = "no router bgp"
else:
if not mode:
command = "router bgp {} vrf {}".format(local_asn, vrf)
else:
command = "no router bgp vrf {}".format(vrf)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
return True
def unconfig_router_bgp(dut, **kwargs):
"""
:param dut
:return:
"""
st.log("Unconfiguring Bgp in {}".format(dut))
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type in ['vtysh', 'click']:
cli_type = 'vtysh'
command = "no router bgp"
if 'vrf_name' in kwargs and 'local_asn' in kwargs:
command += ' ' + kwargs['local_asn'] + ' vrf ' + kwargs['vrf_name']
elif cli_type == 'klish':
if kwargs.get("vrf_name"):
command = "no router bgp vrf {}".format(kwargs.get("vrf_name"))
else:
command = "no router bgp"
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
return True
def cleanup_router_bgp(dut_list, cli_type="", skip_error_check=True):
"""
:param dut_list:
:return:
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
for dut in dut_li:
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
if cli_type in ["vtysh", "klish"]:
st.log("Cleanup BGP mode ..")
command = "no router bgp"
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def _cleanup_bgp_config(dut_list, cli_type=""):
"""
:param dut_list:
:return:
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
for dut in dut_li:
command = "show running bgp"
output = st.show(dut, command, type="vtysh", skip_error_check=True)
st.log("Cleanup BGP configuration on %s.." % dut)
config = output.splitlines()
line = 0
count = len(config)
bgp_inst = []
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
while line < count:
_str = config[line]
if re.match(r'router bgp .*', _str, re.IGNORECASE):
if cli_type =="klish":
_newstr =' '.join([i for i in _str.split(" ") if not i.isdigit()])
if "vrf" in _str:
bgp_inst.insert(0, _newstr)
else:
bgp_inst.append(_newstr)
else:
if "vrf" in _str:
bgp_inst.insert(0, _str)
else:
bgp_inst.append(_str)
while config[line] != "!":
line += 1
line += 1
for inst in bgp_inst:
st.config(dut, "no {}".format(inst), type=cli_type)
return True
def cleanup_bgp_config(dut_list, cli_type="", thread=True):
"""
:param dut_list:
:param thread:
:return:
"""
dut_li = list(dut_list) if isinstance(dut_list, list) else [dut_list]
[out, _] = putils.exec_foreach(thread, dut_li, _cleanup_bgp_config, cli_type=cli_type)
return False if False in out else True
def config_bgp_router(dut, local_asn, router_id='', keep_alive=60, hold=180, config='yes',**kwargs):
"""
:param dut:
:param local_asn:
:param router_id:
:param keep_alive:
:param hold:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
command = "router bgp {}\n".format(local_asn)
if cli_type == 'vtysh':
if config == 'yes':
if router_id:
command += "\n bgp router-id {}".format(router_id)
if keep_alive and hold:
command += "\n timers bgp {} {}".format(keep_alive, hold)
if config == 'no' and keep_alive:
command += "\n no timers bgp\n"
if config == 'no' and router_id:
command += "\n no bgp router-id {}".format(router_id)
elif cli_type == 'klish':
if config == 'yes':
if router_id:
command += "router-id {}\n".format(router_id)
if keep_alive and hold:
command += "timers {} {}\n".format(keep_alive, hold)
if config == 'no' and keep_alive:
command += "no timers {} {}\n".format(keep_alive, hold)
if config == 'no' and router_id:
command += "no router-id \n"
command += "exit"
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command.split("\n") if cli_type == 'klish' else command, type=cli_type)
return True
def create_bgp_router(dut, local_asn, router_id='', keep_alive=60, hold=180, cli_type=""):
"""
:param dut:
:param local_asn:
:param router_id:
:param keep_alive:
:param hold:
:return:
"""
st.log("Creating BGP router ..")
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
if cli_type == 'vtysh':
command = ""
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
# Add validation for IPV4 address
if router_id:
command = "bgp router-id {}\n".format(router_id)
command += "timers bgp {} {}\n".format(keep_alive, hold)
elif cli_type == 'klish':
command = list()
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
# Add validation for IPV4 address
if router_id:
command.append("router-id {}".format(router_id))
command.append("timers {} {}".format(keep_alive, hold))
command.append("exit")
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
return True
def create_bgp_neighbor(dut, local_asn, neighbor_ip, remote_asn, keep_alive=60, hold=180, password=None, family="ipv4",vrf='default', cli_type=""):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param remote_asn:
:param keep_alive:
:param hold:
:param password:
:param family:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Creating BGP neighbor ..")
config_router_bgp_mode(dut, local_asn, vrf=vrf, cli_type=cli_type)
if cli_type == 'vtysh':
command = "neighbor {} remote-as {}".format(neighbor_ip, remote_asn)
st.config(dut, command, type='vtysh')
command = "neighbor {} timers {} {}".format(neighbor_ip, keep_alive, hold)
st.config(dut, command, type='vtysh')
if password:
command = " neighbor {} password {}".format(neighbor_ip, password)
st.config(dut, command, type='vtysh')
# Gather the IP type using the validation result
# ipv6 = False
if family == "ipv6":
command = "address-family ipv6 unicast"
st.config(dut, command, type='vtysh')
command = "neighbor {} activate".format(neighbor_ip)
st.config(dut, command, type='vtysh')
if family == "ipv4":
command = "address-family ipv4 unicast"
st.config(dut, command, type='vtysh')
command = "neighbor {} activate".format(neighbor_ip)
st.config(dut, command, type='vtysh')
elif cli_type == 'klish':
commands = list()
commands.append("neighbor {}".format(neighbor_ip))
commands.append("remote-as {}".format(remote_asn))
commands.append("timers {} {}".format(keep_alive, hold))
if password:
commands.append("password {}\n".format(password))
if family:
commands.append("address-family {} unicast".format(family))
commands.append("activate")
commands.append("exit")
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def config_bgp_neighbor(dut, local_asn, neighbor_ip, remote_asn, family="ipv4", keep_alive=60, hold=180, config='yes', vrf='default', cli_type="", skip_error_check=True, connect_retry=120):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param remote_asn:
:param keep_alive:
:param hold:
:param family:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if family !='ipv4' and family != 'ipv6':
return False
if cli_type=="vtysh":
if vrf.lower() == 'default':
command = "router bgp {}".format(local_asn)
else:
command = "router bgp {} vrf {}".format(local_asn, vrf)
command += "\n {} neighbor {} remote-as {}".format(cfgmode, neighbor_ip, remote_asn)
if config == 'yes' :
command += "\n neighbor {} timers {} {}".format(neighbor_ip, keep_alive, hold)
command += "\n neighbor {} timers connect {}".format(neighbor_ip, connect_retry)
command += "\n address-family {} unicast".format(family)
command += "\n neighbor {} activate".format(neighbor_ip)
st.config(dut, command, type=cli_type)
return True
elif cli_type=="klish":
commands = list()
commands.append("router bgp {}".format(local_asn) if vrf.lower() == 'default' else "router bgp {} vrf {}".format(local_asn, vrf))
commands.append("{} neighbor {}".format(cfgmode, neighbor_ip))
if config == "yes":
commands.append("remote-as {}".format(remote_asn))
commands.append("timers {} {}".format(keep_alive, hold))
commands.append("timers connect {}".format(connect_retry))
commands.append("address-family {} unicast".format(family))
commands.append("activate")
commands.append("exit")
commands.append("exit") #exit neighbor
commands.append("exit") #exit router-bgp
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def config_bgp_neighbor_properties(dut, local_asn, neighbor_ip, family=None, mode=None, **kwargs):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param family:
:param mode:
:param kwargs:
:return:
"""
st.log("Configuring the BGP neighbor properties ..")
properties = kwargs
peergroup = properties.get('peergroup', None)
cli_type = get_cfg_cli_type(dut, **kwargs)
skip_error_check = kwargs.get("skip_error_check", True)
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
no_form = "no" if "no_form" in properties and properties["no_form"] == "no" else ""
if cli_type == "vtysh":
if "password" in properties:
command = "{} neighbor {} password {}".format(no_form, neighbor_ip, properties["password"]).strip()
st.config(dut, command, type=cli_type)
if "keep_alive" in properties and "hold_time" in properties:
command = "{} neighbor {} timers {} {}".format(no_form, neighbor_ip, properties["keep_alive"],
properties["hold_time"])
st.config(dut, command, type=cli_type)
if "neighbor_shutdown" in properties:
command = "{} neighbor {} shutdown".format(no_form, neighbor_ip)
st.config(dut, command, type=cli_type)
if family and mode:
command = "address-family {} {}".format(family, mode)
st.config(dut, command, type=cli_type)
if "activate" in properties:
if properties["activate"]:
command = "{} neighbor {} activate".format(no_form, neighbor_ip)
st.config(dut, command, type=cli_type)
if "default-originate" in properties:
if properties["default-originate"]:
command = "{} neighbor {} default-originate".format(no_form, neighbor_ip)
st.config(dut, command, type=cli_type)
if "maximum-prefix" in properties:
command = "{} neighbor {} maximum-prefix {}".format(no_form, neighbor_ip, properties["maximum-prefix"])
st.config(dut, command, type=cli_type)
return True
elif cli_type == "klish":
commands = list()
if not peergroup:
neigh_name = get_interface_number_from_name(neighbor_ip)
if isinstance(neigh_name, dict):
commands.append("neighbor interface {} {}".format(neigh_name["type"], neigh_name["number"]))
else:
commands.append("neighbor {}".format(neigh_name))
else:
commands.append("peer-group {}".format(neighbor_ip))
if "password" in properties:
password = "" if no_form == 'no' else properties["password"]
commands.append("{} password {}".format(no_form, password))
if "keep_alive" in properties and "hold_time" in properties:
commands.append("{} timers {} {}".format(no_form, properties["keep_alive"],properties["hold_time"]))
if "neighbor_shutdown" in properties:
commands.append("{} shutdown".format(no_form))
if family and mode:
commands.append("address-family {} {}".format(family, mode))
if "activate" in properties:
commands.append("{} activate".format(no_form))
if "default-originate" in properties:
commands.append("{} default-originate".format(no_form))
if "maximum-prefix" in properties:
commands.append("{} maximum-prefix {}".format(no_form, properties["maximum-prefix"]))
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def delete_bgp_neighbor(dut, local_asn, neighbor_ip, remote_asn, vrf='default', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param remote_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Deleting BGP neighbor ..")
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, vrf=vrf, cli_type=cli_type)
if cli_type == "vtysh":
command = "no neighbor {} remote-as {}".format(neighbor_ip, remote_asn)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
elif cli_type == "klish":
commands = list()
commands.append("neighbor {}".format(neighbor_ip))
commands.append("no remote-as {}".format(remote_asn))
commands.append("exit")
commands.append("no neighbor {}".format(neighbor_ip))
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
else:
st.error("UNSUPPORTE CLI TYPE -- {}".format(cli_type))
return False
return True
def change_bgp_neighbor_admin_status(dut, local_asn, neighbor_ip, operation=1, cli_type=""):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param operation:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Shut/no-shut BGP neighbor ..")
config_router_bgp_mode(dut, local_asn)
if cli_type == 'vtysh':
if operation == 0:
command = "neighbor {} shutdown".format(neighbor_ip)
st.config(dut, command, type=cli_type)
elif operation == 1:
command = "no neighbor {} shutdown".format(neighbor_ip)
st.config(dut, command, type=cli_type)
else:
st.error("Invalid operation provided.")
return False
elif cli_type == 'klish':
command = list()
command.append("neighbor {}".format(neighbor_ip))
if operation == 0:
command.append("shutdown")
elif operation == 1:
command.append("no shutdown")
else:
st.error("Invalid operation provided.")
return False
st.config(dut, command, type=cli_type)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def advertise_bgp_network(dut, local_asn, network, route_map='', config='yes', family='ipv4', cli_type="", skip_error_check=True, network_import_check=False):
"""
:param dut:
:param local_asn:
:param network:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Advertise BGP network ..")
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
mode = "" if config.lower() == 'yes' else "no"
# Gather IPv6 type using validation
if cli_type == "vtysh":
if family == 'ipv6':
command = "address-family ipv6 unicast"
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
if route_map.lower() == '':
command = "{} network {}".format(mode, network)
else:
command = "{} network {} route-map {}".format(mode, network,route_map)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
elif cli_type == "klish":
commands = list()
if network_import_check:
commands.append("no network import-check")
commands.append("address-family {} unicast".format(family))
if route_map.lower() == '':
commands.append("{} network {}".format(mode, network))
commands.append("exit")
commands.append("exit")
else:
commands.append("{} network {} route-map {}".format(mode, network, route_map))
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def config_bgp_network_advertise(dut, local_asn, network, route_map='', addr_family='ipv4', config='yes', cli_type="",
skip_error_check=True, network_import_check=False):
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if cli_type == "vtysh":
command = "router bgp {}".format(local_asn)
command += "\n address-family {} {}".format(addr_family, "unicast")
command += "\n {} network {}".format(cfgmode, network)
if route_map != '' :
command += "route-map {}".format(route_map)
st.config(dut, command, type=cli_type)
return True
elif cli_type == "klish":
commands = list()
commands.append("router bgp {}".format(local_asn))
if network_import_check:
commands.append("no network import-check")
commands.append("address-family {} {}".format(addr_family, "unicast"))
cmd = "route-map {}".format(route_map) if route_map else ""
commands.append("{} network {} {}".format(cfgmode, network, cmd).strip())
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def show_bgp_ipv4_summary_vtysh(dut, vrf='default', **kwargs):
"""
:param dut:
:return:
"""
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == "vtysh":
if vrf == 'default':
command = "show ip bgp summary"
else:
command = "show ip bgp vrf {} summary".format(vrf)
return st.show(dut, command, type='vtysh')
elif cli_type == "klish":
if vrf == 'default':
command = "show bgp ipv4 unicast summary"
else:
command = "show bgp ipv4 unicast vrf {} summary".format(vrf)
return st.show(dut, command, type=cli_type)
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return []
def show_bgp_ipv6_summary_vtysh(dut, vrf='default', **kwargs):
"""
:param dut:
:return:
"""
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == "vtysh":
if vrf == 'default':
command = "show bgp ipv6 summary"
else:
command = "show bgp vrf {} ipv6 summary".format(vrf)
return st.show(dut, command, type='vtysh')
elif cli_type == "klish":
if vrf == 'default':
command = "show bgp ipv6 unicast summary"
else:
command = "show bgp ipv6 unicast vrf {} summary".format(vrf)
return st.show(dut, command, type=cli_type)
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return []
def show_bgp_ipv4_summary(dut, **kwargs):
"""
:param dut:
:return:
"""
#added kwargs.update() as Klish output currently does not list RIB entries. RFE SONIC-23559
kwargs.update({"cli_type": "vtysh"})
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == "vtysh":
command = "show bgp ipv4 summary"
elif cli_type == "klish":
command = 'show bgp ipv4 unicast summary'
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return []
return st.show(dut, command, type=cli_type)
def show_bgp_ipv6_summary(dut, **kwargs):
"""
:param dut:
:return:
"""
# added kwargs.update() as Klish output currently does not list RIB entries. RFE SONIC-23559
kwargs.update({"cli_type": "vtysh"})
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == "vtysh":
command = "show bgp ipv6 summary"
elif cli_type == "klish":
command = 'show bgp ipv6 unicast summary'
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return []
return st.show(dut, command, type=cli_type)
def get_bgp_nbr_count(dut, **kwargs):
cli_type = get_show_cli_type(dut, **kwargs)
vrf = kwargs.get('vrf','default')
family = kwargs.get('family','ipv4')
if family == 'ipv6':
output = show_bgp_ipv6_summary_vtysh(dut, vrf=vrf, cli_type=cli_type)
else:
output = show_bgp_ipv4_summary_vtysh(dut, vrf=vrf, cli_type=cli_type)
estd_nbr = 0
for i in range(0,len(output)):
if output[i]['estd_nbr'] != '':
estd_nbr = int(output[i]['estd_nbr'])
break
return estd_nbr
def verify_ipv6_bgp_summary(dut, **kwargs):
"""
:param interface_name:
:type interface_name:
:param ip_address:
:type ip_address:
:param dut:
:type dut:
:return:
:rtype:
EX; verify_ipv6_bgp_summary(vars.D1, 'neighbor'= '3341::2')
"""
cli_type = get_show_cli_type(dut, **kwargs)
kwargs.pop("cli_type", None)
output = show_bgp_ipv6_summary(dut,cli_type=cli_type)
for each in kwargs.keys():
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def show_bgp_neighbor(dut, neighbor_ip):
"""
:param dut:
:param neighbor_ip:
:return:
"""
#No usage in scripts, so no klish support added
command = "show bgp neighbor {}".format(neighbor_ip)
return st.show(dut, command)
def show_bgp_ipv4_neighbor_vtysh(dut, neighbor_ip=None,vrf='default', **kwargs):
"""
:param dut:
:param neighbor_ip:
:param property:
:param address_family:
:return:
"""
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
if vrf == 'default':
command = "show ip bgp neighbors"
else:
command = "show ip bgp vrf {} neighbors".format(vrf)
if neighbor_ip:
command += " {}".format(neighbor_ip)
if cli_type == 'klish':
if vrf == 'default':
command = "show bgp ipv4 unicast neighbors"
else:
command = "show bgp ipv4 unicast vrf {} neighbors".format(vrf)
if neighbor_ip:
command += " {}".format(neighbor_ip)
return st.show(dut, command, type=cli_type)
def show_bgp_ipv6_neighbor_vtysh(dut, neighbor_ip=None,vrf='default', **kwargs):
"""
:param dut:
:param neighbor_ip:
:return:
"""
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
if vrf == 'default':
command = "show bgp ipv6 neighbors"
else:
command = "show bgp vrf {} ipv6 neighbors".format(vrf)
if neighbor_ip:
command += " {}".format(neighbor_ip)
if cli_type == 'klish':
if vrf == 'default':
command = "show bgp ipv6 unicast neighbors"
else:
command = "show bgp ipv6 unicast vrf {} neighbors".format(vrf)
if neighbor_ip:
command += " {}".format(neighbor_ip)
return st.show(dut, command, type=cli_type)
def clear_ip_bgp(dut, **kwargs):
"""
:param dut:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type in ["click", "vtysh"]:
# command = "sonic-clear ip bgp"
command = "clear ip bgp *"
st.config(dut, command, type=cli_type, conf=False)
elif cli_type == 'klish':
command = 'clear bgp ipv4 unicast *'
st.config(dut, command, type=cli_type)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def clear_bgp_vtysh(dut, **kwargs):
"""
:param dut:
:param value:
:param address_family: ipv4|ipv6|all
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
address_family = kwargs.get('address_family', 'all')
af_list = ['ipv4','ipv6']
if address_family == 'ipv4':
if cli_type == 'vtysh':
af_list = ['ipv4']
elif cli_type == 'klish':
af_list = ['ipv4 unicast']
elif address_family == 'ipv6':
if cli_type == 'vtysh':
af_list = ['ipv6']
elif cli_type == 'klish':
af_list = ['ipv6 unicast']
else:
if cli_type == "vtysh":
af_list=["ipv4", "ipv6"]
elif cli_type == "klish":
af_list = ["ipv4 unicast", "ipv6 unicast"]
for each_af in af_list:
if cli_type == 'vtysh':
command = "clear ip bgp {} *".format(each_af)
elif cli_type == 'klish':
command = "clear bgp {} *".format(each_af)
st.config(dut, command, type=cli_type, conf=False)
def clear_ip_bgp_vtysh(dut, value="*", **kwargs):
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
command = "clear ip bgp ipv4 {}".format(value)
st.config(dut, command, type='vtysh', conf=False)
elif cli_type == 'klish':
command = "clear bgp ipv4 unicast {}".format(value)
st.config(dut, command, type='klish', conf=False)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def clear_ipv6_bgp_vtysh(dut, value="*", **kwargs):
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
command = "clear ip bgp ipv6 {}".format(value)
elif cli_type == 'klish':
command = "clear bgp ipv6 unicast {}".format(value)
st.config(dut, command, type= cli_type, conf=False)
def clear_ip_bgp_vrf_vtysh(dut,vrf,family='ipv4',value="*", **kwargs):
cli_type = get_cfg_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
command = "clear bgp vrf {} {} {}".format(vrf,family,value)
st.config(dut, command, type='vtysh', conf=False)
elif cli_type == 'klish':
if family == 'ipv4':
family = 'ipv4 unicast'
elif family == 'ipv6':
family = 'ipv6 unicast'
command = "clear bgp {} vrf {} {}".format(family, vrf, value)
st.config(dut, command, type='klish', conf=False)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_aggregate_address(dut, **kwargs):
"""
API to create the BGP aggregate address
Author: Chaitanya Vella (chaitanya-vella.kumar@broadcom.com)
:param dut:
:param local_asn:
:param address_range:
:param as_set:
:param summary:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
if "local_asn" not in kwargs and "address_range" not in kwargs and "config" not in kwargs and "family" not in kwargs:
st.error("Mandatory parameters not provided")
skip_error_check = kwargs.get("skip_error_check", True)
# cli_type=kwargs.get("cli_type","vtysh")
config_router_bgp_mode(dut, kwargs["local_asn"], cli_type=cli_type)
if cli_type == "vtysh":
command = "address-family {}\n".format(kwargs["family"])
if kwargs["config"] == "add":
command += "aggregate-address {}".format(kwargs["address_range"])
elif kwargs["config"] == "delete":
command += "no aggregate-address {}".format(kwargs["address_range"])
if "summary" in kwargs:
command += " summary-only"
if "as_set" in kwargs:
command += " as-set"
st.config(dut, command, type=cli_type)
elif cli_type=="klish":
commands = list()
commands.append("address-family {} unicast".format(kwargs["family"]))
if kwargs.get("config") == "add":
command = "aggregate-address {}".format(kwargs["address_range"])
if "summary" in kwargs:
command += " summary-only"
if "as_set" in kwargs:
command += " as-set"
else:
command = "no aggregate-address {}".format(kwargs["address_range"])
commands.append(command)
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
else:
st.error("Unsupported CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_update_delay(dut, local_asn, time=0, cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param time:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
command = "update-delay {}".format(time)
st.config(dut, command,type=cli_type, skip_error_check=skip_error_check)
def create_bgp_always_compare_med(dut, local_asn):
"""
:param dut:
:param local_asn:
:return:
"""
#No usage in scripts
config_router_bgp_mode(dut, local_asn)
command = "bgp always-compare-med"
st.config(dut, command, type='vtysh')
def create_bgp_best_path(dut, local_asn, user_command, cli_type=""):
"""
:param dut:
:param local_asn:
:param user_command:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
if cli_type == 'vtysh':
command = "bgp bestpath {}".format(user_command)
elif cli_type == 'klish':
command = list()
command.append("bestpath {}".format(user_command))
command.append("exit")
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
def create_bgp_client_to_client_reflection(dut, local_asn, config='yes', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if cli_type == "vtysh":
command = "router bgp {}".format(local_asn)
command += "\n {} bgp client-to-client reflection".format(cfgmode)
'''
config_router_bgp_mode(dut, local_asn)
if config == 'yes':
command = "bgp client-to-client reflection"
else :
command = "no bgp client-to-client reflection"
'''
st.config(dut, command, type=cli_type)
return True
elif cli_type == "klish":
commands = list()
commands.append("router bgp {}".format(local_asn))
commands.append("{} client-to-client reflection".format(cfgmode))
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_route_reflector_client(dut, local_asn, addr_family, nbr_ip, config='yes', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param addr_family:
:param nbr_ip:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if cli_type == "vtysh":
command = "router bgp {}".format(local_asn)
command += "\n address-family {} {}".format(addr_family, "unicast")
command += "\n {} neighbor {} route-reflector-client".format(cfgmode, nbr_ip)
st.config(dut, command, type=cli_type)
return True
elif cli_type == "klish":
addr_family_type = "unicast"
neigh_name = nbr_ip
# if re.findall(r'Ethernet|Vlan|PortChannel', nbr_ip):
# neigh_name = get_interface_number_from_name(nbr_ip)
commands = list()
commands.append("router bgp {}".format(local_asn))
# if re.findall(r'Ethernet|Vlan|PortChannel', nbr_ip):
# neigh_name = get_interface_number_from_name(nbr_ip)
# commands.append("{} neighbor interface {} {}".format(cfgmode, neigh_name["type"], neigh_name["number"]))
# elif is_valid_ip_address(neigh_name, addr_family):
# commands.append("{} neighbor {}".format(cfgmode, nbr_ip))
# else:
# commands.append("{} peer-group {}".format(cfgmode, nbr_ip))
# if config == "yes":
# if addr_family == 'l2vpn' : addr_family_type = "evpn"
# commands.append("address-family {} {}".format(addr_family, addr_family_type))
# commands.append("{} route-reflector-client".format(cfgmode))
# commands.append("exit")
# else:
# commands.append("exit")
if re.findall(r'Ethernet|Vlan|PortChannel|Eth', nbr_ip):
neigh_name = get_interface_number_from_name(nbr_ip)
commands.append("neighbor interface {} {}".format( neigh_name["type"], neigh_name["number"]))
elif addr_family == 'l2vpn' :
commands.append("neighbor {}".format(nbr_ip))
elif is_valid_ip_address(neigh_name, addr_family):
commands.append("neighbor {}".format(nbr_ip))
else:
commands.append("peer-group {}".format(nbr_ip))
if addr_family == 'l2vpn' : addr_family_type = "evpn"
commands.append("address-family {} {}".format(addr_family, addr_family_type))
commands.append("{} route-reflector-client".format(cfgmode))
commands.append("exit")
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_next_hop_self(dut, local_asn, addr_family, nbr_ip, force='no', config='yes', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param addr_family:
:param nbr_ip:
:param config:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if cli_type == "vtysh":
command = "router bgp {}".format(local_asn)
command += "\n address-family {} {}".format(addr_family, "unicast")
command += "\n {} neighbor {} next-hop-self".format(cfgmode, nbr_ip)
if force == 'yes' :
command += " force"
'''
config_router_bgp_mode(dut, local_asn)
command = "address-family {} unicast".format(addr_family)
st.config(dut, command, type='vtysh')
if config == 'yes':
command = "neighbor {} next-hop-self".format(nbr_ip)
elif config == 'no' :
command = "no neighbor {} next-hop-self".format(nbr_ip)
else:
return False
if force == 'yes' :
command += " force"
'''
st.config(dut, command, type=cli_type)
return True
elif cli_type == "klish":
commands = list()
commands.append("router bgp {}".format(local_asn))
if is_valid_ip_address(nbr_ip, addr_family):
commands.append("{} neighbor {}".format(cfgmode, nbr_ip))
else:
commands.append("{} peer-group {}".format(cfgmode, nbr_ip))
#commands.append("address-family {} {}".format(addr_family, "unicast"))
if config == "yes":
force_cmd = "force" if force == 'yes' else ""
commands.append("address-family {} {}".format(addr_family, "unicast"))
commands.append("next-hop-self {}".format(force_cmd))
commands.append("exit")
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_cluster_id(dut, local_asn, cluster_id, cluster_ip):
"""
:param dut:
:param local_asn:
:param cluster_id:
:param cluster_ip:
:return:
"""
#No usage in test scripts
config_router_bgp_mode(dut, local_asn)
command = "bgp cluster-id {}".format(cluster_id)
st.config(dut, command, type='vtysh')
command = "bgp cluster-id {}".format(cluster_ip)
st.config(dut, command, type='vtysh')
def create_bgp_confideration(dut, local_asn, confd_id_as, confd_peers_as):
"""
:param dut:
:param local_asn:
:param confd_id_as:
:param confd_peers_as:
:return:
"""
# No usage in test scripts
config_router_bgp_mode(dut, local_asn)
command = "bgp confideration identifier {}".format(confd_id_as)
st.config(dut, command, type='vtysh')
command = "bgp confideration peers {}".format(confd_peers_as)
st.config(dut, command, type='vtysh')
def create_bgp_dampening(dut, local_asn, half_life_time, timer_start, timer_start_supress, max_duration):
"""
:param dut:
:param local_asn:
:param half_life_time:
:param timer_start:
:param timer_start_supress:
:param max_duration:
:return:
"""
# No usage in test scripts
config_router_bgp_mode(dut, local_asn)
command = "bgp dampening {} {} {} {}".format(half_life_time, timer_start, timer_start_supress, max_duration)
st.config(dut, command, type='vtysh')
def config_bgp_default(dut, local_asn, user_command, config='yes', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param user_command:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
cfgmode = 'no' if config != 'yes' else ''
if cli_type == "vtysh":
command = "router bgp {}".format(local_asn)
command += "\n {} bgp default {}".format(cfgmode, user_command)
'''
config_router_bgp_mode(dut, local_asn)
if config == 'yes':
command = "bgp default {}".format(user_command)
else:
command = "no bgp default {}".format(user_command)
'''
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
return True
elif cli_type == "klish":
commands = list()
commands.append("router bgp {}".format(local_asn))
commands.append("{} default {}".format(cfgmode, user_command))
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def config_bgp_always_compare_med(dut, local_asn, config='yes', cli_type=""):
"""
:param dut:
:param local_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
if cli_type == "vtysh":
if config == 'yes' :
command = "bgp always-compare-med"
else :
command = "no bgp always-compare-med"
elif cli_type == 'klish':
command = list()
if config == 'yes' :
command.append("always-compare-med")
else :
command.append("no always-compare-med")
command.append('exit')
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
return True
def config_bgp_deterministic_med(dut, local_asn, config='yes',cli_type=''):
"""
:param dut:
:param local_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
if cli_type == "vtysh":
if config == 'yes' :
command = "bgp deterministic-med"
else :
command = "no bgp deterministic-med"
elif cli_type == "klish":
command = list()
if config == 'yes' :
command.append("deterministic-med")
else :
command.append("no deterministic-med")
command.append('exit')
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
st.config(dut, command, type=cli_type)
return True
def config_bgp_disable_ebgp_connected_route_check(dut, local_asn):
"""
:param dut:
:param local_asn:
:return:
"""
#No script usage
config_router_bgp_mode(dut, local_asn)
command = "bgp disable-ebgp-connected-route-check"
st.config(dut, command, type='vtysh')
def config_bgp_graceful_restart(dut, **kwargs):
"""
:param dut:
:param local_asn:
:param user_command:
:return:
"""
preserve_state = kwargs.get('preserve_state',None)
vrf = kwargs.get('vrf', "default")
skip_error_check = kwargs.get("skip_error_check", True)
cli_type = get_cfg_cli_type(dut, **kwargs)
if "local_asn" not in kwargs and "config" not in kwargs :
st.error("Mandatory params not provided")
return False
if kwargs.get("config") not in ["add","delete"]:
st.log("Unsupported ACTION")
return False
config_router_bgp_mode(dut, kwargs["local_asn"],vrf=vrf, cli_type=cli_type)
mode = "no" if kwargs.get("config") != "add" else ""
bgp_mode = "bgp" if cli_type == "vtysh" else ""
if cli_type == 'vtysh':
command = "{} {} graceful-restart\n".format(mode, bgp_mode)
if cli_type == 'klish':
command = "{} graceful-restart enable\n".format(mode)
if preserve_state != None:
command += "{} {} graceful-restart preserve-fw-state\n".format(mode, bgp_mode)
if cli_type == 'klish':
command += "exit\n"
if not(mode == 'no ' and cli_type == 'vtysh'):
if "user_command" in kwargs:
command += " {}".format(kwargs["user_command"])
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
def config_bgp_graceful_shutdown(dut, local_asn, config="add", cli_type="vtysh", skip_error_check=True):
"""
:param dut:
:param local_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
mode = "no" if config != "add" else ""
bgp_mode = "bgp" if cli_type == "vtysh" else ""
command = "{} {} graceful-shutdown".format(mode, bgp_mode)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
def config_bgp_listen(dut, local_asn, neighbor_address, subnet, peer_grp_name, limit, config='yes', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param neighbor_address:
:param limit:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
# Verify IPV4/IPV6 address pattern for neighbor address
mode = "" if config.lower() == 'yes' else "no"
if cli_type == "vtysh":
if neighbor_address:
command = "{} bgp listen range {}/{} peer-group {}".format(mode, neighbor_address, subnet, peer_grp_name)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
if limit:
command = "{} bgp listen limit {}".format(mode, limit)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
elif cli_type == "klish":
if neighbor_address:
cmd = []
if mode != 'no':
cmd = ['peer-group {}'.format(peer_grp_name), 'exit']
command = "{} listen range {}/{} peer-group {}".format(mode, neighbor_address, subnet, peer_grp_name)
cmd.append(command)
cmd.append("exit")
st.config(dut, cmd, type=cli_type, skip_error_check=skip_error_check)
if limit:
command = ["{} listen limit {}".format(mode, limit)]
command.append('exit')
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def config_bgp_listen_range(dut,local_asn,**kwargs):
"""
:param dut:
:param local_asn:
:param neighbor_address:
:param limit:
:return:
"""
cli_type = st.get_ui_type(dut, **kwargs)
#cli_type = _get_cli_type(cli_type)
neighbor_address = kwargs.get('neighbor_address', '')
subnet = str(kwargs.get('subnet', ''))
peer_grp_name = kwargs.get('peer_grp_name', '')
limit = kwargs.get('limit', '')
config = kwargs.get('config','yes')
vrf = kwargs.get('vrf', 'default')
skip_error_check = kwargs.get('skip_error_check', True)
if config.lower() == 'yes':
mode = ""
else:
mode = 'no'
cmd = ''
if cli_type == 'vtysh' or cli_type == 'click':
if neighbor_address:
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + "{} bgp listen range {}/{} peer-group {}\n".format(mode, neighbor_address, subnet, peer_grp_name)
if limit:
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + "{} bgp listen limit {}".format(mode, limit)
st.config(dut, cmd, type= 'vtysh', skip_error_check=skip_error_check)
return True
elif cli_type == "klish":
if neighbor_address:
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + "{} listen range {}/{} peer-group {}\n".format(mode, neighbor_address, subnet, peer_grp_name)
cmd = cmd + "exit\n"
if limit:
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + "{} listen limit {}\n".format(mode, limit)
cmd = cmd + "exit\n"
st.config(dut, cmd, type=cli_type, skip_error_check=skip_error_check, conf = True)
return True
elif cli_type in ['rest-patch','rest-put']:
http_method = kwargs.pop('http_method',cli_type)
rest_urls = st.get_datastore(dut,'rest_urls')
if neighbor_address:
dynamic_prefix = neighbor_address+'/'+subnet
if mode == '':
rest_url = rest_urls['bgp_dynamic_neigh_prefix'].format(vrf)
ocdata = {"openconfig-network-instance:dynamic-neighbor-prefixes":{"dynamic-neighbor-prefix":[{"prefix": dynamic_prefix,"config":{"prefix": dynamic_prefix,"peer-group": peer_grp_name}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url, json_data=ocdata)
elif mode == 'no':
rest_url = rest_urls['bgp_dynamic_neigh_prefix'].format(vrf)
response = delete_rest(dut, rest_url=rest_url)
if limit:
if mode == '':
rest_url = rest_urls['bgp_max_dynamic_neighbors'].format(vrf)
ocdata = {"openconfig-bgp-ext:max-dynamic-neighbors":int(limit)}
response = config_rest(dut, http_method=http_method, rest_url=rest_url, json_data=ocdata)
elif mode == 'no':
rest_url = rest_urls['bgp_max_dynamic_neighbors'].format(vrf)
response = delete_rest(dut, rest_url=rest_url)
if not response:
st.log(response)
return False
return True
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
def config_bgp_log_neighbor_changes(dut, local_asn):
"""
:param dut:
:param local_asn:
:return:
"""
#No script usage
config_router_bgp_mode(dut, local_asn)
command = "bgp log-neighbor-changes"
st.config(dut, command, type='vtysh')
def config_bgp_max_med(dut, local_asn, config='yes',**kwargs):
"""
:param dut:
:param local_asn:
:param user_command:
:return:
:usage: config_bgp_max_med(dut=dut7,cli_type='klish',config="yes",local_asn="300", on_start_time=10,on_start_med=40,administrative_med=65)
:usage: config_bgp_max_med(dut=dut7,cli_type='click',config="no",local_asn="300",administrative_med=65)
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
command = ''
if cli_type == 'vtysh' :
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
if config == 'yes' :
if 'on_start_time' in kwargs and 'on_start_med' in kwargs:
command += "bgp max-med on-startup {} {}\n".format(kwargs['on_start_time'],kwargs['on_start_med'])
elif 'on_start_time' in kwargs:
command += "bgp max-med on-startup {}\n".format(kwargs['on_start_time'])
if 'administrative_med' in kwargs:
command += "bgp max-med administrative {}\n".format(kwargs['administrative_med'])
else :
if 'on_start_time' in kwargs and 'on_start_med' in kwargs:
command += "no bgp max-med on-startup {} {}\n".format(kwargs['on_start_time'],kwargs['on_start_med'])
elif 'on_start_time' in kwargs:
command += "no bgp max-med on-startup {}\n".format(kwargs['on_start_time'])
if 'administrative_med' in kwargs:
command += "no bgp max-med administrative {}\n".format(kwargs['administrative_med'])
command += 'exit\n'
st.config(dut, command.split("\n"), type=cli_type)
elif cli_type == 'klish':
config_router_bgp_mode(dut, local_asn,cli_type=cli_type)
if config == 'yes' :
if 'on_start_time' and 'on_start_med' in kwargs:
command += "max-med on-startup {} {}\n".format(kwargs['on_start_time'],kwargs['on_start_med'])
elif 'on_start_time' in kwargs:
command += "max-med on-startup {}\n".format(kwargs['on_start_time'])
if 'administrative_med' in kwargs:
command += "max-med administrative {}\n".format(kwargs['administrative_med'])
else :
if 'on_start_time' in kwargs and 'on_start_med' in kwargs:
command += "no max-med on-startup {} {}\n".format(kwargs['on_start_time'],kwargs['on_start_med'])
elif 'on_start_time' in kwargs:
command += "no max-med on-startup {}\n".format(kwargs['on_start_time'])
if 'administrative_med' in kwargs:
command += "no max-med administrative {}\n".format(kwargs['administrative_med'])
command += 'exit\n'
st.config(dut, command.split("\n"), type=cli_type)
return True
def config_route_map_delay_timer(dut, local_asn, timer):
"""
:param dut:
:param local_asn:
:param timer:
:return:
"""
# No script usage
config_router_bgp_mode(dut, local_asn)
command = "bgp route-map delay-timer {}".format(timer)
st.config(dut, command, type='vtysh')
def enable_address_family_mode(dut, local_asn, mode_type, mode,cli_type=''):
"""
:param dut:
:param local_asn:
:param mode_type:
:param mode:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
command = "address-family {} {}".format(mode_type, mode)
st.config(dut, command, type=cli_type)
def config_address_family_neighbor_ip(dut, local_asn, mode_type, mode, neighbor_ip, user_command):
"""
:param dut:
:param local_asn:
:param mode_type:
:param mode:
:param neighbor_ip:
:param user_command:
:return:
"""
#No script usage
enable_address_family_mode(dut, local_asn, mode_type, mode)
# Verify neighbor IP address
command = "neighbor {} {}".format(neighbor_ip, user_command)
st.config(dut, command, type='vtysh')
def create_bgp_peergroup(dut, local_asn, peer_grp_name, remote_asn, keep_alive=60, hold=180, password=None, vrf='default', family='ipv4', skip_error_check = True, **kwargs):
"""
:param dut:
:param local_asn:
:param peer_grp_name:
:param remote_asn:
:param keep_alive:
:param hold:
:param password:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
neighbor_ip = kwargs.get('neighbor_ip',None)
ebgp_multihop = kwargs.get('ebgp_multihop',None)
update_src = kwargs.get('update_src',None)
update_src_intf = kwargs.get('update_src_intf',None)
connect = kwargs.get('connect', None)
st.log("Creating BGP peer-group ..")
cmd = ''
if cli_type == 'vtysh' or cli_type == 'click':
if vrf.lower() != 'default':
cmd = cmd + "router bgp {} vrf {}\n".format(local_asn, vrf)
else:
cmd = cmd + "router bgp {}\n".format(local_asn)
cmd = cmd + "neighbor {} peer-group\n".format(peer_grp_name)
cmd = cmd + "neighbor {} remote-as {}\n".format(peer_grp_name, remote_asn)
cmd = cmd + "neighbor {} timers {} {}\n".format(peer_grp_name, keep_alive, hold)
if password:
cmd = cmd + " neighbor {} password {}\n".format(peer_grp_name, password)
cmd = cmd + "\n address-family {} unicast\n".format(family)
cmd = cmd + "\n neighbor {} activate\n".format(peer_grp_name)
if connect != None:
cmd = cmd + 'neighbor {} timers connect {}\n'.format(peer_grp_name, connect)
if ebgp_multihop != None:
cmd = cmd + 'neighbor {} ebgp-multihop {}\n'.format(peer_grp_name, ebgp_multihop)
if update_src != None:
cmd = cmd + 'neighbor {} update-source {}\n'.format(peer_grp_name, update_src)
if update_src_intf != None:
cmd = cmd + 'neighbor {} update-source {}\n'.format(peer_grp_name, update_src_intf)
if neighbor_ip != None:
cmd = cmd + 'neighbor {} peer-group {}\n'.format(neighbor_ip, peer_grp_name)
st.config(dut, cmd, type='vtysh', skip_error_check=skip_error_check)
return True
elif cli_type == "klish":
neigh_name = get_interface_number_from_name(neighbor_ip)
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + "peer-group {}\n".format(peer_grp_name)
if neighbor_ip != None:
cmd = cmd + "exit\n"
if neigh_name:
if isinstance(neigh_name, dict):
cmd = cmd + 'neighbor interface {} {}\n'.format(neigh_name["type"], neigh_name["number"])
else:
cmd = cmd + 'neighbor {}\n'.format(neigh_name)
cmd = cmd + "peer-group {}\n".format(peer_grp_name)
if connect != None:
cmd = cmd + 'timers connect {}\n'.format(connect)
if ebgp_multihop != None:
cmd = cmd + 'ebgp-multihop {}\n'.format(ebgp_multihop)
if update_src != None:
cmd = cmd + 'update-source {}\n'.format(update_src)
if update_src_intf != None:
update_src_intf = get_interface_number_from_name(update_src_intf)
if isinstance(update_src_intf, dict):
cmd = cmd + 'update-source interface {} {}'.format(update_src_intf['type'],update_src_intf['number'])
cmd = cmd + "remote-as {}\n".format(remote_asn)
cmd = cmd + "address-family {} unicast\n".format(family)
cmd = cmd + "activate\n"
cmd = cmd + "timers {} {}\n".format(keep_alive, hold)
cmd = cmd + "exit\n"
cmd = cmd + "exit\n"
cmd = cmd + "exit\n"
st.config(dut, cmd, type=cli_type, skip_error_check=skip_error_check, conf = True)
return True
elif cli_type in ['rest-patch','rest-put']:
http_method = kwargs.pop('http_method',cli_type)
rest_urls = st.get_datastore(dut,'rest_urls')
rest_url_peergroup = rest_urls['bgp_peergroup_config'].format(vrf)
rest_url_neighbor = rest_urls['bgp_neighbor_config'].format(vrf)
if peer_grp_name != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"config":{"peer-group-name":peer_grp_name,"local-as":int(local_asn)}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('Peergroup config failed')
st.log(response)
return False
if neighbor_ip != None:
ocdata = {'openconfig-network-instance:neighbors':{"neighbor":[{'neighbor-address':neighbor_ip,'config':{'neighbor-address':neighbor_ip,'peer-group':peer_grp_name,'enabled': bool(1)}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_neighbor, json_data=ocdata)
if not response:
st.log('Peergroup config with Neighbor IP failed')
st.log(response)
return False
if remote_asn != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"config":{"peer-as":int(remote_asn)}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('Remote-as config in the Peergroup failed')
st.log(response)
return False
if family != None:
if family == 'ipv4':
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"afi-safis":{"afi-safi":[{"afi-safi-name":"IPV4_UNICAST","config":{"afi-safi-name":"IPV4_UNICAST","enabled": bool(1)}}]}}]}}
elif family == 'ipv6':
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"afi-safis":{"afi-safi":[{"afi-safi-name":"IPV6_UNICAST","config":{"afi-safi-name":"IPV6_UNICAST","enabled": bool(1)}}]}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('Address family activation in the Peergroup failed')
st.log(response)
return False
if keep_alive != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"timers":{"config":{"hold-time":str(hold),"keepalive-interval":str(keep_alive)}}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('Keepalive and Hold timer config in the Peergroup failed')
st.log(response)
return False
if ebgp_multihop != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"ebgp-multihop":{"config":{"enabled":bool(1),"multihop-ttl":int(ebgp_multihop)}}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('EBGP multihop config in the peergroup failed')
st.log(response)
return False
if update_src != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"transport":{"config":{"local-address":update_src}}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('BGP update source config in the peergroup failed')
st.log(response)
return False
if update_src_intf != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"transport":{"config":{"local-address":update_src_intf}}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('BGP update source interface config in the peergroup failed')
st.log(response)
return False
if connect != None:
ocdata = {"openconfig-network-instance:peer-groups":{"peer-group":[{"peer-group-name":peer_grp_name,"timers":{"config":{"connect-retry":str(connect)}}}]}}
response = config_rest(dut, http_method=http_method, rest_url=rest_url_peergroup, json_data=ocdata)
if not response:
st.log('BGP update source interface config in the peergroup failed')
st.log(response)
return False
return True
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
def remove_bgp_peergroup(dut, local_asn, peer_grp_name, remote_asn, vrf='default',**kwargs):
"""
:param dut:
:param local_asn:
:param peer_grp_name:
:param remote_asn:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
cmd = ''
neighbor_ip = kwargs.get('neighbor_ip',None)
st.log("Removing BGP peer-group ..")
if cli_type == 'vtysh' or cli_type == 'click':
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn,vrf=vrf)
command = "no neighbor {} remote-as {}".format(peer_grp_name, remote_asn)
st.config(dut, command, type='vtysh')
command = "no neighbor {} peer-group".format(peer_grp_name)
st.config(dut, command, type='vtysh')
elif cli_type == 'klish':
neigh_name = get_interface_number_from_name(neighbor_ip)
if vrf.lower() != 'default':
cmd = cmd + "router bgp {} vrf {}\n".format(local_asn, vrf)
else:
cmd = cmd + "router bgp {}\n".format(local_asn)
if neighbor_ip != None:
if neigh_name:
if isinstance(neigh_name, dict):
cmd = cmd + 'neighbor interface {} {}\n'.format(neigh_name["type"], neigh_name["number"])
else:
cmd = cmd + 'neighbor {}\n'.format(neigh_name)
cmd = cmd + "no peer-group {}\n".format(peer_grp_name)
cmd = cmd + "exit\n"
cmd = cmd + "no peer-group {}\n".format(peer_grp_name)
cmd = cmd + "exit\n"
st.config(dut, cmd, type='klish')
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def config_bgp_peer_group(dut, local_asn, peer_grp_name, config="yes", vrf="default", cli_type="'", skip_error_check=True):
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
config_router_bgp_mode(dut, local_asn, vrf=vrf, cli_type=cli_type)
no_form = "" if config == "yes" else "no"
if cli_type == "klish":
commands = list()
commands.append("{} peer-group {}".format(no_form, peer_grp_name))
if config == "yes":
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
return True
elif cli_type == "vtysh":
command = "{} neighbor {} peer-group".format(no_form, peer_grp_name)
st.config(dut, command, type=cli_type, skip_error_check=skip_error_check)
return True
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_neighbor_use_peergroup(dut, local_asn, peer_grp_name, neighbor_ip, family="ipv4", vrf='default', cli_type="", skip_error_check=True):
"""
:param dut:
:param local_asn:
:param peer_grp_name:
:param neighbor_ip:
:param family:
:param vrf:
:return:
"""
st.log("Creating BGP peer using peer-group ..")
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, vrf=vrf, cli_type=cli_type)
if cli_type == "vtysh":
command = "neighbor {} peer-group {}".format(neighbor_ip, peer_grp_name)
st.config(dut, command, type='vtysh')
# Gather the IP type using the validation result
if family == "ipv6":
command = "address-family ipv6 unicast"
st.config(dut, command, type=cli_type)
command = "neighbor {} activate".format(neighbor_ip)
st.config(dut, command, type=cli_type)
elif cli_type == "klish":
commands = list()
commands.append("peer-group {}".format(peer_grp_name))
commands.append("address-family {} unicast".format(family))
commands.append("activate")
commands.append("exit")
if family == "ipv6":
commands.append("address-family ipv4 unicast")
commands.append("activate")
commands.append("exit")
commands.append("exit")
commands.append("neighbor {}".format(neighbor_ip))
commands.append("peer-group {}".format(peer_grp_name))
commands.append("exit")
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=skip_error_check)
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
def create_bgp_neighbor_interface(dut, local_asn, interface_name, remote_asn,family,config='yes', cli_type=""):
"""
:param dut:
:param local_asn:
:param interface_name:
:param remote_asn:
:param family:
:param cli_type:
:return:
"""
cli_type = get_cfg_cli_type(dut, cli_type=cli_type)
st.log("Creating bgp neighbor on interface")
if config.lower() == 'yes':
mode = ""
else:
mode = 'no'
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, cli_type=cli_type)
commands = list()
if cli_type == "vtysh":
commands.append("{} neighbor {} interface remote-as {}".format(mode,interface_name,remote_asn))
if config == "yes":
commands.append("address-family {} unicast".format(family))
commands.append("{} neighbor {} activate".format(mode,interface_name))
elif cli_type == "klish":
interface_data = get_interface_number_from_name(interface_name)
if isinstance(interface_data, dict):
commands.append("neighbor interface {} {}".format(interface_data["type"], interface_data["number"]))
else:
commands.append("neighbor {}".format(interface_data))
commands.append("{} remote-as {}".format(mode, remote_asn))
if config == "yes":
commands.append("address-family {} unicast".format(family))
commands.append('{} activate'.format(mode))
commands.append("exit")
###Added
commands.append("exit")
else:
commands.append("exit")
else:
st.log("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
if commands:
if config == "yes":
commands.append("exit")
st.config(dut, commands, type=cli_type, skip_error_check=True)
return True
else:
return False
def remove_bgp_neighbor_use_peergroup(dut, local_asn, peer_grp_name, neighbor_ip, family="ipv4", vrf='default'):
"""
:param dut:
:param local_asn:
:param peer_grp_name:
:param neighbor_ip:
:param family:
:param vrf:
:return:
"""
st.log("Removing BGP peer using peer-group ..")
# Add validation for IPV4 / IPV6 address
config_router_bgp_mode(dut, local_asn, vrf=vrf)
command = "no neighbor {} peer-group {}".format(neighbor_ip, peer_grp_name)
st.config(dut, command, type='vtysh')
# Gather the IP type using the validation result
if family == "ipv6":
command = "no neighbor {} activate".format(neighbor_ip)
st.config(dut, command, type='vtysh')
command = "address-family ipv6 unicast"
st.config(dut, command, type='vtysh')
def config_bgp_multi_neigh_use_peergroup(dut, **kwargs):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
To config BGP peergroup with multi neighbours.
:param dut:
:param local_asn:
:param peer_grp_name:
:param remote_asn:
:param neigh_ip_list:
:param family: ipv4 | ipv6 | all
:param activate: True | False
:param password:
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
if 'local_asn' not in kwargs or 'peer_grp_name' not in kwargs or 'remote_asn' not in kwargs \
or 'neigh_ip_list' not in kwargs:
st.error("Mandatory parameters are missing.")
return False
af = kwargs.get('family', 'ipv4')
vrf = kwargs.get('vrf', 'default')
neigh_ip_li = list(kwargs['neigh_ip_list']) if isinstance(kwargs['neigh_ip_list'], list) else \
[kwargs['neigh_ip_list']]
config_router_bgp_mode(dut, kwargs['local_asn'], vrf=vrf)
if cli_type == 'vtysh':
command = "no bgp default ipv4-unicast \n"
command += "neighbor {} peer-group \n".format(kwargs['peer_grp_name'])
command += "neighbor {} remote-as {} \n".format(kwargs['peer_grp_name'], kwargs['remote_asn'])
if 'keep_alive' in kwargs and 'hold' in kwargs:
command += "neighbor {} timers {} {} \n".format(kwargs['peer_grp_name'], kwargs['keep_alive'], kwargs['hold'])
if 'password' in kwargs:
command += "neighbor {} password {} \n".format(kwargs['peer_grp_name'], kwargs['password'])
for each_neigh in neigh_ip_li:
command += "neighbor {} peer-group {} \n".format(each_neigh, kwargs['peer_grp_name'])
if 'activate' in kwargs or 'redistribute' in kwargs or 'routemap' in kwargs:
command += "address-family {} unicast \n".format(af)
if 'activate' in kwargs:
command += "neighbor {} activate \n".format(kwargs['peer_grp_name'])
if 'redistribute' in kwargs:
redis_li = list(kwargs['redistribute']) if isinstance(kwargs['redistribute'], list) else [kwargs['redistribute']]
for each_ in redis_li:
command += "redistribute {} \n".format(each_)
if 'routemap' in kwargs:
if 'routemap_dir' in kwargs:
command += "neighbor {} route-map {} {} \n".format(kwargs['peer_grp_name'], kwargs['routemap'], kwargs['routemap_dir'])
else:
command += "neighbor {} route-map {} in \n".format(kwargs['peer_grp_name'], kwargs['routemap'])
command += "exit\n"
command += "exit\n"
st.config(dut, command, type='vtysh')
elif cli_type == 'klish':
cmd = "peer-group {} \n".format(kwargs['peer_grp_name'])
cmd += "remote-as {} \n".format(kwargs['remote_asn'])
if 'keep_alive' in kwargs and 'hold' in kwargs:
cmd += "timers {} {} \n".format(kwargs['keep_alive'], kwargs['hold'])
if 'password' in kwargs:
cmd += 'password {} \n'.format(kwargs['password'])
if 'activate' in kwargs:
cmd += "address-family {} unicast \n".format(af)
cmd += "activate \n"
if 'redistribute' in kwargs:
redis_li = list(kwargs['redistribute']) if isinstance(kwargs['redistribute'], list) else [kwargs['redistribute']]
for each_ in redis_li:
cmd += "redistribute {} \n".format(each_)
if 'routemap' in kwargs:
if 'routemap_dir' in kwargs:
cmd += "route-map {} {} \n".format(kwargs['routemap'], kwargs['routemap_dir'])
else:
cmd += "route-map {} in \n".format(kwargs['routemap'])
cmd += "exit \n"
for each_neigh in neigh_ip_li:
cmd += 'exit \n'
cmd += 'neighbor {} \n'.format(each_neigh)
cmd += 'peer-group {} \n'.format(kwargs['peer_grp_name'])
cmd += 'exit \n'
cmd += 'exit \n'
st.config(dut, cmd, type='klish')
else:
st.error("UNSUPPORTED CLI TYPE -- {}".format(cli_type))
return False
return True
def verify_bgp_summary(dut, family='ipv4', shell="sonic", **kwargs):
"""
:param dut:
:param family:
:param shell:
:param kwargs:
:return:
"""
if shell not in ["vtysh", "klish", "rest-patch", "rest-put"]:
if 'vrf' in kwargs and shell=='sonic':
vrf = kwargs.pop('vrf')
cmd = "show bgp vrf {} {} summary".format(vrf,family.lower())
else:
cmd = "show bgp {} summary".format(family.lower())
if not st.is_feature_supported("show-bgp-summary-click-command", dut):
output = st.show(dut,cmd, type="vtysh")
else:
output = st.show(dut,cmd)
cli_type = get_show_cli_type(dut, **kwargs)
if shell in ["vtysh", "klish", "rest-patch", "rest-put"]:
vrf = kwargs.pop('vrf') if 'vrf' in kwargs else "default"
if family.lower() == 'ipv4':
output = show_bgp_ipv4_summary_vtysh(dut, vrf=vrf, cli_type=cli_type)
elif family.lower() == 'ipv6':
output = show_bgp_ipv6_summary_vtysh(dut, vrf=vrf, cli_type=cli_type)
else:
st.log("Invalid family {} or shell {}".format(family, cli_type))
return False
st.debug(output)
# Specifically checking neighbor state
if 'neighbor' in kwargs and 'state' in kwargs:
neigh_li = list(kwargs['neighbor']) if isinstance(kwargs['neighbor'], list) else [kwargs['neighbor']]
for each_neigh in neigh_li:
#For dynamic neighbor, removing *, as it is not displayed in klish
if shell == 'klish' or cli_type =='klish':
st.log('For dynamic neighbor, removing *, as it is not displayed in klish')
each_neigh = each_neigh.lstrip('*')
match = {'neighbor': each_neigh}
try:
entries = filter_and_select(output, None, match)[0]
except Exception as e:
st.error(e)
st.log("Neighbor {} given state {}, matching with {} ".format(each_neigh, kwargs['state'],
"Not Found"))
return False
if entries['state']:
if kwargs['state'] == 'Established':
if entries['state'].isdigit() or entries['state'] == "ESTABLISHED":
st.log("Neighbor {} given state {}, matching with {} ".format(each_neigh,
kwargs['state'], entries['state']))
else:
st.error(
"Neighbor {} given state {}, matching with {} ".format(each_neigh,
kwargs['state'], entries['state']))
return False
elif kwargs['state'] == 'Active':
if entries['state'] == "Active" or entries['state'] == "ACTIVE":
st.log("Neighbor {} given state {}, matching with {} ".format(each_neigh,
kwargs['state'], entries['state']))
else:
st.error(
"Neighbor {} given state {}, matching with {} ".format(each_neigh,
kwargs['state'], entries['state']))
return False
for each in kwargs.keys():
if 'state' not in each and 'neighbor' not in each:
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def verify_bgp_neighbor(dut, neighbor_ip, **kwargs):
"""
:param dut:
:param neighbor_ip:
:param kwargs:
:return:
"""
#No usage in scripts, so no klish support added
output = show_bgp_neighbor(dut, neighbor_ip)
st.debug(output)
for each in kwargs.keys():
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def verify_bgp_ipv4_neighbor_vtysh(dut, neighbor_ip, **kwargs):
"""
No usage in scripts. Template needs changes for this to work
:param dut:
:param neighbor_ip:
:param kwargs:
:return:
"""
output = show_bgp_ipv4_neighbor_vtysh(dut, neighbor_ip)
st.debug(output)
for each in kwargs.keys():
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def verify_bgp_ipv6_neighbor_vtysh(dut, neighbor_ip, **kwargs):
"""
No usage in scripts. Template needs changes for this to work
:param dut:
:param neighbor_ip:
:param kwargs:
:return:
"""
output = show_bgp_ipv6_neighbor_vtysh(dut, neighbor_ip)
st.debug(output)
for each in kwargs.keys():
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def config_address_family_redistribute(dut, local_asn, mode_type, mode, value, config='yes',vrf='default',skip_error_check=True, **kwargs):
"""
:param dut:
:param local_asn:
:param mode_type:
:param mode:
:param value:
:param config:
:param vrf
:return:
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
cfgmode = 'no' if config != 'yes' else ''
route_map = kwargs.get('route_map')
cmd = ''
if cli_type == 'vtysh' or cli_type == 'click':
if vrf.lower() != 'default':
cmd = cmd + "router bgp {} vrf {}\n".format(local_asn, vrf)
else:
cmd = cmd + "router bgp {}\n".format(local_asn)
cmd = cmd + "\n address-family {} {}".format(mode_type, mode)
if route_map:
cmd = cmd + "\n {} redistribute {} route-map {}".format(cfgmode, value, route_map)
else:
cmd = cmd + "\n {} redistribute {}".format(cfgmode, value)
st.config(dut, cmd, type='vtysh', skip_error_check=skip_error_check)
return True
elif cli_type == "klish":
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
cmd = cmd + 'address-family {} {}\n'.format(mode_type, mode)
if route_map:
cmd = cmd + "{} redistribute {} route-map {}\n".format(cfgmode, value, route_map)
else:
cmd = cmd + "{} redistribute {}\n".format(cfgmode, value)
cmd = cmd + 'exit\nexit\n'
st.config(dut, cmd, type=cli_type, skip_error_check=skip_error_check, conf = True)
return True
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
def config_bgp(dut, **kwargs):
"""
config_bgp(dut = DUT1, router_id = '9.9.9.9', local_as='100', neighbor ='192.168.3.2', remote_as='200', config = 'yes', config_type_list =["neighbor"])
config_bgp(dut = DUT1, local_as='100', remote_as='200', neighbor ='2001::2', config = 'yes', config_type_list =["neighbor"]
config_bgp(dut = DUT1, local_as='100',config = 'yes',config_type_list =["redist"], redistribute ='connected')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'yes',config_type_list =["bfd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'yes',config_type_list =["bfd","redist"], redistribute ='connected')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'yes', password ='broadcom' ,config_type_list =["pswd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'no', password ='broadcom' ,config_type_list =["pswd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'yes', update_src ='2.2.2.1', config_type_list =["update_src"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'no', update_src ='2.2.2.1', config_type_list =["update_src"])
config_bgp(dut = DUT1, local_as='100',config = 'yes',config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'no',config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'yes',addr_family ='ipv6', config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'no',addr_family ='ipv6', config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'yes',addr_family ='ipv6', config_type_list =["max_path_ebgp"], max_path_ebgp ='20')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config ='yes', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config ='no', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100', neighbor ='2001::20', addr_family ='ipv6',config = 'yes', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100',config = 'no', removeBGP='yes', config_type_list =["removeBGP"])
config_bgp(dut = dut1,local_as = '100', neighbor = '20.20.20.2', config = 'yes', config_type_list =["nexthop_self"])
config_bgp(dut = dut1,local_as = '100', neighbor = '20.20.20.2', config = 'yes', config_type_list =["ebgp_mhop"],ebgp_mhop ='2')
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
st.log('Configure BGP')
config = kwargs.get('config', "yes")
vrf_name = kwargs.get('vrf_name', "default")
router_id = kwargs.get('router_id','')
config_type_list = kwargs.get('config_type_list', None)
neighbor = kwargs.get('neighbor', None)
local_as = kwargs.get('local_as', None)
remote_as = kwargs.get('remote_as', None)
peergroup = kwargs.get('peergroup', '')
#pswd = kwargs.get('pswd', None)
#activate = kwargs.get('activate', None)
#nexthop_self = kwargs.get('nexthop_self', None)
addr_family = kwargs.get('addr_family', 'ipv4')
keepalive = kwargs.get('keepalive', '')
holdtime = kwargs.get('holdtime', '')
conf_peers = kwargs.get('conf_peers', '')
conf_identf = kwargs.get('conf_identf', '')
update_src = kwargs.get('update_src', None)
update_src_intf = kwargs.get("update_src_intf", "") if "update_src_intf" in config_type_list else ""
interface = kwargs.get('interface', None)
connect = kwargs.get('connect', None)
ebgp_mhop = kwargs.get('ebgp_mhop', None)
#failover = kwargs.get('failover', None)
shutdown = kwargs.get('shutdown', None)
#max_path = kwargs.get('max_path', None)
redistribute = kwargs.get('redistribute', None)
network = kwargs.get('network', None)
password = kwargs.get('password', None)
max_path_ibgp = kwargs.get('max_path_ibgp', None)
max_path_ebgp = kwargs.get('max_path_ebgp', None)
routeMap = kwargs.get('routeMap', None)
distribute_list = kwargs.get('distribute_list', None)
filter_list = kwargs.get('filter_list', None)
prefix_list = kwargs.get('prefix_list', None)
#import_vrf = kwargs.get('import_vrf', None)
import_vrf_name = kwargs.get('import_vrf_name', None)
#fast_external_failover = kwargs.get('fast_external_failover', None)
bgp_bestpath_selection = kwargs.get('bgp_bestpath_selection', None)
removeBGP = kwargs.get('removeBGP', 'no')
diRection = kwargs.get('diRection', 'in')
weight = kwargs.get('weight', None)
config_cmd = "" if config.lower() == 'yes' else "no"
my_cmd =''
if cli_type == "vtysh":
if 'local_as' in kwargs and removeBGP != 'yes':
if vrf_name != 'default':
my_cmd = 'router bgp {} vrf {}\n'.format(local_as, vrf_name)
else:
my_cmd = 'router bgp {}\n'.format(local_as)
if router_id != '':
my_cmd += '{} bgp router-id {}\n'.format(config_cmd, router_id)
if keepalive != '' and holdtime != '':
my_cmd += '{} timers bgp {} {}\n'.format(config_cmd, keepalive, holdtime)
if config_cmd == '':
if peergroup != '':
my_cmd += 'neighbor {} peer-group\n'.format(peergroup)
if conf_peers != '':
my_cmd += '{} bgp confederation peers {}\n'.format(config_cmd, conf_peers)
if conf_identf != '':
my_cmd += '{} bgp confederation identifier {}\n'.format(config_cmd, conf_identf)
for type1 in config_type_list:
if type1 == 'neighbor':
my_cmd += '{} neighbor {} remote-as {}\n'.format(config_cmd, neighbor, remote_as)
elif type1 == 'shutdown':
my_cmd += '{} neighbor {} shutdown\n'.format(config_cmd, neighbor)
elif type1 == 'failover':
my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'fast_external_failover':
st.log("Configuring the fast_external_failover")
my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
elif type1 == 'bgp_bestpath_selection':
st.log("Configuring bgp default bestpath selection")
my_cmd += '{} bgp bestpath {}\n'.format(config_cmd,bgp_bestpath_selection)
elif type1 == 'activate':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} activate\n'.format(config_cmd, neighbor)
elif type1 == 'nexthop_self':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} next-hop-self\n'.format(config_cmd, neighbor)
elif type1 == 'pswd':
my_cmd += '{} neighbor {} password {}\n'.format(config_cmd, neighbor, password)
elif type1 == 'update_src' or type1 == 'update_src_intf':
if update_src != None:
my_cmd += '{} neighbor {} update-source {}\n'.format(config_cmd, neighbor, update_src)
elif update_src_intf != None:
my_cmd += '{} neighbor {} update-source {}\n'.format(config_cmd, neighbor, update_src_intf)
elif type1 == 'interface':
my_cmd += '{} neighbor {} interface {}\n'.format(config_cmd, neighbor, interface)
elif type1 == 'connect':
my_cmd += '{} neighbor {} timers connect {}\n'.format(config_cmd, neighbor, connect)
elif type1 == 'ebgp_mhop':
my_cmd += '{} neighbor {} ebgp-multihop {}\n'.format(config_cmd, neighbor, ebgp_mhop)
elif type1 == 'peergroup':
my_cmd += '{} neighbor {} remote-as {}\n'.format(config_cmd, peergroup, remote_as)
if config_cmd == '':
if interface:
my_cmd += 'neighbor {} interface peer-group {}\n'.format(neighbor, peergroup)
else:
my_cmd += 'neighbor {} peer-group {}\n'.format(neighbor, peergroup)
if config_cmd == 'no':
my_cmd += '{} neighbor {} peer-group\n'.format(config_cmd, peergroup)
elif type1 == 'bfd':
if peergroup:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, peergroup)
elif interface != '' and interface != None:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, interface)
else:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, neighbor)
elif type1 == 'max_path_ibgp':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} maximum-paths ibgp {}\n'.format(config_cmd, max_path_ibgp)
my_cmd += 'exit\n'
elif type1 == 'max_path_ebgp':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} maximum-paths {}\n'.format(config_cmd, max_path_ebgp)
my_cmd += 'exit\n'
elif type1 == 'redist':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} redistribute {}\n'.format(config_cmd, redistribute)
my_cmd += 'exit\n'
elif type1 == 'network':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} network {}\n'.format(config_cmd, network)
my_cmd += 'exit\n'
elif type1 == 'import-check':
my_cmd += '{} bgp network import-check\n'.format(config_cmd)
elif type1 == 'import_vrf':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} import vrf {} \n'.format(config_cmd, import_vrf_name)
my_cmd += 'exit\n'
elif type1 == 'routeMap':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} route-map {} {}\n'.format(config_cmd, neighbor, routeMap, diRection)
my_cmd += 'exit\n'
elif type1 == 'distribute_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} distribute-list {} {}\n'.format(config_cmd, neighbor, distribute_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'filter_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} filter-list {} {}\n'.format(config_cmd, neighbor, filter_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'prefix_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} prefix-list {} {}\n'.format(config_cmd, neighbor, prefix_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'default_originate':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
if 'routeMap' in kwargs:
my_cmd += '{} neighbor {} default-originate route-map {}\n'.format(config_cmd, neighbor, routeMap)
else:
my_cmd += '{} neighbor {} default-originate\n'.format(config_cmd, neighbor)
my_cmd += 'exit\n'
elif type1 == 'removePrivateAs':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} remove-private-AS\n'.format(config_cmd, neighbor)
my_cmd += 'exit\n'
elif type1 == 'multipath-relax':
my_cmd += '{} bgp bestpath as-path multipath-relax \n'.format(config_cmd)
elif type1 == 'remote-as':
my_cmd += '{} neighbor {} interface remote-as {}\n'.format(config_cmd,interface,remote_as)
elif type1 == 'weight':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} weight {}\n'.format(config_cmd, neighbor, weight)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
else:
st.log('Invalid BGP config parameter: {}'.format(type1))
output = st.config(dut, my_cmd, type=cli_type)
if "% Configure the peer-group first" in output:
st.error(output)
return False
if "% Specify remote-as or peer-group commands first" in output:
st.error(output)
return False
if vrf_name != 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp {} vrf {}'.format(config_cmd, local_as, vrf_name)
st.config(dut, my_cmd, type=cli_type)
elif vrf_name == 'default' and removeBGP == 'yes':
if 'local_as' in kwargs:
my_cmd = '{} router bgp {}'.format(config_cmd,local_as)
else:
my_cmd = '{} router bgp'.format(config_cmd)
st.config(dut, my_cmd, type=cli_type)
elif cli_type == "klish":
commands = list()
neigh_name = get_interface_number_from_name(neighbor)
if interface:
intf_name = get_interface_number_from_name(interface)
shutdown = kwargs.get("shutdown", None) if "shutdown" in config_type_list else None
activate = kwargs.get("activate", None) if "activate" in config_type_list else None
nexthop_self = kwargs.get("nexthop_self", True) if "nexthop_self" in config_type_list else None
pswd = True if "pswd" in config_type_list else False
update_src = kwargs.get("update_src", "") if "update_src" in config_type_list else ""
update_src_intf = get_interface_number_from_name(update_src_intf)
bfd = True if "bfd" in config_type_list else False
route_map = True if "routeMap" in config_type_list else False
default_originate = True if "default_originate" in config_type_list else False
removePrivateAs = True if "removePrivateAs" in config_type_list else False
no_neighbor = "no" if kwargs.get("config") == "no" else ""
sub_list = ["neighbor", "routeMap", "shutdown", "activate", "nexthop_self", "pswd", "update_src",
"bfd", "default_originate", "removePrivateAs", "no_neigh","remote-as","filter_list",
"prefix_list", "distribute_list", "weight", "keepalive", "holdtime", "ebgp_mhop","peergroup","update_src_intf","connect"]
if 'local_as' in kwargs and removeBGP != 'yes':
if vrf_name != 'default':
my_cmd = 'router bgp {} vrf {}'.format(local_as, vrf_name)
else:
my_cmd = 'router bgp {}'.format(local_as)
commands.append(my_cmd)
if router_id:
my_cmd = '{} router-id {}'.format(config_cmd, router_id)
commands.append(my_cmd)
if peergroup:
my_cmd = '{} peer-group {}'.format(config_cmd, peergroup)
commands.append(my_cmd)
commands.append("exit")
# if conf_peers:
# my_cmd += '{} bgp confederation peers {}\n'.format(config_cmd, conf_peers)
# if conf_identf != '':
# my_cmd += '{} bgp confederation identifier {}\n'.format(config_cmd, conf_identf)
config_default_activate = True
config_remote_as = True
for type1 in config_type_list:
if type1 in sub_list:
if neigh_name and not peergroup:
if isinstance(neigh_name, dict):
my_cmd = "neighbor interface {} {}".format(neigh_name["type"],neigh_name["number"])
else:
my_cmd = "neighbor {}".format(neigh_name)
commands.append(my_cmd)
if peergroup:
my_cmd_peer = '{} peer-group {}'.format(config_cmd, peergroup)
if 'peergroup' in config_type_list:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"],
neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
if neigh_name:
commands.append(my_cmd)
commands.append(my_cmd_peer)
commands.append('exit')
neigh_name = None
activate = True
commands.append(my_cmd_peer)
if config_remote_as and remote_as:
if interface and not peergroup:
my_cmd = "neighbor interface {} {}".format(intf_name['type'], intf_name['number'])
commands.append(my_cmd)
my_cmd = '{} remote-as {}'.format(config_cmd, remote_as)
commands.append(my_cmd)
config_remote_as = False
if config_default_activate and (activate or neigh_name):
# show ip bgp summary will list
# v4 neighbor only if activate is done for v4 address family
# v6 neighbor only if activate is done for v4 address family
# both v4 and v6 neighbor only if activate is done for both address families
# There is a defect for this issue - 20468
if config_cmd == "":
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
if addr_family == "ipv6":
my_cmd = 'address-family ipv4 unicast'
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
config_default_activate = False
# Avoid disable of neighbor unless config=no and config_type_list contains activate
elif activate and config_cmd == "no":
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
activate = None
if shutdown:
my_cmd = '{} shutdown'.format(config_cmd)
commands.append(my_cmd)
shutdown = None
elif route_map:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} route-map {} {}'.format(config_cmd, routeMap, diRection)
commands.append(my_cmd)
commands.append("exit")
route_map = False
elif filter_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} filter-list {} {}'.format(config_cmd, filter_list, diRection)
commands.append(my_cmd)
commands.append("exit")
filter_list = None
elif prefix_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} prefix-list {} {}\n'.format(config_cmd, prefix_list, diRection)
commands.append(my_cmd)
commands.append("exit")
prefix_list = None
elif distribute_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} prefix-list {} {}\n'.format(config_cmd, distribute_list, diRection)
commands.append(my_cmd)
commands.append("exit")
distribute_list = None
elif default_originate:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
if 'routeMap' in kwargs:
my_cmd = '{} default-originate route-map {}'.format(config_cmd, routeMap)
else:
my_cmd = '{} default-originate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
default_originate = False
elif removePrivateAs:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} remove-private-AS'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
removePrivateAs = False
elif weight:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} weight {}'.format(config_cmd, weight)
commands.append(my_cmd)
commands.append("exit")
weight = None
elif keepalive and holdtime:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} timers {} {}'.format(config_cmd, keepalive, holdtime)
commands.append(my_cmd)
keepalive = 0
holdtime = 0
elif nexthop_self:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} next-hop-self'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
nexthop_self = None
elif pswd:
password = "" if config_cmd== 'no' else password
my_cmd = '{} password {}'.format(config_cmd, password)
commands.append(my_cmd)
pswd = False
elif update_src:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} update-source {}'.format(config_cmd, update_src)
commands.append(my_cmd)
update_src = None
elif update_src_intf:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
if isinstance(update_src_intf, dict):
my_cmd = '{} update-source interface {} {}'.format(config_cmd, update_src_intf['type'],update_src_intf['number'])
commands.append(my_cmd)
update_src_intf = None
elif ebgp_mhop:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} ebgp-multihop {}'.format(config_cmd, ebgp_mhop)
commands.append(my_cmd)
ebgp_mhop = None
elif bfd:
if interface and remote_as:
my_cmd = "neighbor interface {}".format(interface)
commands.append(my_cmd)
elif neighbor and not interface and remote_as:
my_cmd = "neighbor {}".format(neighbor)
commands.append(my_cmd)
my_cmd = "remote-as {}".format(remote_as)
commands.append(my_cmd)
my_cmd = '{} bfd'.format(config_cmd)
commands.append(my_cmd)
bfd = False
elif connect:
my_cmd = '{} timers connect {}'.format(config_cmd, connect)
commands.append(my_cmd)
connect = None
st.log('config_bgp command_list: {}'.format(commands))
#come back to router bgp context
commands.append("exit")
# elif type1 == 'failover':
# my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
# elif type1 == 'router_id':
# st.log("Configuring the router-id on the device")
elif type1 == 'fast_external_failover':
st.log("Configuring the fast_external_failover")
my_cmd = '{} fast-external-failover'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'bgp_bestpath_selection':
st.log("Configuring bgp default bestpath selection")
my_cmd = '{} bestpath {}'.format(config_cmd, bgp_bestpath_selection)
commands.append(my_cmd)
# elif type1 == 'interface':
# my_cmd += '{} neighbor {} interface {}\n'.format(config_cmd, neighbor, interface)
# elif type1 == 'connect':
# my_cmd += '{} neighbor {} timers connect {}\n'.format(config_cmd, neighbor, connect)
elif type1 == 'max_path_ibgp':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} maximum-paths ibgp {}'.format(config_cmd, max_path_ibgp)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'max_path_ebgp':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
if config_cmd == '' or config_cmd == 'yes':
my_cmd = '{} maximum-paths {}'.format(config_cmd, max_path_ebgp)
else:
my_cmd = '{} maximum-paths'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'redist':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} redistribute {}'.format(config_cmd, redistribute)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'network':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} network {}'.format(config_cmd, network)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'import-check':
my_cmd = '{} network import-check'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'import_vrf':
my_cmd = 'address-family {} unicast\n'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} import vrf {}'.format(config_cmd, import_vrf_name)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'multipath-relax':
my_cmd = '{} bestpath as-path multipath-relax'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'peer_group':
st.log("Configuring the peer_group on the device")
else:
st.log('Invalid BGP config parameter')
if config_cmd == 'no' and 'neighbor' in config_type_list and neigh_name and not peergroup:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(config_cmd, neigh_name["type"],neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(config_cmd, neigh_name)
commands.append(my_cmd)
# commands.append("exit")
#go back to config terminal prompt
if removeBGP != 'yes':
commands.append('exit\n')
if commands:
cli_output = st.config(dut, commands, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
if vrf_name != 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp vrf {}'.format(config_cmd, vrf_name)
cli_output = st.config(dut, my_cmd, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
elif vrf_name == 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp'.format(config_cmd)
cli_output = st.config(dut, my_cmd, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
elif cli_type in ["rest-patch", "rest-put"]:
shutdown = kwargs.get("shutdown", None) if "shutdown" in config_type_list else None
activate = kwargs.get("activate", None) if "activate" in config_type_list else None
nexthop_self = kwargs.get("nexthop_self", True) if "nexthop_self" in config_type_list else None
pswd = True if "pswd" in config_type_list else False
update_src = kwargs.get("update_src", "") if "update_src" in config_type_list else ""
update_src_intf = get_interface_number_from_name(update_src_intf)
bfd = True if "bfd" in config_type_list else False
route_map = True if "routeMap" in config_type_list else False
default_originate = True if "default_originate" in config_type_list else False
removePrivateAs = True if "removePrivateAs" in config_type_list else False
#no_neighbor = "no" if kwargs.get("config") == "no" else ""
sub_list = ["neighbor", "routeMap", "shutdown", "activate", "nexthop_self", "pswd", "update_src",
"bfd", "default_originate", "removePrivateAs", "no_neigh", "remote-as", "filter_list",
"prefix_list", "distribute_list", "weight", "keepalive", "holdtime", "ebgp_mhop", "peergroup",
"update_src_intf", "connect"]
bgp_data = dict()
bgp_data["openconfig-network-instance:bgp"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"] = list()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["confederation"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["dynamic-neighbor-prefixes"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["dynamic-neighbor-prefixes"][
"dynamic-neighbor-prefixe"] = list()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:bgp-ext-route-reflector"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:global-defaults"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:update-delay"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:max-med"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"] = list()
bgp_data["openconfig-network-instance:bgp"]["peer-groups"] = dict()
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"] = list()
delete_urls = [] # All the delete URLS should be appended to this list
# neigh_name = get_interface_number_from_name(neighbor)
family = kwargs.get('family', None)
if family == "ipv4":
afi_safi_name = "openconfig-bgp-types:IPV4_UNICAST"
else:
afi_safi_name = "openconfig-bgp-types:IPV6_UNICAST"
if 'local_as' in kwargs and removeBGP != 'yes':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"]["as"] = kwargs.get("local_as")
if router_id:
bgp_data["openconfig-network-instance:bgp"]["global"]["config"]["router-id"] = router_id
if peergroup:
# print(bgp_data)
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['config'] = dict()
peer_data["config"].update({'peer-group-address': peergroup})
config_default_activate = True
config_remote_as = True
neigh_data_sub = dict()
for type1 in config_type_list:
if type1 in sub_list:
if neighbor and not peergroup:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if peergroup:
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['config'] = dict()
peer_data["config"].update({'peer-group-address': peergroup})
if 'peergroup' in config_type_list:
# if isinstance(neigh_name, dict):
if activate and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
# delete_rest(dut, rest_url=url)
if not delete_rest(dut, rest_url=url):
st.error("neighbor is failed")
else:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if neighbor:
neigh_data1 = dict()
neigh_data1["afi-safis"] = dict()
neigh_data1["afi-safis"]["afi-safi"] = list()
neigh_data1_sub = dict()
neigh_data1_sub.update({"afi-safi-name": afi_safi_name})
neigh_data1_sub["config"] = dict()
neigh_data1_sub["config"].update({"afi-safi-name": "afi-safi-name", "enabled": True})
neigh_data1.update(neigh_data1_sub)
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data1)
if config_remote_as and remote_as:
if interface and not peergroup:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor, "peer-type": remote_as})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
config_remote_as = False
if config_default_activate and (activate or neighbor):
if config_cmd == "":
family = kwargs.get('family', None)
if family == "ipv4":
afi_safi_name = "openconfig-bgp-types:IPV4_UNICAST"
else:
afi_safi_name = "openconfig-bgp-types:IPV6_UNICAST"
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif activate and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
activate = None
if shutdown:
neigh_data["config"].update({"enabled": False})
shutdown = None
elif route_map:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": "afi-safi-name"})
neigh_data_sub["config"] = dict()
neigh_data_sub["config"].update({"afi-safi-name": "afi-safi-name"})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
neigh_data_sub["apply-policy"] = dict()
neigh_data_sub["apply-policy"]["config"] = dict()
neigh_data_sub["apply-policy"]["config"].update({"import-policy": ["route-map"]})
route_map = False
elif filter_list:
neigh_data_sub["openconfig-bgp-ext:filter-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:filter-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:filter-list"]["config"].update({"import-policy": "filter-list"})
filter_list = None
elif prefix_list:
neigh_data_sub["openconfig-bgp-ext:prefix-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"].update({"import-policy": "prefix-list"})
prefix_list = None
elif distribute_list:
neigh_data_sub["openconfig-bgp-ext:prefix-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"].update({"import-policy": "prefix-list"})
distribute_list = None
elif default_originate:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data["ipv4-unicast"] = dict()
afi_data["ipv4-unicast"]["config"] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
if 'routeMap' in kwargs:
afi_data["ipv4-unicast"]["config"].update({"send-default-route": True, "openconfig-bgp-ext:default-policy-name": "routeMap"})
else:
afi_data["ipv4-unicast"]["config"].update({"send-default-route": True})
default_originate = False
elif removePrivateAs:
neigh_data_sub["openconfig-bgp-ext:remove-private-as"] = dict()
neigh_data_sub["openconfig-bgp-ext:remove-private-as"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:remove-private-as"]["config"].update({"enabled": True})
removePrivateAs = False
elif weight:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
neigh_data = dict()
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": afi_safi_name})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
neigh_data_sub['config'] = dict()
neigh_data_sub['config'].update(
{"afi-safi-name": afi_safi_name, "enabled": True, "openconfig-bgp-ext:weight": 0})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
bgp_data["openconfig-network-instance:bgp"]["global"]["neighbors"]["neighbor"].append(neigh_data)
weight = None
elif keepalive and holdtime:
if isinstance(neighbor, dict):
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
neigh_data["timers"] = dict()
neigh_data["timers"]["config"] = dict()
neigh_data["timers"]["config"].update({"hold-time": holdtime, "keepalive-interval": keepalive})
keepalive = 0
holdtime = 0
elif nexthop_self:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
neigh_data = dict()
neigh_data.update({"neighbor-address": "string"})
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": afi_safi_name})
neigh_data_sub["openconfig-bgp-ext:next-hop-self"] = dict()
neigh_data_sub["openconfig-bgp-ext:next-hop-self"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:next-hop-self"]["config"].update(
{"enabled": True, "force": True})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
bgp_data["openconfig-network-instance:bgp"]["global"]["neighbors"]["neighbor"].append(neigh_data)
nexthop_self = None
elif pswd:
password = "" if config_cmd == 'no' else password
if password:
neigh_data["openconfig-bgp-ext:auth-password"] = dict()
neigh_data["openconfig-bgp-ext:auth-password"]["config"] = dict()
neigh_data["openconfig-bgp-ext:auth-password"]["config"].update({"password": password})
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
pswd = False
elif update_src:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
update_src = None
elif update_src_intf:
if neighbor:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
if update_src_intf:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
neigh_data["transport"] = dict()
neigh_data["transport"]["config"] = dict()
neigh_data.update({"local-address": update_src_intf})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
update_src_intf = None
elif ebgp_mhop:
if neighbor:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
neigh_data["ebgp-multihop"] = dict()
neigh_data["ebgp-multihop"]["config"] = dict()
neigh_data.update({"multihop-ttl": ebgp_mhop})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
ebgp_mhop = None
elif bfd:
if interface and remote_as:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
elif neighbor and not interface and remote_as:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["config"].update({"neighbor-address": neighbor, "peer-type": remote_as})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["openconfig-bfd:enable-bfd"] = dict()
neigh_data["openconfig-bfd:enable-bfd"]["config"] = dict()
neigh_data.update({"enabled": True})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
bfd = False
elif connect:
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['timers'] = dict()
peer_data['timers']["config"] = dict()
peer_data.update({"connect-retry": 10})
connect = None
elif type1 == 'fast_external_failover':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update(
{"openconfig-bgp-ext:fast-external-failover": True})
elif type1 == 'bgp_bestpath_selection':
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"]["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"]["config"].update(
{"external-compare-router-id": True})
elif type1 == 'max_path_ibgp':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
afi_data["use-multiple-paths"] = dict()
afi_data["use-multiple-paths"]["ibgp"] = dict()
afi_data["use-multiple-paths"]["ibgp"]["config"] = dict()
afi_data["use-multiple-paths"]["ibgp"]["config"].update({"maximum-paths": type1})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif type1 == 'max_path_ebgp':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": 'afi-safi-name'})
if config_cmd == '' or config_cmd == 'yes':
afi_data["use-multiple-paths"] = dict()
afi_data["use-multiple-paths"]["ebgp"] = dict()
afi_data["use-multiple-paths"]["ebgp"]["config"] = dict()
afi_data["use-multiple-paths"]["ebgp"]["config"].update({"maximum-paths": type1})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
else:
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"][
"config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"][
"config"].update({"allow-multiple-as": True})
elif type1 == 'redist':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
# my_cmd = '{} redistribute {}'.format(config_cmd, redistribute) # SW defect is there
elif type1 == 'network':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
afi_data["openconfig-bgp-ext:network-config"] = dict()
afi_data["openconfig-bgp-ext:network-config"]["network"] = list()
obe_data = dict()
obe_data.update({"prefix": "string"})
obe_data["config"] = dict()
obe_data.update({"prefix": "network"})
afi_data["openconfig-bgp-ext:network-config"]["network"].append(obe_data)
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif type1 == 'import-check':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update({"openconfig-bgp-ext:network-import-check": True})
elif type1 == 'import_vrf':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
afi_data["openconfig-bgp-ext:import-network-instance"] = dict()
afi_data["openconfig-bgp-ext:import-network-instance"]["config"] = dict()
afi_data["openconfig-bgp-ext:import-network-instance"]["config"].update({"name": "import_vrf"})
elif type1 == 'multipath-relax':
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
ump_data = dict()
ump_data["ebpg"] = dict()
ump_data["ebpg"]["config"] = dict()
ump_data["ebpg"]["config"].update({"allow-multiple-as": True})
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"].update(ump_data)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'peer_group':
st.log("Configuring the peer_group on the device")
else:
st.log('Invalid BGP config parameter')
if config_cmd == 'no' and 'neighbor' in config_type_list and neighbor and not peergroup:
#if isinstance(neigh_name, dict):
if neighbor and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
if delete_urls:
for url in delete_urls:
delete_rest(dut, rest_url=url.format("default", neighbor))
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
else:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if neighbor:
neigh_data1 = dict()
neigh_data1["afi-safis"] = dict()
neigh_data1["afi-safis"]["afi-safi"] = list()
neigh_data1_sub = dict()
neigh_data1_sub.update({"afi-safi-name": afi_safi_name})
neigh_data1_sub["config"] = dict()
neigh_data1_sub["config"].update({"afi-safi-name": "afi-safi-name", "enabled": True})
neigh_data1.update(neigh_data1_sub)
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data1)
if vrf_name != 'default' and removeBGP == 'yes':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update({"as": 0})
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
def verify_bgp_neighborship(dut, family='ipv4', shell="sonic", **kwargs):
"""
This API will poll the BGP neighborship with the provided parameters
Author: Chaitanya Vella (chaitanya-vella.kumar@broadcom.com)
:param dut:
:param family:
:param shell:
:param kwargs: neighbor, state, delay, iterations
:return:
"""
iterations = kwargs["iterations"] if "iterations" in kwargs else 5
delay = kwargs["delay"] if "delay" in kwargs else 1
if "neighbor" in kwargs and "state" in kwargs:
i = 1
while True:
if verify_bgp_summary(dut, family, shell, neighbor=kwargs["neighbor"], state=kwargs["state"]):
st.log("BGP neigborship found ....")
return True
if i > iterations:
st.log("Reached max iteration count, Exiting ...")
return False
i += 1
st.wait(delay)
else:
st.log("Required values not found ....")
return False
def show_ip_bgp_route(dut, family='ipv4', **kwargs):
"""
API for show ip bgp
Author: Chaitanya Vella (chaitanya-vella.kumar@broadcom.com)
:param dut:
:return:
"""
cli_type = get_show_cli_type(dut, **kwargs)
if cli_type == 'vtysh':
command = "show bgp {}".format(family)
elif cli_type == 'klish':
command = "show bgp {} unicast".format(family)
return st.show(dut, command, type=cli_type)
def fetch_ip_bgp_route(dut, family='ipv4', match=None, select=None, **kwargs):
cli_type = get_show_cli_type(dut, **kwargs)
output = show_ip_bgp_route(dut, family=family,cli_type=cli_type)
#match = {'network': network}
entries = filter_and_select(output, select, match)
return entries
def get_ip_bgp_route(dut, family='ipv4', **kwargs):
cli_type = get_show_cli_type(dut, **kwargs)
output = show_ip_bgp_route(dut, family=family,cli_type=cli_type)
st.debug(output)
kwargs.pop("cli_type", None)
for each in kwargs.keys():
match = {each: kwargs[each]}
get_list = ["network", "as_path"]
entries = filter_and_select(output, get_list, match)
if not entries:
st.log("Could not get bgp route info")
return False
return entries
def verify_ip_bgp_route(dut, family='ipv4', **kwargs):
"""
EX; verify_ip_bgp_route(vars.D1, network= '11.2.1.2/24')
"""
cli_type = get_show_cli_type(dut, **kwargs)
output = show_ip_bgp_route(dut, family=family,cli_type=cli_type)
kwargs.pop("cli_type", None)
for each in kwargs.keys():
match = {each: kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def verify_ip_bgp_route_network_list(dut, family='ipv4', nw_list=[], **kwargs):
cli_type = get_show_cli_type(dut, **kwargs)
output = show_ip_bgp_route(dut, family=family,cli_type=cli_type)
for network in nw_list:
match = {'network': network}
entries = filter_and_select(output, None, match)
if not entries:
st.log("BGP Network {} is not matching ".format(network))
return False
return True
def check_bgp_config_in_startupconfig(dut, config_list):
"""
API to check the configuration in startup config
:param dut:
:param config_list: list of configuration commands to check in statup config
:return:
"""
cmd = "show startupconfiguration bgp"
output = st.show(dut, cmd, skip_error_check=True)
output_list = output.splitlines()
for config in config_list:
if config not in output_list:
return False
return True
def show_bgp_ipvx_prefix(dut, prefix, masklen, family='ipv4'):
"""
API for show bgp ipv4 prefix
:param dut:
:param prefix: (ip address)
:param masklen: length of mask (e.g. 24)
:param family: ipv4/ipv6
EX: show_bgp_ipvx_prefix(dut1, prefix="40.1.1.1", masklen=32, family='ipv4')
:return:
"""
#4 place, can use get_ip_bpg_route and/or verify_ip_bgp_route4 place, can use get_ip_bpg_route and/or verify_ip_bgp_route
command = "show bgp {} {}/{}".format(family, prefix, masklen)
entries = st.show(dut, command, type='vtysh')
st.log(entries)
return entries
def show_bgp_ip_prefix(dut, ip_prefix, family='ipv4'):
"""
API for show bgp ipv4 prefix
:param dut:
:param prefix: ip address with or without subnet <ip>/<mask>
:param family: ipv4/ipv6
EX: show_bgp_ipvx_prefix(dut1, prefix="40.1.1.1/32", family='ipv4')
:return:
"""
#1 place, can use get_ip_bpg_route and/or verify_ip_bgp_route4 place, can use get_ip_bpg_route and/or verify_ip_bgp_route
if family != 'ipv4' and family != 'ipv6' :
return {}
command = "show bgp {} {}".format(family, ip_prefix)
entries = st.show(dut, command, type='vtysh')
return entries
def activate_bgp_neighbor(dut, local_asn, neighbor_ip, family="ipv4", config='yes',vrf='default', **kwargs):
"""
:param dut:
:param local_asn:
:param neighbor_ip:
:param family:
:param config:
:param vrf:
:return:
"""
st.log("Activate BGP neigbor")
cli_type = get_cfg_cli_type(dut, **kwargs)
skip_error_check = kwargs.get('skip_error_check', True)
remote_asn = kwargs.get('remote_asn', '')
if config.lower() == 'yes':
mode = ""
else:
mode = 'no'
if family !='ipv4' and family != 'ipv6':
return False
cmd = ''
if cli_type == 'vtysh':
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
if remote_asn != '':
cmd = cmd + 'neighbor {} remote-as {}\n'.format(neighbor_ip, remote_asn)
cmd = cmd + 'address-family {} unicast\n'.format(family)
cmd = cmd + '{} neighbor {} activate\n'.format(mode, neighbor_ip)
cmd = cmd + '\n end'
st.config(dut, cmd, type='vtysh', skip_error_check=skip_error_check)
return True
elif cli_type == "klish":
neigh_name = get_interface_number_from_name(neighbor_ip)
if vrf != 'default':
cmd = cmd + 'router bgp {} vrf {}\n'.format(local_asn, vrf)
else:
cmd = cmd + 'router bgp {}\n'.format(local_asn)
if neigh_name:
if isinstance(neigh_name, dict):
cmd = cmd + 'neighbor interface {} {}\n'.format(neigh_name["type"], neigh_name["number"])
else:
cmd = cmd + 'neighbor {}\n'.format(neigh_name)
cmd = cmd + 'remote-as {}\n'.format(remote_asn)
cmd = cmd + 'address-family {} unicast\n'.format(family)
cmd = cmd + ' {} activate\n'.format(mode)
cmd = cmd + 'exit\nexit\nexit\n'
st.config(dut, cmd, type=cli_type, skip_error_check=skip_error_check, conf = True)
return True
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
def bgp_debug_config(dut, **kwargs):
"""
API to enable BGP zebra logs
:param dut:
:param prefix: (ip address)
:param message: eg update
"""
# Debug command, no klish supported needed for this API
command = "debug bgp zebra\n"
if "prefix" in kwargs:
command += "debug bgp zebra prefix {}\n".format(kwargs["prefix"])
if "message" in kwargs:
if kwargs["message"] == "updates":
command += "debug bgp updates\n"
command += "debug bgp update-groups\n"
command += "log stdout\n"
st.config(dut, command, type='vtysh')
class ASPathAccessList:
"""
Usage:
aspath_access_list = ASPathAccessList("testaspath")
aspath_access_list.add_match_permit_sequence(['_65001', '65002', '65003'])
aspath_access_list.add_match_deny_sequence(['_1^', '_2$', '_3*'])
aspath_access_list.add_match_permit_sequence(['_65100^'])
aspath_access_list.execute_command(dut, config='yes')
cmd_str = aspath_access_list.config_command_string()
aspath_access_list.execute_command(dut, config='no')
"""
def __init__(self, name, cli_type=''):
self.name = name
self.cli_type = get_cfg_cli_type(None, cli_type=cli_type)
self.match_sequence = []
if self.cli_type == 'vtysh':
self.cmdkeyword = 'bgp as-path access-list'
elif self.cli_type == 'klish':
self.cmdkeyword = 'bgp as-path-list'
def add_match_permit_sequence(self, as_path_regex_list):
self.match_sequence.append(('permit', as_path_regex_list))
def add_match_deny_sequence(self, as_path_regex_list):
self.match_sequence.append(('deny', as_path_regex_list))
def config_command_string(self):
command = ''
if self.cli_type == 'vtysh':
for v in self.match_sequence:
command += '{} {} {}'.format(self.cmdkeyword, self.name, v[0])
for as_path_regex in list(v[1]):
command += ' {}'.format(as_path_regex)
command += '\n'
elif self.cli_type == 'klish':
for v in self.match_sequence:
command += '{} {} {} {}'.format(self.cmdkeyword, self.name, v[0], "[{}]".format(','.join(v[1])))
command += '\n'
return command
def unconfig_command_string(self):
command = 'no {} {}\n'.format(self.cmdkeyword, self.name)
return command
def execute_command(self, dut, config='yes'):
if config == 'no':
command = self.unconfig_command_string()
else:
command = self.config_command_string()
st.config(dut, command, type=self.cli_type)
| 45.316309
| 240
| 0.573719
| 18,499
| 156,160
| 4.629277
| 0.027028
| 0.046183
| 0.017469
| 0.015974
| 0.827423
| 0.775599
| 0.726684
| 0.68816
| 0.641043
| 0.608394
| 0
| 0.005536
| 0.292059
| 156,160
| 3,445
| 241
| 45.329463
| 0.769095
| 0.084631
| 0
| 0.662926
| 0
| 0
| 0.210448
| 0.034197
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033282
| false
| 0.010449
| 0.012384
| 0
| 0.104102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
967ecd67e31aba03d61c20f92bc7ac41b6434102
| 455
|
py
|
Python
|
capitalize.py
|
hydrogeohc/Programming_tipsandnotes
|
266e7bc6f605ef16e89831bd95b86c60ddbe3a81
|
[
"MIT"
] | null | null | null |
capitalize.py
|
hydrogeohc/Programming_tipsandnotes
|
266e7bc6f605ef16e89831bd95b86c60ddbe3a81
|
[
"MIT"
] | null | null | null |
capitalize.py
|
hydrogeohc/Programming_tipsandnotes
|
266e7bc6f605ef16e89831bd95b86c60ddbe3a81
|
[
"MIT"
] | null | null | null |
## Capitalizes the first letter of a string.
## Capitalizes the fist letter of the sring and then adds it with rest of the string. Omit the lower_rest parameter to keep the rest of the string intact, or set it to true to convert to lowercase.
def capitalize(string, lower_rest=False):
return string[:1].upper() + (string[1:].lower() if lower_rest else string[1:])
# capitalize('fooBar') # 'FooBar'
# capitalize('fooBar', True) # 'Foobar'
| 45.5
| 198
| 0.712088
| 71
| 455
| 4.521127
| 0.492958
| 0.046729
| 0.056075
| 0.093458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.182418
| 455
| 9
| 199
| 50.555556
| 0.854839
| 0.67033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
96a37a9d7d4699466df4f12916f37e85ae15ccaf
| 337
|
py
|
Python
|
projects/serializers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
projects/serializers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
projects/serializers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import Project, Risk
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = ["name", "space", "description"]
class RiskSerializer(serializers.ModelSerializer):
class Meta:
model = Risk
fields = "__all__"
| 21.0625
| 53
| 0.691395
| 32
| 337
| 7.125
| 0.59375
| 0.22807
| 0.27193
| 0.307018
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225519
| 337
| 15
| 54
| 22.466667
| 0.873563
| 0
| 0
| 0.2
| 0
| 0
| 0.080119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
96a4e785762d420f3e2403085922366bad37f0d5
| 89
|
py
|
Python
|
myauths/apps.py
|
benoitboyer/DjangoBio
|
415e048e7207f4abc6ac9b6bde7b7c7043aab78a
|
[
"MIT"
] | null | null | null |
myauths/apps.py
|
benoitboyer/DjangoBio
|
415e048e7207f4abc6ac9b6bde7b7c7043aab78a
|
[
"MIT"
] | null | null | null |
myauths/apps.py
|
benoitboyer/DjangoBio
|
415e048e7207f4abc6ac9b6bde7b7c7043aab78a
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class MyauthsConfig(AppConfig):
name = 'myauths'
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
96ad2561503ec1cf6a1a63efae5d0974b55518e6
| 210
|
py
|
Python
|
notice_comment/models.py
|
navigo/regulations-site
|
910c24e46f4e921210a40da452dff69feae692d4
|
[
"CC0-1.0"
] | 18
|
2016-09-22T05:05:16.000Z
|
2021-07-28T18:13:48.000Z
|
notice_comment/models.py
|
navigo/regulations-site
|
910c24e46f4e921210a40da452dff69feae692d4
|
[
"CC0-1.0"
] | 260
|
2016-04-05T22:06:10.000Z
|
2021-01-07T22:08:15.000Z
|
notice_comment/models.py
|
navigo/regulations-site
|
910c24e46f4e921210a40da452dff69feae692d4
|
[
"CC0-1.0"
] | 25
|
2016-04-06T03:26:42.000Z
|
2020-10-19T16:49:23.000Z
|
from django.db import models
class FailedCommentSubmission(models.Model):
""" Model that holds comment submissions that failed when submitted to
regulations.gov.
"""
body = models.TextField()
| 23.333333
| 74
| 0.728571
| 24
| 210
| 6.375
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 210
| 8
| 75
| 26.25
| 0.9
| 0.395238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
96aee6f49a25b93a369e1c4505d3f2315e9e5fad
| 2,491
|
py
|
Python
|
fitDataInfo.py
|
andreasbayer/AEGUIFit
|
6a1e31091b74d648d007c75c9fef6efae4086860
|
[
"BSD-3-Clause"
] | null | null | null |
fitDataInfo.py
|
andreasbayer/AEGUIFit
|
6a1e31091b74d648d007c75c9fef6efae4086860
|
[
"BSD-3-Clause"
] | null | null | null |
fitDataInfo.py
|
andreasbayer/AEGUIFit
|
6a1e31091b74d648d007c75c9fef6efae4086860
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
class fitDataInfo():
SUCCESS = "Fit succeeded."
FAILURE = "Fit failed."
def __init__(self, index):
self._fitData = None
self._data = None
self._stdDev = 1
self._stdErr = None
self._fitFunction = None
self._msg = ""
self._passProgressUpdate = None
self._fitFunction = None
self._fit_index = index
self._isDisabled = False
self._weighted = True
self._shift = 0
def is_initialized(self):
return self._data is not None
def progressUpdate(self, relation, info):
pass
def getName(self):
pass
def isDisabled(self):
return self._isDisabled
def setDisabled(self, disabled):
self._isDisabled = disabled
def isFitted(self):
return (self._getFitData() is not None)
def is_weighted(self):
return self._weighted
def set_weighted(self, value):
self._weighted = value
def get_fit_index(self):
return self._fit_index
def set_fit_index(self, p_index):
self._fit_index = p_index
def setProgressUpdateFunction(self, updateFunction):
self._passProgressUpdate = updateFunction
def getFitFunc(self):
pass
def get_msg(self):
return self._msg
def setData(self, data):
self._data = data
def setStdErr(self, std_err):
self._stdErr = std_err
def emitProgressUpdate(self, relation):
self.progressUpdate.emit(relation)
def reset(self):
self._fitData = None
self._data = None
self._stdErr = None
def _getFitData(self):
return self._fitData
def _setFitData(self, newFitData):
self._fitData = newFitData
def getStdDeviation(self):
return self._stdDev
def getFitData(self):
return self._fitData
def get_data(self):
return self._data
def get_std_err(self):
return self._stdErr
def fitToFunction(self):
pass
def shift_fit(self, increment):
self._shift += increment
if self.isFitted():
for set in self._fitData:
set[0] += increment
for set in self._data:
set[0] += increment
def get_meta_string(self):
pass
def get_shift(self):
return self._shift
| 22.853211
| 56
| 0.575672
| 268
| 2,491
| 5.108209
| 0.25
| 0.087655
| 0.122717
| 0.027757
| 0.138787
| 0.099343
| 0.099343
| 0
| 0
| 0
| 0
| 0.00246
| 0.34725
| 2,491
| 109
| 57
| 22.853211
| 0.839483
| 0
| 0
| 0.217949
| 0
| 0
| 0.010032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.358974
| false
| 0.089744
| 0.012821
| 0.153846
| 0.564103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.