hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
90a9e55b40daa95603fdad1b1cdd9813a1176b63
| 87
|
py
|
Python
|
intermod.py
|
my0373/intermod
|
52efcf043c133825d7a7bfec0f04516b31161aa3
|
[
"MIT"
] | null | null | null |
intermod.py
|
my0373/intermod
|
52efcf043c133825d7a7bfec0f04516b31161aa3
|
[
"MIT"
] | null | null | null |
intermod.py
|
my0373/intermod
|
52efcf043c133825d7a7bfec0f04516b31161aa3
|
[
"MIT"
] | null | null | null |
import sys
from nixgeek.utils import *
if __name__ == '__main__':
exit_failure(5)
| 17.4
| 27
| 0.712644
| 12
| 87
| 4.416667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.183908
| 87
| 5
| 28
| 17.4
| 0.732394
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
90c5790d164e29d68a6a76d9b4c4f93a3afab5ab
| 115
|
py
|
Python
|
examples/python/setup_cython.py
|
lanahuong/dubrayn.github.io
|
fd78edb38442966395161cfcf9dfd9b464703ddd
|
[
"MIT"
] | 7
|
2019-04-12T07:46:05.000Z
|
2022-03-30T06:11:47.000Z
|
examples/python/setup_cython.py
|
lanahuong/dubrayn.github.io
|
fd78edb38442966395161cfcf9dfd9b464703ddd
|
[
"MIT"
] | 3
|
2018-03-05T20:35:50.000Z
|
2022-03-28T13:09:44.000Z
|
examples/python/setup_cython.py
|
lanahuong/dubrayn.github.io
|
fd78edb38442966395161cfcf9dfd9b464703ddd
|
[
"MIT"
] | 10
|
2017-10-04T07:05:51.000Z
|
2020-12-15T12:08:32.000Z
|
from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules=cythonize("calc.pyx"),
)
| 16.428571
| 36
| 0.782609
| 16
| 115
| 5.5625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121739
| 115
| 6
| 37
| 19.166667
| 0.881188
| 0
| 0
| 0
| 0
| 0
| 0.069565
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
90da2c3c7d9d2618d0c267d0f673911baa130ec2
| 294
|
py
|
Python
|
domoji/__main__.py
|
M4cs/Domoji
|
7b1ade462bb189a2a480caee2366378bf72f3faf
|
[
"MIT"
] | 2
|
2019-08-20T21:37:26.000Z
|
2020-11-06T02:07:05.000Z
|
domoji/__main__.py
|
M4cs/Domoji
|
7b1ade462bb189a2a480caee2366378bf72f3faf
|
[
"MIT"
] | null | null | null |
domoji/__main__.py
|
M4cs/Domoji
|
7b1ade462bb189a2a480caee2366378bf72f3faf
|
[
"MIT"
] | null | null | null |
from domoji import *
from crayons import *
def start():
try:
menu()
except KeyboardInterrupt:
print(red('\nExiting...'))
exit(1)
if __name__ == "__main__":
try:
menu()
except KeyboardInterrupt:
print(red('\nExiting...'))
exit(1)
| 18.375
| 34
| 0.547619
| 29
| 294
| 5.275862
| 0.62069
| 0.091503
| 0.169935
| 0.392157
| 0.666667
| 0.666667
| 0.666667
| 0.666667
| 0.666667
| 0
| 0
| 0.009852
| 0.309524
| 294
| 16
| 35
| 18.375
| 0.743842
| 0
| 0
| 0.714286
| 0
| 0
| 0.108475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| true
| 0
| 0.142857
| 0
| 0.214286
| 0.142857
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
90e9e3b393d5b8c48b0b91bfcf0b320f9648ff43
| 91
|
py
|
Python
|
phaseone/apps.py
|
tobyspark/orbit_data
|
7648b58a6f845554322d0b46ea34ac6674435c42
|
[
"MIT"
] | null | null | null |
phaseone/apps.py
|
tobyspark/orbit_data
|
7648b58a6f845554322d0b46ea34ac6674435c42
|
[
"MIT"
] | null | null | null |
phaseone/apps.py
|
tobyspark/orbit_data
|
7648b58a6f845554322d0b46ea34ac6674435c42
|
[
"MIT"
] | 1
|
2021-03-31T11:51:15.000Z
|
2021-03-31T11:51:15.000Z
|
from django.apps import AppConfig
class PhaseOneConfig(AppConfig):
name = 'phaseone'
| 15.166667
| 33
| 0.758242
| 10
| 91
| 6.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 5
| 34
| 18.2
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
90f43ee5adda427ec511145c02bb5fd52d363e08
| 97
|
py
|
Python
|
lesson1/first_turtle.py
|
tekichan/teach_kids_python
|
5393e7261c62211976a928501cb1aa4e25bcbeb9
|
[
"MIT"
] | null | null | null |
lesson1/first_turtle.py
|
tekichan/teach_kids_python
|
5393e7261c62211976a928501cb1aa4e25bcbeb9
|
[
"MIT"
] | null | null | null |
lesson1/first_turtle.py
|
tekichan/teach_kids_python
|
5393e7261c62211976a928501cb1aa4e25bcbeb9
|
[
"MIT"
] | null | null | null |
from turtle import *
circle(50) # Draw a circle
exitonclick() # Exit when you click the screen
| 32.333333
| 48
| 0.731959
| 15
| 97
| 4.733333
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.195876
| 97
| 3
| 48
| 32.333333
| 0.884615
| 0.453608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
90ff541a81bfa2a5980a301a6f8f149b2f9aa214
| 104
|
py
|
Python
|
submission_form/apps.py
|
NAKKA-K/degifarm
|
63d74b1206860d0d2213efbc8a7969be7976c4fd
|
[
"MIT"
] | null | null | null |
submission_form/apps.py
|
NAKKA-K/degifarm
|
63d74b1206860d0d2213efbc8a7969be7976c4fd
|
[
"MIT"
] | 6
|
2018-02-18T08:38:46.000Z
|
2018-02-21T09:19:21.000Z
|
submission_form/apps.py
|
NAKKA-K/dw2018_server
|
63d74b1206860d0d2213efbc8a7969be7976c4fd
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class SubmissionFormConfig(AppConfig):
name = 'submission_form'
| 17.333333
| 38
| 0.788462
| 11
| 104
| 7.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 104
| 5
| 39
| 20.8
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
294d74df0d02a55cccef69c8006472e34b7d94ec
| 1,729
|
py
|
Python
|
DailyProgrammer/DP20130530B.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | 2
|
2020-12-23T18:59:22.000Z
|
2021-04-14T13:16:09.000Z
|
DailyProgrammer/DP20130530B.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
DailyProgrammer/DP20130530B.py
|
DayGitH/Python-Challenges
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
[
"MIT"
] | null | null | null |
"""
[05/30/13] Challenge #126 [Intermediate] Perfect P'th Powers
https://www.reddit.com/r/dailyprogrammer/comments/1fcpnx/053013_challenge_126_intermediate_perfect_pth/
# [](#IntermediateIcon) *(Intermediate)*: Perfect P'th Powers
An integer X is a "perfect square power" if there is some integer Y such that Y^2 = X. An integer X is a "perfect cube
power" if there is some integer Y such that Y^3 = X. We can extrapolate this where P is the power in question: an
integer X is a "perfect p'th power" if there is some integer Y such that Y^P = X.
Your goal is to find the highest value of P for a given X such that for some unknown integer Y, Y^P should equal X. You
can expect the given input integer X to be within the range of an unsigned 32-bit integer (0 to 4,294,967,295).
*Special thanks to the ACM collegiate programming challenges group for giving me the initial idea
[here](http://uva.onlinejudge.org/index.php?option=onlinejudge&page=show_problem&problem=1563).*
# Formal Inputs & Outputs
## Input Description
You will be given a single integer on a single line of text through standard console input. This integer will range
from 0 to 4,294,967,295 (the limits of a 32-bit unsigned integer).
## Output Description
You must print out to standard console the highest value P that fits the above problem description's requirements.
# Sample Inputs & Outputs
## Sample Input
*Note:* These are all considered separate input examples.
17
1073741824
25
## Sample Output
*Note:* The string following the result are notes to help with understanding the example; it is NOT expected of you to
write this out.
1 (17^1)
30 (2^30)
2 (5^2)
"""
def main():
pass
if __name__ == "__main__":
main()
| 42.170732
| 119
| 0.746096
| 299
| 1,729
| 4.267559
| 0.48495
| 0.025078
| 0.023511
| 0.028213
| 0.193574
| 0.149687
| 0.082288
| 0.082288
| 0.082288
| 0.082288
| 0
| 0.054073
| 0.176403
| 1,729
| 40
| 120
| 43.225
| 0.841994
| 0.958357
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
2967c93a59d62969ff829872f978c9d2ebcedf9f
| 241
|
py
|
Python
|
django_cradmin/apps/cradmin_register_account/urls.py
|
appressoas/django_cradmin
|
0f8715afdfe1ad32e46033f442e622aecf6a4dec
|
[
"BSD-3-Clause"
] | 11
|
2015-07-05T16:57:58.000Z
|
2020-11-24T16:58:19.000Z
|
django_cradmin/apps/cradmin_register_account/urls.py
|
appressoas/django_cradmin
|
0f8715afdfe1ad32e46033f442e622aecf6a4dec
|
[
"BSD-3-Clause"
] | 91
|
2015-01-08T22:38:13.000Z
|
2022-02-10T10:25:27.000Z
|
django_cradmin/apps/cradmin_register_account/urls.py
|
appressoas/django_cradmin
|
0f8715afdfe1ad32e46033f442e622aecf6a4dec
|
[
"BSD-3-Clause"
] | 3
|
2016-12-07T12:19:24.000Z
|
2018-10-03T14:04:18.000Z
|
from django.urls import path
from django_cradmin.apps.cradmin_register_account.views import register_account
urlpatterns = [
path('',
register_account.RegisterAccountView.as_view(),
name="cradmin-register-account"),
]
| 26.777778
| 79
| 0.746888
| 27
| 241
| 6.444444
| 0.555556
| 0.344828
| 0.252874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157676
| 241
| 8
| 80
| 30.125
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.099585
| 0.099585
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
4647ff040f613b9cd40aebe74c807458f5fcb450
| 665
|
py
|
Python
|
config.py
|
anderlli0053/godex
|
cb345c30b077c8a4847371d9d446729872e3385a
|
[
"MIT"
] | 576
|
2021-01-13T08:12:57.000Z
|
2022-03-29T14:21:50.000Z
|
config.py
|
anderlli0053/godex
|
cb345c30b077c8a4847371d9d446729872e3385a
|
[
"MIT"
] | 101
|
2021-01-14T14:19:25.000Z
|
2022-03-07T06:56:33.000Z
|
config.py
|
anderlli0053/godex
|
cb345c30b077c8a4847371d9d446729872e3385a
|
[
"MIT"
] | 41
|
2021-01-13T07:37:48.000Z
|
2022-03-24T07:14:47.000Z
|
def can_build(env, platform):
return True
def configure(env):
pass
def has_custom_iterator():
return True
def has_custom_physics_iterator():
return True
def has_custom_audio_iterator():
# TODO enable custom iterator once the audio process system is integrated
return False
def get_doc_path():
return "doc_classes"
def get_doc_classes():
return [
"Component",
"DynamicQuery",
"ECS",
# Disabled until only 'Entity' exists. 'doctool' will generate in 'godot/docs/classes' instead.
# "Entity2D",
# "Entity3D",
"PipelineECS",
"System",
"WorldECS",
]
| 17.5
| 103
| 0.628571
| 75
| 665
| 5.386667
| 0.613333
| 0.074257
| 0.096535
| 0.10396
| 0.148515
| 0.148515
| 0
| 0
| 0
| 0
| 0
| 0.004149
| 0.275188
| 665
| 37
| 104
| 17.972973
| 0.834025
| 0.284211
| 0
| 0.142857
| 0
| 0
| 0.127389
| 0
| 0
| 0
| 0
| 0.027027
| 0
| 1
| 0.333333
| false
| 0.047619
| 0
| 0.285714
| 0.619048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
46586ea3bde47569399645cf7de44fa0a381acc0
| 708
|
py
|
Python
|
tests/conftest.py
|
puzzlluminati/padsniff
|
0e0c6589db8e207831fecd017eeaf6092b1aae04
|
[
"MIT"
] | 1
|
2018-08-07T03:29:43.000Z
|
2018-08-07T03:29:43.000Z
|
tests/conftest.py
|
puzzlluminati/padsniff
|
0e0c6589db8e207831fecd017eeaf6092b1aae04
|
[
"MIT"
] | 4
|
2018-04-16T22:45:40.000Z
|
2018-05-07T18:12:01.000Z
|
tests/conftest.py
|
puzzlluminati/padsniff
|
0e0c6589db8e207831fecd017eeaf6092b1aae04
|
[
"MIT"
] | null | null | null |
from mitmproxy.test.tflow import tflow as TestFlow
from pytest import fixture
from pytest_mock import mocker
class MockSocket:
"""A Mock `socket.socket` to prevent opening connections."""
def __init__(self, *args, **kwargs):
pass
def __getattr__(self, method_name):
# mock all method names to return called parameters
# prevents sockets from binding and connecting
return lambda *args, **kwargs: (args, kwargs)
@fixture(autouse=True)
def mock_proxy_server(monkeypatch):
"""Prevent networking libraries that use `socket.socket` from opening connections."""
monkeypatch.setattr('socket.socket', MockSocket)
@fixture
def flow():
return TestFlow()
| 26.222222
| 89
| 0.716102
| 86
| 708
| 5.755814
| 0.581395
| 0.072727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19209
| 708
| 26
| 90
| 27.230769
| 0.865385
| 0.324859
| 0
| 0
| 0
| 0
| 0.027897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.071429
| 0.214286
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
465c304e9434451978a2b63f2660a7ff5c53a69b
| 1,840
|
py
|
Python
|
stellar_sdk/network.py
|
bantalon/py-stellar-base
|
b452f0f92be0387c3e78c8149103978788d7ec0f
|
[
"Apache-2.0"
] | 1
|
2020-03-04T20:37:17.000Z
|
2020-03-04T20:37:17.000Z
|
stellar_sdk/network.py
|
bantalon/py-stellar-base
|
b452f0f92be0387c3e78c8149103978788d7ec0f
|
[
"Apache-2.0"
] | 1
|
2020-04-26T12:08:54.000Z
|
2020-04-26T12:08:54.000Z
|
stellar_sdk/network.py
|
bantalon/py-stellar-base
|
b452f0f92be0387c3e78c8149103978788d7ec0f
|
[
"Apache-2.0"
] | null | null | null |
from .utils import sha256
__all__ = ["Network"]
class Network:
"""The :class:`Network` object, which represents a Stellar network.
This class represents such a stellar network such as the Public network and the Test network.
:param str network_passphrase: The passphrase for the network.
(ex. 'Public Global Stellar Network ; September 2015')
"""
PUBLIC_NETWORK_PASSPHRASE: str = "Public Global Stellar Network ; September 2015"
"""Get the Public network passphrase."""
TESTNET_NETWORK_PASSPHRASE: str = "Test SDF Network ; September 2015"
"""Get the Test network passphrase."""
def __init__(self, network_passphrase: str) -> None:
self.network_passphrase: str = network_passphrase
def network_id(self) -> bytes:
"""Get the network ID of the network, which is an XDR hash of the
passphrase.
:returns: The network ID of the network.
"""
return sha256(self.network_passphrase.encode())
@classmethod
def public_network(cls) -> "Network":
"""Get the :class:`Network` object representing the PUBLIC Network.
:return: PUBLIC Network
"""
return cls(cls.PUBLIC_NETWORK_PASSPHRASE)
@classmethod
def testnet_network(cls) -> "Network":
"""Get the :class:`Network` object representing the TESTNET Network.
:return: TESTNET Network
"""
return cls(cls.TESTNET_NETWORK_PASSPHRASE)
def __eq__(self, other: object) -> bool:
if not isinstance(other, self.__class__):
return NotImplemented # pragma: no cover
return self.network_passphrase == other.network_passphrase
def __str__(self):
return "<Network [network_passphrase={network_passphrase}]>".format(
network_passphrase=self.network_passphrase
)
| 31.724138
| 97
| 0.669022
| 211
| 1,840
| 5.635071
| 0.274882
| 0.243061
| 0.08831
| 0.052986
| 0.227082
| 0.200168
| 0.094197
| 0.094197
| 0.094197
| 0.094197
| 0
| 0.012866
| 0.239674
| 1,840
| 57
| 98
| 32.280702
| 0.837026
| 0.325543
| 0
| 0.086957
| 0
| 0
| 0.141784
| 0.039437
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26087
| false
| 0.434783
| 0.043478
| 0.043478
| 0.695652
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
466d68ae46a9f08291ab0c146f0323db9382d321
| 13,412
|
py
|
Python
|
plot/suppFig/fig5_time_gen_accross_conditions.py
|
romquentin/decod_WM_Selection_and_maintenance
|
fc1bf2f21959795fbea731f642cc750c2b61bce2
|
[
"BSD-3-Clause"
] | 7
|
2018-07-16T01:59:03.000Z
|
2021-07-28T09:48:13.000Z
|
plot/suppFig/fig5_time_gen_accross_conditions.py
|
romquentin/decod_WM_Selection_and_maintenance
|
fc1bf2f21959795fbea731f642cc750c2b61bce2
|
[
"BSD-3-Clause"
] | 1
|
2020-03-15T00:35:45.000Z
|
2020-04-17T09:54:38.000Z
|
plot/suppFig/fig5_time_gen_accross_conditions.py
|
romquentin/decod_WM_Selection_and_maintenance
|
fc1bf2f21959795fbea731f642cc750c2b61bce2
|
[
"BSD-3-Clause"
] | 4
|
2018-08-02T08:52:59.000Z
|
2021-12-17T11:43:47.000Z
|
"""
Figure 5
Plot time generalization across memory and visual conditions computed from
run_decoding_WM_across_epochs_and_conditions.py
"""
# Authors: Romain Quentin <rom.quentin@gmail.com>
# Jean-Remi King <jeanremi.king@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
import numpy as np
import matplotlib.pyplot as plt
from config import subjects, path_data
from base import gat_stats
from copy import deepcopy
from webcolors import hex_to_rgb
from mpl_toolkits.axes_grid1 import make_axes_locatable
# Define colors
colors = ['#1f77b4', '#d62728', '#ff7f0e']
title_size = 12
legend_size = 16
ticks_size = 12
asterisk_size = 24
plt.rcParams['font.sans-serif'] = ['Arial']
plt.rcParams['font.family'] = 'sans-serif'
plt.rcParams['xtick.labelsize'] = ticks_size
plt.rcParams['ytick.labelsize'] = ticks_size
# Define pair of cross analyses
analyses = {'Tar_csf_cr_sf': ['target_sfreq_cue_left_sfreq_cross_left_sfreq',
'target_sfreq_cue_right_sfreq_cross_right_sfreq',
0.1, colors[1]],
'Sf_cr_tar_csf': ['left_sfreq_cross_target_sfreq_cue_left_sfreq',
'right_sfreq_cross_target_sfreq_cue_right_sfreq',
0.1, colors[2]],
'Tar_can_cr_an': ['target_angle_cue_left_angle_cross_left_angle',
'target_angle_cue_right_angle_cross_right_angle',
0.1, colors[1]],
'An_cr_tar_can': ['left_angle_cross_target_angle_cue_left_angle',
'right_angle_cross_target_angle_cue_right_angle',
0.1, colors[2]]
}
# Define results to plot
results_folder = 'sensors_accross_epochs_and_conditions_Kfold7'
# Define times
sfreq = 120
tmin = -.2
tmin_cue = 0.
tmax = .9
tmax_cue = 1.5
sample_times = np.linspace(0, (tmax-tmin)*sfreq, (tmax-tmin)*sfreq + 1)
sample_times_cue = np.linspace(0, (tmax_cue-tmin_cue)*sfreq,
(tmax_cue-tmin_cue)*sfreq + 1)
times = sample_times/sfreq + tmin
times_cue = sample_times_cue/sfreq + tmin_cue
chance = 0
# Loop across each pair of analyses
for analysis, sub_analysis in analyses.iteritems():
all_scores = list()
for subject in subjects:
fname0 = '%s_scores_%s.npy' % (subject, sub_analysis[0])
scores0 = np.load(op.join(path_data, 'results/', subject,
results_folder, fname0))
fname1 = '%s_scores_%s.npy' % (subject, sub_analysis[1])
scores1 = np.load(op.join(path_data, 'results/', subject,
results_folder, fname1))
scores = (scores0 + scores1)/2. # Mean cue left and cue right
all_scores.append(scores)
all_scores = np.array(all_scores)
ymax = sub_analysis[2]
color = sub_analysis[3]
color = np.array(hex_to_rgb(color))/255.
color = np.concatenate((color, [1]), axis=0)
cmap = deepcopy(plt.get_cmap('magma_r'))
cmap.colors = np.c_[np.linspace(1, color[0], 256),
np.linspace(1, color[1], 256),
np.linspace(1, color[2], 256)]
if 'Tar' in analysis[:4]:
borders = [[133, 314, 133, 314], [133, 314, 0, 133],
[0, 133, 133, 314]]
else:
borders = [[0, 133, 0, 133], [133, 314, 0, 133],
[0, 133, 133, 314]]
# Separate epoch times
for num, border in enumerate(borders):
all_scores_sub = all_scores[:, border[0]:border[1],
border[2]:border[3]]
gat_p_values = gat_stats(np.array(all_scores_sub))
sig = np.array(gat_p_values < 0.05)
if all_scores_sub.shape[1] == 133:
y_times = times
y_size = 3
else:
y_times = times_cue
y_size = 4.09
if all_scores_sub.shape[2] == 133:
x_times = times
x_size = 3
else:
x_times = times_cue
x_size = 4.09
# Plot mean subjects
fig_mean, axes = plt.subplots()
fig_mean.set_size_inches(x_size, y_size)
imshow = axes.imshow(np.mean((all_scores_sub), axis=0), origin='lower',
cmap=cmap,
extent=[x_times[0], x_times[-1], y_times[0], y_times[-1]], vmin=0, vmax=ymax)
axes_divider = make_axes_locatable(axes)
cax = axes_divider.append_axes("top", size="7%", pad="2%")
cbar = plt.colorbar(imshow, cax=cax, ticks=[0, ymax], orientation="horizontal")
cax.xaxis.set_ticks_position('top')
cbar.ax.set_xticklabels(['', ymax])
if 'Tar' in analysis[:4]:
if num == 0:
axes.set_xticks(np.linspace(0, 1.4, 8))
axes.tick_params(
axis='y',
which='both',
labelleft='off')
axes.fill_between(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
elif num == 1:
axes.set_xticks(np.linspace(-.2, 0.8, 6))
axes.set_yticks(np.linspace(0, 1.4, 8))
axes.fill_between(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
elif num == 2:
axes.set_xticks(np.linspace(0, 1.4, 8))
axes.tick_params(
axis='y',
which='both',
labelleft='off')
axes.fill_between(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
else:
if num == 0:
axes.set_xticks(np.linspace(-.2, 0.8, 6))
axes.set_yticks(np.linspace(-.2, 0.8, 6))
axes.fill_between(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
elif num == 1:
axes.set_xticks(np.linspace(-.2, 0.8, 6))
axes.set_yticks(np.linspace(0, 1.4, 8))
axes.fill_between(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
elif num == 2:
axes.set_xticks(np.linspace(0, 1.4, 8))
axes.tick_params(
axis='y',
which='both',
labelleft='off')
axes.fill_between(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
axes.fill_betweenx(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
xx, yy = np.meshgrid(x_times, y_times,
copy=False, indexing='xy')
axes.contour(xx, yy, sig, colors='Gray', levels=[0],
linestyles='solid')
# Save cross analyses figure
plt.tight_layout()
fname = op.join(path_data, 'fig_supp/fig_supp_5/', analysis
+ str(num) + '.png')
plt.savefig(fname, transparent=True)
# # Define pair of analyses
analyses = {'target_sfreq': ['target_sfreq_cue_left_sfreq',
'target_sfreq_cue_right_sfreq',
0.1, colors[1]],
'stim_sfreq': ['left_sfreq',
'right_sfreq',
0.4, colors[2]],
'target_angle': ['target_angle_cue_left_angle',
'target_angle_cue_right_angle',
0.1, colors[1]],
'stim_angle': ['left_angle',
'right_angle',
0.1, colors[2]]
}
# Define results to plot
results_folder = 'sensors_accross_epochs_and_conditions'
# Loop across each pair of analyses
for analysis, sub_analysis in analyses.iteritems():
all_scores = list()
for subject in subjects:
fname0 = '%s_scores_%s.npy' % (subject, sub_analysis[0])
scores0 = np.load(op.join(path_data, 'results/', subject,
results_folder, fname0))
fname1 = '%s_scores_%s.npy' % (subject, sub_analysis[1])
scores1 = np.load(op.join(path_data, 'results/', subject,
results_folder, fname1))
scores = (scores0 + scores1)/2. # Mean cue left and cue right
all_scores.append(scores)
all_scores = np.array(all_scores)
ymax = sub_analysis[2]
color = sub_analysis[3]
color = np.array(hex_to_rgb(color))/255.
color = np.concatenate((color, [1]), axis=0)
cmap = deepcopy(plt.get_cmap('magma_r'))
cmap.colors = np.c_[np.linspace(1, color[0], 256),
np.linspace(1, color[1], 256),
np.linspace(1, color[2], 256)]
if 'tar' in analysis[:4]:
borders = [133, 314, 133, 314]
x_times = y_times = times_cue
x_size = y_size = 4.09
else:
borders = [0, 133, 0, 133]
x_times = y_times = times
x_size = y_size = 3
all_scores_sub = all_scores[:, borders[0]:borders[1], borders[2]:borders[3]]
gat_p_values = gat_stats(np.array(all_scores_sub))
sig = np.array(gat_p_values < 0.05)
# Plot mean subjects
fig_mean, axes = plt.subplots()
fig_mean.set_size_inches(x_size, y_size)
imshow = axes.imshow(np.mean((all_scores_sub), axis=0), origin='lower',
cmap=cmap,
extent=[x_times[0], x_times[-1], y_times[0], y_times[-1]], vmin=0, vmax=ymax)
axes_divider = make_axes_locatable(axes)
cax = axes_divider.append_axes("top", size="7%", pad="2%")
cbar = plt.colorbar(imshow, cax=cax, ticks=[ymax], orientation="horizontal")
cax.xaxis.set_ticks_position('top')
cbar.ax.set_yticklabels([ymax])
if 'tar' in analysis[:4]:
axes.set_xticks(np.linspace(0, 1.4, 8))
axes.tick_params(
axis='y',
which='both',
labelleft='off')
axes.fill_between(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray',
interpolate=True)
axes.fill_betweenx(times_cue, times_cue[0], times_cue[-1],
where=(times_cue >= 0) & (times_cue <= 0.1),
alpha=0.2,
color='gray')
else:
axes.set_xticks(np.linspace(-.2, 0.8, 6))
axes.set_yticks(np.linspace(-.2, 0.8, 6))
axes.fill_between(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray',
interpolate=True)
axes.fill_betweenx(times, times[0], times[-1],
where=(times >= 0) & (times <= 0.1),
alpha=0.2,
color='gray')
xx, yy = np.meshgrid(x_times, y_times,
copy=False, indexing='xy')
axes.contour(xx, yy, sig, colors='Gray', levels=[0],
linestyles='solid')
# Save figure for non-crossed analysis
plt.tight_layout()
fname = op.join(path_data, 'fig_supp/fig_supp_5/', analysis
+ '.png')
plt.savefig(fname, transparent=True)
| 43.97377
| 106
| 0.495526
| 1,609
| 13,412
| 3.912989
| 0.142946
| 0.05845
| 0.034308
| 0.035578
| 0.794155
| 0.739199
| 0.709975
| 0.696633
| 0.670902
| 0.670902
| 0
| 0.051734
| 0.37884
| 13,412
| 304
| 107
| 44.118421
| 0.703997
| 0.046898
| 0
| 0.711111
| 0
| 0
| 0.087723
| 0.043195
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02963
| 0
| 0.02963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
467a3719714bff4315d3f7ba2b4c43fdfd1edb24
| 68
|
py
|
Python
|
apps/responsys/__init__.py
|
gene1wood/webowonder
|
a173d5e9ccf6d15b02b48759efb9a625d84822dc
|
[
"BSD-3-Clause"
] | 2
|
2015-07-01T20:17:14.000Z
|
2021-03-26T06:02:13.000Z
|
apps/responsys/__init__.py
|
gene1wood/webowonder
|
a173d5e9ccf6d15b02b48759efb9a625d84822dc
|
[
"BSD-3-Clause"
] | 2
|
2019-02-17T17:28:04.000Z
|
2019-04-02T06:57:31.000Z
|
apps/responsys/__init__.py
|
gene1wood/webowonder
|
a173d5e9ccf6d15b02b48759efb9a625d84822dc
|
[
"BSD-3-Clause"
] | 2
|
2019-03-28T03:40:06.000Z
|
2019-11-25T17:35:08.000Z
|
""" This code was liberated from jlongster's fine zamboni fork. """
| 34
| 67
| 0.720588
| 10
| 68
| 4.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 68
| 1
| 68
| 68
| 0.859649
| 0.867647
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
467b3cbff1063bcf9949c589571987d47cdafa5d
| 158
|
py
|
Python
|
tests/processing/data/__init__.py
|
DHI-GRAS/atmcorr
|
55e584c7971009065b47ece9d3d215bfe8335d04
|
[
"MIT"
] | 5
|
2019-09-03T17:13:57.000Z
|
2021-12-01T03:22:11.000Z
|
tests/processing/data/__init__.py
|
DHI-GRAS/atmcorr
|
55e584c7971009065b47ece9d3d215bfe8335d04
|
[
"MIT"
] | 1
|
2021-04-28T08:11:37.000Z
|
2021-04-28T09:52:02.000Z
|
tests/processing/data/__init__.py
|
DHI-GRAS/atmcorr
|
55e584c7971009065b47ece9d3d215bfe8335d04
|
[
"MIT"
] | 1
|
2021-03-31T02:13:08.000Z
|
2021-03-31T02:13:08.000Z
|
import os
import glob
here = os.path.abspath(os.path.dirname(__file__))
MTDFILES = {os.path.basename(p): p for p in glob.glob(os.path.join(here, '*.imd'))}
| 22.571429
| 83
| 0.702532
| 28
| 158
| 3.821429
| 0.535714
| 0.224299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 158
| 6
| 84
| 26.333333
| 0.764286
| 0
| 0
| 0
| 0
| 0
| 0.031646
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
46b6355cbc3694dea6587fc87608ab1c1e64836d
| 101
|
py
|
Python
|
__init__.py
|
rmtew/livecoding
|
9c5619c9653d4cd83977fc1f3aae51da004f1e8b
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
rmtew/livecoding
|
9c5619c9653d4cd83977fc1f3aae51da004f1e8b
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
rmtew/livecoding
|
9c5619c9653d4cd83977fc1f3aae51da004f1e8b
|
[
"BSD-3-Clause"
] | null | null | null |
# Python package structure shenanigans.
#
# http://docs.python.org/tutorial/modules.html#packages
#
| 20.2
| 56
| 0.762376
| 12
| 101
| 6.416667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09901
| 101
| 4
| 57
| 25.25
| 0.846154
| 0.90099
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3b3d12b2fe6ddd9c56192a5479c7911ca5edf52
| 29,189
|
py
|
Python
|
nova/tests/unit/api/openstack/compute/test_simple_tenant_usage.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/compute/test_simple_tenant_usage.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/compute/test_simple_tenant_usage.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2011 OpenStack Foundation'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'datetime'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'oslo_policy'
name|'import'
name|'policy'
name|'as'
name|'oslo_policy'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'timeutils'
newline|'\n'
name|'from'
name|'six'
op|'.'
name|'moves'
name|'import'
name|'range'
newline|'\n'
name|'import'
name|'webob'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
op|'.'
name|'compute'
name|'import'
name|'simple_tenant_usage'
name|'as'
name|'simple_tenant_usage_v21'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'vm_states'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'db'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'policy'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'fakes'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_flavor'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
name|'import'
name|'uuidsentinel'
name|'as'
name|'uuids'
newline|'\n'
nl|'\n'
DECL|variable|SERVERS
name|'SERVERS'
op|'='
number|'5'
newline|'\n'
DECL|variable|TENANTS
name|'TENANTS'
op|'='
number|'2'
newline|'\n'
DECL|variable|HOURS
name|'HOURS'
op|'='
number|'24'
newline|'\n'
DECL|variable|ROOT_GB
name|'ROOT_GB'
op|'='
number|'10'
newline|'\n'
DECL|variable|EPHEMERAL_GB
name|'EPHEMERAL_GB'
op|'='
number|'20'
newline|'\n'
DECL|variable|MEMORY_MB
name|'MEMORY_MB'
op|'='
number|'1024'
newline|'\n'
DECL|variable|VCPUS
name|'VCPUS'
op|'='
number|'2'
newline|'\n'
DECL|variable|NOW
name|'NOW'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
DECL|variable|START
name|'START'
op|'='
name|'NOW'
op|'-'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
name|'hours'
op|'='
name|'HOURS'
op|')'
newline|'\n'
DECL|variable|STOP
name|'STOP'
op|'='
name|'NOW'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|FAKE_INST_TYPE
name|'FAKE_INST_TYPE'
op|'='
op|'{'
string|"'id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'vcpus'"
op|':'
name|'VCPUS'
op|','
nl|'\n'
string|"'root_gb'"
op|':'
name|'ROOT_GB'
op|','
nl|'\n'
string|"'ephemeral_gb'"
op|':'
name|'EPHEMERAL_GB'
op|','
nl|'\n'
string|"'memory_mb'"
op|':'
name|'MEMORY_MB'
op|','
nl|'\n'
string|"'name'"
op|':'
string|"'fakeflavor'"
op|','
nl|'\n'
string|"'flavorid'"
op|':'
string|"'foo'"
op|','
nl|'\n'
string|"'rxtx_factor'"
op|':'
number|'1.0'
op|','
nl|'\n'
string|"'vcpu_weight'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'swap'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'created_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'updated_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'disabled'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'is_public'"
op|':'
name|'True'
op|','
nl|'\n'
string|"'extra_specs'"
op|':'
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|'}'
op|'}'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|get_fake_db_instance
name|'def'
name|'get_fake_db_instance'
op|'('
name|'start'
op|','
name|'end'
op|','
name|'instance_id'
op|','
name|'tenant_id'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'ACTIVE'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst'
op|'='
name|'fakes'
op|'.'
name|'stub_instance'
op|'('
nl|'\n'
name|'id'
op|'='
name|'instance_id'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'getattr'
op|'('
name|'uuids'
op|','
string|"'instance_%d'"
op|'%'
name|'instance_id'
op|')'
op|','
nl|'\n'
name|'image_ref'
op|'='
string|"'1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'tenant_id'
op|','
nl|'\n'
name|'user_id'
op|'='
string|"'fakeuser'"
op|','
nl|'\n'
name|'display_name'
op|'='
string|"'name'"
op|','
nl|'\n'
name|'flavor_id'
op|'='
name|'FAKE_INST_TYPE'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'launched_at'
op|'='
name|'start'
op|','
nl|'\n'
name|'terminated_at'
op|'='
name|'end'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_state'
op|','
nl|'\n'
name|'memory_mb'
op|'='
name|'MEMORY_MB'
op|','
nl|'\n'
name|'vcpus'
op|'='
name|'VCPUS'
op|','
nl|'\n'
name|'root_gb'
op|'='
name|'ROOT_GB'
op|','
nl|'\n'
name|'ephemeral_gb'
op|'='
name|'EPHEMERAL_GB'
op|','
op|')'
newline|'\n'
name|'return'
name|'inst'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_instance_get_active_by_window_joined
dedent|''
name|'def'
name|'fake_instance_get_active_by_window_joined'
op|'('
name|'context'
op|','
name|'begin'
op|','
name|'end'
op|','
nl|'\n'
name|'project_id'
op|','
name|'host'
op|','
name|'columns_to_join'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
name|'get_fake_db_instance'
op|'('
name|'START'
op|','
nl|'\n'
name|'STOP'
op|','
nl|'\n'
name|'x'
op|','
nl|'\n'
name|'project_id'
name|'if'
name|'project_id'
name|'else'
nl|'\n'
string|'"faketenant_%s"'
op|'%'
op|'('
name|'x'
op|'/'
name|'SERVERS'
op|')'
op|')'
nl|'\n'
name|'for'
name|'x'
name|'in'
name|'range'
op|'('
name|'TENANTS'
op|'*'
name|'SERVERS'
op|')'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'db'
op|','
string|"'instance_get_active_by_window_joined'"
op|','
nl|'\n'
name|'fake_instance_get_active_by_window_joined'
op|')'
newline|'\n'
DECL|class|SimpleTenantUsageTestV21
name|'class'
name|'SimpleTenantUsageTestV21'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|policy_rule_prefix
indent|' '
name|'policy_rule_prefix'
op|'='
string|'"os_compute_api:os-simple-tenant-usage"'
newline|'\n'
DECL|variable|controller
name|'controller'
op|'='
name|'simple_tenant_usage_v21'
op|'.'
name|'SimpleTenantUsageController'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'SimpleTenantUsageTestV21'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'admin_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fakeadmin_0'"
op|','
nl|'\n'
string|"'faketenant_0'"
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'user_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fakeadmin_0'"
op|','
nl|'\n'
string|"'faketenant_0'"
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'alt_user_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fakeadmin_0'"
op|','
nl|'\n'
string|"'faketenant_1'"
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_verify_index
dedent|''
name|'def'
name|'_test_verify_index'
op|'('
name|'self'
op|','
name|'start'
op|','
name|'stop'
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
name|'start'
op|'.'
name|'isoformat'
op|'('
op|')'
op|','
name|'stop'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'admin_context'
newline|'\n'
name|'res_dict'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'index'
op|'('
name|'req'
op|')'
newline|'\n'
name|'usages'
op|'='
name|'res_dict'
op|'['
string|"'tenant_usages'"
op|']'
newline|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'TENANTS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'SERVERS'
op|'*'
name|'HOURS'
op|','
name|'int'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'['
string|"'total_hours'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'SERVERS'
op|'*'
op|'('
name|'ROOT_GB'
op|'+'
name|'EPHEMERAL_GB'
op|')'
op|'*'
name|'HOURS'
op|','
nl|'\n'
name|'int'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'['
string|"'total_local_gb_usage'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'SERVERS'
op|'*'
name|'MEMORY_MB'
op|'*'
name|'HOURS'
op|','
nl|'\n'
name|'int'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'['
string|"'total_memory_mb_usage'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'SERVERS'
op|'*'
name|'VCPUS'
op|'*'
name|'HOURS'
op|','
nl|'\n'
name|'int'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'['
string|"'total_vcpus_usage'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'.'
name|'get'
op|'('
string|"'server_usages'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_index
dedent|''
dedent|''
name|'def'
name|'test_verify_index'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_verify_index'
op|'('
name|'START'
op|','
name|'STOP'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_index_future_end_time
dedent|''
name|'def'
name|'test_verify_index_future_end_time'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'future'
op|'='
name|'NOW'
op|'+'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
name|'hours'
op|'='
name|'HOURS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_test_verify_index'
op|'('
name|'START'
op|','
name|'future'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_show
dedent|''
name|'def'
name|'test_verify_show'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_verify_show'
op|'('
name|'START'
op|','
name|'STOP'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_show_future_end_time
dedent|''
name|'def'
name|'test_verify_show_future_end_time'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'future'
op|'='
name|'NOW'
op|'+'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
name|'hours'
op|'='
name|'HOURS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_test_verify_show'
op|'('
name|'START'
op|','
name|'future'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_tenant_usages
dedent|''
name|'def'
name|'_get_tenant_usages'
op|'('
name|'self'
op|','
name|'detailed'
op|'='
string|"''"
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?detailed=%s&start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
name|'detailed'
op|','
name|'START'
op|'.'
name|'isoformat'
op|'('
op|')'
op|','
name|'STOP'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'admin_context'
newline|'\n'
nl|'\n'
comment|'# Make sure that get_active_by_window_joined is only called with'
nl|'\n'
comment|"# expected_attrs=['flavor']."
nl|'\n'
name|'orig_get_active_by_window_joined'
op|'='
op|'('
nl|'\n'
name|'objects'
op|'.'
name|'InstanceList'
op|'.'
name|'get_active_by_window_joined'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_get_active_by_window_joined
name|'def'
name|'fake_get_active_by_window_joined'
op|'('
name|'context'
op|','
name|'begin'
op|','
name|'end'
op|'='
name|'None'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'None'
op|','
name|'host'
op|'='
name|'None'
op|','
nl|'\n'
name|'expected_attrs'
op|'='
name|'None'
op|','
nl|'\n'
name|'use_slave'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'flavor'"
op|']'
op|','
name|'expected_attrs'
op|')'
newline|'\n'
name|'return'
name|'orig_get_active_by_window_joined'
op|'('
name|'context'
op|','
name|'begin'
op|','
name|'end'
op|','
nl|'\n'
name|'project_id'
op|','
name|'host'
op|','
nl|'\n'
name|'expected_attrs'
op|','
name|'use_slave'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'InstanceList'
op|','
nl|'\n'
string|"'get_active_by_window_joined'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'fake_get_active_by_window_joined'
op|')'
op|':'
newline|'\n'
indent|' '
name|'res_dict'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'index'
op|'('
name|'req'
op|')'
newline|'\n'
name|'return'
name|'res_dict'
op|'['
string|"'tenant_usages'"
op|']'
newline|'\n'
nl|'\n'
DECL|member|test_verify_detailed_index
dedent|''
dedent|''
name|'def'
name|'test_verify_detailed_index'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'usages'
op|'='
name|'self'
op|'.'
name|'_get_tenant_usages'
op|'('
string|"'1'"
op|')'
newline|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'TENANTS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'servers'
op|'='
name|'usages'
op|'['
name|'i'
op|']'
op|'['
string|"'server_usages'"
op|']'
newline|'\n'
name|'for'
name|'j'
name|'in'
name|'range'
op|'('
name|'SERVERS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'HOURS'
op|','
name|'int'
op|'('
name|'servers'
op|'['
name|'j'
op|']'
op|'['
string|"'hours'"
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_simple_index
dedent|''
dedent|''
dedent|''
name|'def'
name|'test_verify_simple_index'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'usages'
op|'='
name|'self'
op|'.'
name|'_get_tenant_usages'
op|'('
name|'detailed'
op|'='
string|"'0'"
op|')'
newline|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'TENANTS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'.'
name|'get'
op|'('
string|"'server_usages'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_simple_index_empty_param
dedent|''
dedent|''
name|'def'
name|'test_verify_simple_index_empty_param'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|"# NOTE(lzyeval): 'detailed=&start=..&end=..'"
nl|'\n'
indent|' '
name|'usages'
op|'='
name|'self'
op|'.'
name|'_get_tenant_usages'
op|'('
op|')'
newline|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'TENANTS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'usages'
op|'['
name|'i'
op|']'
op|'.'
name|'get'
op|'('
string|"'server_usages'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_verify_show
dedent|''
dedent|''
name|'def'
name|'_test_verify_show'
op|'('
name|'self'
op|','
name|'start'
op|','
name|'stop'
op|')'
op|':'
newline|'\n'
indent|' '
name|'tenant_id'
op|'='
number|'1'
newline|'\n'
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
name|'start'
op|'.'
name|'isoformat'
op|'('
op|')'
op|','
name|'stop'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'user_context'
newline|'\n'
nl|'\n'
name|'res_dict'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'show'
op|'('
name|'req'
op|','
name|'tenant_id'
op|')'
newline|'\n'
nl|'\n'
name|'usage'
op|'='
name|'res_dict'
op|'['
string|"'tenant_usage'"
op|']'
newline|'\n'
name|'servers'
op|'='
name|'usage'
op|'['
string|"'server_usages'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'TENANTS'
op|'*'
name|'SERVERS'
op|','
name|'len'
op|'('
name|'usage'
op|'['
string|"'server_usages'"
op|']'
op|')'
op|')'
newline|'\n'
name|'server_uuids'
op|'='
op|'['
name|'getattr'
op|'('
name|'uuids'
op|','
string|"'instance_%d'"
op|'%'
name|'x'
op|')'
nl|'\n'
name|'for'
name|'x'
name|'in'
name|'range'
op|'('
name|'SERVERS'
op|')'
op|']'
newline|'\n'
name|'for'
name|'j'
name|'in'
name|'range'
op|'('
name|'SERVERS'
op|')'
op|':'
newline|'\n'
indent|' '
name|'delta'
op|'='
name|'STOP'
op|'-'
name|'START'
newline|'\n'
comment|'# NOTE(javeme): cast seconds from float to int for clarity'
nl|'\n'
name|'uptime'
op|'='
name|'int'
op|'('
name|'delta'
op|'.'
name|'total_seconds'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'uptime'
op|','
name|'int'
op|'('
name|'servers'
op|'['
name|'j'
op|']'
op|'['
string|"'uptime'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'HOURS'
op|','
name|'int'
op|'('
name|'servers'
op|'['
name|'j'
op|']'
op|'['
string|"'hours'"
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'servers'
op|'['
name|'j'
op|']'
op|'['
string|"'instance_id'"
op|']'
op|','
name|'server_uuids'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_show_cannot_view_other_tenant
dedent|''
dedent|''
name|'def'
name|'test_verify_show_cannot_view_other_tenant'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
name|'START'
op|'.'
name|'isoformat'
op|'('
op|')'
op|','
name|'STOP'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'alt_user_context'
newline|'\n'
nl|'\n'
name|'rules'
op|'='
op|'{'
nl|'\n'
name|'self'
op|'.'
name|'policy_rule_prefix'
op|'+'
string|'":show"'
op|':'
op|'['
nl|'\n'
op|'['
string|'"role:admin"'
op|']'
op|','
op|'['
string|'"project_id:%(project_id)s"'
op|']'
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'policy'
op|'.'
name|'set_rules'
op|'('
name|'oslo_policy'
op|'.'
name|'Rules'
op|'.'
name|'from_dict'
op|'('
name|'rules'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'PolicyNotAuthorized'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'show'
op|','
name|'req'
op|','
string|"'faketenant_0'"
op|')'
newline|'\n'
dedent|''
name|'finally'
op|':'
newline|'\n'
indent|' '
name|'policy'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_tenants_usage_with_bad_start_date
dedent|''
dedent|''
name|'def'
name|'test_get_tenants_usage_with_bad_start_date'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'future'
op|'='
name|'NOW'
op|'+'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
name|'hours'
op|'='
name|'HOURS'
op|')'
newline|'\n'
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
name|'future'
op|'.'
name|'isoformat'
op|'('
op|')'
op|','
name|'NOW'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'user_context'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'show'
op|','
name|'req'
op|','
string|"'faketenant_0'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_tenants_usage_with_invalid_start_date
dedent|''
name|'def'
name|'test_get_tenants_usage_with_invalid_start_date'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?start=%s&end=%s'"
op|'%'
nl|'\n'
op|'('
string|'"xxxx"'
op|','
name|'NOW'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'user_context'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'show'
op|','
name|'req'
op|','
string|"'faketenant_0'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_get_tenants_usage_with_one_date
dedent|''
name|'def'
name|'_test_get_tenants_usage_with_one_date'
op|'('
name|'self'
op|','
name|'date_url_param'
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"'?%s'"
op|'%'
name|'date_url_param'
op|')'
newline|'\n'
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
op|'='
name|'self'
op|'.'
name|'user_context'
newline|'\n'
name|'res'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'show'
op|'('
name|'req'
op|','
string|"'faketenant_0'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'tenant_usage'"
op|','
name|'res'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_tenants_usage_with_no_start_date
dedent|''
name|'def'
name|'test_get_tenants_usage_with_no_start_date'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_get_tenants_usage_with_one_date'
op|'('
nl|'\n'
string|"'end=%s'"
op|'%'
op|'('
name|'NOW'
op|'+'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
number|'5'
op|')'
op|')'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_tenants_usage_with_no_end_date
dedent|''
name|'def'
name|'test_get_tenants_usage_with_no_end_date'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_get_tenants_usage_with_one_date'
op|'('
nl|'\n'
string|"'start=%s'"
op|'%'
op|'('
name|'NOW'
op|'-'
name|'datetime'
op|'.'
name|'timedelta'
op|'('
number|'5'
op|')'
op|')'
op|'.'
name|'isoformat'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SimpleTenantUsageControllerTestV21
dedent|''
dedent|''
name|'class'
name|'SimpleTenantUsageControllerTestV21'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|controller
indent|' '
name|'controller'
op|'='
name|'simple_tenant_usage_v21'
op|'.'
name|'SimpleTenantUsageController'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'SimpleTenantUsageControllerTestV21'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fakeuser'"
op|','
string|"'fake-project'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'baseinst'
op|'='
name|'get_fake_db_instance'
op|'('
name|'START'
op|','
name|'STOP'
op|','
name|'instance_id'
op|'='
number|'1'
op|','
nl|'\n'
name|'tenant_id'
op|'='
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'DELETED'
op|')'
newline|'\n'
comment|'# convert the fake instance dict to an object'
nl|'\n'
name|'flavor'
op|'='
name|'fake_flavor'
op|'.'
name|'fake_flavor_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'**'
name|'FAKE_INST_TYPE'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'self'
op|'.'
name|'baseinst'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'inst_obj'
op|'.'
name|'flavor'
op|'='
name|'flavor'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.Instance.get_flavor'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
op|')'
newline|'\n'
DECL|member|test_get_flavor_from_non_deleted_with_id_fails
name|'def'
name|'test_get_flavor_from_non_deleted_with_id_fails'
op|'('
name|'self'
op|','
name|'fake_get_flavor'
op|')'
op|':'
newline|'\n'
comment|'# If an instance is not deleted and missing type information from'
nl|'\n'
comment|"# instance.flavor, then that's a bug"
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_get_flavor'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'self'
op|'.'
name|'inst_obj'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.Instance.get_flavor'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
op|')'
newline|'\n'
DECL|member|test_get_flavor_from_deleted_with_notfound
name|'def'
name|'test_get_flavor_from_deleted_with_notfound'
op|'('
name|'self'
op|','
name|'fake_get_flavor'
op|')'
op|':'
newline|'\n'
comment|'# If the flavor is not found from the instance and the instance is'
nl|'\n'
comment|"# deleted, attempt to look it up from the DB and if found we're OK."
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'inst_obj'
op|'.'
name|'deleted'
op|'='
number|'1'
newline|'\n'
name|'flavor'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_get_flavor'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'self'
op|'.'
name|'inst_obj'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'objects'
op|'.'
name|'Flavor'
op|','
name|'type'
op|'('
name|'flavor'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'FAKE_INST_TYPE'
op|'['
string|"'id'"
op|']'
op|','
name|'flavor'
op|'.'
name|'id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.Instance.get_flavor'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
op|')'
newline|'\n'
DECL|member|test_get_flavor_from_deleted_with_id_of_deleted
name|'def'
name|'test_get_flavor_from_deleted_with_id_of_deleted'
op|'('
name|'self'
op|','
name|'fake_get_flavor'
op|')'
op|':'
newline|'\n'
comment|'# Verify the legacy behavior of instance_type_id pointing to a'
nl|'\n'
comment|'# missing type being non-fatal'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'inst_obj'
op|'.'
name|'deleted'
op|'='
number|'1'
newline|'\n'
name|'self'
op|'.'
name|'inst_obj'
op|'.'
name|'instance_type_id'
op|'='
number|'99'
newline|'\n'
name|'flavor'
op|'='
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_get_flavor'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'self'
op|'.'
name|'inst_obj'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'flavor'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SimpleTenantUsageUtilsV21
dedent|''
dedent|''
name|'class'
name|'SimpleTenantUsageUtilsV21'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|simple_tenant_usage
indent|' '
name|'simple_tenant_usage'
op|'='
name|'simple_tenant_usage_v21'
newline|'\n'
nl|'\n'
DECL|member|test_valid_string
name|'def'
name|'test_valid_string'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dt'
op|'='
name|'self'
op|'.'
name|'simple_tenant_usage'
op|'.'
name|'parse_strtime'
op|'('
nl|'\n'
string|'"2014-02-21T13:47:20.824060"'
op|','
string|'"%Y-%m-%dT%H:%M:%S.%f"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'microsecond'
op|'='
number|'824060'
op|','
name|'second'
op|'='
number|'20'
op|','
name|'minute'
op|'='
number|'47'
op|','
name|'hour'
op|'='
number|'13'
op|','
nl|'\n'
name|'day'
op|'='
number|'21'
op|','
name|'month'
op|'='
number|'2'
op|','
name|'year'
op|'='
number|'2014'
op|')'
op|','
name|'dt'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_invalid_string
dedent|''
name|'def'
name|'test_invalid_string'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidStrTime'
op|','
nl|'\n'
name|'self'
op|'.'
name|'simple_tenant_usage'
op|'.'
name|'parse_strtime'
op|','
nl|'\n'
string|'"2014-02-21 13:47:20.824060"'
op|','
nl|'\n'
string|'"%Y-%m-%dT%H:%M:%S.%f"'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.516724
| 88
| 0.60766
| 4,416
| 29,189
| 3.897418
| 0.07269
| 0.168729
| 0.069142
| 0.064261
| 0.794782
| 0.754227
| 0.705305
| 0.650224
| 0.615188
| 0.540236
| 0
| 0.004977
| 0.09829
| 29,189
| 2,331
| 89
| 12.522094
| 0.648936
| 0
| 0
| 0.920635
| 0
| 0
| 0.361951
| 0.054541
| 0
| 0
| 0
| 0
| 0.009867
| 0
| null | null | 0
| 0.007293
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3d5c7a6752012a506b2b4016dfc3d39596a8347
| 771
|
py
|
Python
|
tests/test_python_type_to_typescript.py
|
conanfanli/py2ts
|
8543ad03f19f094b0771c3b0cfc26a89eefd95ed
|
[
"MIT"
] | 3
|
2020-04-10T22:09:44.000Z
|
2020-11-29T07:19:28.000Z
|
tests/test_python_type_to_typescript.py
|
conanfanli/py2ts
|
8543ad03f19f094b0771c3b0cfc26a89eefd95ed
|
[
"MIT"
] | 1
|
2020-04-11T14:25:50.000Z
|
2020-04-11T14:25:50.000Z
|
tests/test_python_type_to_typescript.py
|
conanfanli/py2ts
|
8543ad03f19f094b0771c3b0cfc26a89eefd95ed
|
[
"MIT"
] | 1
|
2021-05-15T09:22:41.000Z
|
2021-05-15T09:22:41.000Z
|
from datetime import date, datetime
from decimal import Decimal
import unittest
from py2ts.python2ts import python_type_to_typescript
class Enum2TsTestCase(unittest.TestCase):
def test_python_type_to_typescript(self) -> None:
assert python_type_to_typescript(str) == "string"
assert python_type_to_typescript(int) == "number"
assert python_type_to_typescript(bool) == "boolean"
assert python_type_to_typescript(float) == "number"
assert python_type_to_typescript(datetime) == "string"
assert python_type_to_typescript(date) == "string"
assert python_type_to_typescript(Decimal) == "string"
assert python_type_to_typescript(dict) == "{}"
assert python_type_to_typescript(list) == "Array<any>"
| 40.578947
| 62
| 0.734112
| 94
| 771
| 5.659574
| 0.329787
| 0.206767
| 0.24812
| 0.454887
| 0.541353
| 0.383459
| 0
| 0
| 0
| 0
| 0
| 0.004724
| 0.176394
| 771
| 18
| 63
| 42.833333
| 0.833071
| 0
| 0
| 0
| 0
| 0
| 0.071336
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.066667
| false
| 0
| 0.266667
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3dd7c181e674859dbb88542378c383bba05d1ce
| 386
|
py
|
Python
|
forms_app/models.py
|
cs-fullstack-fall-2018/django-forms3-myiahm
|
63159ccf6cf0d2b387d78f8288e9506cf7046e55
|
[
"Apache-2.0"
] | null | null | null |
forms_app/models.py
|
cs-fullstack-fall-2018/django-forms3-myiahm
|
63159ccf6cf0d2b387d78f8288e9506cf7046e55
|
[
"Apache-2.0"
] | null | null | null |
forms_app/models.py
|
cs-fullstack-fall-2018/django-forms3-myiahm
|
63159ccf6cf0d2b387d78f8288e9506cf7046e55
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from datetime import datetime
class NonProfit(models.Model):
name = models.CharField(max_length=200)
address = models.CharField(max_length=200)
establishedDate = models.DateField()
operatingBudget = models.CharField(max_length=200)
numberOfEmployees = models.CharField(max_length=200)
def __str__(self):
return self.name
| 25.733333
| 56
| 0.746114
| 46
| 386
| 6.086957
| 0.5
| 0.214286
| 0.257143
| 0.342857
| 0.385714
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 0.168394
| 386
| 15
| 57
| 25.733333
| 0.834891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0.1
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
d3dfebcf35bfb0cdb61eda4b4e913721fef194bb
| 6,543
|
py
|
Python
|
quedadas/controllers/trophyCtrl.py
|
fevsea/meet-Run-Server
|
48454a4665f55da019334271641c514df231f177
|
[
"MIT"
] | null | null | null |
quedadas/controllers/trophyCtrl.py
|
fevsea/meet-Run-Server
|
48454a4665f55da019334271641c514df231f177
|
[
"MIT"
] | null | null | null |
quedadas/controllers/trophyCtrl.py
|
fevsea/meet-Run-Server
|
48454a4665f55da019334271641c514df231f177
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from quedadas.controllers import firebaseCtrl
class TrophySerializer(serializers.Serializer):
def update(self, instance, validated_data):
pass
def create(self, validated_data):
pass
km_1 = serializers.SerializerMethodField()
km_10 = serializers.SerializerMethodField()
km_100 = serializers.SerializerMethodField()
km_1000 = serializers.SerializerMethodField()
h_1 = serializers.SerializerMethodField()
h_10 = serializers.SerializerMethodField()
h_100 = serializers.SerializerMethodField()
h_1000 = serializers.SerializerMethodField()
meetings_1 = serializers.SerializerMethodField()
meetings_5 = serializers.SerializerMethodField()
meetings_10 = serializers.SerializerMethodField()
meetings_20 = serializers.SerializerMethodField()
meetings_50 = serializers.SerializerMethodField()
level_1 = serializers.SerializerMethodField()
level_5 = serializers.SerializerMethodField()
level_10 = serializers.SerializerMethodField()
level_25 = serializers.SerializerMethodField()
level_40 = serializers.SerializerMethodField()
level_50 = serializers.SerializerMethodField()
max_distance_1 = serializers.SerializerMethodField()
max_distance_5 = serializers.SerializerMethodField()
max_distance_10 = serializers.SerializerMethodField()
max_distance_21 = serializers.SerializerMethodField()
max_distance_42 = serializers.SerializerMethodField()
steps_10000 = serializers.SerializerMethodField()
steps_20000 = serializers.SerializerMethodField()
steps_25000 = serializers.SerializerMethodField()
steps_50000 = serializers.SerializerMethodField()
steps_100000 = serializers.SerializerMethodField()
challenges_1 = serializers.SerializerMethodField()
challenges_5 = serializers.SerializerMethodField()
challenges_10 = serializers.SerializerMethodField()
challenges_20 = serializers.SerializerMethodField()
friends_1 = serializers.SerializerMethodField()
friends_5 = serializers.SerializerMethodField()
friends_10 = serializers.SerializerMethodField()
friends_20 = serializers.SerializerMethodField()
@staticmethod
def get_km_1(obj):
return obj.distance >= 1 * 1000
@staticmethod
def get_km_10(obj):
return obj.distance >= 10 * 1000
@staticmethod
def get_km_100(obj):
return obj.distance >= 100 * 1000
@staticmethod
def get_km_1000(obj):
return obj.distance >= 1000 * 1000
@staticmethod
def get_h_1(obj):
return obj.totalTimeMillis >= 1 * 1000 * 3600
@staticmethod
def get_h_10(obj):
return obj.totalTimeMillis >= 10 * 1000 * 3600
@staticmethod
def get_h_100(obj):
return obj.totalTimeMillis >= 100 * 1000 * 3600
@staticmethod
def get_h_1000(obj):
return obj.totalTimeMillis >= 1000 * 1000 * 3600
@staticmethod
def get_meetings_1(obj):
return obj.meetingsCompletats >= 1
@staticmethod
def get_meetings_5(obj):
return obj.meetingsCompletats >= 5
@staticmethod
def get_meetings_10(obj):
return obj.meetingsCompletats >= 10
@staticmethod
def get_meetings_20(obj):
return obj.meetingsCompletats >= 20
@staticmethod
def get_meetings_50(obj):
return obj.meetingsCompletats >= 50
@staticmethod
def get_level_1(obj):
return obj.prof.level >= 1
@staticmethod
def get_level_5(obj):
return obj.prof.level >= 5
@staticmethod
def get_level_10(obj):
return obj.prof.level >= 10
@staticmethod
def get_level_25(obj):
return obj.prof.level >= 25
@staticmethod
def get_level_40(obj):
return obj.prof.level >= 40
@staticmethod
def get_level_50(obj):
return obj.prof.level >= 50
@staticmethod
def get_max_distance_1(obj):
return obj.maxDistance >= 1 * 1000
@staticmethod
def get_max_distance_5(obj):
return obj.maxDistance >= 5 * 1000
@staticmethod
def get_max_distance_10(obj):
return obj.maxDistance >= 10 * 1000
@staticmethod
def get_max_distance_21(obj):
return obj.maxDistance >= 21 * 1000
@staticmethod
def get_max_distance_42(obj):
return obj.maxDistance >= 42 * 1000
@staticmethod
def get_steps_10000(obj):
return obj.steps >= 10000
@staticmethod
def get_steps_20000(obj):
return obj.steps >= 20000
@staticmethod
def get_steps_25000(obj):
return obj.steps >= 25000
@staticmethod
def get_steps_50000(obj):
return obj.steps >= 50000
@staticmethod
def get_steps_100000(obj):
return obj.steps >= 100000
@staticmethod
def get_challenges_1(obj):
return obj.challenges >= 1
@staticmethod
def get_challenges_5(obj):
return obj.challenges >= 5
@staticmethod
def get_challenges_10(obj):
return obj.challenges >= 10
@staticmethod
def get_challenges_20(obj):
return obj.challenges >= 20
@staticmethod
def get_friends_1(obj):
return obj.prof.friend_number >= 1
@staticmethod
def get_friends_5(obj):
return obj.prof.friend_number >= 5
@staticmethod
def get_friends_10(obj):
return obj.prof.friend_number >= 10
@staticmethod
def get_friends_20(obj):
return obj.prof.friend_number >= 20
def check_km(stats, old, new):
check(stats, old, new, "km", [1, 10, 100, 1000])
def check_h(stats, old, new):
check(stats, old / (3600 * 1000), new / (3600 * 1000), "h", [1, 10, 100, 1000])
def check_meetings(stats, old, new):
check(stats, old, new, "meetings", [1, 5, 10, 20, 50])
def check_level(stats, old, new):
check(stats, old, new, "level", [1, 5, 10, 20, 25, 40, 50])
def check_max_distance(stats, old, new):
check(stats, old, new, "max_distance", [1, 5, 10, 21, 42])
def check_steps(stats, old, new):
check(stats, old, new, "steps", [10000, 20000, 25000, 50000, 100000])
def check_challenges(stats, old, new):
check(stats, old, new, "challenges", [1, 5, 10, 20])
def check_friends(user):
friends = user.prof.friend_number
check(user.prof.statistics, friends - 1, friends, "friends", [1, 5, 10, 20])
def check(stats, old, new, prefix, values):
for treshold in values:
if old < treshold and new >= treshold:
firebaseCtrl.trophy_obtained(stats, prefix + '_' + str(treshold))
| 27.607595
| 83
| 0.682714
| 748
| 6,543
| 5.783422
| 0.096257
| 0.273694
| 0.153953
| 0.036986
| 0.194637
| 0.132917
| 0.037448
| 0
| 0
| 0
| 0
| 0.082957
| 0.220694
| 6,543
| 236
| 84
| 27.724576
| 0.765444
| 0
| 0
| 0.221591
| 0
| 0
| 0.007795
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.011364
| 0.011364
| 0.210227
| 0.710227
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
d3e7e8270f672cea41c27e7c90782dc7a79dfc56
| 40,208
|
py
|
Python
|
retirement/tests/tests_viewset_Reservation.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | null | null | null |
retirement/tests/tests_viewset_Reservation.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | null | null | null |
retirement/tests/tests_viewset_Reservation.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | null | null | null |
import json
import pytz
import responses
from datetime import datetime
from rest_framework import status
from rest_framework.test import APIClient
from django.urls import reverse
from django.utils import timezone
from django.conf import settings
from django.core import mail
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth import get_user_model
from django.test.utils import override_settings
from unittest import mock
from blitz_api.factories import (
UserFactory,
AdminFactory,
)
from blitz_api.testing_tools import CustomAPITestCase
from log_management.models import EmailLog
from store.models import (
Order,
OrderLine,
)
from store.tests.paysafe_sample_responses import (
SAMPLE_REFUND_RESPONSE,
SAMPLE_NO_AMOUNT_TO_REFUND,
UNKNOWN_EXCEPTION,
)
from retirement.models import (
Retreat,
Reservation, RetreatType, RetreatDate,
)
User = get_user_model()
LOCAL_TIMEZONE = pytz.timezone(settings.TIME_ZONE)
TAX_RATE = settings.LOCAL_SETTINGS['SELLING_TAX']
@override_settings(
PAYSAFE={
'ACCOUNT_NUMBER': "0123456789",
'USER': "user",
'PASSWORD': "password",
'BASE_URL': "http://example.com/",
'VAULT_URL': "customervault/v1/",
'CARD_URL': "cardpayments/v1/"
}
)
class ReservationTests(CustomAPITestCase):
ATTRIBUTES = [
'id',
'url',
'inscription_date',
'is_active',
'is_present',
'user',
'cancelation_action',
'cancelation_date',
'cancelation_reason',
'refundable',
'exchangeable',
'retreat',
'order_line',
'invitation',
'post_event_send',
'pre_event_send',
'retreat_details',
'user_details',
]
def setUp(self):
self.client = APIClient()
self.user = UserFactory()
self.user2 = UserFactory()
self.admin = AdminFactory()
self.retreat_content_type = ContentType.objects.get_for_model(Retreat)
self.retreatType = RetreatType.objects.create(
name="Type 1",
minutes_before_display_link=10,
number_of_tomatoes=4,
)
self.retreat = Retreat.objects.create(
name="mega_retreat",
details="This is a description of the mega retreat.",
seats=400,
address_line1="123 random street",
postal_code="123 456",
state_province="Random state",
country="Random country",
price=199,
min_day_refund=7,
min_day_exchange=7,
refund_rate=50,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
toilet_gendered=False,
room_type=Retreat.SINGLE_OCCUPATION,
type=self.retreatType,
)
RetreatDate.objects.create(
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 17, 12)),
retreat=self.retreat,
)
self.retreat.activate()
self.retreat.add_wait_queue_place(self.user, generate_cron=False)
self.retreat2 = Retreat.objects.create(
name="random_retreat",
details="This is a description of the retreat.",
seats=40,
address_line1="123 random street",
postal_code="123 456",
state_province="Random state",
country="Random country",
price=199,
min_day_refund=7,
min_day_exchange=7,
refund_rate=100,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
toilet_gendered=False,
room_type=Retreat.SINGLE_OCCUPATION,
type=self.retreatType,
)
RetreatDate.objects.create(
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 2, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 2, 17, 12)),
retreat=self.retreat2,
)
self.retreat_overlap = Retreat.objects.create(
name="ultra_retreat",
details="This is a description of the ultra retreat.",
seats=400,
address_line1="1234 random street",
postal_code="654 321",
state_province="Random state 2",
country="Random country 2",
price=199,
min_day_refund=7,
min_day_exchange=7,
refund_rate=50,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
toilet_gendered=False,
room_type=Retreat.SINGLE_OCCUPATION,
type=self.retreatType,
)
RetreatDate.objects.create(
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 17, 12)),
retreat=self.retreat_overlap,
)
self.retreat_overlap.activate()
self.order = Order.objects.create(
user=self.user,
transaction_date=timezone.now(),
authorization_id=1,
settlement_id=1,
)
self.order_line = OrderLine.objects.create(
order=self.order,
quantity=1,
content_type=self.retreat_content_type,
object_id=self.retreat.id,
cost=self.retreat.price
)
self.reservation = Reservation.objects.create(
user=self.user,
retreat=self.retreat,
order_line=self.order_line,
is_active=True,
)
self.reservation_expected_payload = {
'id': self.reservation.id,
'is_active': True,
'is_present': False,
'retreat': 'http://testserver/retreat/retreats/' +
str(self.reservation.retreat.id),
'url': 'http://testserver/retreat/reservations/' +
str(self.reservation.id),
'user': 'http://testserver/users/' + str(self.user.id),
'order_line': 'http://testserver/order_lines/' +
str(self.order_line.id),
'cancelation_date': None,
'cancelation_action': None,
'cancelation_reason': None,
'refundable': True,
'exchangeable': True,
'invitation': None,
'post_event_send': False,
'pre_event_send': False,
}
self.reservation2 = Reservation.objects.create(
user=self.user2,
retreat=self.retreat,
is_active=True,
)
self.reservation2_expected_payload = {
'id': self.reservation2.id,
'is_active': True,
'is_present': False,
'retreat': 'http://testserver/retreat/retreats/' +
str(self.reservation2.retreat.id),
'url': 'http://testserver/retreat/reservations/' +
str(self.reservation2.id),
'user': 'http://testserver/users/' + str(self.user2.id),
'order_line': None,
'cancelation_date': None,
'cancelation_action': None,
'cancelation_reason': None,
'refundable': True,
'exchangeable': True,
'invitation': None,
'post_event_send': False,
'pre_event_send': False,
}
self.reservation_admin = Reservation.objects.create(
user=self.admin,
retreat=self.retreat2,
order_line=self.order_line,
is_active=True,
)
def test_create(self):
self.maxDiff = None
"""
Ensure we can create a reservation if user has permission.
It is possible to create reservations for INACTIVE retreats.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retreat': reverse(
'retreat:retreat-detail', args=[self.retreat2.id]
),
'user': reverse('user-detail', args=[self.user.id]),
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED
)
content = json.loads(response.content)
self.assertCountEqual(
content['retreat_details']['users'],
[
'http://testserver/users/' + str(self.admin.id),
'http://testserver/users/' + str(self.user.id)
]
)
self.check_attributes(content)
def test_create_without_permission(self):
"""
Ensure we can't create a reservation if user has no permission.
"""
self.client.force_authenticate(user=self.user)
data = {
'retreat': reverse(
'retreat:retreat-detail', args=[self.retreat.id]
),
'user': reverse('user-detail', args=[self.user.id]),
'order_line': reverse(
'orderline-detail', args=[self.order_line.id]),
'is_active': True,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'detail': 'You do not have permission to perform this action.'
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_overlapping(self):
"""
Ensure we can't create reservations with overlapping retreat for the
same user.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retreat': reverse(
'retreat:retreat-detail',
args=[self.retreat_overlap.id]
),
'user': reverse('user-detail', args=[self.user.id]),
'order_line': reverse(
'orderline-detail', args=[self.order_line.id]),
'is_active': True,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'non_field_errors': [
'This reservation overlaps with another active reservations '
'for this user.'
]
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_duplicate(self):
"""
Ensure we cannot create the same reservation multiple times.
Overlapping reservation error is sent
"""
self.client.force_authenticate(user=self.admin)
data = {
'retreat': reverse(
'retreat:retreat-detail', args=[self.retreat.id]
),
'user': reverse('user-detail', args=[self.user.id]),
'order_line': reverse(
'orderline-detail', args=[self.order_line.id]),
'is_active': True,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'non_field_errors': [
'This reservation overlaps with another active reservations '
'for this user.'
]
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_non_existent_period_user(self):
"""
Ensure we can't create a reservation with a non-existent retreat or
user.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retreat': reverse('retreat:retreat-detail', args=[999]),
'user': reverse('user-detail', args=[999]),
'order_line': reverse('orderline-detail', args=[999]),
'is_active': True,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'retreat': ['Invalid hyperlink - Object does not exist.'],
'user': ['Invalid hyperlink - Object does not exist.'],
'order_line': ['Invalid hyperlink - Object does not exist.']
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_missing_field(self):
"""
Ensure we can't create a reservation when required field are missing.
"""
self.client.force_authenticate(user=self.admin)
data = {}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'user': ['This field is required.'],
'retreat': ['This field is required.'],
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_blank_field(self):
"""
Ensure we can't create a reservation when required field are blank.
"""
self.client.force_authenticate(user=self.admin)
data = {
'user': None,
'retreat': None,
'order_line': None,
'is_active': None,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'user': ['This field may not be null.'],
'retreat': ['This field may not be null.'],
'is_active': ['This field may not be null.'],
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_invalid_field(self):
"""
Ensure we can't create a reservation when required field are invalid.
"""
self.client.force_authenticate(user=self.admin)
data = {
'user': "invalid",
'retreat': "invalid",
'order_line': "invalid",
'is_active': "invalid",
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'user': ['Invalid hyperlink - No URL match.'],
'retreat': ['Invalid hyperlink - No URL match.'],
'order_line': ['Invalid hyperlink - No URL match.'],
'is_active': ['Must be a valid boolean.'],
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_no_place_left(self):
"""
Ensure we can't create a reservation if there is no place left
"""
self.client.force_authenticate(user=self.admin)
self.retreat2.seats = 0
self.retreat2.save()
data = {
'retreat': reverse(
'retreat:retreat-detail', args=[self.retreat2.id]
),
'user': reverse('user-detail', args=[self.user.id]),
'order_line': reverse(
'orderline-detail', args=[self.order_line.id]),
'is_active': True,
}
response = self.client.post(
reverse('retreat:reservation-list'),
data,
format='json',
)
content = {
'non_field_errors': [
"This retreat doesn't have available places. Please "
'check number of seats available and reserved seats.'
]
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_list(self):
"""
Ensure we can list reservations as an admin.
"""
self.client.force_authenticate(user=self.admin)
self.maxDiff = None
response = self.client.get(
reverse('retreat:reservation-list'),
format='json',
)
data = json.loads(response.content)
del data['results'][0]['user_details']
del data['results'][0]['retreat_details']
del data['results'][1]['user_details']
del data['results'][1]['retreat_details']
del data['results'][2]['user_details']
del data['results'][2]['retreat_details']
del data['results'][0]['inscription_date']
del data['results'][1]['inscription_date']
del data['results'][2]['inscription_date']
content = {
'count': 3,
'next': None,
'previous': None,
'results': [
self.reservation_expected_payload,
self.reservation2_expected_payload,
{
'id': self.reservation_admin.id,
'is_active': True,
'is_present': False,
'retreat': 'http://testserver/retreat/retreats/' +
str(self.retreat2.id),
'url': 'http://testserver/retreat/reservations/' +
str(self.reservation_admin.id),
'user': 'http://testserver/users/' + str(self.admin.id),
'order_line': 'http://testserver/order_lines/' +
str(self.order_line.id),
'cancelation_date': None,
'cancelation_action': None,
'cancelation_reason': None,
'refundable': True,
'exchangeable': True,
'invitation': None,
'post_event_send': False,
'pre_event_send': False,
}
]
}
self.assertEqual(data, content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_as_non_admin(self):
"""
Ensure that a user can list its reservations.
Be wary: a user can see the list of user ID that are associated with
the reservation's retreat.
"""
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse('retreat:reservation-list'),
format='json',
)
data = json.loads(response.content)
del data['results'][0]['user_details']
del data['results'][0]['retreat_details']
del data['results'][0]['inscription_date']
content = {
'count': 1,
'next': None,
'previous': None,
'results': [self.reservation_expected_payload]
}
self.assertEqual(data, content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read(self):
"""
Ensure that a user can read one of his reservations.
"""
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
response_data = json.loads(response.content)
del response_data['user_details']
del response_data['retreat_details']
del response_data['inscription_date']
self.assertEqual(response_data, self.reservation_expected_payload)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_inactive_non_admin(self):
"""
Ensure we can't read a reservation as non_admin if it is not owned.
"""
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation_admin.id},
),
)
content = {'detail': 'Not found.'}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_read_non_existent(self):
"""
Ensure we get not found when asking for a period that doesn't exist.
"""
self.client.force_authenticate(user=self.admin)
response = self.client.get(
reverse(
'retreat:retreat-detail',
kwargs={'pk': 999},
),
)
content = {'detail': 'Not found.'}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@responses.activate
def test_delete(self):
"""
Ensure that a user can cancel one of his retreat reservations.
By canceling 'min_day_refund' days or more before the event, the user
will be refunded 'refund_rate'% of the price paid.
The user will receive an email confirming the refund or inviting the
user to contact the support if payment informations are no longer
valid.
If the user cancels less than 'min_day_refund' days before the event,
no refund is made.
"""
self.client.force_authenticate(user=self.user)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation.refresh_from_db()
self.assertFalse(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, 'U')
self.assertEqual(self.reservation.cancelation_action, 'R')
self.assertEqual(self.reservation.cancelation_date, FIXED_TIME)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 1)
@responses.activate
def test_delete_late(self):
"""
Ensure that a user can cancel one of his retreat reservations.
This cancelation does not respect 'min_day_refund', thus the user
will not be refunded.
The user won't receive any email.
"""
self.client.force_authenticate(user=self.user)
FIXED_TIME = datetime(2130, 1, 10, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation.refresh_from_db()
self.assertFalse(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, 'U')
self.assertEqual(self.reservation.cancelation_action, 'N')
self.assertEqual(self.reservation.cancelation_date, FIXED_TIME)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
@responses.activate
def test_delete_non_refundable(self):
"""
Ensure that a user can cancel one of his retreat reservations.
This cancelation does not respect 'refundable', thus the user
will not be refunded.
The user won't receive any email.
"""
self.client.force_authenticate(user=self.user)
self.reservation.refundable = False
self.reservation.save()
FIXED_TIME = datetime(2000, 1, 10, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.pk},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation.refresh_from_db()
self.assertFalse(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, 'U')
self.assertEqual(self.reservation.cancelation_action, 'N')
self.assertEqual(self.reservation.cancelation_date, FIXED_TIME)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
self.reservation.refundable = True
self.reservation.save()
@responses.activate
def test_delete_retirement_refundable_created_by_administrator(self):
"""
Ensure that a user can cancel one of his retreat reservations
created by an administrator.
Since the user didn't bought this reservation via the platform
via a manual administratior action he will not be automatically
refund.
The user won't receive any email.
Test when refundable is True, but we will not refund
"""
self.client.force_authenticate(user=self.user2)
FIXED_TIME = datetime(2000, 1, 10, tzinfo=LOCAL_TIMEZONE)
self.assertTrue(self.reservation2.refundable)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation2.pk},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation2.refresh_from_db()
self.assertFalse(self.reservation2.is_active)
self.assertEqual(self.reservation2.cancelation_reason, 'U')
self.assertEqual(self.reservation2.cancelation_action, 'N')
self.assertEqual(self.reservation2.cancelation_date, FIXED_TIME)
self.reservation2.is_active = True
self.reservation2.cancelation_date = None
self.reservation2.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
@responses.activate
def test_delete_retirement_not_refundable_created_by_administrator(self):
"""
Ensure that a user can cancel one of his retreat reservations
created by an administrator.
Since the user didn't bought this reservation via the platform
via a manual administratior action he will not be automatically
refund.
The user won't receive any email.
Test when refundable is False, but we will not refund
"""
self.client.force_authenticate(user=self.user2)
FIXED_TIME = datetime(2000, 1, 10, tzinfo=LOCAL_TIMEZONE)
self.reservation2.refundable = False
self.reservation2.save()
self.reservation2.refresh_from_db()
self.assertFalse(self.reservation2.refundable)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation2.pk},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation2.refresh_from_db()
self.assertFalse(self.reservation2.is_active)
self.assertEqual(self.reservation2.cancelation_reason, 'U')
self.assertEqual(self.reservation2.cancelation_action, 'N')
self.assertEqual(self.reservation2.cancelation_date, FIXED_TIME)
self.reservation2.is_active = True
self.reservation2.cancelation_date = None
self.reservation2.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
@responses.activate
def test_delete_scheduler_working(self):
"""
Ensure emails were sent to admins if the API fails to schedule
notifications.
"""
self.client.force_authenticate(user=self.admin)
self.retreat2.seats = self.retreat2.total_reservations
self.retreat2.save()
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation_admin.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation_admin.refresh_from_db()
self.assertFalse(self.reservation_admin.is_active)
self.assertEqual(self.reservation_admin.cancelation_reason, 'U')
self.assertEqual(self.reservation_admin.cancelation_action, 'R')
self.assertEqual(self.reservation_admin.cancelation_date,
FIXED_TIME)
self.reservation_admin.is_active = True
self.reservation_admin.cancelation_date = None
self.reservation_admin.cancelation_reason = None
self.retreat2.seats = 400
self.retreat2.save()
def test_delete_not_owner(self):
"""
Ensure that a user can't delete a reservation that he doesn't own.
"""
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation_admin.id},
),
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@responses.activate
def test_delete_reservation_of_user_as_admin_no_refundable(self):
"""
Ensure that an admin can cancel the reservations of a user.
This cancelation does not respect 'refundable', the user
will not be refunded.
The user won't receive any email.
"""
self.client.force_authenticate(user=self.admin)
self.reservation.refundable = False
self.reservation.save()
FIXED_TIME = datetime(2000, 1, 10, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.pk},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation.refresh_from_db()
self.assertFalse(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, 'A')
self.assertEqual(self.reservation.cancelation_action, 'N')
self.assertEqual(self.reservation.cancelation_date, FIXED_TIME)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
self.reservation.refundable = True
self.reservation.save()
def test_delete_orderline_quantity_too_big(self):
"""
Ensure that a user can't delete a reservation if the orderline
containing it has a quatity bigger than 1.
"""
self.client.force_authenticate(user=self.admin)
self.order_line.quantity = 2
self.order_line.save()
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation_admin.id},
),
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
content = {
'non_field_errors': [
"The order containing this reservation has a quantity "
"bigger than 1. Please contact the support team."
]
}
self.order_line.quantity = 1
self.order_line.save()
@responses.activate
def test_delete_twice(self):
"""
Ensure that a user can delete one of his reservations.
"""
self.client.force_authenticate(user=self.user)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_REFUND_RESPONSE,
status=200
)
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.reservation.refresh_from_db()
self.assertFalse(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, 'U')
self.assertEqual(self.reservation.cancelation_action, 'R')
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 1)
@responses.activate
def test_delete_refund_too_fast(self):
"""
Ensure that a user can't get a refund if the order payment has not been
processed completely.
"""
self.client.force_authenticate(user=self.user)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=SAMPLE_NO_AMOUNT_TO_REFUND,
status=400
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
content = {
'non_field_errors': [
"The order has not been charged yet. Try again later."
]
}
self.assertEqual(
json.loads(response.content).get('non_field_errors'),
content.get('non_field_errors'))
self.reservation.refresh_from_db()
self.assertTrue(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, None)
self.assertEqual(self.reservation.cancelation_action, None)
self.assertEqual(self.reservation.cancelation_date, None)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
@responses.activate
def test_delete_refund_error(self):
"""
Ensure that a user can cancel one of his retreat reservations.
By canceling 'min_day_refund' days or more before the event, the user
will be refunded 'refund_rate'% of the price paid.
The user will receive an email confirming the refund or inviting the
user to contact the support if payment informations are no longer
valid.
If the user cancels less than 'min_day_refund' days before the event,
no refund is made.
"""
self.client.force_authenticate(user=self.user)
responses.add(
responses.POST,
"http://example.com/cardpayments/v1/accounts/0123456789/"
"settlements/1/refunds",
json=UNKNOWN_EXCEPTION,
status=400
)
FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.delete(
reverse(
'retreat:reservation-detail',
kwargs={'pk': self.reservation.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
response.content
)
content = {
'message': "The request could not be processed."
}
# Receiving a 'bytes' object, which is probably wrong...
# self.assertEqual(json.dumps(response.content), content)
self.reservation.refresh_from_db()
self.assertTrue(self.reservation.is_active)
self.assertEqual(self.reservation.cancelation_reason, None)
self.assertEqual(self.reservation.cancelation_action, None)
self.assertEqual(self.reservation.cancelation_date, None)
self.reservation.is_active = True
self.reservation.cancelation_date = None
self.reservation.cancelation_reason = None
self.assertEqual(len(mail.outbox), 0)
@override_settings(
LOCAL_SETTINGS={
"EMAIL_SERVICE": True,
}
)
def test_remind_users(self):
self.client.force_authenticate(user=self.admin)
FIXED_TIME = datetime(2130, 1, 10, tzinfo=LOCAL_TIMEZONE)
with mock.patch(
'django.utils.timezone.now', return_value=FIXED_TIME):
response = self.client.get(
reverse(
'retreat:retreat-remind-users',
kwargs={'pk': self.retreat.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
MAIL_SERVICE = settings.ANYMAIL
template = MAIL_SERVICE["TEMPLATES"].get('REMINDER_PHYSICAL_RETREAT')
self.assertTrue(
EmailLog.objects.filter(
user_email=self.user.email,
type_email='Template #' + str(template)
)
)
self.assertEqual(
EmailLog.objects.filter(
user_email=self.user.email,
type_email='Template #' + str(template)
)[0].nb_email_sent,
1
)
| 32.217949
| 79
| 0.580506
| 4,135
| 40,208
| 5.486578
| 0.09867
| 0.06215
| 0.038965
| 0.032133
| 0.794552
| 0.765549
| 0.731564
| 0.706484
| 0.688324
| 0.667343
| 0
| 0.016572
| 0.320135
| 40,208
| 1,247
| 80
| 32.243785
| 0.81336
| 0.086873
| 0
| 0.628979
| 0
| 0
| 0.143114
| 0.033387
| 0
| 0
| 0
| 0
| 0.103183
| 1
| 0.030735
| false
| 0.001098
| 0.021954
| 0
| 0.054885
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3fab1e8ef3de9c3492e68d35b4d541a197efdee
| 6,300
|
py
|
Python
|
stage/configuration/test_opc_ua_client_origin.py
|
streamsets/datacollector-tests
|
6c3e908768e1d4a586e9183e2141096921ecd5be
|
[
"Apache-2.0"
] | 14
|
2019-03-04T10:12:39.000Z
|
2021-11-24T16:17:09.000Z
|
stage/configuration/test_opc_ua_client_origin.py
|
Pragatibs/datacollector-tests
|
aac53b2f0e056009ef0e437c8430651e3cf4d502
|
[
"Apache-2.0"
] | 48
|
2019-03-08T14:59:06.000Z
|
2021-08-13T14:49:56.000Z
|
stage/configuration/test_opc_ua_client_origin.py
|
Pragatibs/datacollector-tests
|
aac53b2f0e056009ef0e437c8430651e3cf4d502
|
[
"Apache-2.0"
] | 23
|
2018-09-24T20:49:17.000Z
|
2021-11-24T16:17:11.000Z
|
# Copyright 2021 StreamSets Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from streamsets.testframework.decorators import stub
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'MANUAL'}])
def test_(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_application_name(sdc_builder, sdc_executor):
pass
@stub
def test_application_uri(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_cipher_suites': False, 'use_tls': True}])
def test_cipher_suites(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_client_private_key_alias(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_key_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'keystore_type': 'JKS', 'use_tls': True},
{'keystore_type': 'PKCS12', 'use_tls': True}])
def test_keystore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_max_array_length(sdc_builder, sdc_executor):
pass
@stub
def test_max_chunk_count(sdc_builder, sdc_executor):
pass
@stub
def test_max_chunk_size(sdc_builder, sdc_executor):
pass
@stub
def test_max_message_size(sdc_builder, sdc_executor):
pass
@stub
def test_max_string_length(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'BROWSE'},
{'nodeid_fetch_mode': 'FILE'},
{'nodeid_fetch_mode': 'MANUAL'}])
def test_nodeid_fetch_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'FILE'}])
def test_nodeid_file_path(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'BROWSE'}])
def test_nodeid_refresh_interval_in_sec(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'processing_mode': 'POLLING'}])
def test_polling_interval_in_ms(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'processing_mode': 'BROWSE_NODES'},
{'processing_mode': 'POLLING'},
{'processing_mode': 'SUBSCRIBE'}])
def test_processing_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_request_timeout(sdc_builder, sdc_executor):
pass
@stub
def test_resource_url(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'BROWSE'}])
def test_root_node_identifier(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'BROWSE', 'root_node_identifier_type': 'NUMERIC'},
{'nodeid_fetch_mode': 'BROWSE', 'root_node_identifier_type': 'OPAQUE'},
{'nodeid_fetch_mode': 'BROWSE', 'root_node_identifier_type': 'STRING'},
{'nodeid_fetch_mode': 'BROWSE', 'root_node_identifier_type': 'UUID'}])
def test_root_node_identifier_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'nodeid_fetch_mode': 'BROWSE'}])
def test_root_node_namespace_index(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'security_policy': 'BASIC_128_RSA_15'},
{'security_policy': 'BASIC_256'},
{'security_policy': 'BASIC_256_SHA_256'},
{'security_policy': 'NONE'}])
def test_security_policy(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_session_timeout(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_protocols': False, 'use_tls': True}])
def test_transport_protocols(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_cipher_suites': False, 'use_tls': True},
{'use_default_cipher_suites': True, 'use_tls': True}])
def test_use_default_cipher_suites(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_protocols': False, 'use_tls': True},
{'use_default_protocols': True, 'use_tls': True}])
def test_use_default_protocols(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': False}, {'use_tls': True}])
def test_use_tls(sdc_builder, sdc_executor, stage_attributes):
pass
| 30.288462
| 118
| 0.683968
| 756
| 6,300
| 5.333333
| 0.19709
| 0.14881
| 0.09995
| 0.161458
| 0.726935
| 0.712054
| 0.681052
| 0.664683
| 0.593998
| 0.576389
| 0
| 0.004775
| 0.202222
| 6,300
| 207
| 119
| 30.434783
| 0.797453
| 0.087619
| 0
| 0.523077
| 0
| 0
| 0.218559
| 0.041514
| 0
| 0
| 0
| 0
| 0
| 1
| 0.238462
| false
| 0.246154
| 0.015385
| 0
| 0.253846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
3124893d2203332b10f97267863ef9841e3a7b2d
| 3,635
|
py
|
Python
|
test/testOpSVM.py
|
burgerdev/hostload
|
93142628bb32923c5e6f3a8b791488d72a5c9077
|
[
"MIT"
] | null | null | null |
test/testOpSVM.py
|
burgerdev/hostload
|
93142628bb32923c5e6f3a8b791488d72a5c9077
|
[
"MIT"
] | null | null | null |
test/testOpSVM.py
|
burgerdev/hostload
|
93142628bb32923c5e6f3a8b791488d72a5c9077
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import vigra
from sklearn.svm import SVC
from sklearn.svm import SVR
from lazyflow.graph import Graph
from tsdl.classifiers import OpSVMTrain
from tsdl.classifiers import OpSVMPredict
class TestOpSVM(unittest.TestCase):
def setUp(self):
X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
X = vigra.taggedView(X, axistags='tc')
y = np.array([[1, 0], [1, 0], [0, 1], [0, 1]])
y = vigra.taggedView(y, axistags='tc')
self.X = X
self.y = y
X = np.zeros((0, 2))
X = vigra.taggedView(X, axistags='tc')
y = np.zeros((0, 2))
y = vigra.taggedView(y, axistags='tc')
self.Xvalid = X
self.yvalid = y
def testTrain(self):
op = OpSVMTrain.build(dict(), graph=Graph())
op.Train.resize(2)
op.Train[0].setValue(self.X)
op.Train[1].setValue(self.y)
op.Valid.resize(2)
op.Valid[0].setValue(self.Xvalid)
op.Valid[1].setValue(self.yvalid)
svc = op.Classifier[0].wait()[0]
assert isinstance(svc, SVC), "was {}".format(type(svc))
def testPredict(self):
g = Graph()
op = OpSVMTrain(graph=g)
op.Train.resize(2)
op.Train[0].setValue(self.X)
op.Train[1].setValue(self.y)
op.Valid.resize(2)
op.Valid[0].setValue(self.Xvalid)
op.Valid[1].setValue(self.yvalid)
svc = op.Classifier[0].wait()[0]
assert isinstance(svc, SVC), "was {}".format(type(svc))
pred = OpSVMPredict.build(dict(), graph=g)
pred.Classifier.connect(op.Classifier)
pred.Input.setValue(self.X)
pred.Target.connect(op.Train[1])
res = pred.Output[...].wait()
np.testing.assert_array_equal(res, self.y.view(np.ndarray))
pred.Classifier.disconnect()
pred.Classifier.setValue([None])
pred.Input.setValue(None)
pred.Input.setValue(self.X)
with self.assertRaises(ValueError):
pred.Output[...].wait()
class TestOpSVR(unittest.TestCase):
def setUp(self):
n = 100
np.random.seed(1)
X = np.random.random(size=(n, 2))
X = vigra.taggedView(X, axistags='tc')
y = X.sum(axis=1).withAxes(*'tc')
self.X = X
self.y = y
X = np.zeros((0, 2))
X = vigra.taggedView(X, axistags='tc')
y = np.zeros((0, 1))
y = vigra.taggedView(y, axistags='tc')
self.Xvalid = X
self.yvalid = y
def testTrain(self):
op = OpSVMTrain(graph=Graph())
op.Train.resize(2)
op.Train[0].setValue(self.X)
op.Train[1].setValue(self.y)
op.Valid.resize(2)
op.Valid[0].setValue(self.Xvalid)
op.Valid[1].setValue(self.yvalid)
svr = op.Classifier[0].wait()[0]
assert isinstance(svr, SVR), "was {}".format(type(svr))
def testPredict(self):
g = Graph()
op = OpSVMTrain(graph=g)
op.Train.resize(2)
op.Train[0].setValue(self.X)
op.Train[1].setValue(self.y)
op.Valid.resize(2)
op.Valid[0].setValue(self.Xvalid)
op.Valid[1].setValue(self.yvalid)
svr = op.Classifier[0].wait()[0]
assert isinstance(svr, SVR), "was {}".format(type(svc))
pred = OpSVMPredict(graph=g)
pred.Classifier.connect(op.Classifier)
pred.Input.setValue(self.X)
pred.Target.connect(op.Train[1])
res = pred.Output[...].wait()
np.testing.assert_array_almost_equal(res, self.y.view(np.ndarray),
decimal=1)
| 26.925926
| 74
| 0.562586
| 491
| 3,635
| 4.154786
| 0.158859
| 0.111765
| 0.035294
| 0.033333
| 0.797549
| 0.743137
| 0.72598
| 0.70049
| 0.671569
| 0.657843
| 0
| 0.024724
| 0.276754
| 3,635
| 134
| 75
| 27.126866
| 0.751236
| 0
| 0
| 0.673469
| 0
| 0
| 0.011007
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.061224
| false
| 0
| 0.081633
| 0
| 0.163265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3128578becc637a0ea33040e2b904178757d4e33
| 175
|
py
|
Python
|
analysis/__init__.py
|
schurterb/kmeansconv
|
74912b9fdfc1e688be737ba0117461ef8959207b
|
[
"Unlicense"
] | 2
|
2016-12-08T02:37:00.000Z
|
2017-07-21T01:02:39.000Z
|
analysis/__init__.py
|
schurterb/kmeansconv
|
74912b9fdfc1e688be737ba0117461ef8959207b
|
[
"Unlicense"
] | null | null | null |
analysis/__init__.py
|
schurterb/kmeansconv
|
74912b9fdfc1e688be737ba0117461ef8959207b
|
[
"Unlicense"
] | null | null | null |
#Init for analysis functions
from .imageScan import display
from .visualizeStats import showStats
from .analyzer import Analyzer
__all__ = ['display','showStats','Analyzer']
| 25
| 44
| 0.8
| 20
| 175
| 6.8
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 175
| 7
| 44
| 25
| 0.877419
| 0.154286
| 0
| 0
| 0
| 0
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
3149416ad77227acea7fefaacc2a13642f23922e
| 231
|
py
|
Python
|
cornflow-server/cornflow/schemas/__init__.py
|
ggsdc/corn
|
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
|
[
"MIT"
] | 2
|
2020-07-09T20:58:47.000Z
|
2020-07-20T20:40:46.000Z
|
cornflow-server/cornflow/schemas/__init__.py
|
baobabsoluciones/cornflow
|
bd7cae22107e5fe148704d5f41d4f58f9c410b40
|
[
"Apache-2.0"
] | 2
|
2022-03-31T08:42:10.000Z
|
2022-03-31T12:05:23.000Z
|
cornflow-server/cornflow/schemas/__init__.py
|
ggsdc/corn
|
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
|
[
"MIT"
] | null | null | null |
"""
Initialization file for the schemas module
"""
from .dag import DeployedDAGSchema
from .execution import ExecutionSchema
from .instance import InstanceSchema
from .user import UserSchema
# from .model_json import DataSchema
| 19.25
| 42
| 0.809524
| 27
| 231
| 6.888889
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138528
| 231
| 11
| 43
| 21
| 0.934673
| 0.337662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
315a6218af6a98ca3b85fcab51a792dfd448b872
| 116
|
py
|
Python
|
wp6-virtualfolder/VRE-master/addvagrantuser.py
|
TomasKulhanek/west-life-wp6
|
8ee704235b7b087c6a144f0cb582a77693690a7f
|
[
"MIT"
] | null | null | null |
wp6-virtualfolder/VRE-master/addvagrantuser.py
|
TomasKulhanek/west-life-wp6
|
8ee704235b7b087c6a144f0cb582a77693690a7f
|
[
"MIT"
] | 27
|
2018-06-11T09:13:03.000Z
|
2019-04-04T06:51:16.000Z
|
wp6-virtualfolder/VRE-master/addvagrantuser.py
|
h2020-westlife-eu/virtualfolder
|
8ee704235b7b087c6a144f0cb582a77693690a7f
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import User
user=User.objects.create_user('vagrant',password='vagrant')
user.save()
| 29
| 59
| 0.801724
| 17
| 116
| 5.411765
| 0.705882
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 116
| 3
| 60
| 38.666667
| 0.836364
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
316623c38e2a6620fe3ffa065667b4402467c2d7
| 733
|
py
|
Python
|
utils/__init__.py
|
XiYe20/VPTR
|
df01b60333975cd8c403c5b228689cbb5c763ae6
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
XiYe20/VPTR
|
df01b60333975cd8c403c5b228689cbb5c763ae6
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
XiYe20/VPTR
|
df01b60333975cd8c403c5b228689cbb5c763ae6
|
[
"MIT"
] | null | null | null |
from .dataset import KTHDataset, VidCenterCrop, VidPad, VidResize, BAIRDataset, VidCrop, MovingMNISTDataset, ClipDataset
from .dataset import VidRandomHorizontalFlip, VidRandomVerticalFlip
from .dataset import VidToTensor, VidNormalize, VidReNormalize, get_dataloader
from .misc import NestedTensor, set_seed
from .train_summary import save_ckpt, load_ckpt, init_loss_dict, write_summary, resume_training, write_code_files
from .train_summary import visualize_batch_clips, parameters_count, AverageMeters, init_loss_dict, write_summary, BatchAverageMeter, gather_AverageMeters
from .metrics import PSNR, SSIM, pred_ave_metrics, MSEScore
from .position_encoding import PositionEmbeddding2D, PositionEmbeddding1D, PositionEmbeddding3D
| 81.444444
| 153
| 0.869031
| 82
| 733
| 7.5
| 0.658537
| 0.053659
| 0.082927
| 0.071545
| 0.078049
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004458
| 0.081855
| 733
| 8
| 154
| 91.625
| 0.909361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
316ae3d0cf0d520731316718547625557cca8e74
| 23
|
py
|
Python
|
mlspeclib/_version.py
|
mlspec/mlspec-lib
|
4d7fcca3067e228c9396bf5811f572310487cca0
|
[
"MIT"
] | 12
|
2020-04-22T02:41:17.000Z
|
2020-11-29T12:36:26.000Z
|
docassemble_webapp/docassemble/webapp/__init__.py
|
ttamg/docassemble
|
1429fbbddfeb60b9f8fe74c928a479236d6a6113
|
[
"MIT"
] | 12
|
2020-04-01T23:31:41.000Z
|
2020-11-19T01:32:11.000Z
|
docassemble_webapp/docassemble/webapp/__init__.py
|
ttamg/docassemble
|
1429fbbddfeb60b9f8fe74c928a479236d6a6113
|
[
"MIT"
] | 5
|
2020-03-23T16:32:36.000Z
|
2020-06-15T16:07:28.000Z
|
__version__ = "1.1.15"
| 11.5
| 22
| 0.652174
| 4
| 23
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.130435
| 23
| 1
| 23
| 23
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
318df9f59aaadba77482a3529c1543fa8e307e59
| 178
|
py
|
Python
|
src/kuddl/version.py
|
ktbarrett/kuddl
|
1c8a3e436e7c38de82c3aaaec72463f54afd43bb
|
[
"MIT"
] | null | null | null |
src/kuddl/version.py
|
ktbarrett/kuddl
|
1c8a3e436e7c38de82c3aaaec72463f54afd43bb
|
[
"MIT"
] | null | null | null |
src/kuddl/version.py
|
ktbarrett/kuddl
|
1c8a3e436e7c38de82c3aaaec72463f54afd43bb
|
[
"MIT"
] | null | null | null |
major = "0"
minor = "0"
patch = "0"
release = "dev0"
if release != "":
__version__ = f"{major}.{minor}.{patch}.{release}"
else:
__version__ = f"{major}.{minor}.{patch}"
| 17.8
| 54
| 0.578652
| 22
| 178
| 4.318182
| 0.454545
| 0.168421
| 0.273684
| 0.378947
| 0.484211
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.179775
| 178
| 9
| 55
| 19.777778
| 0.623288
| 0
| 0
| 0
| 0
| 0
| 0.353933
| 0.314607
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31957a235461e8e4c92245c63ab4f28c6fa14d21
| 183
|
py
|
Python
|
backend/src/baserow/contrib/database/api/rows/errors.py
|
orlandoblooms/baserow
|
79a77cad4dd05520339261d4f4c6440c8b04f9d0
|
[
"MIT"
] | null | null | null |
backend/src/baserow/contrib/database/api/rows/errors.py
|
orlandoblooms/baserow
|
79a77cad4dd05520339261d4f4c6440c8b04f9d0
|
[
"MIT"
] | null | null | null |
backend/src/baserow/contrib/database/api/rows/errors.py
|
orlandoblooms/baserow
|
79a77cad4dd05520339261d4f4c6440c8b04f9d0
|
[
"MIT"
] | null | null | null |
from rest_framework.status import HTTP_404_NOT_FOUND
ERROR_ROW_DOES_NOT_EXIST = (
"ERROR_ROW_DOES_NOT_EXIST",
HTTP_404_NOT_FOUND,
"The requested row does not exist.",
)
| 20.333333
| 52
| 0.770492
| 29
| 183
| 4.344828
| 0.517241
| 0.166667
| 0.238095
| 0.357143
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.163934
| 183
| 8
| 53
| 22.875
| 0.784314
| 0
| 0
| 0
| 0
| 0
| 0.311475
| 0.131148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31ad9dd56a8236ba8a1eb9e51e5cf9171ce56abf
| 135
|
py
|
Python
|
example/mqtt/testing_utils.py
|
lvijnck/tavern
|
067a75dd9e845136b461b2fc443be29ecee9273a
|
[
"MIT"
] | 889
|
2017-11-04T11:43:36.000Z
|
2022-03-31T11:37:31.000Z
|
example/mqtt/testing_utils.py
|
lvijnck/tavern
|
067a75dd9e845136b461b2fc443be29ecee9273a
|
[
"MIT"
] | 636
|
2017-11-04T11:43:02.000Z
|
2022-03-31T00:02:04.000Z
|
example/mqtt/testing_utils.py
|
lvijnck/tavern
|
067a75dd9e845136b461b2fc443be29ecee9273a
|
[
"MIT"
] | 181
|
2017-12-05T13:51:42.000Z
|
2022-03-25T11:34:58.000Z
|
def message_says_hello(msg):
"""Make sure that the response was friendly"""
assert msg.payload.get("message") == "hello world"
| 33.75
| 54
| 0.696296
| 19
| 135
| 4.842105
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162963
| 135
| 3
| 55
| 45
| 0.814159
| 0.296296
| 0
| 0
| 0
| 0
| 0.202247
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31e70b1cada2a2a69cffad09500af31756774e49
| 210
|
py
|
Python
|
tests/data/credentials.py
|
Schveitzer/selenium-python-bdd-behave-example
|
2dc16006a27565b2aac3712292f4449f8d162c7d
|
[
"Apache-2.0"
] | null | null | null |
tests/data/credentials.py
|
Schveitzer/selenium-python-bdd-behave-example
|
2dc16006a27565b2aac3712292f4449f8d162c7d
|
[
"Apache-2.0"
] | null | null | null |
tests/data/credentials.py
|
Schveitzer/selenium-python-bdd-behave-example
|
2dc16006a27565b2aac3712292f4449f8d162c7d
|
[
"Apache-2.0"
] | null | null | null |
import os
import json
def load_credentials():
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'credentials.json')) as data:
credentials = json.load(data)
return credentials
| 23.333333
| 100
| 0.714286
| 29
| 210
| 5
| 0.551724
| 0.124138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161905
| 210
| 8
| 101
| 26.25
| 0.823864
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
31e895549de4df404df8fee855b91312521a8f65
| 162
|
py
|
Python
|
AtCoder/ABC/A/page-2/078A.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/A/page-2/078A.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/A/page-2/078A.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
# asciiコード変換では 文字->数字 = ord() 数字->文字 = chr()
a, b = map(str, input().split())
if a == b:
print("=")
elif ord(a) < ord(b):
print("<")
else:
print(">")
| 18
| 44
| 0.487654
| 25
| 162
| 3.16
| 0.6
| 0.050633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228395
| 162
| 8
| 45
| 20.25
| 0.632
| 0.259259
| 0
| 0
| 0
| 0
| 0.025424
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
9ed276b79d60b4f7c00a32df1d0d275d29553a8c
| 116
|
py
|
Python
|
sysmanager/modules/comms/hw/__init__.py
|
yurkis/whitebox
|
b40e377ed9fc29d5f0c9c96677c190b520c9d188
|
[
"MIT"
] | null | null | null |
sysmanager/modules/comms/hw/__init__.py
|
yurkis/whitebox
|
b40e377ed9fc29d5f0c9c96677c190b520c9d188
|
[
"MIT"
] | null | null | null |
sysmanager/modules/comms/hw/__init__.py
|
yurkis/whitebox
|
b40e377ed9fc29d5f0c9c96677c190b520c9d188
|
[
"MIT"
] | null | null | null |
from . import *
from . import hw
commands={"hw":{"subcomms":
{"info":{"fn":hw.do_hw_info}}
},
}
| 16.571429
| 32
| 0.5
| 14
| 116
| 4
| 0.571429
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.275862
| 116
| 7
| 33
| 16.571429
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
73005b0829fb2bf45835104950fd1c44b79955f7
| 579
|
py
|
Python
|
runner_service/controllers/__init__.py
|
ktdreyer/ansible-runner-service
|
1a177b69c170b072328ffc365903812bc2ba7c3c
|
[
"Apache-2.0"
] | 13
|
2018-08-14T06:45:42.000Z
|
2022-02-05T14:57:26.000Z
|
runner_service/controllers/__init__.py
|
ktdreyer/ansible-runner-service
|
1a177b69c170b072328ffc365903812bc2ba7c3c
|
[
"Apache-2.0"
] | 52
|
2018-08-23T05:37:14.000Z
|
2019-01-22T20:44:19.000Z
|
runner_service/controllers/__init__.py
|
ktdreyer/ansible-runner-service
|
1a177b69c170b072328ffc365903812bc2ba7c3c
|
[
"Apache-2.0"
] | 9
|
2018-08-14T13:31:56.000Z
|
2021-04-30T05:06:57.000Z
|
from .playbooks import (ListPlaybooks, # noqa: F401
PlaybookState,
StartPlaybook,
StartTaggedPlaybook)
from .api import API # noqa: F401
from .hosts import Hosts, HostMgmt, HostDetails # noqa: F401
from .jobs import ListEvents, GetEvent # noqa: F401
from .groups import ListGroups, ManageGroups # noqa: F401
from .metrics import PrometheusMetrics # noqa: F401
from .login import Login # noqa: F401
| 48.25
| 64
| 0.53886
| 49
| 579
| 6.367347
| 0.44898
| 0.179487
| 0.192308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061404
| 0.409326
| 579
| 11
| 65
| 52.636364
| 0.850877
| 0.131261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.7
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
730cbb26f93a3b47b31e8e20b57f3489592316e9
| 1,334
|
py
|
Python
|
lib/systems/beta-d-mannopyranose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/beta-d-mannopyranose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/beta-d-mannopyranose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns beta-d-mannopyranose as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C -0.4820 1.3845 -0.8945
O -1.2325 0.5004 -0.0247
C -0.8609 -0.8899 -0.0294
C 0.6151 -1.0100 0.4077
O 1.0718 -2.3614 0.2503
C 1.5151 -0.2196 -0.5667
C 1.0542 1.2450 -0.6995
O 1.3308 1.9691 0.5036
O 2.8385 -0.1002 -0.0241
C -1.8527 -1.5043 0.9740
O -3.1161 -1.7022 0.3397
O -0.9196 2.6486 -0.5014
H -1.0168 -1.3119 -1.0480
H 0.7684 -0.6735 1.4565
H 1.5812 -0.7409 -1.5491
H 1.5981 1.7558 -1.5291
H -0.8210 1.2472 -1.9401
H -0.6294 2.8427 0.4368
H 2.2368 1.7262 0.8338
H 3.1837 -0.9973 0.2027
H 0.6927 -2.9419 0.9418
H -1.9774 -0.8508 1.8591
H -1.5618 -2.5251 1.2818
H -3.4866 -0.8291 0.0639
""")
| 41.6875
| 76
| 0.401049
| 199
| 1,334
| 2.673367
| 0.532663
| 0.018797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.535714
| 0.496252
| 1,334
| 31
| 77
| 43.032258
| 0.255952
| 0.087706
| 0
| 0
| 0
| 0
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| true
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
732f7a8659797c2e33b9fa3ae23e882e14ba06a9
| 126
|
py
|
Python
|
terrascript/resource/poseidon/ct.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/poseidon/ct.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/poseidon/ct.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/poseidon/ct.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:14:51 UTC)
__all__ = []
| 25.2
| 73
| 0.753968
| 19
| 126
| 4.789474
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.111111
| 126
| 4
| 74
| 31.5
| 0.705357
| 0.849206
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
734f8bdb507183af4882f504ad4022bdf0195e39
| 177
|
py
|
Python
|
aiographfix/__init__.py
|
Yyonging/aiograph
|
78d291f9e1157720c949e336a9aa2711ad707285
|
[
"MIT"
] | 1
|
2020-06-16T03:06:40.000Z
|
2020-06-16T03:06:40.000Z
|
aiographfix/__init__.py
|
Yyonging/aiograph
|
78d291f9e1157720c949e336a9aa2711ad707285
|
[
"MIT"
] | null | null | null |
aiographfix/__init__.py
|
Yyonging/aiograph
|
78d291f9e1157720c949e336a9aa2711ad707285
|
[
"MIT"
] | null | null | null |
from . import types
from . import utils
from .api import Telegraph
from .utils import exceptions
__all__ = ['Telegraph', 'types', 'utils', 'exceptions']
__version__ = '0.2.2'
| 19.666667
| 55
| 0.717514
| 23
| 177
| 5.173913
| 0.478261
| 0.168067
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.152542
| 177
| 8
| 56
| 22.125
| 0.773333
| 0
| 0
| 0
| 0
| 0
| 0.19209
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
735563e406a5ff6e89d4eb6c702591e20ed89727
| 33
|
py
|
Python
|
exercises/exc_02_01.py
|
deep-diver/test-course22
|
2d2668d3b9a54546c681bc27efbbc9b326af1ab1
|
[
"MIT"
] | null | null | null |
exercises/exc_02_01.py
|
deep-diver/test-course22
|
2d2668d3b9a54546c681bc27efbbc9b326af1ab1
|
[
"MIT"
] | null | null | null |
exercises/exc_02_01.py
|
deep-diver/test-course22
|
2d2668d3b9a54546c681bc27efbbc9b326af1ab1
|
[
"MIT"
] | 1
|
2020-07-01T21:46:44.000Z
|
2020-07-01T21:46:44.000Z
|
# Hello 를 화면에 출력한다
_____('Hello')
| 16.5
| 18
| 0.69697
| 5
| 33
| 3.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 2
| 19
| 16.5
| 0.642857
| 0.484848
| 0
| 0
| 0
| 0
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b41495082ba1dd339ad1a3f9cf9700c363a4723c
| 6,413
|
py
|
Python
|
wplay/utils/helpers.py
|
olivier-j/whatsapp-play
|
fc97115125a1ab7f395d76c0414e4bbe56e59de7
|
[
"MIT"
] | 361
|
2019-06-08T05:10:18.000Z
|
2022-01-11T17:45:43.000Z
|
wplay/utils/helpers.py
|
olivier-j/whatsapp-play
|
fc97115125a1ab7f395d76c0414e4bbe56e59de7
|
[
"MIT"
] | 320
|
2019-06-01T07:42:30.000Z
|
2021-05-13T16:24:53.000Z
|
wplay/utils/helpers.py
|
olivier-j/whatsapp-play
|
fc97115125a1ab7f395d76c0414e4bbe56e59de7
|
[
"MIT"
] | 301
|
2019-06-24T13:27:33.000Z
|
2021-09-27T21:39:35.000Z
|
# region IMPORTS
from pathlib import Path
import signal
import psutil
from whaaaaat import style_from_dict, Token
# endregion
# region Whatsapp WEBSITES
websites = {'whatsapp': 'https://web.whatsapp.com/', 'wpp_unknown': 'https://web.whatsapp.com/send?phone='}
# endregion
# region SELECTORS
whatsapp_selectors_dict = {
'login_area': '#app > div > div > div.landing-header',
'new_chat_button': '#side > header div[role="button"] span[data-icon="chat"]',
'search_contact_input_new_chat': '#app > div > div > div > div > span > div > span > div > div > div > label > div > div',
'contact_list_elements_filtered_new_chat': '#app > div > div > div > div > span > div > span > div > div > div > div > div > div > div > div > div > div > div > span > span[title][dir]',
'group_list_elements_filtered_new_chat': '#app > div > div > div > div > span > div > span > div > div > div > div > div > div > div > div > div > div > div > div > span[title][dir]',
'search_contact_input': '#side > div > div > label > div > div',
'chat_list_elements_filtered': '#pane-side > div > div > div > div > div > div > div > div > div > span > span[title][dir]',
'target_focused_title': '#main > header div > div > span[title]',
'message_area': '#main > footer div.selectable-text[contenteditable]',
'last_seen': '#main > header > div > div > span[title]',
'target_chat_header': '#main > header',
'contact_info_page_elements': '#app > div > div > div:nth-child(2) > div:last-of-type > span > div > span > div > div > div:first-child',
'contact_info_page_group_element_heading': '#app > div > div > div:nth-child(2) > div:last-of-type > '
'span > div > span > div > div:nth-child(5)>div>div>div>div:first-child>span',
'contact_info_page_group_elements': '#app > div > div > div:nth-child(2) > div:last-of-type > '
'span > div > span > div > div:nth-child(5)>div:nth-child(2)>div>div',
'contact_info_page_close_button': '#app > div > div > div > div > span > div > span > div > header > div > div > button',
'chat_or_message_search': '#side > div:nth-child(3) > div > label > div > div:last-child',
'chats_groups_messages_elements': '#side > div:last-child > div > div > div > div',
'contact_element': 'span > span > span[class^="matched-text"]',
'group_element': 'div:last-child > div:first-child > div:first-child > div > span > span[class^="matched-text"]',
'attach_file': '#main > header > div > div > div:nth-child(2) > div',
'choose_file': '#main > header > div > div > div > span > div > div > ul > li:nth-child(3) > button',
'send_file': '#app > div > div > div > div > span > div > span > div > div > div > span > div > div > span',
'profile_photo_element': '#side > header > div > div > img',
'about_edit_button_element': '#app > div > div > div > div > span > div > div > div > div:nth-child(4) > div > div > span > div > span',
'about_text_area': '#app > div > div > div > div > span > div > div > div > div:nth-child(4) > div > div > div > div',
'contact_info_page_target_group_name_element': 'div:nth-child(2)>div>div> div:last-of-type',
'contact_info_page_target_group_creation_info_element': ':scope > div:last-child > span',
'contact_info_page_target_group_description_element': ':scope > div:last-child span:first-of-type',
'contact_info_page_target_group_member_elements': ':scope > div:nth-child(4) > div > div',
'invalid_number_ok_button': '#app > div > span> div > span > div > div > div > div > div > div > div',
'target_name_selector': "#main > header > div > div > div > span",
'media_text': "#app > div > div > div > div > span > div > span > div > div > div > div > div > div > div > div > span",
'media_images': "#app > div > div > div > div > span > div > span > div > div > span > div > div > div > div > div > div",
'left_arrow_button': "#app > div > span > div > div > div > div > div > span",
'media_url_img': "#app > div > span:nth-child(3) > div > div > div > div > div > div > div > div > img",
'media_url_vid': "#app > div > span:nth-child(3) > div > div > div > div > div > div > div > div > video",
}
# endregion
# region PATHS
data_folder_path = Path.home() / 'wplay'
logs_path = data_folder_path / 'logs'
log_file_path = logs_path / 'wplay.log'
test_log_file_path = logs_path / 'testwplay.log'
user_data_folder_path = data_folder_path / '.userData'
profile_photos_path = data_folder_path / 'media' / 'profilePhotos'
tracking_folder_path = data_folder_path / 'trackingData'
messages_json_folder_path = data_folder_path / 'messagesJSON' / 'system'
messages_json_path = data_folder_path / 'messagesJSON' / 'messages.json'
open_messages_json_path = data_folder_path / 'messagesJSON' / 'system' / 'openMessages.json'
media_path = data_folder_path / 'media' / 'media'
save_chat_folder_path = data_folder_path / 'savedChats'
audio_file_folder_path = data_folder_path / 'audioFiles'
chatbot_image_folder_path = data_folder_path / 'ChatbotImage'
# endregion
# region MENU STYLES
menu_style = style_from_dict({
Token.Separator: '#6C6C6C',
Token.QuestionMark: '#FF9D00 bold',
Token.Selected: '#5F819D',
Token.Pointer: '#FF9D00 bold',
Token.Instruction: '', # default
Token.Answer: '#5F819D bold',
Token.Question: '',
})
# endregion
# region FUNCTIONS
def create_dirs():
logs_path.mkdir(parents=True, exist_ok=True)
user_data_folder_path.mkdir(parents=True, exist_ok=True)
profile_photos_path.mkdir(parents=True, exist_ok=True)
tracking_folder_path.mkdir(parents=True, exist_ok=True)
messages_json_folder_path.mkdir(parents=True, exist_ok=True)
media_path.mkdir(parents=True, exist_ok=True)
save_chat_folder_path.mkdir(parents = True, exist_ok = True)
audio_file_folder_path.mkdir(parents = True, exist_ok = True)
tracking_folder_path.mkdir(parents = True, exist_ok = True)
messages_json_folder_path.mkdir(parents = True, exist_ok = True)
chatbot_image_folder_path.mkdir(parents= True, exist_ok=True)
def kill_child_processes(parent_pid, sig=signal.SIGTERM):
try:
parent = psutil.Process(parent_pid)
except psutil.NoSuchProcess:
return
children = parent.children(recursive=True)
print('Process Killed!')
for process in children:
process.send_signal(sig)
# endregion
| 56.254386
| 190
| 0.662093
| 905
| 6,413
| 4.461878
| 0.18674
| 0.215453
| 0.211738
| 0.181278
| 0.567608
| 0.439326
| 0.36949
| 0.303368
| 0.268945
| 0.260773
| 0
| 0.006151
| 0.188835
| 6,413
| 113
| 191
| 56.752212
| 0.770088
| 0.026976
| 0
| 0.044944
| 0
| 0.235955
| 0.599422
| 0.128333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022472
| false
| 0
| 0.044944
| 0
| 0.078652
| 0.011236
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b43ebdccf4ca9387d56162b0308e2f6eb397961b
| 72,289
|
py
|
Python
|
python/models.py
|
stwisdom/sista-rnn
|
64ddea177b4bf3efc9504f2106e3ce3f6574b4e0
|
[
"MIT"
] | 10
|
2017-01-10T01:14:30.000Z
|
2020-07-02T19:24:47.000Z
|
python/models.py
|
stwisdom/sista-rnn
|
64ddea177b4bf3efc9504f2106e3ce3f6574b4e0
|
[
"MIT"
] | 1
|
2017-03-23T06:34:58.000Z
|
2017-03-23T06:34:58.000Z
|
python/models.py
|
stwisdom/sista-rnn
|
64ddea177b4bf3efc9504f2106e3ce3f6574b4e0
|
[
"MIT"
] | 9
|
2017-01-10T01:14:31.000Z
|
2019-11-25T08:25:39.000Z
|
import theano, cPickle
import theano.tensor as T
import numpy as np
from fftconv import cufft, cuifft
def initialize_matrix(n_in, n_out, name, rng, init='rand'):
if (init=='rand') or (init=='randSmall'):
bin = np.sqrt(6. / (n_in + n_out))
values = np.asarray(rng.uniform(low=-bin,
high=bin,
size=(n_in, n_out)),
dtype=theano.config.floatX)
if (init=='randSmall'):
values=np.float32(0.01)*values
elif (init=='identity'):
if (n_in >= n_out):
values = np.concatenate([np.eye(n_out).astype(theano.config.floatX),np.zeros((n_in-n_out,n_out)).astype(theano.config.floatX)],axis=0)
else:
values = np.concatenate([np.eye(n_in).astype(theano.config.floatX),np.zeros((n_in,n_out-n_in)).astype(theano.config.floatX)],axis=1)
else:
raise ValueError("Unknown initialization method ["+init+"]")
return theano.shared(value=values, name=name)
def initialize_matrix_np(n_in, n_out, rng):
bin = np.sqrt(6. / (n_in + n_out))
values = np.asarray(rng.uniform(low=-bin,
high=bin,
size=(n_in, n_out)),
dtype=theano.config.floatX)
return values
def do_fft(input, n_hidden):
fft_input = T.reshape(input, (input.shape[0], 2, n_hidden))
fft_input = fft_input.dimshuffle(0,2,1)
fft_output = cufft(fft_input) / T.sqrt(n_hidden)
fft_output = fft_output.dimshuffle(0,2,1)
output = T.reshape(fft_output, (input.shape[0], 2*n_hidden))
return output
def do_ifft(input, n_hidden):
ifft_input = T.reshape(input, (input.shape[0], 2, n_hidden))
ifft_input = ifft_input.dimshuffle(0,2,1)
ifft_output = cuifft(ifft_input) / T.sqrt(n_hidden)
ifft_output = ifft_output.dimshuffle(0,2,1)
output = T.reshape(ifft_output, (input.shape[0], 2*n_hidden))
return output
def times_diag(input, n_hidden, diag, swap_re_im):
# input is a Ix2n_hidden matrix, where I is number
# of training examples
# diag is a n_hidden-dimensional real vector, which creates
# the 2n_hidden x 2n_hidden complex diagonal matrix using
# e.^{j.*diag}=cos(diag)+j.*sin(diag)
d = T.concatenate([diag, -diag]) #d is 2n_hidden
Re = T.cos(d).dimshuffle('x',0)
Im = T.sin(d).dimshuffle('x',0)
input_times_Re = input * Re
input_times_Im = input * Im
output = input_times_Re + input_times_Im[:, swap_re_im]
return output
def vec_permutation(input, index_permute):
return input[:, index_permute]
def times_reflection(input, n_hidden, reflection):
input_re = input[:, :n_hidden]
input_im = input[:, n_hidden:]
reflect_re = reflection[:n_hidden]
reflect_im = reflection[n_hidden:]
vstarv = (reflection**2).sum()
input_re_reflect_re = T.dot(input_re, reflect_re)
input_re_reflect_im = T.dot(input_re, reflect_im)
input_im_reflect_re = T.dot(input_im, reflect_re)
input_im_reflect_im = T.dot(input_im, reflect_im)
a = T.outer(input_re_reflect_re - input_im_reflect_im, reflect_re)
b = T.outer(input_re_reflect_im + input_im_reflect_re, reflect_im)
c = T.outer(input_re_reflect_re - input_im_reflect_im, reflect_im)
d = T.outer(input_re_reflect_im + input_im_reflect_re, reflect_re)
output = input
output = T.inc_subtensor(output[:, :n_hidden], - 2. / vstarv * (a + b))
output = T.inc_subtensor(output[:, n_hidden:], - 2. / vstarv * (d - c))
return output
def times_reflection_sub(input, n_hidden, n_sub, reflection):
#print "n_hidden=%d, n_sub=%d" % (n_hidden,n_sub)
input_re = input[:, :n_hidden]
input_im = input[:, n_hidden:]
n_start=n_hidden-n_sub
#print "n_start=%d" % n_start
reflect_re = reflection[n_start:n_hidden]
reflect_im = reflection[(n_hidden+n_start):]
vstarv = (reflect_re**2).sum() + (reflect_im**2).sum()
input_re_reflect_re = T.dot(input_re[:,n_start:], reflect_re)
input_re_reflect_im = T.dot(input_re[:,n_start:], reflect_im)
input_im_reflect_re = T.dot(input_im[:,n_start:], reflect_re)
input_im_reflect_im = T.dot(input_im[:,n_start:], reflect_im)
a = T.outer(input_re_reflect_re - input_im_reflect_im, reflect_re)
b = T.outer(input_re_reflect_im + input_im_reflect_re, reflect_im)
c = T.outer(input_re_reflect_re - input_im_reflect_im, reflect_im)
d = T.outer(input_re_reflect_im + input_im_reflect_re, reflect_re)
output = input
output = T.inc_subtensor(output[:, n_start:n_hidden], - 2. / vstarv * (a + b))
output = T.inc_subtensor(output[:, (n_hidden+n_start):], - 2. / vstarv * (d - c))
return output
def times_givens(input,n_hidden,gparams,idx):
# input is a Ix2n complex matrix in augmented ReIm form.
# gparams is a 3-dim vector parameterizing a Givens rotation.
# idx are two indices of the Givens rotation.
#
# output will be Ix2n complex matrix in augmented ReIm form
# Givens rotation using gparams=(phi,psi,chi) is
# [ cos(phi)*exp( j*psi), sin(phi)*exp( j*chi) ]
# [-sin(phi)*exp(-j*psi), cos(phi)*exp(-j*chi) ]
cos_phi=T.cos(gparams[0])
sin_phi=T.sin(gparams[0])
cos_psi=T.cos(gparams[1])
sin_psi=T.sin(gparams[1])
cos_chi=T.cos(gparams[2])
sin_chi=T.sin(gparams[2])
G11_re=cos_phi*cos_psi
G11_im=cos_phi*sin_psi
G12_re=sin_phi*cos_chi
G12_im=sin_phi*sin_chi
G21_re=-sin_phi*cos_chi
G21_im= sin_phi*sin_chi
G22_re= cos_phi*cos_psi
G22_im=-cos_phi*sin_psi
idx=T.cast(idx,'int64')
output=input
# Re{y_{i1}}=Re{ G11*x_{i1}+G12*x_{i2} }
#output[:,idx[0]]
output = T.set_subtensor(output[:,idx[0]], \
(G11_re*input[:,idx[0]]-G11_im*input[:,idx[0]+n_hidden]) \
+(G12_re*input[:,idx[1]]-G12_im*input[:,idx[1]+n_hidden]))
# Im{y_{i1}}=Im{ G11*x_{i1}+G12*x_{i2} }
#output[:,idx[0]+n_hidden]
output = T.set_subtensor(output[:,idx[0]+n_hidden], \
(G11_im*input[:,idx[0]]+G11_re*input[:,idx[0]+n_hidden]) \
+(G12_im*input[:,idx[1]]+G12_re*input[:,idx[1]+n_hidden]))
# Re{y_{i2}}=Re{ G21*x_{i1}+G22*x_{i2} }
#output[:,idx[1]]
output = T.set_subtensor(output[:,idx[1]], \
(G21_re*input[:,idx[0]]-G21_im*input[:,idx[0]+n_hidden]) \
+(G22_re*input[:,idx[1]]-G22_im*input[:,idx[1]+n_hidden]))
# Im{y_{i2}}=Im{ G21*x_{i1}+G22*x_{i2} }
#output[:,idx[1]+n_hidden]
output = T.set_subtensor(output[:,idx[1]+n_hidden], \
(G21_im*input[:,idx[0]]+G21_re*input[:,idx[0]+n_hidden]) \
+(G22_im*input[:,idx[1]]+G22_re*input[:,idx[1]+n_hidden]))
return output
def compute_cost_t(lin_output, loss_function, y_t, ymask_t=None, z_t=None, lam=0.0):
if (loss_function == 'CE') or (loss_function == 'CE_of_sum'):
RNN_output = T.nnet.softmax(lin_output)
CE = T.nnet.categorical_crossentropy(RNN_output, y_t)
if ymask_t is not None:
RNN_output=RNN_output*ymask_t
CE = CE*(ymask_t.dimshuffle(0,))
cost_t = CE.mean()
acc_t =(T.eq(T.argmax(RNN_output, axis=-1), y_t)).mean(dtype=theano.config.floatX)
elif loss_function == 'MSE':
mse = (lin_output - y_t)**2
if ymask_t is not None:
mse = mse*((ymask_t[:,0]).dimshuffle(0,'x'))
#mse = mse*ymask_t[:,0:1]
cost_t = mse.mean()
#acc_t = theano.shared(np.float32(0.0))
acc_t = cost_t
elif loss_function == 'MSEplusL1':
mseOnly = (lin_output - y_t)**2
L1 = T.sqrt(1e-5 + T.sum(lin_output**2,axis=1,keepdims=True))
mse = mseOnly + lam*L1
if ymask_t is not None:
mse = mse*((ymask_t[:,0]).dimshuffle(0,'x'))
cost_t = mse.mean()
acc_t = mseOnly.mean()
#elif loss_function == 'NMSE':
# err=(lin_output - y_t)**2
# err_sum=T.sum(err,axis=0)
# err_sum=T.sum(err_sum,axis=-1)
# ypow=y_t**2
# ypow_sum=T.sum(ypow,axis=0)
# ypow_sum=T.sum(ypow_sum,axis=-1)
# cost_t = (err_sum / (1e-5+ypow_sum)).mean()
# acc_t = theano.shared(np.float32(0.0))
elif (loss_function == 'g_loss') or (loss_function == 'none_in_scan'):
cost_t=theano.shared(np.float32(0.0))
acc_t =theano.shared(np.float32(0.0))
elif loss_function == 'd_loss':
RNN_output = T.nnet.sigmoid(lin_output)
# clip the output of the sigmoid to avoid 0s, and thus NaNs in cross entropy:
RNN_output_clip = T.clip(RNN_output,1e-7,1.0-1e-7)
costs_t = T.nnet.binary_crossentropy(RNN_output_clip, y_t)
if ymask_t is not None:
costs_t = costs_t*(ymask_t.dimshuffle(0,))
cost_t = costs_t.mean()
idx_half=costs_t.shape[0]/2
costs_t_fake=costs_t[:idx_half]
costs_t_real=costs_t[idx_half:]
acc_t = [costs_t_fake.mean()/2,costs_t_real.mean()/2]
return cost_t, acc_t
def initialize_data_nodes(loss_function, input_type, out_every_t):
# if input_type is real or complex, will be size n_fram x n_input x n_utt
x = T.tensor3() if input_type == 'real' or input_type == 'complex' else T.matrix(dtype='int32')
if 'CE' in loss_function:
y = T.matrix(dtype='int32') if out_every_t else T.vector(dtype='int32')
else:
# y will be n_fram x n_output x n_utt
y = T.tensor3() if out_every_t else T.matrix()
return x, y
def IRNN(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE'):
np.random.seed(1234)
rng = np.random.RandomState(1234)
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
inputs = [x, y]
h_0 = theano.shared(np.zeros((1, n_hidden), dtype=theano.config.floatX))
V = initialize_matrix(n_input, n_hidden, 'V', rng)
W = theano.shared(np.identity(n_hidden, dtype=theano.config.floatX))
out_mat = initialize_matrix(n_hidden, n_output, 'out_mat', rng)
hidden_bias = theano.shared(np.zeros((n_hidden,), dtype=theano.config.floatX))
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX))
parameters = [h_0, V, W, out_mat, hidden_bias, out_bias]
def recurrence(x_t, y_t, h_prev, cost_prev, acc_prev, V, W, hidden_bias, out_mat, out_bias):
if loss_function == 'CE':
data_lin_output = V[x_t]
else:
data_lin_output = T.dot(x_t, V)
h_t = T.nnet.relu(T.dot(h_prev, W) + data_lin_output + hidden_bias.dimshuffle('x', 0))
if out_every_t:
lin_output = T.dot(h_t, out_mat) + out_bias.dimshuffle('x', 0)
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, cost_t, acc_t
non_sequences = [V, W, hidden_bias, out_mat, out_bias]
h_0_batch = T.tile(h_0, [x.shape[1], 1])
if out_every_t:
sequences = [x, y]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
outputs_info = [h_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
[hidden_states, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info = outputs_info)
if not out_every_t:
lin_output = T.dot(hidden_states[-1,:,:], out_mat) + out_bias.dimshuffle('x', 0)
costs = compute_cost_t(lin_output, loss_function, y)
else:
cost = cost_steps.mean()
accuracy = acc_steps.mean()
costs = [cost, accuracy]
return inputs, parameters, costs
def tanhRNN(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE'):
np.random.seed(1234)
rng = np.random.RandomState(1234)
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
inputs = [x, y]
h_0 = theano.shared(np.zeros((1, n_hidden), dtype=theano.config.floatX))
V = initialize_matrix(n_input, n_hidden, 'V', rng)
W = initialize_matrix(n_hidden, n_hidden, 'W', rng)
out_mat = initialize_matrix(n_hidden, n_output, 'out_mat', rng)
hidden_bias = theano.shared(np.zeros((n_hidden,), dtype=theano.config.floatX))
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX))
parameters = [h_0, V, W, out_mat, hidden_bias, out_bias]
def recurrence(x_t, y_t, h_prev, cost_prev, acc_prev, V, W, hidden_bias, out_mat, out_bias):
if loss_function == 'CE':
data_lin_output = V[x_t]
else:
data_lin_output = T.dot(x_t, V)
h_t = T.tanh(T.dot(h_prev, W) + data_lin_output + hidden_bias.dimshuffle('x', 0))
if out_every_t:
lin_output = T.dot(h_t, out_mat) + out_bias.dimshuffle('x', 0)
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, cost_t, acc_t
non_sequences = [V, W, hidden_bias, out_mat, out_bias]
h_0_batch = T.tile(h_0, [x.shape[1], 1])
if out_every_t:
sequences = [x, y]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
outputs_info = [h_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
[hidden_states, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
if not out_every_t:
lin_output = T.dot(hidden_states[-1,:,:], out_mat) + out_bias.dimshuffle('x', 0)
costs = compute_cost_t(lin_output, loss_function, y)
else:
cost = cost_steps.mean()
accuracy = acc_steps.mean()
costs = [cost, accuracy]
return inputs, parameters, costs
def LSTM(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE',flag_use_mask=False,flag_return_lin_output=False,flag_return_hidden_states=False,cost_weight=None,cost_transform=None,seed=1234):
np.random.seed(seed)
rng = np.random.RandomState(seed)
W_i = initialize_matrix(n_input, n_hidden, 'W_i', rng)
W_f = initialize_matrix(n_input, n_hidden, 'W_f', rng)
W_c = initialize_matrix(n_input, n_hidden, 'W_c', rng)
W_o = initialize_matrix(n_input, n_hidden, 'W_o', rng)
U_i = initialize_matrix(n_hidden, n_hidden, 'U_i', rng)
U_f = initialize_matrix(n_hidden, n_hidden, 'U_f', rng)
U_c = initialize_matrix(n_hidden, n_hidden, 'U_c', rng)
U_o = initialize_matrix(n_hidden, n_hidden, 'U_o', rng)
V_o = initialize_matrix(n_hidden, n_hidden, 'V_o', rng)
b_i = theano.shared(np.zeros((n_hidden,), dtype=theano.config.floatX))
b_f = theano.shared(np.ones((n_hidden,), dtype=theano.config.floatX))
b_c = theano.shared(np.zeros((n_hidden,), dtype=theano.config.floatX))
b_o = theano.shared(np.zeros((n_hidden,), dtype=theano.config.floatX))
h_0 = theano.shared(np.zeros((1, n_hidden), dtype=theano.config.floatX))
state_0 = theano.shared(np.zeros((1, n_hidden), dtype=theano.config.floatX))
out_mat = initialize_matrix(n_hidden, n_output, 'out_mat', rng)
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX))
parameters = [W_i, W_f, W_c, W_o, U_i, U_f, U_c, U_o, V_o, b_i, b_f, b_c, b_o, h_0, state_0, out_mat, out_bias]
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
if flag_use_mask:
if loss_function == 'CE':
ymask = T.matrix(dtype='int8') if out_every_t else T.vector(dtype='int8')
else:
# y will be n_fram x n_output x n_utt
ymask = T.tensor3(dtype='int8') if out_every_t else T.matrix(dtype='int8')
def recurrence(x_t, y_t, ymask_t, h_prev, state_prev, cost_prev, acc_prev,
W_i, W_f, W_c, W_o, U_i, U_f, U_c, U_o, V_o, b_i, b_f, b_c, b_o, out_mat, out_bias):
if (loss_function == 'CE') and (input_type=='categorical'):
x_t_W_i = W_i[x_t]
x_t_W_c = W_c[x_t]
x_t_W_f = W_f[x_t]
x_t_W_o = W_o[x_t]
else:
x_t_W_i = T.dot(x_t, W_i)
x_t_W_c = T.dot(x_t, W_c)
x_t_W_f = T.dot(x_t, W_f)
x_t_W_o = T.dot(x_t, W_o)
input_t = T.nnet.sigmoid(x_t_W_i + T.dot(h_prev, U_i) + b_i.dimshuffle('x', 0))
candidate_t = T.tanh(x_t_W_c + T.dot(h_prev, U_c) + b_c.dimshuffle('x', 0))
forget_t = T.nnet.sigmoid(x_t_W_f + T.dot(h_prev, U_f) + b_f.dimshuffle('x', 0))
state_t = input_t * candidate_t + forget_t * state_prev
output_t = T.nnet.sigmoid(x_t_W_o + T.dot(h_prev, U_o) + T.dot(state_t, V_o) + b_o.dimshuffle('x', 0))
h_t = output_t * T.tanh(state_t)
if out_every_t:
lin_output = T.dot(h_t, out_mat) + out_bias.dimshuffle('x', 0)
if flag_use_mask:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t, ymask_t=ymask_t)
else:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, state_t, cost_t, acc_t
non_sequences = [W_i, W_f, W_c, W_o, U_i, U_f, U_c, U_o, V_o, b_i, b_f, b_c, b_o, out_mat, out_bias]
h_0_batch = T.tile(h_0, [x.shape[1], 1])
state_0_batch = T.tile(state_0, [x.shape[1], 1])
if out_every_t:
if flag_use_mask:
sequences = [x, y, ymask]
else:
sequences = [x, y, T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
else:
if flag_use_mask:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1]), T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1]), T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)),[x.shape[0], 1, 1])]
outputs_info = [h_0_batch, state_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
[hidden_states, states, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
if flag_return_lin_output:
#if output_type=='complex':
# lin_output = T.dot(hidden_states, out_mat) + out_bias.dimshuffle('x',0)
#elif output_type=='real':
lin_output = T.dot(hidden_states, out_mat) + out_bias.dimshuffle('x',0)
if not out_every_t:
lin_output = T.dot(hidden_states[-1,:,:], out_mat) + out_bias.dimshuffle('x', 0)
costs = compute_cost_t(lin_output, loss_function, y)
cost=costs[0]
accuracy=costs[1]
else:
if (cost_transform=='magTimesPhase'):
cosPhase=T.cos(lin_output)
sinPhase=T.sin(lin_output)
linMag=np.sqrt(10**(x/10.0)-1e-5)
yest_real=linMag*cosPhase
yest_imag=linMag*sinPhase
yest=T.concatenate([yest_real,yest_imag],axis=2)
mse=(yest-y)**2
cost_steps=T.mean(mse*ymask[:,:,0].dimshuffle(0,1,'x'),axis=2)
elif cost_transform is not None:
# assume that cost_transform is an inverse DFT followed by synthesis windowing
lin_output_real=lin_output[:,:,:n_output//2]
lin_output_imag=lin_output[:,:,n_output//2:]
lin_output_sym_real=T.concatenate([lin_output_real,lin_output_real[:,:,n_output//2-2:0:-1]],axis=2)
lin_output_sym_imag=T.concatenate([-lin_output_imag,lin_output_imag[:,:,n_output//2-2:0:-1]],axis=2)
lin_output_sym=T.concatenate([lin_output_sym_real,lin_output_sym_imag],axis=2)
yest_xform=T.dot(lin_output_sym,cost_transform)
# apply synthesis window
yest_xform=yest_xform*cost_weight.dimshuffle('x','x',0)
y_real=y[:,:,:n_output//2]
y_imag=y[:,:,n_output//2:]
y_sym_real=T.concatenate([y_real,y_real[:,:,n_output//2-2:0:-1]],axis=2)
y_sym_imag=T.concatenate([-y_imag,y_imag[:,:,n_output//2-2:0:-1]],axis=2)
y_sym=T.concatenate([y_sym_real,y_sym_imag],axis=2)
y_xform=T.dot(y_sym,cost_transform)
# apply synthesis window
y_xform=y_xform*cost_weight.dimshuffle('x','x',0)
mse=(y_xform-yest_xform)**2
cost_steps=T.mean(mse*ymask[:,:,0].dimshuffle(0,1,'x'),axis=2)
cost = cost_steps.mean()
accuracy = acc_steps.mean()
costs = [cost, accuracy]
if (loss_function=='CE_of_sum'):
yest = T.sum(lin_output,axis=0) #sum over time_steps, yest is Nseq x n_output
yest_softmax = T.nnet.softmax(yest)
cost = T.nnet.categorical_crossentropy(yest_softmax, y[0,:]).mean()
accuracy = T.eq(T.argmax(yest, axis=-1), y[0,:]).mean(dtype=theano.config.floatX)
costs = [cost,accuracy]
if flag_return_lin_output:
costs = [cost, accuracy, lin_output]
if flag_return_hidden_states:
costs = costs + [hidden_states]
#nmse_local = ymask.dimshuffle(0,1)*( (lin_output-y)**2 )/( 1e-5 + y**2 )
nmse_local = theano.shared(np.float32(0.0))
costs = costs + [nmse_local]
costs = costs + [cost_steps]
if flag_use_mask:
return [x, y, ymask], parameters, costs
else:
return [x, y], parameters, costs
def initialize_unitary(n,impl,rng,name_suffix='',n_Givens=0,init='rand'):
if (impl == 'adhoc'):
# ad-hoc parameterization of Arjovsky, Shah, and Bengio 2015
reflection = initialize_matrix(2, 2*n, 'reflection'+name_suffix, rng)
theta = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(3, n)),
dtype=theano.config.floatX),
name='theta'+name_suffix)
index_permute = rng.permutation(n)
index_permute_long = np.concatenate((index_permute, index_permute + n))
Wparams = [theta,reflection,index_permute_long]
elif (impl == 'full'):
"""
# fixed full unitary matrix
Z=rng.randn(n,n).astype(np.complex64)+1j*rng.randn(n,n).astype(np.complex64)
UZ, SZ, VZ=np.linalg.svd(Z)
Wc=np.dot(UZ,VZ)
WcRe=np.transpose(np.real(Wc))
WcIm=np.transpose(np.imag(Wc))
Waug = theano.shared(np.concatenate( [np.concatenate([WcRe,WcIm],axis=1),np.concatenate([(-1)*WcIm,WcRe],axis=1)], axis=0),name='Waug'+name_suffix)
"""
if (init=='rand'):
# use ad-hoc for initialization
reflection = initialize_matrix(2, 2*n, 'reflection'+name_suffix, rng)
theta = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(3, n)),
dtype=theano.config.floatX),
name='theta'+name_suffix)
index_permute = rng.permutation(n)
index_permute_long = np.concatenate((index_permute, index_permute + n))
WcRe=np.eye(n).astype(np.float32)
WcIm=np.zeros((n,n)).astype(np.float32)
Waug=np.concatenate( [np.concatenate([WcRe,WcIm],axis=1),np.concatenate([WcIm,WcRe],axis=1)], axis=0)
swap_re_im = np.concatenate((np.arange(n, 2*n), np.arange(n)))
Waug_variable=times_unitary(Waug,n,swap_re_im,[theta,reflection,index_permute_long],'adhoc')
Waug=theano.shared(Waug_variable.eval().astype(np.float32),name='Waug'+name_suffix)
elif (init=='identity'):
WcRe=np.eye(n).astype(np.float32)
WcIm=np.zeros((n,n)).astype(np.float32)
Waug_np=np.concatenate( [np.concatenate([WcRe,WcIm],axis=1),np.concatenate([WcIm,WcRe],axis=1)], axis=0)
Waug=theano.shared(Waug_np,name='Waug'+name_suffix)
Wparams = [Waug]
elif (impl == 'givens'):
# composition of Givens rotations
gphipsi = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(n_Givens, 1, 2)),
dtype=theano.config.floatX),
name='gphipsi')
gchi = theano.shared(np.asarray(np.arccos(rng.uniform(low=0,
high=1,
size=(n_Givens, 1, 1))),
dtype=theano.config.floatX),
name='gchi')
#galp = theano.shared(np.asarray(rng.uniform(low=-np.pi,
# high=np.pi,
# size=(1, 1)),
# dtype=theano.config.floatX),
# name='galp')
# build indices for Givens rotations:
Nchoose2=(n)*(n-1)/2;
# generate a random permutation of 1:(N choose 2)
Nchoose2perm=rng.permutation(Nchoose2)
# take the first n_Givens values
Nchoose2perm=Nchoose2perm[:n_Givens]
# initialize Givens indices
gidx_np=np.zeros((n_Givens,1,2),dtype=np.int32)
ig=0 #absolute Givens index
ig_sel=0 #indices for selected Givens indices
for ig1 in range(0,n):
for ig2 in range(ig1+1,n):
if ig in Nchoose2perm:
ig_sel=np.where(Nchoose2perm==ig)
ig_sel=ig_sel[0][0]
gidx_np[ig_sel,0,:]=np.reshape([ig1,ig2],(1,1,2))
ig=ig+1
gidx=theano.shared(gidx_np)
Wparams = [gphipsi, gchi, gidx]
return Wparams
def initialize_complex_RNN_layer(n_hidden,Wimpl,rng,hidden_bias_mean,name_suffix='',n_Givens=0,hidden_bias_init='rand',h_0_init='rand',W_init='rand'):
# hidden bias
if (hidden_bias_init=='rand'):
hidden_bias = theano.shared(np.asarray(hidden_bias_mean+rng.uniform(low=-0.01,
high=0.01,
size=(n_hidden,)),
dtype=theano.config.floatX),
name='hidden_bias'+name_suffix)
elif (hidden_bias_init=='zero'):
hidden_bias = theano.shared(np.zeros((n_hidden,)).astype(theano.config.floatX),name='hidden_bias'+name_suffix)
else:
raise ValueError("Unknown initialization method %s for hidden_bias" % hidden_bias_init)
# initial state h_0
h_0_size=(1,2*n_hidden)
if (h_0_init=='rand'):
bucket = np.sqrt(3. / 2 / n_hidden)
h_0 = theano.shared(np.asarray(rng.uniform(low=-bucket,
high=bucket,
size=h_0_size),
dtype=theano.config.floatX),
name='h_0'+name_suffix)
elif (h_0_init=='zero'):
h_0 = theano.shared(np.zeros(h_0_size).astype(theano.config.floatX),name='h_0'+name_suffix)
else:
raise ValueError("Unknown initialization method %s for h_0" % h_0_init)
# unitary transition matrix W
Wparams = initialize_unitary(n_hidden,Wimpl,rng,name_suffix=name_suffix,n_Givens=n_Givens,init=W_init)
"""
if (Wimpl == 'adhoc'):
# ad-hoc parameterization of Arjovsky, Shah, and Bengio 2015
reflection = initialize_matrix(2, 2*n_hidden, 'reflection'+name_suffix, rng)
theta = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(3, n_hidden)),
dtype=theano.config.floatX),
name='theta'+name_suffix)
index_permute = np.random.permutation(n_hidden)
index_permute_long = np.concatenate((index_permute, index_permute + n_hidden))
Wparams = [theta,reflection,index_permute_long]
elif (Wimpl == 'full'):
# fixed full unitary matrix
Z=prng_Givens.randn(n_hidden,n_hidden).astype(np.complex64)+1j*prng_Givens.randn(n_hidden,n_hidden).astype(np.complex64)
UZ, SZ, VZ=np.linalg.svd(Z)
Wc=np.dot(UZ,VZ)
WcRe=np.transpose(np.real(Wc))
WcIm=np.transpose(np.imag(Wc))
Waug = theano.shared(np.concatenate( [np.concatenate([WcRe,WcIm],axis=1),np.concatenate([(-1)*WcIm,WcRe],axis=1)], axis=0),name='Waug'+name_suffix)
Wparams = [Waug]
elif (Wimpl == 'givens'):
# composition of Givens rotations
gphipsi = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(n_Givens, 1, 2)),
dtype=theano.config.floatX),
name='gphipsi')
gchi = theano.shared(np.asarray(np.arccos(rng.uniform(low=0,
high=1,
size=(n_Givens, 1, 1))),
dtype=theano.config.floatX),
name='gchi')
#galp = theano.shared(np.asarray(rng.uniform(low=-np.pi,
# high=np.pi,
# size=(1, 1)),
# dtype=theano.config.floatX),
# name='galp')
# build indices for Givens rotations:
Nchoose2=(n_hidden)*(n_hidden-1)/2;
# generate a random permutation of 1:(N choose 2)
Nchoose2perm=prng_Givens.permutation(Nchoose2)
# take the first n_Givens values
Nchoose2perm=Nchoose2perm[:n_Givens]
# initialize Givens indices
gidx_np=np.zeros((n_Givens,1,2),dtype=np.int32)
ig=0 #absolute Givens index
ig_sel=0 #indices for selected Givens indices
for ig1 in range(0,n_hidden):
for ig2 in range(ig1+1,n_hidden):
if ig in Nchoose2perm:
ig_sel=np.where(Nchoose2perm==ig)
ig_sel=ig_sel[0][0]
gidx_np[ig_sel,0,:]=np.reshape([ig1,ig2],(1,1,2))
ig=ig+1
gidx=theano.shared(gidx_np)
Wparams = [gphipsi, gchi, gidx]
"""
return hidden_bias, h_0, Wparams
def times_unitary(x,n,swap_re_im,Wparams,Wimpl):
# multiply tensor x on the right by the unitary matrix W parameterized by Wparams
if (Wimpl == 'adhoc'):
theta=Wparams[0]
reflection=Wparams[1]
index_permute_long=Wparams[2]
step1 = times_diag(x, n, theta[0,:], swap_re_im)
step2 = do_fft(step1, n)
step3 = times_reflection(step2, n, reflection[0,:])
step4 = vec_permutation(step3, index_permute_long)
step5 = times_diag(step4, n, theta[1,:], swap_re_im)
step6 = do_ifft(step5, n)
step7 = times_reflection(step6, n, reflection[1,:])
step8 = times_diag(step7, n, theta[2,:], swap_re_im)
y = step8
elif (Wimpl == 'full'):
Waug=Wparams[0]
y = T.dot(x,Waug)
elif (Wimpl == 'givens'):
gphipsi=Wparams[0]
gchi=Wparams[1]
gidx=Wparams[2]
# scan method for composing Givens rotations
givens_steps=h_prev #output of this inner scan should be I x 2*n
givens_outputs, updates = theano.scan(fn=lambda gphipsi,
gchi,
gidx,
Gh_prev:
times_givens(Gh_prev,
n,
T.reshape(T.concatenate([gphipsi,gchi],axis=1),[3]),
T.reshape(gidx,[2])),
sequences=[gphipsi,gchi,gidx],
outputs_info=givens_steps)
# output of composition of Givens rotations:
y=T.reshape(givens_outputs[-1,:,:],(givens_outputs.shape[1],givens_outputs.shape[2]))
return y
def complex_RNN(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE', output_type='real', fidx=None, flag_return_lin_output=False,name_suffix='',x_spec=None,flag_feed_forward=False,flag_use_mask=False,hidden_bias_mean=0.0,lam=0.0,Wimpl="adhoc",n_Givens=None,prng_Givens=np.random.RandomState(),Vnorm=0.0,Unorm=0.0,flag_return_hidden_states=False,n_layers=1,cost_weight=None,cost_transform=None,flag_noComplexConstraint=0,seed=1234,V_init='rand',U_init='rand',W_init='rand',h_0_init='rand',out_bias_init='rand',hidden_bias_init='rand',flag_add_input_to_output=False,flag_connect_input_to_layers=False,flag_broadcast_silo=False):
np.random.seed(seed)
rng = np.random.RandomState(seed)
# Initialize input and output parameters: V, U, out_bias0
# input matrix V
if flag_noComplexConstraint and (input_type=='complex'):
V = initialize_matrix(2*n_input, 2*n_hidden, 'V'+name_suffix, rng, init=V_init)
Vaug = V
else:
V = initialize_matrix(n_input, 2*n_hidden, 'V'+name_suffix, rng, init=V_init)
if (Vnorm>0.0):
# normalize the rows of V by the L2 norm (note that the variable V here is actually V^T, so we normalize the columns)
Vr = V[:,:n_hidden]
Vi = V[:,n_hidden:]
Vnorms = T.sqrt(1e-5 + T.sum(Vr**2,axis=0,keepdims=True) + T.sum(Vi**2,axis=0,keepdims=True))
Vn = T.concatenate( [Vr/(1e-5 + Vnorms), Vi/(1e-5 + Vnorms)], axis=1)
# scale so row norms are desired number
Vn = V*T.sqrt(Vnorm)
else:
Vn = V
if input_type=='complex':
Vim = T.concatenate([ (-1)*Vn[:,n_hidden:], Vn[:,:n_hidden] ],axis=1) #concatenate along columns to make [-V_I, V_R]
Vaug = T.concatenate([ Vn, Vim ],axis=0) #concatenate along rows to make [V_R, V_I; -V_I, V_R]
# output matrix U
if flag_noComplexConstraint and (input_type=='complex'):
U = initialize_matrix(2*n_hidden,2*n_output,'U'+name_suffix,rng, init=U_init)
Uaug=U
else:
U = initialize_matrix(2 * n_hidden, n_output, 'U'+name_suffix, rng, init=U_init)
if (Unorm > 0.0):
# normalize the cols of U by the L2 norm (note that the variable U here is actually U^H, so we normalize the rows)
Ur = U[:n_hidden,:]
Ui = U[n_hidden:,:]
Unorms = T.sqrt(1e-5 + T.sum(Ur**2,axis=1,keepdims=True) + T.sum(Ui**2,axis=1,keepdims=True))
Un = T.concatenate([ Ur/(1e-5 + Unorms), Ui/(1e-5 + Unorms) ], axis=0)
# scale so col norms are desired number
Un = Un*T.sqrt(Unorm)
else:
Un = U
if output_type=='complex':
Uim = T.concatenate([ (-1)*Un[n_hidden:,:], Un[:n_hidden,:] ],axis=0) #concatenate along rows to make [-U_I; U_R]
Uaug = T.concatenate([ Un,Uim ],axis=1) #concatante along cols to make [U_R, -U_I; U_I, U_R]
# note that this is a little weird compared to the convention elsewhere in this code that
# right-multiplication real-composite form is [A, B; -B, A]. The weirdness is because of the original
# implementation, which initialized U for real-valued outputs as U=[A; B], which really should have
# been U=[A; -B]
# output bias out_bias
if output_type=='complex':
out_bias = theano.shared(np.zeros((2*n_output,), dtype=theano.config.floatX), name='out_bias'+name_suffix)
else:
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX), name='out_bias'+name_suffix)
# initialize layer 1 parameters
hidden_bias, h_0, Wparams = initialize_complex_RNN_layer(n_hidden,Wimpl,rng,hidden_bias_mean,name_suffix=name_suffix,n_Givens=n_Givens,hidden_bias_init=hidden_bias_init,h_0_init=h_0_init,W_init=W_init)
# extract recurrent parameters into this namespace
if flag_feed_forward:
# just doing feed-foward, so remove any recurrent parameters
if (Wimpl == 'adhoc'):
#theta = theano.shared(np.float32(0.0))
h_0_size=(1,2*n_hidden)
h_0 = theano.shared(np.asarray(np.zeros(h_0_size),dtype=theano.config.floatX))
parameters = [V, U, hidden_bias, out_bias]
else:
if (Wimpl == 'adhoc'):
# ad-hoc parameterization of Arjovsky, Shah, and Bengio 2015
theta = Wparams[0]
reflection = Wparams[1]
index_permute_long = Wparams[2]
parameters = [V, U, hidden_bias, reflection, out_bias, theta, h_0]
#Wparams = [theta]
elif (Wimpl == 'full'):
# fixed full unitary matrix
Waug=Wparams[0]
parameters = [V, U, hidden_bias, out_bias, h_0, Waug]
#Wparams = [Waug]
elif (Wimpl == 'givens'):
# composition of Givens rotations
gphipsi = Wparams[0]
gchi = Wparams[1]
gidx = Wparams[2]
parameters = [V,U, hidden_bias, out_bias, h_0, gphipsi, gchi]
#Wparams = [gphipsi, gchi, gidx]
h_0_all_layers = h_0
# initialize additional layer parameters
addl_layers_params=[]
addl_layers_params_optim=[]
for i_layer in range(2,n_layers+1):
betw_layer_suffix='_L%d_to_L%d' % (i_layer-1,i_layer)
layer_suffix='_L%d' % i_layer
Wvparams_cur = initialize_unitary(n_hidden,Wimpl,rng,name_suffix=(name_suffix+betw_layer_suffix),n_Givens=n_Givens,init=W_init)
hidden_bias_cur, h_0_cur, Wparams_cur = initialize_complex_RNN_layer(n_hidden,Wimpl,rng,hidden_bias_mean,name_suffix=(name_suffix + layer_suffix),n_Givens=n_Givens,hidden_bias_init=hidden_bias_init,h_0_init=h_0_init,W_init=W_init)
addl_layers_params = addl_layers_params + Wvparams_cur + [hidden_bias_cur, h_0_cur, ] + Wparams_cur
if (Wimpl=='adhoc'):
# don't include permutation indices in the list of parameters to be optimized
addl_layers_params_optim = addl_layers_params_optim + Wvparams_cur[0:2] + [hidden_bias_cur, h_0_cur] + Wparams_cur[0:2]
else:
addl_layers_params_optim = addl_layers_params
if flag_connect_input_to_layers:
Vk = initialize_matrix(n_input, 2*n_hidden, 'V'+name_suffix+layer_suffix, rng, init=V_init)
if (Vnorm>0.0):
# normalize the rows of V by the L2 norm (note that the variable V here is actually V^T, so we normalize the columns)
Vkr = Vk[:,:n_hidden]
Vki = Vk[:,n_hidden:]
Vknorms = T.sqrt(1e-5 + T.sum(Vkr**2,axis=0,keepdims=True) + T.sum(Vki**2,axis=0,keepdims=True))
Vkn = T.concatenate( [Vkr/(1e-5 + Vknorms), Vki/(1e-5 + Vknorms)], axis=1)
# scale so row norms are desired number
Vkn = Vk*T.sqrt(Vknorm)
else:
Vkn = Vk
if input_type=='complex':
Vkim = T.concatenate([ (-1)*Vkn[:,n_hidden:], Vkn[:,:n_hidden] ],axis=1) #concatenate along columns to make [-V_I, V_R]
Vkaug = T.concatenate([ Vkn, Vkim ],axis=0) #concatenate along rows to make [V_R, V_I; -V_I, V_R]
addl_layers_params = addl_layers_params + [Vkaug]
else:
addl_layers_params = addl_layers_params + [Vkn]
addl_layers_params_optim = addl_layers_params_optim + [Vk]
h_0_all_layers = T.concatenate([h_0_all_layers,h_0_cur],axis=1)
parameters = parameters + addl_layers_params_optim
# initialize data nodes
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
if flag_use_mask:
if 'CE' in loss_function:
ymask = T.matrix(dtype='int8') if out_every_t else T.vector(dtype='int8')
else:
# y will be n_fram x n_output x n_utt
ymask = T.tensor3(dtype='int8') if out_every_t else T.matrix(dtype='int8')
if x_spec is not None:
# x is specified, set x to this:
x = x_spec
swap_re_im = np.concatenate((np.arange(n_hidden, 2*n_hidden), np.arange(n_hidden)))
# define the recurrence used by theano.scan
def recurrence(x_t, y_t, ymask_t, h_prev, cost_prev, acc_prev, V, hidden_bias, out_bias, U, *argv):
# h_prev is of size n_batch x n_layers*2*n_hidden
# strip W parameters off variable arguments list
if (Wimpl=='full'):
Wparams=argv[0:1]
argv=argv[1:]
else:
Wparams=argv[0:3]
argv=argv[3:]
if not flag_feed_forward:
# Compute hidden linear transform: W h_{t-1}
h_prev_layer1 = h_prev[:,0:2*n_hidden]
hidden_lin_output = times_unitary(h_prev_layer1,n_hidden,swap_re_im,Wparams,Wimpl)
# Compute data linear transform
if ('CE' in loss_function) and (input_type=='categorical'):
# inputs are categorical, so just use them as indices into V
data_lin_output = V[T.cast(x_t, 'int32')]
else:
# second dimension of real-valued x_t should be of size n_input, first dimension of V should be of size n_input
# (or augmented, where the dimension of summation is 2*n_input and V is of real/imag. augmented form)
data_lin_output = T.dot(x_t, V)
# Total linear output
if not flag_feed_forward:
lin_output = hidden_lin_output + data_lin_output
else:
lin_output = data_lin_output
# Apply non-linearity ----------------------------
# scale RELU nonlinearity
# add a little bit to sqrt argument to ensure stable gradients,
# since gradient of sqrt(x) is -0.5/sqrt(x)
modulus = T.sqrt(1e-9+lin_output**2 + lin_output[:, swap_re_im]**2)
rescale = T.maximum(modulus + T.tile(hidden_bias, [2]).dimshuffle('x', 0), 0.) / (modulus)
h_t = lin_output * rescale
h_t_all_layers = h_t
# Compute additional recurrent layers
for i_layer in range(2,n_layers+1):
# strip Wv parameters off variable arguments list
if (Wimpl=='full'):
Wvparams_cur=argv[0:1]
argv=argv[1:]
else:
Wvparams_cur=argv[0:3]
argv=argv[3:]
# strip hidden_bias for this layer off argv
hidden_bias_cur = argv[0]
argv=argv[1:]
# strip h_0 for this layer off argv
#h_0_cur = argv[0] #unused, since h_0_all_layers is all layers' h_0s concatenated
argv=argv[1:]
# strip W parameters off variable arguments list
if (Wimpl=='full'):
Wparams_cur=argv[0:1]
argv=argv[1:]
else:
Wparams_cur=argv[0:3]
argv=argv[3:]
if flag_connect_input_to_layers:
# strip layer-dependent input transforms off variable arugments list
Vk=argv[0]
argv=argv[1:]
# Compute the linear parts of the layer ----------
if not flag_feed_forward:
# get previous hidden state h_{t-1} for this layer:
if flag_broadcast_silo:
# use top of the previous iteration stack for h_{t-1}
h_prev_cur = h_prev[:,(n_layers-1)*2*n_hidden:n_layers*2*n_hidden]
else:
h_prev_cur = h_prev[:,(i_layer-1)*2*n_hidden:i_layer*2*n_hidden]
# Compute hidden linear transform: W h_{t-1}
hidden_lin_output_cur = times_unitary(h_prev_cur,n_hidden,swap_re_im,Wparams_cur,Wimpl)
# Compute "data linear transform", which for this intermediate layer is the previous layer's h_t transformed by Wv
data_lin_output_cur = times_unitary(h_t,n_hidden,swap_re_im,Wvparams_cur,Wimpl)
# Total linear output
if not flag_feed_forward:
lin_output_cur = hidden_lin_output_cur + data_lin_output_cur
else:
lin_output_cur = data_lin_output_cur
if flag_connect_input_to_layers:
lin_output_cur = lin_output_cur + T.dot(x_t,Vk)
# Apply non-linearity ----------------------------
# scale RELU nonlinearity
# add a little bit to sqrt argument to ensure stable gradients,
# since gradient of sqrt(x) is -0.5/sqrt(x)
modulus = T.sqrt(1e-9+lin_output_cur**2 + lin_output_cur[:, swap_re_im]**2)
rescale = T.maximum(modulus + T.tile(hidden_bias_cur, [2]).dimshuffle('x', 0), 0.) / (modulus)
h_t = lin_output_cur * rescale
h_t_all_layers = T.concatenate([h_t_all_layers,h_t],axis=1)
# assume we aren't passing any preactivation to compute_cost
z_t = None
if loss_function == 'MSEplusL1':
z_t = h_t
if out_every_t:
lin_output = T.dot(h_t, U) + out_bias.dimshuffle('x', 0)
if flag_add_input_to_output:
lin_output=lin_output + x_t
if flag_use_mask:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t, ymask_t=ymask_t, z_t=z_t, lam=lam)
else:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t, z_t=z_t, lam=lam)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t_all_layers, cost_t, acc_t
def recurrence_Givens(x_t, y_t, ymask_t, h_prev, cost_prev, acc_prev, V, hidden_bias, out_bias, U, gphipsi, gchi, gidx):
if not flag_feed_forward:
# scan method for composing Givens rotations
givens_steps=h_prev #output of this inner scan should be I x 2*n_hidden
givens_outputs, updates = theano.scan(fn=lambda gphipsi,
gchi,
gidx,
Gh_prev:
times_givens(Gh_prev,
n_hidden,
T.reshape(T.concatenate([gphipsi,gchi],axis=1),[3]),
T.reshape(gidx,[2])),
sequences=[gphipsi,gchi,gidx],
outputs_info=givens_steps)
# output of composition of Givens rotations:
hidden_lin_output=T.reshape(givens_outputs[-1,:,:],(givens_outputs.shape[1],givens_outputs.shape[2]))
# Compute data linear transform
if ('CE' in loss_function) and (input_type=='categorical'):
# inputs are categorical, so just use them as indices into V
data_lin_output = V[T.cast(x_t, 'int32')]
else:
# second dimension of real-valued x_t should be of size n_input, first dimension of V should be of size n_input
# (or augmented, where the dimension of summation is 2*n_input and V is of real/imag. augmented form)
data_lin_output = T.dot(x_t, V)
# Total linear output
if not flag_feed_forward:
lin_output = hidden_lin_output + data_lin_output
else:
lin_output = data_lin_output
# Apply non-linearity ----------------------------
# scale RELU nonlinearity
# add a little bit to sqrt argument to ensure stable gradients,
# since gradient of sqrt(x) is -0.5/sqrt(x)
modulus = T.sqrt(1e-9+lin_output**2 + lin_output[:, swap_re_im]**2)
rescale = T.maximum(modulus + T.tile(hidden_bias, [2]).dimshuffle('x', 0), 0.) / (modulus)
h_t = lin_output * rescale
# assume we aren't passing any preactivation to compute_cost
z_t = None
if loss_function == 'MSEplusL1':
z_t = h_t
if out_every_t:
lin_output = T.dot(h_t, U) + out_bias.dimshuffle('x', 0)
if flag_use_mask:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t, ymask_t=ymask_t, z_t=z_t, lam=lam)
else:
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t, z_t=z_t, lam=lam)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, cost_t, acc_t
# compute hidden states
# h_0_batch should be n_utt x n_layers*2*n_hidden, since scan goes over first dimension of x, which is the maximum STFT length in frames
h_0_batch = T.tile(h_0_all_layers, [x.shape[1], 1])
if (Wimpl=='givens'):
if input_type=='complex' and output_type=='complex':
# pass in augmented input and output transformations
non_sequences = [Vaug, hidden_bias, out_bias, Uaug, gphipsi, gchi, gidx]
elif input_type=='complex':
non_sequences = [Vaug, hidden_bias, out_bias, Un, gphipsi, gchi, gidx]
elif output_type=='complex':
non_sequences = [Vn , hidden_bias, out_bias, Uaug, gphipsi, gchi, gidx]
else:
non_sequences = [Vn , hidden_bias, out_bias, Un, gphipsi, gchi, gidx]
else:
if input_type=='complex' and output_type=='complex':
# pass in augmented input and output transformations
non_sequences = [Vaug, hidden_bias, out_bias, Uaug] + Wparams + addl_layers_params
elif input_type=='complex':
non_sequences = [Vaug, hidden_bias, out_bias, Un] + Wparams + addl_layers_params
elif output_type=='complex':
non_sequences = [Vn , hidden_bias, out_bias, Uaug] + Wparams + addl_layers_params
else:
non_sequences = [Vn , hidden_bias, out_bias, Un] + Wparams + addl_layers_params
if out_every_t:
if flag_use_mask:
sequences = [x, y, ymask]
else:
sequences = [x, y, T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
else:
if flag_use_mask:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1]), T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1]), T.tile(theano.shared(np.ones((1,1),dtype=theano.config.floatX)),[x.shape[0], 1, 1])]
outputs_info=[h_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
if (Wimpl=='givens'):
[hidden_states_all_layers, cost_steps, acc_steps], updates = theano.scan(fn=recurrence_Givens,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
else:
[hidden_states_all_layers, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
# get hidden states of last layer
hidden_states = hidden_states_all_layers[:,:,(n_layers-1)*2*n_hidden:]
if flag_return_lin_output:
if output_type=='complex':
lin_output = T.dot(hidden_states, Uaug) + out_bias.dimshuffle('x',0)
else:
lin_output = T.dot(hidden_states, Un) + out_bias.dimshuffle('x',0)
if flag_add_input_to_output:
lin_output = lin_output + x
if not out_every_t:
#TODO: here, if flag_use_mask is set, need to use a for-loop to select the desired time-step for each utterance
lin_output = T.dot(hidden_states[-1,:,:], Un) + out_bias.dimshuffle('x', 0)
z_t = None
if loss_function == 'MSEplusL1':
z_t = hidden_states[-1,:,:]
costs = compute_cost_t(lin_output, loss_function, y, z_t=z_t, lam=lam)
cost=costs[0]
accuracy=costs[1]
else:
if (cost_transform=='magTimesPhase'):
cosPhase=T.cos(lin_output)
sinPhase=T.sin(lin_output)
linMag=np.sqrt(10**(x/10.0)-1e-5)
yest_real=linMag*cosPhase
yest_imag=linMag*sinPhase
yest=T.concatenate([yest_real,yest_imag],axis=2)
mse=(yest-y)**2
cost_steps=T.mean(mse*ymask[:,:,0].dimshuffle(0,1,'x'),axis=2)
elif cost_transform is not None:
# assume that cost_transform is an inverse DFT followed by synthesis windowing
lin_output_real=lin_output[:,:,:n_output]
lin_output_imag=lin_output[:,:,n_output:]
lin_output_sym_real=T.concatenate([lin_output_real,lin_output_real[:,:,n_output-2:0:-1]],axis=2)
lin_output_sym_imag=T.concatenate([-lin_output_imag,lin_output_imag[:,:,n_output-2:0:-1]],axis=2)
lin_output_sym=T.concatenate([lin_output_sym_real,lin_output_sym_imag],axis=2)
yest_xform=T.dot(lin_output_sym,cost_transform)
# apply synthesis window
yest_xform=yest_xform*cost_weight.dimshuffle('x','x',0)
y_real=y[:,:,:n_output]
y_imag=y[:,:,n_output:]
y_sym_real=T.concatenate([y_real,y_real[:,:,n_output-2:0:-1]],axis=2)
y_sym_imag=T.concatenate([-y_imag,y_imag[:,:,n_output-2:0:-1]],axis=2)
y_sym=T.concatenate([y_sym_real,y_sym_imag],axis=2)
y_xform=T.dot(y_sym,cost_transform)
# apply synthesis window
y_xform=y_xform*cost_weight.dimshuffle('x','x',0)
mse=(y_xform-yest_xform)**2
cost_steps=T.mean(mse*ymask[:,:,0].dimshuffle(0,1,'x'),axis=2)
cost = cost_steps.mean()
accuracy = acc_steps.mean()
if (loss_function=='CE_of_sum'):
yest = T.sum(lin_output,axis=0) #sum over time_steps, yest is Nseq x n_output
yest_softmax = T.nnet.softmax(yest)
cost = T.nnet.categorical_crossentropy(yest_softmax, y[0,:]).mean()
accuracy = T.eq(T.argmax(yest, axis=-1), y[0,:]).mean(dtype=theano.config.floatX)
if flag_return_lin_output:
costs = [cost, accuracy, lin_output]
if flag_return_hidden_states:
costs = costs + [hidden_states]
#nmse_local = ymask.dimshuffle(0,1)*( (lin_output-y)**2 )/( 1e-5 + y**2 )
nmse_local = theano.shared(np.float32(0.0))
costs = costs + [nmse_local]
costs = costs + [cost_steps]
else:
costs = [cost, accuracy]
if flag_use_mask:
return [x,y,ymask], parameters, costs
else:
return [x, y], parameters, costs
def Givens_RNN(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE', output_type='real', fidx=None, flag_return_lin_output=False, flag_useGivensForLoop=False):
# composes {n_hidden}\choose{2} Givens rotations to form W, which is guaranteed
# to cover the entire unitary group U(n_hidden) [K Zyczkowski, M Kus, "Random Unitary Matrices", 1994]
np.random.seed(1234)
rng = np.random.RandomState(1234)
# Initialize parameters: theta, V_re, V_im, hidden_bias, U, out_bias, h_0
if input_type=='complex':
V = initialize_matrix(n_input, 2*n_hidden, 'V', rng)
Vim = T.concatenate([ (-1)*V[:,n_hidden:], V[:,:n_hidden] ],axis=1) #concatenate along columns to make [-V_I, V_R]
Vaug = T.concatenate([ V, Vim ],axis=0) #concatenate along rows to make [V_R, V_I; -V_I, V_R]
else:
V = initialize_matrix(n_input, 2*n_hidden, 'V', rng)
if output_type=='complex':
U = initialize_matrix(2 * n_hidden, n_output, 'U', rng)
Uim = T.concatenate([ (-1)*U[n_hidden:,:], U[:n_hidden,:] ],axis=0) #concatenate along rows to make [-U_I; U_R]
Uaug = T.concatenate([ U,Uim ],axis=1) #concatante along columns to make [U_R, U_I; -U_I, U_R]
else:
U = initialize_matrix(2 * n_hidden, n_output, 'U', rng)
hidden_bias = theano.shared(np.asarray(rng.uniform(low=-0.01,
high=0.01,
size=(n_hidden,)),
dtype=theano.config.floatX),
name='hidden_bias')
if output_type=='complex':
out_bias = theano.shared(np.zeros((2*n_output,), dtype=theano.config.floatX), name='out_bias')
else:
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX), name='out_bias')
Nchoose2=(n_hidden)*(n_hidden-1)/2;
gphipsi = theano.shared(np.asarray(rng.uniform(low=-np.pi,
high=np.pi,
size=(Nchoose2, 1, 2)),
dtype=theano.config.floatX),
name='gphipsi')
gchi = theano.shared(np.asarray(np.arccos(rng.uniform(low=0,
high=1,
size=(Nchoose2, 1, 1))),
dtype=theano.config.floatX),
name='gchi')
#galp = theano.shared(np.asarray(rng.uniform(low=-np.pi,
# high=np.pi,
# size=(1, 1)),
# dtype=theano.config.floatX),
# name='galp')
# build indices for Givens rotations:
gidx=np.zeros((Nchoose2,1,2),dtype=np.int32)
ig=0
for ig1 in range(0,n_hidden):
for ig2 in range(ig1+1,n_hidden):
gidx[ig,0,:]=np.reshape([ig1,ig2],(1,1,2))
ig=ig+1
bucket = np.sqrt(3. / 2 / n_hidden)
h_0_size=(1,2*n_hidden)
h_0 = theano.shared(np.asarray(rng.uniform(low=-bucket,
high=bucket,
size=h_0_size),
dtype=theano.config.floatX),
name='h_0')
parameters = [V, U, hidden_bias, out_bias, h_0, gphipsi, gchi]
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
swap_re_im = np.concatenate((np.arange(n_hidden, 2*n_hidden), np.arange(n_hidden)))
# define the recurrence used by theano.scan
def recurrence(x_t, y_t, h_prev, cost_prev, acc_prev, V, hidden_bias, out_bias, U, gphipsi, gchi, gidx):
# h_prev is nutt x 2*n_hidden
# Compute hidden linear transform
##complex_RNN steps
#step1 = times_diag(h_prev, n_hidden, theta[0,:], swap_re_im)
#step2 = do_fft(step1, n_hidden)
#step3 = times_reflection(step2, n_hidden, reflection[0,:])
#step4 = vec_permutation(step3, index_permute_long)
#step5 = times_diag(step4, n_hidden, theta[1,:], swap_re_im)
#step6 = do_ifft(step5, n_hidden)
#step7 = times_reflection(step6, n_hidden, reflection[1,:])
#step8 = times_diag(step7, n_hidden, theta[2,:], swap_re_im)
#
#hidden_lin_output = step8
if flag_useGivensForLoop:
# for loop method
hidden_lin_output=h_prev
ig=0; #absolute matrix index
for ig1 in range(0,n_hidden):
for ig2 in range(ig1+1,n_hidden):
hidden_lin_output=T.set_subtensor(hidden_lin_output[:,[ig1,ig2,ig1+n_hidden,ig2+n_hidden]],
times_givens(hidden_lin_output[:,[ig1,ig2,ig1+n_hidden,ig2+n_hidden]],
2,
T.reshape(T.concatenate([gphipsi[ig,:],gchi[ig,:]],axis=1),[3]),
np.asarray([1,2],dtype=np.int32)))
else:
# scan method for composing Givens rotations
givens_steps=h_prev #output of this inner scan should be I x 2*n_hidden
givens_outputs, updates = theano.scan(fn=lambda gphipsi,
gchi,
gidx,
Gh_prev:
times_givens(Gh_prev,
n_hidden,
T.reshape(T.concatenate([gphipsi,gchi],axis=1),[3]),
T.reshape(gidx,[2])),
sequences=[gphipsi,gchi,gidx],
outputs_info=[givens_steps])
# output of composition of Givens rotations:
hidden_lin_output=T.reshape(givens_outputs[-1,:,:],(givens_outputs.shape[1],givens_outputs.shape[2]))
# Compute data linear transform
if loss_function == 'CE':
# inputs are categorical, so just use them as indices into V
data_lin_output = V[T.cast(x_t, 'int32')]
# elif input_type=='complex':
# # second dimension of complex-valued x_t should be of size 2*n_input, with 0:(n_input-1) the real part and
# # n_input:end the imag. part
# data_lin_output = T.dot(x_t, V)
else:
# second dimension of real-valued x_t should be of size n_input, first dimension of V should be of size n_input
# (or augmented, where the dimension of summation is 2*n_input and V is of real/imag. augmented form)
data_lin_output = T.dot(x_t, V)
# Total linear output
lin_output = hidden_lin_output + data_lin_output
# Apply non-linearity ----------------------------
# scale RELU nonlinearity
modulus = T.sqrt(lin_output**2 + lin_output[:, swap_re_im]**2)
rescale = T.maximum(modulus + T.tile(hidden_bias, [2]).dimshuffle('x', 0), 0.) / (modulus + 1e-5)
h_t = lin_output * rescale
if out_every_t:
lin_output = T.dot(h_t, U) + out_bias.dimshuffle('x', 0)
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, cost_t, acc_t
# compute hidden states
# h_0_batch should be n_utt x 2*n_hidden, since scan goes over first dimension of x, which is the maximum STFT length in frames
h_0_batch = T.tile(h_0, [x.shape[1], 1])
if input_type=='complex' and output_type=='complex':
# pass in augmented input and output transformations
non_sequences = [Vaug, hidden_bias, out_bias, Uaug, gphipsi, gchi, gidx]
elif input_type=='complex':
non_sequences = [Vaug, hidden_bias, out_bias, U, gphipsi, gchi, gidx]
elif output_type=='complex':
non_sequences = [V , hidden_bias, out_bias, Uaug, gphipsi, gchi, gidx]
else:
non_sequences = [V, hidden_bias, out_bias, U, gphipsi, gchi, gidx]
if out_every_t:
sequences = [x, y]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
outputs_info=[h_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
[hidden_states, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
if not out_every_t:
lin_output = T.dot(hidden_states[-1,:,:], U) + out_bias.dimshuffle('x', 0)
costs = compute_cost_t(lin_output, loss_function, y)
else:
cost = cost_steps.mean()
accuracy = acc_steps.mean()
if flag_return_lin_output:
if output_type=='complex':
lin_outputs = T.dot(hidden_states, Uaug) + out_bias.dimshuffle('x',0)
elif output_type=='real':
lin_outputs = T.dot(hidden_states, U) + out_bias.dimshuffle('x',0)
costs = [cost, accuracy, lin_outputs]
else:
costs = [cost, accuracy]
return [x, y], parameters, costs
def cue_RNN(n_input, n_hidden, n_output, input_type='real', out_every_t=False, loss_function='CE', n_reflections=None, flag_telescope=True):
# composes n_reflections Householder reflection matrices to make W, defaults to telescoping
# Householder matrices, which is related to the subgroup algorithm.
if n_reflections is None:
# use n_hidden reflections by default (samples entire unitary group)
n_reflections=n_hidden
np.random.seed(1234)
rng = np.random.RandomState(1234)
# Initialize parameters: theta, V_re, V_im, hidden_bias, U, out_bias, h_0
V = initialize_matrix(n_input, 2*n_hidden, 'V', rng)
U = initialize_matrix(2 * n_hidden, n_output, 'U', rng)
hidden_bias = theano.shared(np.asarray(rng.uniform(low=-0.01,
high=0.01,
size=(n_hidden,)),
dtype=theano.config.floatX),
name='hidden_bias')
reflection = initialize_matrix(n_reflections, 2*n_hidden, 'reflection', rng)
out_bias = theano.shared(np.zeros((n_output,), dtype=theano.config.floatX), name='out_bias')
bucket = np.sqrt(3. / 2 / n_hidden)
h_0 = theano.shared(np.asarray(rng.uniform(low=-bucket,
high=bucket,
size=(1, 2 * n_hidden)),
dtype=theano.config.floatX),
name='h_0')
parameters = [V, U, hidden_bias, reflection, out_bias, h_0]
x, y = initialize_data_nodes(loss_function, input_type, out_every_t)
swap_re_im = np.concatenate((np.arange(n_hidden, 2*n_hidden), np.arange(n_hidden)))
# define the recurrence used by theano.scan
def recurrence(x_t, y_t, h_prev, cost_prev, acc_prev, V, hidden_bias, out_bias, U):
# Compute hidden linear transform
# def apply_reflection(ireflection, rinput, n_hidden, reflection):
# return times_reflection(rinput, n_hidden, reflection[ireflection,:])
#
# outputs_info=[h_prev]
# sequences=[np.arange(n_reflections)]
# non_sequences=[n_hidden,reflection]
#
# hidden_lin_output = theano.scan(fn=apply_reflection,
# outputs_info=outputs_info,
# sequences=sequences,
# non_sequences=non_sequences)
# hidden_lin_output = hidden_lin_output[-1]
step=h_prev
#for ii in range(0,n_reflections):
# step=times_reflection(step, n_hidden, reflection[ii,:])
for ii in range(n_hidden,n_hidden-n_reflections,-1):
if flag_telescope:
step=times_reflection_sub(step, n_hidden, ii, reflection[ii-1,:])
else:
step=times_reflection(step, n_hidden, reflection[ii-1,:])
hidden_lin_output = step
# Compute data linear transform
if loss_function == 'CE':
data_lin_output = V[T.cast(x_t, 'int32')]
else:
data_lin_output = T.dot(x_t, V)
# Total linear output
lin_output = hidden_lin_output + data_lin_output
# Apply non-linearity ----------------------------
# scale RELU nonlinearity
modulus = T.sqrt(lin_output**2 + lin_output[:, swap_re_im]**2)
rescale = T.maximum(modulus + T.tile(hidden_bias, [2]).dimshuffle('x', 0), 0.) / (modulus + 1e-5)
h_t = lin_output * rescale
if out_every_t:
lin_output = T.dot(h_t, U) + out_bias.dimshuffle('x', 0)
cost_t, acc_t = compute_cost_t(lin_output, loss_function, y_t)
else:
cost_t = theano.shared(np.float32(0.0))
acc_t = theano.shared(np.float32(0.0))
return h_t, cost_t, acc_t
# compute hidden states
h_0_batch = T.tile(h_0, [x.shape[1], 1])
non_sequences = [V, hidden_bias, out_bias, U]
if out_every_t:
sequences = [x, y]
else:
sequences = [x, T.tile(theano.shared(np.zeros((1,1), dtype=theano.config.floatX)), [x.shape[0], 1, 1])]
outputs_info=[h_0_batch, theano.shared(np.float32(0.0)), theano.shared(np.float32(0.0))]
[hidden_states, cost_steps, acc_steps], updates = theano.scan(fn=recurrence,
sequences=sequences,
non_sequences=non_sequences,
outputs_info=outputs_info)
if not out_every_t:
lin_output = T.dot(hidden_states[-1,:,:], U) + out_bias.dimshuffle('x', 0)
costs = compute_cost_t(lin_output, loss_function, y)
else:
cost = cost_steps.mean()
accuracy = acc_steps.mean()
costs = [cost, accuracy]
return [x, y], parameters, costs
| 47.124511
| 660
| 0.568551
| 10,057
| 72,289
| 3.842896
| 0.052401
| 0.034232
| 0.03224
| 0.033922
| 0.811581
| 0.768656
| 0.735433
| 0.695974
| 0.663061
| 0.642983
| 0
| 0.023977
| 0.309411
| 72,289
| 1,533
| 661
| 47.155251
| 0.75019
| 0.148847
| 0
| 0.619
| 0
| 0
| 0.018395
| 0
| 0
| 0
| 0
| 0.000652
| 0
| 1
| 0.027
| false
| 0
| 0.004
| 0.001
| 0.06
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
81eac552ae8c128bc27d4dfac1f43ad682e8de63
| 84
|
py
|
Python
|
booksapi/books/apps.py
|
Anujangalapalli/micropythonapi
|
2d5f779be2e00e3009ca0c4902d9cf50be1ecee5
|
[
"MIT"
] | null | null | null |
booksapi/books/apps.py
|
Anujangalapalli/micropythonapi
|
2d5f779be2e00e3009ca0c4902d9cf50be1ecee5
|
[
"MIT"
] | 5
|
2020-02-11T23:16:35.000Z
|
2020-07-17T20:20:45.000Z
|
booksapi/books/apps.py
|
Anujangalapalli/micropythonapi
|
2d5f779be2e00e3009ca0c4902d9cf50be1ecee5
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class BooksConfig(AppConfig):
name = 'books'
| 16.8
| 33
| 0.75
| 10
| 84
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 84
| 4
| 34
| 21
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.059524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
81f7572e6b63241d6dd04999ebc60e04614d802b
| 153
|
py
|
Python
|
pk1/clouds/pagination.py
|
cas-bigdatalab/scispace
|
5099c1b5716e21172cd2b914b0d9389b2a0415aa
|
[
"Apache-2.0"
] | 32
|
2019-07-08T06:09:13.000Z
|
2021-03-14T06:32:02.000Z
|
pk1/clouds/pagination.py
|
cas-bigdatalab/scispace
|
5099c1b5716e21172cd2b914b0d9389b2a0415aa
|
[
"Apache-2.0"
] | 12
|
2018-11-15T01:36:07.000Z
|
2019-01-22T04:37:29.000Z
|
pk1/clouds/pagination.py
|
cas-bigdatalab/scispace
|
5099c1b5716e21172cd2b914b0d9389b2a0415aa
|
[
"Apache-2.0"
] | 9
|
2019-07-12T09:01:08.000Z
|
2020-01-05T13:49:25.000Z
|
from rest_framework.pagination import PageNumberPagination
class PageSizeNumberPagination(PageNumberPagination):
page_size_query_param = 'page_size'
| 38.25
| 58
| 0.869281
| 15
| 153
| 8.533333
| 0.8
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084967
| 153
| 4
| 59
| 38.25
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0.058442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
81f995f90dcd5fed4ac851a6c4459a629c04ecb9
| 638
|
py
|
Python
|
openslides_backend/shared/interfaces/services.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
openslides_backend/shared/interfaces/services.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
openslides_backend/shared/interfaces/services.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
from typing import Protocol
from ...services.auth.interface import AuthenticationService
from ...services.datastore.interface import DatastoreService
from ...services.media.interface import MediaService
from ...services.permission.interface import PermissionService
class Services(Protocol): # pragma: no cover
"""
Interface for service container used for dependency injection.
"""
def authentication(self) -> AuthenticationService:
pass
def permission(self) -> PermissionService:
pass
def datastore(self) -> DatastoreService:
pass
def media(self) -> MediaService:
pass
| 25.52
| 66
| 0.724138
| 62
| 638
| 7.451613
| 0.451613
| 0.103896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194357
| 638
| 24
| 67
| 26.583333
| 0.898833
| 0.125392
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.357143
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
c3068a0061f0156528f30bf54d9153beaaa8fd4c
| 15,701
|
py
|
Python
|
impc_etl/workflow/main.py
|
ficolo/impc-etl
|
3ca0fadaaa2b6e5d6fc424f949a9faa7680cd5f5
|
[
"Apache-2.0"
] | 4
|
2021-04-14T09:28:51.000Z
|
2022-02-07T10:52:14.000Z
|
impc_etl/workflow/main.py
|
ficolo/impc-etl
|
3ca0fadaaa2b6e5d6fc424f949a9faa7680cd5f5
|
[
"Apache-2.0"
] | 85
|
2018-10-30T10:49:28.000Z
|
2022-03-25T13:51:31.000Z
|
impc_etl/workflow/main.py
|
ficolo/impc-etl
|
3ca0fadaaa2b6e5d6fc424f949a9faa7680cd5f5
|
[
"Apache-2.0"
] | 7
|
2018-10-30T11:36:57.000Z
|
2021-07-15T15:36:14.000Z
|
from typing import Union
from luigi.contrib.hdfs import HdfsTarget
from impc_etl.jobs.load.impc_api.impc_api_mapper import (
ApiSpecimenMapper,
ApiExperimentMapper,
ApiObservationMapper,
)
from impc_etl.jobs.load.impc_api.impc_api_pg_loader import ApiPostgreSQLLoader
from impc_etl.workflow.load import *
from impc_etl.jobs.extract.colony_tracking_extractor import *
from impc_etl.jobs.extract.gene_production_status_extractor import (
GeneProductionStatusExtractor,
)
from impc_etl.jobs.load.impc_api.impc_gene_bundle_mapper import ImpcGeneBundleMapper
from impc_etl.jobs.load.impc_api.impc_statistical_results_bundle_mapper import (
ImpcStatsBundleMapper,
)
class ImpcEtl(luigi.Task):
dcc_xml_path = luigi.Parameter()
imits_colonies_tsv_path = luigi.Parameter()
imits_alleles_tsv_path = luigi.Parameter()
mgi_allele_input_path = luigi.Parameter()
mgi_strain_input_path = luigi.Parameter()
ontology_input_path = luigi.Parameter()
output_path = luigi.Parameter()
def requires(self):
return [
ObservationsMapper(
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
output_path=self.output_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
)
]
class ImpcSolrCores(luigi.Task):
openstats_jdbc_connection = luigi.Parameter()
openstats_db_user = luigi.Parameter()
openstats_db_password = luigi.Parameter()
data_release_version = luigi.Parameter()
use_cache = luigi.Parameter()
dcc_xml_path = luigi.Parameter()
imits_colonies_tsv_path = luigi.Parameter()
imits_alleles_tsv_path = luigi.Parameter()
imits_product_tsv_path = luigi.Parameter()
mgi_allele_input_path = luigi.Parameter()
mgi_strain_input_path = luigi.Parameter()
mgi_gene_pheno_input_path = luigi.Parameter()
mgi_homologene_input_path = luigi.Parameter()
mgi_mrk_list_input_path = luigi.Parameter()
ontology_input_path = luigi.Parameter()
emap_emapa_csv_path = luigi.Parameter()
emapa_metadata_csv_path = luigi.Parameter()
ma_metadata_csv_path = luigi.Parameter()
mpath_metadata_csv_path = luigi.Parameter()
impc_search_index_csv_path = luigi.Parameter()
mp_relation_augmented_metadata_table_csv_path = luigi.Parameter()
threei_stats_results_csv = luigi.Parameter()
embryo_data_json_path = luigi.Parameter()
omero_ids_csv_path = luigi.Parameter()
http_proxy = luigi.Parameter()
output_path = luigi.Parameter()
def requires(self):
return [
PipelineCoreLoader(
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
imits_alleles_tsv_path=self.imits_alleles_tsv_path,
output_path=self.output_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
emap_emapa_csv_path=self.emap_emapa_csv_path,
emapa_metadata_csv_path=self.emapa_metadata_csv_path,
ma_metadata_csv_path=self.ma_metadata_csv_path,
),
GenotypePhenotypeCoreLoader(
openstats_jdbc_connection=self.openstats_jdbc_connection,
openstats_db_user=self.openstats_db_user,
openstats_db_password=self.openstats_db_password,
data_release_version=self.data_release_version,
use_cache=self.use_cache,
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
imits_alleles_tsv_path=self.imits_alleles_tsv_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
emap_emapa_csv_path=self.emap_emapa_csv_path,
emapa_metadata_csv_path=self.emapa_metadata_csv_path,
ma_metadata_csv_path=self.ma_metadata_csv_path,
mpath_metadata_csv_path=self.mpath_metadata_csv_path,
threei_stats_results_csv=self.threei_stats_results_csv,
raw_data_in_output="exclude",
http_proxy=self.http_proxy,
output_path=self.output_path,
),
StatsResultsCoreLoader(),
MGIPhenotypeCoreLoader(
mgi_allele_input_path=self.mgi_allele_input_path,
mgi_gene_pheno_input_path=self.mgi_gene_pheno_input_path,
ontology_input_path=self.ontology_input_path,
output_path=self.output_path,
),
MPCoreLoader(
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
imits_alleles_tsv_path=self.imits_alleles_tsv_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
emap_emapa_csv_path=self.emap_emapa_csv_path,
emapa_metadata_csv_path=self.emapa_metadata_csv_path,
ma_metadata_csv_path=self.ma_metadata_csv_path,
impc_search_index_csv_path=self.impc_search_index_csv_path,
mp_relation_augmented_metadata_table_csv_path=self.mp_relation_augmented_metadata_table_csv_path,
output_path=self.output_path,
),
GeneCoreLoader(
imits_tsv_path=self.imits_alleles_tsv_path,
embryo_data_json_path=self.embryo_data_json_path,
mgi_homologene_input_path=self.mgi_homologene_input_path,
mgi_mrk_list_input_path=self.mgi_mrk_list_input_path,
output_path=self.output_path,
dcc_xml_path=self.dcc_xml_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
emap_emapa_csv_path=self.emap_emapa_csv_path,
emapa_metadata_csv_path=self.emapa_metadata_csv_path,
ma_metadata_csv_path=self.ma_metadata_csv_path,
mpath_metadata_csv_path=self.mpath_metadata_csv_path,
threei_stats_results_csv=self.threei_stats_results_csv,
openstats_jdbc_connection=self.openstats_jdbc_connection,
openstats_db_user=self.openstats_db_user,
openstats_db_password=self.openstats_db_password,
data_release_version=self.data_release_version,
use_cache=self.use_cache,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
imits_alleles_tsv_path=self.imits_alleles_tsv_path,
),
Allele2Extractor(
imits_tsv_path=self.imits_alleles_tsv_path, output_path=self.output_path
),
ProductExtractor(
imits_tsv_path=self.imits_product_tsv_path, output_path=self.output_path
),
ImpcImagesCoreLoader(
omero_ids_csv_path=self.omero_ids_csv_path,
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
imits_alleles_tsv_path=self.imits_alleles_tsv_path,
output_path=self.output_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
emap_emapa_csv_path=self.emap_emapa_csv_path,
emapa_metadata_csv_path=self.emapa_metadata_csv_path,
ma_metadata_csv_path=self.ma_metadata_csv_path,
),
]
class ImpcStatPacketLoader(luigi.Task):
openstats_jdbc_connection = luigi.Parameter()
openstats_db_user = luigi.Parameter()
openstats_db_password = luigi.Parameter()
data_release_version = luigi.Parameter()
use_cache = luigi.Parameter()
dcc_xml_path = luigi.Parameter()
imits_colonies_tsv_path = luigi.Parameter()
imits_alleles_tsv_path = luigi.Parameter()
mgi_allele_input_path = luigi.Parameter()
mgi_strain_input_path = luigi.Parameter()
ontology_input_path = luigi.Parameter()
emap_emapa_csv_path = luigi.Parameter()
emapa_metadata_csv_path = luigi.Parameter()
ma_metadata_csv_path = luigi.Parameter()
mpath_metadata_csv_path = luigi.Parameter()
threei_stats_results_csv = luigi.Parameter()
http_proxy = luigi.Parameter()
output_path = luigi.Parameter()
def requires(self):
return [StatsResultsCoreLoader()]
class ImpcWindowedDataLoader(luigi.Task):
openstats_jdbc_connection = luigi.Parameter()
openstats_db_user = luigi.Parameter()
openstats_db_password = luigi.Parameter()
data_release_version = luigi.Parameter()
use_cache = luigi.Parameter()
dcc_xml_path = luigi.Parameter()
imits_colonies_tsv_path = luigi.Parameter()
imits_alleles_tsv_path = luigi.Parameter()
mgi_allele_input_path = luigi.Parameter()
mgi_strain_input_path = luigi.Parameter()
ontology_input_path = luigi.Parameter()
emap_emapa_csv_path = luigi.Parameter()
emapa_metadata_csv_path = luigi.Parameter()
ma_metadata_csv_path = luigi.Parameter()
mpath_metadata_csv_path = luigi.Parameter()
threei_stats_results_csv = luigi.Parameter()
http_proxy = luigi.Parameter()
output_path = luigi.Parameter()
def requires(self):
return [
StatsResultsCoreLoader(
raw_data_in_output="include",
extract_windowed_data="true",
)
]
class ImpcDataDrivenAnnotationLoader(SparkSubmitTask):
app = "impc_etl/jobs/load/data_driven_annotation.py"
name = "IMPC_Data_Driven_Annotation_Loader"
output_path = luigi.Parameter()
def requires(self):
return [
ObservationsMapper(
dcc_xml_path=self.dcc_xml_path,
imits_colonies_tsv_path=self.imits_colonies_tsv_path,
output_path=self.output_path,
mgi_strain_input_path=self.mgi_strain_input_path,
mgi_allele_input_path=self.mgi_allele_input_path,
ontology_input_path=self.ontology_input_path,
)
]
def app_options(self):
return [self.input()[0].path, self.output().path]
def output(self):
self.output_path = (
self.output_path + "/"
if not self.output_path.endswith("/")
else self.output_path
)
return ImpcConfig().get_target(
f"{self.output_path}annotated_observations_parquet"
)
class ImpcIndexDaily(luigi.Task):
name = "IMPC_Index_Daily"
imits_product_tsv_path = luigi.Parameter()
parquet_path = luigi.Parameter()
solr_path = luigi.Parameter()
local_path = luigi.Parameter()
remote_host = luigi.Parameter()
def requires(self):
return [
ProductExtractor(
imits_tsv_path=self.imits_product_tsv_path,
output_path=self.parquet_path,
),
GeneCoreLoader(),
]
def run(self):
tasks = []
for dependency in self.input():
tasks.append(
ImpcMergeIndex(
remote_host=self.remote_host,
parquet_path=dependency.path,
solr_path=self.solr_path,
local_path=self.local_path,
)
)
yield tasks
class ImpcCleanDaily(luigi.Task):
name = "IMPC_Clean_Daily"
imits_product_tsv_path = luigi.Parameter()
parquet_path = luigi.Parameter()
solr_path = luigi.Parameter()
local_path = luigi.Parameter()
remote_host = luigi.Parameter()
def _delele_target_if_exists(
self, target: Union[luigi.LocalTarget, HdfsTarget], hdfs=False
):
if target.exists():
print(target.path)
if hdfs:
target.remove(skip_trash=True)
else:
target.remove()
def run(self):
index_daily_task = ImpcIndexDaily(
imits_product_tsv_path=self.imits_product_tsv_path,
remote_host=self.remote_host,
parquet_path=self.parquet_path,
solr_path=self.solr_path,
local_path=self.local_path,
)
for index_daily_dependency in index_daily_task.requires():
impc_merge_index_task = ImpcMergeIndex(
remote_host=self.remote_host,
parquet_path=index_daily_dependency.output().path,
solr_path=self.solr_path,
local_path=self.local_path,
)
impc_copy_index_task = impc_merge_index_task.requires()[0]
impc_parquet_to_solr_task = impc_copy_index_task.requires()[0]
self._delele_target_if_exists(index_daily_dependency.output(), hdfs=True)
self._delele_target_if_exists(impc_merge_index_task.output())
self._delele_target_if_exists(impc_copy_index_task.output())
self._delele_target_if_exists(impc_parquet_to_solr_task.output(), hdfs=True)
self._delele_target_if_exists(Allele2Extractor().output(), hdfs=True)
self._delele_target_if_exists(GeneExtractor().output(), hdfs=True)
self._delele_target_if_exists(AlleleExtractor().output(), hdfs=True)
class ImpcIndexDataRelease(luigi.Task):
name = "IMPC_Index_Data_Release"
dcc_xml_path = luigi.Parameter()
imits_colonies_tsv_path = luigi.Parameter()
output_path = luigi.Parameter()
mgi_strain_input_path = luigi.Parameter()
mgi_allele_input_path = luigi.Parameter()
ontology_input_path = luigi.Parameter()
parquet_path = luigi.Parameter()
solr_path = luigi.Parameter()
local_path = luigi.Parameter()
remote_host = luigi.Parameter()
def requires(self):
return [
ObservationsMapper(),
StatsResultsCoreLoader(),
GeneCoreLoader(),
Allele2Extractor(),
GenotypePhenotypeCoreLoader(),
MPCoreLoader(),
PipelineCoreLoader(),
ProductExtractor(),
MGIPhenotypeCoreLoader(),
ImpcImagesCoreLoader(),
]
def run(self):
tasks = []
for dependency in self.input():
tasks.append(
ImpcMergeIndex(
remote_host=self.remote_host,
parquet_path=dependency.path,
solr_path=self.solr_path,
local_path=self.local_path,
)
)
if "stats_results" in dependency.path:
tasks.append(
ImpcMergeIndex(
remote_host=self.remote_host,
parquet_path=dependency.path + "_raw_data",
solr_path=self.solr_path,
local_path=self.local_path,
)
)
yield tasks
class ImpcApiDatasource(luigi.Task):
name = "IMPC_Generate_API_Datasource"
def requires(self):
return [ApiPostgreSQLLoader()]
| 40.466495
| 113
| 0.663652
| 1,789
| 15,701
| 5.348239
| 0.090553
| 0.079431
| 0.124164
| 0.039507
| 0.782818
| 0.746237
| 0.736204
| 0.708821
| 0.674436
| 0.645067
| 0
| 0.00052
| 0.264697
| 15,701
| 387
| 114
| 40.571059
| 0.828237
| 0
| 0
| 0.639205
| 0
| 0
| 0.015986
| 0.011273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039773
| false
| 0.014205
| 0.025568
| 0.025568
| 0.392045
| 0.002841
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c306e7064d943621b80fe537a57e8735fed2aacb
| 1,950
|
py
|
Python
|
data/users/apptoken.py
|
anwarchk/quay
|
23c5120790c619174e7d36784ca5aab7f4eece5c
|
[
"Apache-2.0"
] | 1
|
2019-11-22T21:10:08.000Z
|
2019-11-22T21:10:08.000Z
|
data/users/apptoken.py
|
anwarchk/quay
|
23c5120790c619174e7d36784ca5aab7f4eece5c
|
[
"Apache-2.0"
] | 20
|
2019-12-26T17:32:34.000Z
|
2022-03-21T22:18:06.000Z
|
data/users/apptoken.py
|
anwarchk/quay
|
23c5120790c619174e7d36784ca5aab7f4eece5c
|
[
"Apache-2.0"
] | 1
|
2020-05-31T16:28:40.000Z
|
2020-05-31T16:28:40.000Z
|
import logging
from data import model
from oauth.loginmanager import OAuthLoginManager
from oauth.oidc import PublicKeyLoadException
from util.security.jwtutil import InvalidTokenError
logger = logging.getLogger(__name__)
class AppTokenInternalAuth(object):
""" Forces all internal credential login to go through an app token, by disabling all other
access.
"""
@property
def supports_fresh_login(self):
# Since there is no password.
return False
@property
def federated_service(self):
return None
@property
def requires_distinct_cli_password(self):
# Since there is no supported "password".
return False
def has_password_set(self, username):
# Since there is no supported "password".
return False
@property
def supports_encrypted_credentials(self):
# Since there is no supported "password".
return False
def verify_credentials(self, username_or_email, id_token):
return (None, 'An application specific token is required to login')
def verify_and_link_user(self, username_or_email, password):
return self.verify_credentials(username_or_email, password)
def confirm_existing_user(self, username, password):
return self.verify_credentials(username, password)
def link_user(self, username_or_email):
return (None, 'Unsupported for this authentication system')
def get_and_link_federated_user_info(self, user_info):
return (None, 'Unsupported for this authentication system')
def query_users(self, query, limit):
return (None, '', '')
def check_group_lookup_args(self, group_lookup_args):
return (False, 'Not supported')
def iterate_group_members(self, group_lookup_args, page_size=None, disable_pagination=False):
return (None, 'Not supported')
def service_metadata(self):
return {}
def ping(self):
""" Always assumed to be working. If the DB is broken, other checks will handle it. """
return (True, None)
| 28.676471
| 95
| 0.748205
| 254
| 1,950
| 5.543307
| 0.42126
| 0.059659
| 0.034091
| 0.039773
| 0.31321
| 0.271307
| 0.171875
| 0.171875
| 0.069602
| 0.069602
| 0
| 0
| 0.172821
| 1,950
| 67
| 96
| 29.104478
| 0.872908
| 0.166667
| 0
| 0.243902
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.365854
| false
| 0.146341
| 0.121951
| 0.341463
| 0.878049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
c30ccff8f41257e20623606280dd0b400eb9389a
| 177
|
py
|
Python
|
docs_src/subcommands/tutorial001/main.py
|
madkinsz/typer
|
a1520dcda685220a9a796288f5eaaebd00d68845
|
[
"MIT"
] | 7,615
|
2019-12-24T13:08:20.000Z
|
2022-03-31T22:07:53.000Z
|
docs_src/subcommands/tutorial001/main.py
|
madkinsz/typer
|
a1520dcda685220a9a796288f5eaaebd00d68845
|
[
"MIT"
] | 351
|
2019-12-24T22:17:54.000Z
|
2022-03-31T15:35:08.000Z
|
docs_src/subcommands/tutorial001/main.py
|
jina-ai/typer
|
8b5e14b25ddf0dd777403015883301b17bedcee0
|
[
"MIT"
] | 360
|
2019-12-24T15:29:59.000Z
|
2022-03-30T20:33:10.000Z
|
import typer
import items
import users
app = typer.Typer()
app.add_typer(users.app, name="users")
app.add_typer(items.app, name="items")
if __name__ == "__main__":
app()
| 14.75
| 38
| 0.706215
| 27
| 177
| 4.259259
| 0.333333
| 0.208696
| 0.191304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141243
| 177
| 11
| 39
| 16.090909
| 0.756579
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c358f78b0d6d8d53a52db592c0540f16d66f164e
| 1,171
|
py
|
Python
|
gslib/discard_messages_queue.py
|
MingweiChen/gsutil
|
a760690c1e1b3244e59bbcaa14448a72d323f658
|
[
"Apache-2.0"
] | 1
|
2019-01-14T17:38:35.000Z
|
2019-01-14T17:38:35.000Z
|
gslib/discard_messages_queue.py
|
MingweiChen/gsutil
|
a760690c1e1b3244e59bbcaa14448a72d323f658
|
[
"Apache-2.0"
] | 1
|
2019-05-07T06:22:16.000Z
|
2019-05-07T07:03:24.000Z
|
gslib/discard_messages_queue.py
|
MingweiChen/gsutil
|
a760690c1e1b3244e59bbcaa14448a72d323f658
|
[
"Apache-2.0"
] | 1
|
2020-07-03T00:59:53.000Z
|
2020-07-03T00:59:53.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides a message queue that discards all messages."""
class DiscardMessagesQueue(object):
"""Emulates a Cloud API status queue but drops all messages.
This is useful when you want to perform some operations but not have the UI
thread display information about those ops (e.g. running a test or fetching
the public gsutil tarball object's metadata to perform a version check).
"""
# pylint: disable=invalid-name, unused-argument
def put(self, message=None, timeout=None):
pass
# pylint: enable=invalid-name, unused-argument
| 40.37931
| 77
| 0.75064
| 177
| 1,171
| 4.966102
| 0.689266
| 0.068259
| 0.029579
| 0.036405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009288
| 0.172502
| 1,171
| 28
| 78
| 41.821429
| 0.897833
| 0.87105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
c35b721b142fe929e8a073a4d9fc0e4bc4a575e0
| 73
|
py
|
Python
|
keras_contrib/regularizers/__init__.py
|
rgreenblatt/keras-contrib
|
46fcdb9384b3bc9399c651b2b43640aa54098e64
|
[
"MIT"
] | 11
|
2019-03-23T13:23:49.000Z
|
2022-01-20T07:57:56.000Z
|
keras_contrib/regularizers/__init__.py
|
rgreenblatt/keras-contrib
|
46fcdb9384b3bc9399c651b2b43640aa54098e64
|
[
"MIT"
] | 1
|
2021-06-18T23:07:54.000Z
|
2021-07-13T21:43:51.000Z
|
keras_contrib/regularizers/__init__.py
|
rgreenblatt/keras-contrib
|
46fcdb9384b3bc9399c651b2b43640aa54098e64
|
[
"MIT"
] | 11
|
2017-07-06T14:11:51.000Z
|
2021-08-21T23:18:20.000Z
|
from __future__ import absolute_import
from keras.regularizers import *
| 18.25
| 38
| 0.849315
| 9
| 73
| 6.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 3
| 39
| 24.333333
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c35cb02e8dbd91aeee22cdaa4f79693a8724b5e4
| 789
|
py
|
Python
|
app/forms/venda_form.py
|
pedroferronato/gerenciamento-rural
|
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
|
[
"MIT"
] | null | null | null |
app/forms/venda_form.py
|
pedroferronato/gerenciamento-rural
|
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
|
[
"MIT"
] | null | null | null |
app/forms/venda_form.py
|
pedroferronato/gerenciamento-rural
|
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
|
[
"MIT"
] | null | null | null |
from datetime import date
from flask_wtf import FlaskForm
from wtforms import FloatField, DateField, StringField
from wtforms.validators import DataRequired, Length
class VendaForm(FlaskForm):
data = StringField('Data da venda:', default=date.today().strftime('%d/%m/%Y'), validators=[DataRequired('Insira a data de venda')])
quantidade = FloatField('Quantidade:', validators=[DataRequired('Insira a quantidade vendida')])
valor_total = FloatField('Valor total:', validators=[DataRequired('Insira o valor total da venda, ou deixe o cálculo automático')])
desconto = StringField('Desconto:', validators=[Length(min=0, max=50, message="Por favor, não ultrapasse 50 caracteres")])
valor_unitario = FloatField('Valor unitário:', validators=[DataRequired('Insira ')])
| 60.692308
| 136
| 0.751584
| 94
| 789
| 6.276596
| 0.531915
| 0.149153
| 0.189831
| 0.098305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007225
| 0.12294
| 789
| 13
| 137
| 60.692308
| 0.845376
| 0
| 0
| 0
| 0
| 0
| 0.283544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.1
| 0.4
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
c35edfff29e3bec80a93f531afa7a709b20b6135
| 130
|
py
|
Python
|
geodeconstructor/__init__.py
|
ThomasVieth/geodeconstructor
|
934ab7c0a0f0b728111d33ac90812272c85310f2
|
[
"MIT"
] | null | null | null |
geodeconstructor/__init__.py
|
ThomasVieth/geodeconstructor
|
934ab7c0a0f0b728111d33ac90812272c85310f2
|
[
"MIT"
] | null | null | null |
geodeconstructor/__init__.py
|
ThomasVieth/geodeconstructor
|
934ab7c0a0f0b728111d33ac90812272c85310f2
|
[
"MIT"
] | null | null | null |
"""
"""
## library imports
from .components import Coordinate
from .history.json import *
from .history.locations import *
##
| 11.818182
| 34
| 0.7
| 14
| 130
| 6.5
| 0.642857
| 0.241758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169231
| 130
| 11
| 35
| 11.818182
| 0.842593
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c365fb437de58fbc6b4e7852be32ee1d05d33b2c
| 54
|
py
|
Python
|
modules/missing_species.py
|
axelthorstein/gene-matrix
|
0bd2bb40ead3b4109d9f407f908567b728bce56a
|
[
"Unlicense"
] | null | null | null |
modules/missing_species.py
|
axelthorstein/gene-matrix
|
0bd2bb40ead3b4109d9f407f908567b728bce56a
|
[
"Unlicense"
] | null | null | null |
modules/missing_species.py
|
axelthorstein/gene-matrix
|
0bd2bb40ead3b4109d9f407f908567b728bce56a
|
[
"Unlicense"
] | null | null | null |
def add_missing_species(missing_species, filenames):
| 27
| 52
| 0.851852
| 7
| 54
| 6.142857
| 0.714286
| 0.651163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 2
| 53
| 27
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c37a3c991848c9fc0bf09028cf829e9ab77a38a2
| 4,937
|
py
|
Python
|
src/PythonMPIInterfaces/mpi_mpi4py.py
|
as1m0n/spheral
|
4d72822f56aca76d70724c543d389d15ff6ca48e
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 19
|
2020-10-21T01:49:17.000Z
|
2022-03-15T12:29:17.000Z
|
src/PythonMPIInterfaces/mpi_mpi4py.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 41
|
2020-09-28T23:14:27.000Z
|
2022-03-28T17:01:33.000Z
|
src/PythonMPIInterfaces/mpi_mpi4py.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 5
|
2020-11-03T16:14:26.000Z
|
2022-01-03T19:07:24.000Z
|
#-------------------------------------------------------------------------------
# mpi
#
# This module reproduces the pyMPI interface using mpi4py.
#-------------------------------------------------------------------------------
import sys
from SpheralTestUtilities import globalFrame
# NOTE: this logic for disabling recv_mprobe seems to be necessary with newer
# mpi4py versions, since the LC MPI implementations apparently report matched_probes
# as supported, but seem to be broken.
import mpi4py
mpi4py.rc.recv_mprobe = False
# Now go on as usual...
from mpi4py import MPI
#-------------------------------------------------------------------------------
# Communicator and geometry.
#-------------------------------------------------------------------------------
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
procs = comm.Get_size()
#-------------------------------------------------------------------------------
# Define the operations.
#-------------------------------------------------------------------------------
MIN = MPI.MIN
MAX = MPI.MAX
SUM = MPI.SUM
#-------------------------------------------------------------------------------
# Prepare files to keep the stdout and stderr streams in.
# The pyMPI defaults are only rank 0 writes stdout, but all
# processes write stderr.
#-------------------------------------------------------------------------------
globalscope = globalFrame().f_globals
if rank > 0:
exec("""
import sys
__mpi_stdoutfile__ = open("/dev/null", "w")
sys.stdout = __mpi_stdoutfile__
""", globalscope)
#-------------------------------------------------------------------------------
# A common helper to convert vector_of_* types to lists for communication
#-------------------------------------------------------------------------------
def __listify(obj):
if hasattr(obj, "__qualname__") and "vector_of" in obj.__qualname__:
return list(obj)
else:
return obj
#-------------------------------------------------------------------------------
# send
#-------------------------------------------------------------------------------
def send(obj, dest=0, tag=100):
comm.send(obj=__listify(obj), dest=dest, tag=tag)
#-------------------------------------------------------------------------------
# recv
#-------------------------------------------------------------------------------
def recv(source=0, tag=100):
return (comm.recv(source=source, tag=tag), )
#-------------------------------------------------------------------------------
# isend
#-------------------------------------------------------------------------------
def isend(obj, dest=0, tag=100):
return comm.isend(obj=__listify(obj), dest=dest, tag=tag)
#-------------------------------------------------------------------------------
# reduce
#-------------------------------------------------------------------------------
def reduce(obj, op=SUM, root=0):
return comm.reduce(sendobj=__listify(obj), op=op, root=root)
#-------------------------------------------------------------------------------
# allreduce
#-------------------------------------------------------------------------------
def allreduce(obj, op=SUM):
return comm.allreduce(sendobj=__listify(obj), op=op)
#-------------------------------------------------------------------------------
# gather
#-------------------------------------------------------------------------------
def gather(obj, root=0):
return comm.gather(sendobj=__listify(obj), root=root)
#-------------------------------------------------------------------------------
# allgather
#-------------------------------------------------------------------------------
def allgather(obj):
return comm.allgather(sendobj=__listify(obj))
#-------------------------------------------------------------------------------
# bcast
#-------------------------------------------------------------------------------
def bcast(obj, root=0):
return comm.bcast(__listify(obj), root=root)
#-------------------------------------------------------------------------------
# barrier
#-------------------------------------------------------------------------------
def barrier():
comm.barrier()
#-------------------------------------------------------------------------------
# synchronizeQueuedOutput
#-------------------------------------------------------------------------------
def synchronizeQueuedOutput(stdoutfile = None,
stderrfile = None):
if stdoutfile == None:
exec("import sys; sys.stdout = sys.__stdout__", globalscope)
else:
exec("__mpi_stdoutfile__ = open(%s, 'w'); sys.stdout = __mpi_stdoutfile__" % stdoutfile,
globalscope)
if stderrfile == None:
exec("import sys; sys.stderr = sys.__stderr__", globalscope)
else:
exec("__mpi_stderrfile__ = open(%s, 'w'); sys.stderr = __mpi_stderrfile__" % stderrfile,
globalscope)
return
| 39.18254
| 96
| 0.354466
| 344
| 4,937
| 4.877907
| 0.348837
| 0.047676
| 0.040524
| 0.026818
| 0.162694
| 0.032181
| 0.032181
| 0
| 0
| 0
| 0
| 0.004965
| 0.102491
| 4,937
| 125
| 97
| 39.496
| 0.373731
| 0.607049
| 0
| 0.12963
| 0
| 0
| 0.170111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.203704
| false
| 0
| 0.12963
| 0.12963
| 0.518519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
5edf65ece4ba961ba4e38398140a1d65d5a1b405
| 30
|
py
|
Python
|
omnipresence/test/__init__.py
|
kxz/omnipresence
|
ffb3dbc30d36331a68e8dea3a85db6a4d2928cd7
|
[
"BSD-3-Clause"
] | null | null | null |
omnipresence/test/__init__.py
|
kxz/omnipresence
|
ffb3dbc30d36331a68e8dea3a85db6a4d2928cd7
|
[
"BSD-3-Clause"
] | 10
|
2016-04-05T04:36:15.000Z
|
2018-03-25T00:15:47.000Z
|
omnipresence/test/__init__.py
|
kxz/omnipresence
|
ffb3dbc30d36331a68e8dea3a85db6a4d2928cd7
|
[
"BSD-3-Clause"
] | null | null | null |
"""Tests for Omnipresence."""
| 15
| 29
| 0.666667
| 3
| 30
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.740741
| 0.766667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5ef3a1fe1bfc6eede8555f558976213f959cbd0b
| 32
|
py
|
Python
|
python-code.py
|
etonkou/repo-test
|
1bde3af592c8c5f5b4355086f96d7fda1ba2ecb8
|
[
"BSD-3-Clause"
] | null | null | null |
python-code.py
|
etonkou/repo-test
|
1bde3af592c8c5f5b4355086f96d7fda1ba2ecb8
|
[
"BSD-3-Clause"
] | null | null | null |
python-code.py
|
etonkou/repo-test
|
1bde3af592c8c5f5b4355086f96d7fda1ba2ecb8
|
[
"BSD-3-Clause"
] | null | null | null |
def bonjour(nom):
retrurn nom
| 8
| 17
| 0.71875
| 5
| 32
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 32
| 3
| 18
| 10.666667
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5efb1352515eff04c3ca1a4b03ed3883aaba8c9b
| 94
|
py
|
Python
|
controller_microservice/controller/apps.py
|
getnosleep/VirtualUnjam
|
bae08eec9756c963dab409c6e4e7397ef019cc8a
|
[
"MIT"
] | null | null | null |
controller_microservice/controller/apps.py
|
getnosleep/VirtualUnjam
|
bae08eec9756c963dab409c6e4e7397ef019cc8a
|
[
"MIT"
] | null | null | null |
controller_microservice/controller/apps.py
|
getnosleep/VirtualUnjam
|
bae08eec9756c963dab409c6e4e7397ef019cc8a
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class ControllerConfig(AppConfig):
name = 'controller'
| 18.8
| 34
| 0.776596
| 10
| 94
| 7.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 94
| 4
| 35
| 23.5
| 0.9125
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6f0febeaa613a49ee08b87ee2ffa5d5dba08b84e
| 1,420
|
py
|
Python
|
FlaskORM2/app/models.py
|
lxdzz/item
|
1024c53baa51bcdc98ec7a987eb3433fc4478d00
|
[
"MIT"
] | null | null | null |
FlaskORM2/app/models.py
|
lxdzz/item
|
1024c53baa51bcdc98ec7a987eb3433fc4478d00
|
[
"MIT"
] | 3
|
2021-05-10T16:52:04.000Z
|
2022-02-13T15:33:12.000Z
|
FlaskORM2/app/models.py
|
lxdzz/item
|
1024c53baa51bcdc98ec7a987eb3433fc4478d00
|
[
"MIT"
] | null | null | null |
from app import models
class BaseModel(models.Model):
__abstract__ = True #声明当前类是抽象类,被继承调用不被创建
id = models.Column(models.Integer,primary_key = True,autoincrement=True)
def save(self):
db = models.session()
db.add(self)
db.commit()
def delete(self):
db = models.session()
db.delete(self)
db.commit()
#定义表
class Curriculum(BaseModel):
__tablename__ = "curriculum"
c_id = models.Column(models.String(32))
c_name = models.Column(models.String(32))
c_time = models.Column(models.Date)
class User(BaseModel):
__tablename__="user"
user_name=models.Column(models.String(32))
user_email=models.Column(models.String(32))
user_password=models.Column(models.String(32))
class Leave(BaseModel):
"""
请假 0
批准 1
驳回 2
销假 3
"""
__tablename__="leave"
leave_id=models.Column(models.Integer) #请假人id
leave_name=models.Column(models.String(32)) #请假人姓名
leave_type=models.Column(models.String(32)) #假期类型
leave_start_time=models.Column(models.String(32)) #起始时间
leave_end_time=models.Column(models.String(32)) #结束时间
leave_description=models.Column(models.Text) #请假事由
leave_phone=models.Column(models.String(32)) #联系方式
leave_status=models.Column(models.String(32)) #请假状态
class Picture(BaseModel):
name=models.Column(models.String(32))
picture=models.Column(models.String(32))
| 30.212766
| 76
| 0.690845
| 188
| 1,420
| 5.042553
| 0.329787
| 0.21519
| 0.322785
| 0.329114
| 0.493671
| 0.255274
| 0
| 0
| 0
| 0
| 0
| 0.025751
| 0.179577
| 1,420
| 47
| 77
| 30.212766
| 0.787983
| 0.056338
| 0
| 0.114286
| 0
| 0
| 0.014571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0.028571
| 0.028571
| 0
| 0.828571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6f28a497436864c004c8d2b2a6820592cf9d81e2
| 93
|
py
|
Python
|
backstage/apps.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | 45
|
2019-03-22T23:01:45.000Z
|
2021-11-09T01:32:12.000Z
|
EMS/backstage/apps.py
|
Carlyx/2019-Software-Engineering-Curriculum-Design
|
213336540c58f4b1dbcc3656c7178e9b37e6cff4
|
[
"MIT"
] | 9
|
2019-03-25T03:27:57.000Z
|
2021-06-10T21:27:21.000Z
|
EMS/backstage/apps.py
|
Carlyx/2019-Software-Engineering-Curriculum-Design
|
213336540c58f4b1dbcc3656c7178e9b37e6cff4
|
[
"MIT"
] | 13
|
2019-03-28T13:44:05.000Z
|
2021-05-23T06:45:03.000Z
|
from django.apps import AppConfig
class BackstageConfig(AppConfig):
name = 'backstage'
| 15.5
| 33
| 0.763441
| 10
| 93
| 7.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 93
| 5
| 34
| 18.6
| 0.910256
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6f487afa7ad5fb1d9fd56bfd83ff36f59149f441
| 129
|
py
|
Python
|
Chapter09/text_write.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 116
|
2018-12-21T01:05:47.000Z
|
2022-03-23T21:41:41.000Z
|
Chapter09/text_write.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 2
|
2021-03-31T19:36:19.000Z
|
2021-06-10T22:29:26.000Z
|
Chapter09/text_write.py
|
add54/ADMIN_SYS_PYTHON
|
5a6d9705537c8663c8f7b0f45d29ccc87b6096e7
|
[
"MIT"
] | 147
|
2018-12-19T14:10:32.000Z
|
2022-03-20T11:03:20.000Z
|
text_file = open("test.txt", "w")
text_file.write("Monday\nTuesday\nWednesday\nThursday\nFriday\nSaturday\n")
text_file.close()
| 25.8
| 75
| 0.767442
| 19
| 129
| 5.052632
| 0.789474
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054264
| 129
| 4
| 76
| 32.25
| 0.786885
| 0
| 0
| 0
| 0
| 0
| 0.507813
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6f4bc6e26adf10e740f93a66e244b0513b8e039b
| 239
|
py
|
Python
|
pyclerk/endpoints/__init__.py
|
rgioai/caselaw-access-project
|
31275e3af89a9f7702b11a91ba712f3c542dc015
|
[
"MIT"
] | 2
|
2020-04-28T14:14:50.000Z
|
2020-05-12T16:50:45.000Z
|
pyclerk/endpoints/__init__.py
|
rgioai/caselaw-access-project
|
31275e3af89a9f7702b11a91ba712f3c542dc015
|
[
"MIT"
] | null | null | null |
pyclerk/endpoints/__init__.py
|
rgioai/caselaw-access-project
|
31275e3af89a9f7702b11a91ba712f3c542dc015
|
[
"MIT"
] | null | null | null |
from ._endpoint import *
from .bulk import *
from .cases import *
from .citations import *
from .courts import *
from .jurisdictions import *
from .ngrams import *
from .reporters import *
from .user_history import *
from .volumes import *
| 23.9
| 28
| 0.753138
| 31
| 239
| 5.741935
| 0.419355
| 0.505618
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16318
| 239
| 10
| 29
| 23.9
| 0.89
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6f4d168a478d77b8c7b778b61126d70232090145
| 487
|
py
|
Python
|
django_event/backends/base/client.py
|
ailove-dev/django-event
|
2d82cee0b3b86209850cbb6e382d597d2624251d
|
[
"MIT"
] | 3
|
2015-08-31T00:46:12.000Z
|
2017-12-13T01:32:32.000Z
|
django_event/backends/base/client.py
|
ailove-dev/django-event
|
2d82cee0b3b86209850cbb6e382d597d2624251d
|
[
"MIT"
] | 8
|
2015-01-20T12:27:24.000Z
|
2015-05-29T12:29:53.000Z
|
django_event/backends/base/client.py
|
ailove-dev/django-event
|
2d82cee0b3b86209850cbb6e382d597d2624251d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Backend base client module.
"""
from __future__ import unicode_literals
class BaseClient(object):
"""
Base backend client.
"""
def connect(self):
"""
Establishes connection.
:raises: :class:`NotImplementedError`
"""
raise NotImplementedError
def disconnect(self):
"""
Disconnects.
:raises: :class:`NotImplementedError`
"""
raise NotImplementedError
| 15.21875
| 45
| 0.570842
| 37
| 487
| 7.378378
| 0.675676
| 0.080586
| 0.21978
| 0.25641
| 0.395604
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002976
| 0.310062
| 487
| 32
| 46
| 15.21875
| 0.809524
| 0.38193
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6f6465003d65ed58802dda51b92ff286e8009cc3
| 174
|
py
|
Python
|
helm/dagster/schema/schema/charts/dagster/subschema/global_.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
helm/dagster/schema/schema/charts/dagster/subschema/global_.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
helm/dagster/schema/schema/charts/dagster/subschema/global_.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
from pydantic import BaseModel # pylint: disable=no-name-in-module
class Global(BaseModel):
postgresqlSecretName: str
dagsterHome: str
serviceAccountName: str
| 21.75
| 67
| 0.758621
| 19
| 174
| 6.947368
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 174
| 7
| 68
| 24.857143
| 0.916667
| 0.189655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6f6fea304dc512956790042062c28a5a7469c7c6
| 200
|
py
|
Python
|
onadata/apps/export/__init__.py
|
gushil/kobocat
|
5ce27ed5fbf969b2ce68e8a59dd97ced74686711
|
[
"BSD-2-Clause"
] | 1
|
2018-07-15T10:37:41.000Z
|
2018-07-15T10:37:41.000Z
|
onadata/apps/export/__init__.py
|
gushil/kobocat
|
5ce27ed5fbf969b2ce68e8a59dd97ced74686711
|
[
"BSD-2-Clause"
] | 48
|
2019-03-18T09:26:31.000Z
|
2019-05-27T08:12:03.000Z
|
onadata/apps/export/__init__.py
|
gushil/kobocat
|
5ce27ed5fbf969b2ce68e8a59dd97ced74686711
|
[
"BSD-2-Clause"
] | 1
|
2020-03-03T15:50:24.000Z
|
2020-03-03T15:50:24.000Z
|
#################################################
# THIS APP IS DEAD CODE AND SHOULD BE EXCISED #
# EVERY SINGLE ENDPOINT 500s EXCEPT export_menu #
#################################################
| 40
| 49
| 0.375
| 16
| 200
| 4.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.115
| 200
| 4
| 50
| 50
| 0.40113
| 0.46
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
489d6f4d40fb42cdab157aaebde171cd04713835
| 46
|
py
|
Python
|
data/studio21_generated/introductory/4271/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4271/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4271/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
def roman_fractions(integer, fraction=None):
| 23
| 44
| 0.804348
| 6
| 46
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 2
| 45
| 23
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48ac73d1d9c5ae408cf5d7443f177a048d51baf3
| 213
|
py
|
Python
|
dwetl/writer/writer.py
|
ThisIsNima/dwetl
|
12e06148929ec3ff5946345251c955cb4277d167
|
[
"Apache-2.0"
] | 1
|
2021-04-08T11:58:51.000Z
|
2021-04-08T11:58:51.000Z
|
dwetl/writer/writer.py
|
tsboom/dwetl
|
b137b8ad3fa36fcabb6a0de33c23e1328b6e3a19
|
[
"Apache-2.0"
] | 1
|
2019-12-17T16:41:25.000Z
|
2019-12-17T16:41:25.000Z
|
dwetl/writer/writer.py
|
ThisIsNima/dwetl
|
12e06148929ec3ff5946345251c955cb4277d167
|
[
"Apache-2.0"
] | 3
|
2019-05-09T17:27:48.000Z
|
2019-10-02T17:58:53.000Z
|
class Writer:
"""
Abstract class that encapsulates a writing a row to an output.
Subclasses should implement the "write_row" method.
"""
def write_row(self):
raise NotImplementedError
| 23.666667
| 66
| 0.680751
| 26
| 213
| 5.5
| 0.807692
| 0.111888
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253521
| 213
| 9
| 67
| 23.666667
| 0.899371
| 0.539906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
48da26e52f25a53e20690fcc3011f8b221894957
| 24
|
py
|
Python
|
irco/__init__.py
|
GaretJax/irco
|
e5df3cf1a608dc813011a1ee7e920637e5bd155c
|
[
"MIT"
] | null | null | null |
irco/__init__.py
|
GaretJax/irco
|
e5df3cf1a608dc813011a1ee7e920637e5bd155c
|
[
"MIT"
] | null | null | null |
irco/__init__.py
|
GaretJax/irco
|
e5df3cf1a608dc813011a1ee7e920637e5bd155c
|
[
"MIT"
] | 1
|
2015-12-17T19:18:28.000Z
|
2015-12-17T19:18:28.000Z
|
__version__ = '0.10.2'
| 8
| 22
| 0.625
| 4
| 24
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.166667
| 24
| 2
| 23
| 12
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5b0c1b2c0919a4333fb1ee5606bcc2d98af20ed6
| 346
|
py
|
Python
|
notebooks/Skew_T/solutions/skewt_get_data.py
|
DEVESHTARASIA/unidata-python-workshop
|
6ce194a0515effbd0cddb50c2302d5160494747e
|
[
"MIT"
] | 1
|
2020-01-18T20:34:33.000Z
|
2020-01-18T20:34:33.000Z
|
notebooks/Skew_T/solutions/skewt_get_data.py
|
DEVESHTARASIA/unidata-python-workshop
|
6ce194a0515effbd0cddb50c2302d5160494747e
|
[
"MIT"
] | null | null | null |
notebooks/Skew_T/solutions/skewt_get_data.py
|
DEVESHTARASIA/unidata-python-workshop
|
6ce194a0515effbd0cddb50c2302d5160494747e
|
[
"MIT"
] | 1
|
2020-11-07T12:42:54.000Z
|
2020-11-07T12:42:54.000Z
|
df = WyomingUpperAir.request_data(datetime(2017, 9, 10, 0), 'KEY')
p = df['pressure'].values * units(df.units['pressure'])
T = df['temperature'].values * units(df.units['temperature'])
Td = df['dewpoint'].values * units(df.units['dewpoint'])
u = df['u_wind'].values * units(df.units['u_wind'])
v = df['v_wind'].values * units(df.units['v_wind'])
| 49.428571
| 66
| 0.67052
| 54
| 346
| 4.203704
| 0.388889
| 0.242291
| 0.286344
| 0.396476
| 0.193833
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025478
| 0.092486
| 346
| 7
| 67
| 49.428571
| 0.697452
| 0
| 0
| 0
| 0
| 0
| 0.233429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5b14bd7758eb780eb14ad51f9e8a701073439bad
| 163
|
py
|
Python
|
colt/obj.py
|
xiki-tempula/colt
|
dbd6e7e329787c2b92dc7d69b89df8680f10b738
|
[
"Apache-2.0"
] | 1
|
2021-11-07T12:06:54.000Z
|
2021-11-07T12:06:54.000Z
|
colt/obj.py
|
xiki-tempula/colt
|
dbd6e7e329787c2b92dc7d69b89df8680f10b738
|
[
"Apache-2.0"
] | 1
|
2021-11-07T13:34:16.000Z
|
2021-11-07T13:34:16.000Z
|
colt/obj.py
|
xiki-tempula/colt
|
dbd6e7e329787c2b92dc7d69b89df8680f10b738
|
[
"Apache-2.0"
] | 1
|
2021-10-31T10:39:37.000Z
|
2021-10-31T10:39:37.000Z
|
from .colt import Colt
class NoFurtherQuestions(Colt):
"""Empty class to use for cases where no further questions
should be asked"""
__slots__ = ()
| 18.111111
| 62
| 0.693252
| 21
| 163
| 5.190476
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226994
| 163
| 8
| 63
| 20.375
| 0.865079
| 0.435583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2a834e2510d369733b1848fde49bf41e0f5edab
| 94
|
py
|
Python
|
koocook_core/apps.py
|
KooCook/koocook-dj
|
33bfaf48e8363013ddd083d5d8542496c50fd5d3
|
[
"BSD-3-Clause"
] | 1
|
2020-10-19T04:44:49.000Z
|
2020-10-19T04:44:49.000Z
|
koocook_core/apps.py
|
KooCook/koocook-dj
|
33bfaf48e8363013ddd083d5d8542496c50fd5d3
|
[
"BSD-3-Clause"
] | 26
|
2019-11-11T03:37:03.000Z
|
2019-12-15T23:18:18.000Z
|
koocook_core/apps.py
|
KooCook/koocook-dj
|
33bfaf48e8363013ddd083d5d8542496c50fd5d3
|
[
"BSD-3-Clause"
] | 1
|
2020-11-08T14:36:21.000Z
|
2020-11-08T14:36:21.000Z
|
from django.apps import AppConfig
class KooCookConfig(AppConfig):
name = 'koocook_core'
| 15.666667
| 33
| 0.765957
| 11
| 94
| 6.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 5
| 34
| 18.8
| 0.898734
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2d0c22191e1bf0bb4a08fd8be6359bfc6d71a2d
| 203
|
py
|
Python
|
back/matchmaker/admin.py
|
HaeSe0ng/SWPP
|
3bfc95edd2b341283d8d63d398b05e74605d54af
|
[
"Apache-2.0"
] | 2
|
2019-09-16T08:06:45.000Z
|
2019-12-17T14:35:51.000Z
|
back/matchmaker/admin.py
|
HaeSe0ng/SWPP
|
3bfc95edd2b341283d8d63d398b05e74605d54af
|
[
"Apache-2.0"
] | 43
|
2019-10-05T02:45:23.000Z
|
2020-07-18T11:15:00.000Z
|
back/matchmaker/admin.py
|
eodmsabc/SNU-SWPP
|
3a2453b6747e9e198fda5174a208ca1f2f3e6cd3
|
[
"Apache-2.0"
] | 3
|
2019-12-16T05:48:11.000Z
|
2019-12-17T14:43:09.000Z
|
'''
matchmaker admin
'''
from django.contrib import admin
from .models import Category, Match, Participation
admin.site.register(Category)
admin.site.register(Match)
admin.site.register(Participation)
| 18.454545
| 50
| 0.79803
| 25
| 203
| 6.48
| 0.48
| 0.166667
| 0.314815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093596
| 203
| 10
| 51
| 20.3
| 0.880435
| 0.078818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
96121a488874857004fbed7e249c5a6a79b6bcdf
| 264
|
py
|
Python
|
Dataset/Leetcode/test/58/482.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/test/58/482.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/test/58/482.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, s: str) -> int:
return len([x for x in s.split(' ') if x != ''][-1])
undefined
for (i = 0; i < document.getElementsByTagName("code").length; i++) { console.log(document.getElementsByTagName("code")[i].innerText); }
| 29.333333
| 139
| 0.613636
| 36
| 264
| 4.5
| 0.722222
| 0.345679
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.189394
| 264
| 8
| 140
| 33
| 0.747664
| 0
| 0
| 0
| 0
| 0
| 0.034351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
82506975d054669f34f5755d8a6288b5f89d9c19
| 79
|
py
|
Python
|
node_modules/python-shell/test/python/echo_binary.py
|
harsh424jan/Gazeplayer
|
8654ac3ba6d94700d2c96a12c67e78b24d685a08
|
[
"MIT"
] | 2
|
2018-04-20T15:50:32.000Z
|
2020-04-17T06:43:57.000Z
|
node_modules/python-shell/test/python/echo_binary.py
|
harsh424jan/Gazeplayer
|
8654ac3ba6d94700d2c96a12c67e78b24d685a08
|
[
"MIT"
] | 1
|
2018-04-20T17:30:50.000Z
|
2018-05-28T14:14:03.000Z
|
ThrotaleMLSystem/example/node_modules/python-shell/test/python/echo_binary.py
|
RavinduHasithanjana/Throtale---Expert-System-for-Automating-API-Throttling
|
7e35b8de437ca24759234274722565201f949f9f
|
[
"Apache-2.0"
] | null | null | null |
import sys
# simple binary echo script
sys.stdout.write(sys.stdin.read())
| 15.8
| 35
| 0.721519
| 12
| 79
| 4.75
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 79
| 4
| 36
| 19.75
| 0.863636
| 0.316456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
82573dfe69ff19c994102ed1457dfddd215d0c9e
| 98
|
py
|
Python
|
image_viewer/apps.py
|
lucasLB7/Zoomin-Photos-
|
0382a7c7c3854901c9ff4de742062c1cc9a706fa
|
[
"Unlicense"
] | null | null | null |
image_viewer/apps.py
|
lucasLB7/Zoomin-Photos-
|
0382a7c7c3854901c9ff4de742062c1cc9a706fa
|
[
"Unlicense"
] | null | null | null |
image_viewer/apps.py
|
lucasLB7/Zoomin-Photos-
|
0382a7c7c3854901c9ff4de742062c1cc9a706fa
|
[
"Unlicense"
] | null | null | null |
from django.apps import AppConfig
class ImageViewerConfig(AppConfig):
name = 'image_viewer'
| 16.333333
| 35
| 0.77551
| 11
| 98
| 6.818182
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 98
| 5
| 36
| 19.6
| 0.903614
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
8267978a51e5763d4f0421e9b0d8adc5b8bd4b49
| 249
|
py
|
Python
|
graphene_django_jwt/signals.py
|
Speedy1991/graphene-django-jwt
|
d5a09785fdda31328e6a6dbdbbdf3436c9275435
|
[
"MIT"
] | null | null | null |
graphene_django_jwt/signals.py
|
Speedy1991/graphene-django-jwt
|
d5a09785fdda31328e6a6dbdbbdf3436c9275435
|
[
"MIT"
] | null | null | null |
graphene_django_jwt/signals.py
|
Speedy1991/graphene-django-jwt
|
d5a09785fdda31328e6a6dbdbbdf3436c9275435
|
[
"MIT"
] | null | null | null |
from django.dispatch import Signal
refresh_token_revoked = Signal(providing_args=['refresh_token'])
refresh_token_rotated = Signal(providing_args=['refresh_token', 'new_refresh_token'])
refresh_finished = Signal(providing_args=['request', 'user'])
| 41.5
| 85
| 0.811245
| 31
| 249
| 6.129032
| 0.483871
| 0.315789
| 0.3
| 0.273684
| 0.326316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064257
| 249
| 5
| 86
| 49.8
| 0.815451
| 0
| 0
| 0
| 0
| 0
| 0.216867
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
828d480a21c1680492961a129640011cfed39bcc
| 498
|
py
|
Python
|
tests/test_format_others.py
|
movermeyer/kaviar
|
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
|
[
"MIT"
] | 3
|
2015-01-09T12:03:19.000Z
|
2015-11-23T22:43:00.000Z
|
tests/test_format_others.py
|
movermeyer/kaviar
|
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
|
[
"MIT"
] | 1
|
2018-03-04T20:08:08.000Z
|
2018-03-04T20:08:08.000Z
|
tests/test_format_others.py
|
movermeyer/kaviar
|
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
|
[
"MIT"
] | 3
|
2015-08-21T11:48:10.000Z
|
2019-12-05T09:30:10.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
from decimal import Decimal
from kaviar import kv_format
def test_decimal():
assert kv_format(delta=Decimal('4.50')) == 'delta=4.50'
def test_datetime():
assert (kv_format(date=datetime(2013, 9, 23, 11, 11, 11))
== 'date="2013-09-23 11:11:11"')
def test_boolean():
assert kv_format(success=True, fail=False) == 'fail=False success=True'
| 23.714286
| 77
| 0.696787
| 73
| 498
| 4.561644
| 0.452055
| 0.096096
| 0.126126
| 0.048048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081928
| 0.166667
| 498
| 20
| 78
| 24.9
| 0.720482
| 0.042169
| 0
| 0
| 0
| 0
| 0.136842
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.272727
| true
| 0
| 0.363636
| 0
| 0.636364
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
82bdaf6d670fed0c211985d05f6a444f3697c883
| 116
|
py
|
Python
|
app/main/admin.py
|
lenaunderwood22/django-forum
|
9d739166029197dcd7256d1250641928cff01251
|
[
"MIT"
] | null | null | null |
app/main/admin.py
|
lenaunderwood22/django-forum
|
9d739166029197dcd7256d1250641928cff01251
|
[
"MIT"
] | null | null | null |
app/main/admin.py
|
lenaunderwood22/django-forum
|
9d739166029197dcd7256d1250641928cff01251
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Topic
# Register your models here.
admin.site.register(Topic)
| 19.333333
| 32
| 0.801724
| 17
| 116
| 5.470588
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 116
| 6
| 33
| 19.333333
| 0.920792
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
82c9bd9e0536855f0c90ef033f0fcd2349b13631
| 1,019
|
py
|
Python
|
hello_python/chapter02/del_blank.py
|
zachard/python-parent
|
374ab3fc3d0584adb26db1ff7c124665468d76df
|
[
"Apache-2.0"
] | null | null | null |
hello_python/chapter02/del_blank.py
|
zachard/python-parent
|
374ab3fc3d0584adb26db1ff7c124665468d76df
|
[
"Apache-2.0"
] | null | null | null |
hello_python/chapter02/del_blank.py
|
zachard/python-parent
|
374ab3fc3d0584adb26db1ff7c124665468d76df
|
[
"Apache-2.0"
] | null | null | null |
favorite_languages = ' I like python ' # 创建一个前后带空格的变量favorite_languages
print("\n")
favorite_languages # 没有使用print函数, 单写变量是无法将变量内容打印出来的
print('前面有空格' + favorite_languages + '后面有空格')
print("\n")
# rstrip()函数去除字符串结尾的空格, \t \n等造成的效果也会删除
print('前面有空格' + favorite_languages.rstrip() + '后面没空格')
print('前面有空格' + favorite_languages + '后面有空格') # 原变量内容并未改变
print("\n")
print('前面有空格' + favorite_languages + '后面有空格')
# lstrip()函数去除字符串开头的空格, \t \n等造成的效果也会删除
print('前面没空格' + favorite_languages.lstrip() + '后面有空格')
print('前面有空格' + favorite_languages + '后面有空格') # 原变量内容并未改变
print('\n')
print('前面有空格' + favorite_languages + '后面有空格')
# strip()函数去除前后的空格(中间的空格不去除), \t \n等造成的效果也会删除
print('前面没空格' + favorite_languages.strip() + '后面没空格')
print('前面有空格' + favorite_languages + '后面有空格') # 原变量内容并未改变
print('\n')
print('前面有空格' + favorite_languages + '后面有空格')
# replace()函数去除所有空格, \t \n等造成的效果不会删除
print('前面没空格' + favorite_languages.replace(' ', '') + '后面没空格')
print('前面有空格' + favorite_languages + '后面有空格') # 原变量内容并未改变
| 39.192308
| 73
| 0.698724
| 106
| 1,019
| 6.575472
| 0.254717
| 0.341463
| 0.232425
| 0.348637
| 0.578192
| 0.532281
| 0.420373
| 0.354376
| 0.354376
| 0.354376
| 0
| 0
| 0.13052
| 1,019
| 26
| 74
| 39.192308
| 0.786682
| 0.251227
| 0
| 0.684211
| 0
| 0
| 0.193891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.894737
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
82d7dcac93d2b5d38fa8225f595bd9e8f72782fd
| 125
|
py
|
Python
|
src/captcha/urls.py
|
hohner36/hexchan-engine
|
8edf155e8fe64936dfe428a6e9bac69705934b1f
|
[
"MIT"
] | 2
|
2021-04-16T10:11:18.000Z
|
2022-03-15T15:16:14.000Z
|
src/captcha/urls.py
|
hohner36/hexchan-engine
|
8edf155e8fe64936dfe428a6e9bac69705934b1f
|
[
"MIT"
] | 2
|
2019-09-02T18:39:51.000Z
|
2019-09-02T18:43:34.000Z
|
src/captcha/urls.py
|
hohner36/hexchan-engine
|
8edf155e8fe64936dfe428a6e9bac69705934b1f
|
[
"MIT"
] | 2
|
2019-09-02T18:37:25.000Z
|
2022-02-20T19:19:40.000Z
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.captcha_view, name='captcha_view'),
]
| 13.888889
| 54
| 0.696
| 16
| 125
| 5.3125
| 0.625
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176
| 125
| 8
| 55
| 15.625
| 0.825243
| 0
| 0
| 0
| 0
| 0
| 0.096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
82dc1428a931411b621bc83fd97ec835ea345637
| 1,637
|
py
|
Python
|
tests/unit/test_uiautomator_device_wrapper.py
|
tksn/phoneauto
|
9b92226c5c5eeb606f4b3c462a8b654454eb203d
|
[
"MIT"
] | null | null | null |
tests/unit/test_uiautomator_device_wrapper.py
|
tksn/phoneauto
|
9b92226c5c5eeb606f4b3c462a8b654454eb203d
|
[
"MIT"
] | null | null | null |
tests/unit/test_uiautomator_device_wrapper.py
|
tksn/phoneauto
|
9b92226c5c5eeb606f4b3c462a8b654454eb203d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from phoneauto.helpers.uiautomator_device_wrapper import DeviceWrapper
@pytest.fixture
def wdev(mocks):
device_wrap = DeviceWrapper(mocks.device)
return device_wrap
def test_selector(mocks, wdev):
wdev(text='abc', className='def')
mocks.device.assert_called_with(text='abc', className='def')
def test_click(mocks, wdev):
wdev.click(123, 456)
mocks.device.click.assert_called_with(123, 456)
assert mocks.device.wait.idle.called
assert mocks.device.wait.update.called
def test_press_home(mocks, wdev):
wdev.press.home()
assert mocks.device.press.home.called
assert mocks.device.wait.idle.called
assert mocks.device.wait.update.called
def test_press_with_keycode(mocks, wdev):
wdev.press(4, 2)
mocks.device.press.assert_called_with(4, 2)
assert mocks.device.wait.idle.called
assert mocks.device.wait.update.called
def test_screen_eq(mocks, wdev):
wdev.screen == 'on'
assert mocks.device.screen.__eq__.called
def test_screen_ne(mocks, wdev):
wdev.screen != 'on'
assert mocks.device.screen.__ne__.called
def test_wait_idle(mocks, wdev):
wdev.wait.idle()
assert mocks.device.wait.idle.called
assert not mocks.device.wait.update.called
def test_set_orientation(mocks, wdev):
wdev.orientation = 'left'
assert mocks.device.wait.idle.called
assert mocks.device.wait.update.called
assert mocks.device.orientation == 'left'
def test_get_orientation(mocks, wdev):
mocks.device.orientation = 'right'
assert wdev.orientation == 'right'
| 24.432836
| 70
| 0.73427
| 229
| 1,637
| 5.074236
| 0.218341
| 0.189329
| 0.190189
| 0.162651
| 0.388985
| 0.383821
| 0.383821
| 0.322719
| 0.322719
| 0.246988
| 0
| 0.012248
| 0.152108
| 1,637
| 66
| 71
| 24.80303
| 0.824928
| 0.012828
| 0
| 0.209302
| 0
| 0
| 0.021066
| 0
| 0
| 0
| 0
| 0
| 0.418605
| 1
| 0.232558
| false
| 0
| 0.069767
| 0
| 0.325581
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
82e88441a070b06174c1692e3b1c2395dc4fe881
| 104
|
py
|
Python
|
logiccircuit/__main__.py
|
TINYT1ME/LogicCircuit
|
0a497d84a606c672a8bb3e7d55951835576a13e7
|
[
"MIT"
] | 5
|
2021-11-16T04:12:35.000Z
|
2022-01-02T22:57:42.000Z
|
logiccircuit/__main__.py
|
TINYT1ME/LogicCircuit
|
0a497d84a606c672a8bb3e7d55951835576a13e7
|
[
"MIT"
] | null | null | null |
logiccircuit/__main__.py
|
TINYT1ME/LogicCircuit
|
0a497d84a606c672a8bb3e7d55951835576a13e7
|
[
"MIT"
] | null | null | null |
# Script to run logiccircuit.main
from logiccircuit import main
if __name__ == "__main__":
main()
| 14.857143
| 33
| 0.721154
| 13
| 104
| 5.153846
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 104
| 6
| 34
| 17.333333
| 0.797619
| 0.298077
| 0
| 0
| 0
| 0
| 0.112676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7d656d7daa1ad6e9d689017298c3ddb221cc11f7
| 205
|
py
|
Python
|
dataset_src/controllers/default.py
|
uwdata/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 97
|
2015-01-17T09:41:57.000Z
|
2022-03-15T11:39:03.000Z
|
dataset_src/controllers/default.py
|
afcarl/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 12
|
2015-02-01T02:59:56.000Z
|
2021-06-09T02:31:34.000Z
|
dataset_src/controllers/default.py
|
afcarl/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 35
|
2015-01-25T04:48:37.000Z
|
2021-01-29T20:32:26.000Z
|
#!/usr/bin/env python
import os
import utils.uploads as uploads
def index():
corpora = [fname[:-len(".csv")] for fname in os.listdir(uploads.spreadsheet_dir(request))]
return {"corpora": corpora}
| 25.625
| 94
| 0.702439
| 29
| 205
| 4.931034
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141463
| 205
| 7
| 95
| 29.285714
| 0.8125
| 0.097561
| 0
| 0
| 0
| 0
| 0.059783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7d679d6da083221701f90755f8328c511b695beb
| 268
|
py
|
Python
|
authentication/authentication.py
|
damiclem/django-rest-tutorial
|
a8bfce3e94cd8c8d7b1bc2d8ed851980e38b86fa
|
[
"MIT"
] | null | null | null |
authentication/authentication.py
|
damiclem/django-rest-tutorial
|
a8bfce3e94cd8c8d7b1bc2d8ed851980e38b86fa
|
[
"MIT"
] | null | null | null |
authentication/authentication.py
|
damiclem/django-rest-tutorial
|
a8bfce3e94cd8c8d7b1bc2d8ed851980e38b86fa
|
[
"MIT"
] | null | null | null |
# Import token authentication
from rest_framework.authentication import TokenAuthentication
# Extend token autentication in order to create Bearer authentication
class BearerAuthentication(TokenAuthentication):
# Define keyword as Bearer
keyword = 'Bearer'
| 26.8
| 69
| 0.817164
| 27
| 268
| 8.074074
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145522
| 268
| 9
| 70
| 29.777778
| 0.951965
| 0.447761
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7d98611a708ad17440f9d1f3053e8de0a906f534
| 91
|
py
|
Python
|
enthought/enable/primitives/line.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/enable/primitives/line.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/enable/primitives/line.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from enable.primitives.line import *
| 22.75
| 38
| 0.835165
| 12
| 91
| 5.916667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120879
| 91
| 3
| 39
| 30.333333
| 0.8875
| 0.131868
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7da795a345955219a607cb5d921acdd27f245e9c
| 138
|
py
|
Python
|
core/logger/logger/__main__.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 10
|
2017-09-14T07:26:15.000Z
|
2021-04-01T09:33:03.000Z
|
core/logger/logger/__main__.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 117
|
2017-09-13T08:09:48.000Z
|
2019-10-03T12:19:13.000Z
|
core/logger/logger/__main__.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 1
|
2018-09-26T09:40:51.000Z
|
2018-09-26T09:40:51.000Z
|
from lie_logger.application import LoggerComponent
from mdstudio.runner import main
if __name__ == '__main__':
main(LoggerComponent)
| 23
| 50
| 0.804348
| 16
| 138
| 6.375
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 138
| 5
| 51
| 27.6
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7db5415edd039b7c3ed6ed69f12194d828cc728d
| 98
|
py
|
Python
|
Lesson4/solution-02.py
|
AnnTka/Lesson2
|
d748553401599b7ec3c95a8a4d71ded501086377
|
[
"BSD-3-Clause"
] | null | null | null |
Lesson4/solution-02.py
|
AnnTka/Lesson2
|
d748553401599b7ec3c95a8a4d71ded501086377
|
[
"BSD-3-Clause"
] | null | null | null |
Lesson4/solution-02.py
|
AnnTka/Lesson2
|
d748553401599b7ec3c95a8a4d71ded501086377
|
[
"BSD-3-Clause"
] | null | null | null |
line = input()
line_pal = line[::-1]
if line_pal == line:
print("Yes")
else:
print("No")
| 12.25
| 21
| 0.561224
| 15
| 98
| 3.533333
| 0.6
| 0.264151
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013158
| 0.22449
| 98
| 7
| 22
| 14
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0.05102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7dbb96bc6ad360e1a0d47866c3f56295f87e8110
| 169
|
py
|
Python
|
evotor/util/utils.py
|
trukanduk/evotor_hackathon
|
5aeec1886c9ca5dbb2d08d535885701062464fb0
|
[
"MIT"
] | null | null | null |
evotor/util/utils.py
|
trukanduk/evotor_hackathon
|
5aeec1886c9ca5dbb2d08d535885701062464fb0
|
[
"MIT"
] | null | null | null |
evotor/util/utils.py
|
trukanduk/evotor_hackathon
|
5aeec1886c9ca5dbb2d08d535885701062464fb0
|
[
"MIT"
] | null | null | null |
import string
import random
def get_new_id():
newId = ''.join(random.choice(string.ascii_lowercase \
+ string.digits) for i in range(20))
return newId
| 18.777778
| 58
| 0.680473
| 24
| 169
| 4.666667
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015038
| 0.213018
| 169
| 8
| 59
| 21.125
| 0.827068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7dd42b3d07677a49b5f87cac006770f8643e19a2
| 15
|
py
|
Python
|
data/studio21_generated/introductory/4455/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4455/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/introductory/4455/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
def sumin(n):
| 7.5
| 13
| 0.6
| 3
| 15
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 15
| 2
| 14
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7df00d8784e7debf9770ab0bf36c12af5f7e5545
| 8,846
|
py
|
Python
|
tohocd/services/songService.py
|
mokusen/django_app
|
19f43b4d675a7f0d10e4a12cf558b3c6c100dd27
|
[
"MIT"
] | null | null | null |
tohocd/services/songService.py
|
mokusen/django_app
|
19f43b4d675a7f0d10e4a12cf558b3c6c100dd27
|
[
"MIT"
] | 9
|
2019-01-22T12:17:55.000Z
|
2021-06-10T17:36:49.000Z
|
tohocd/services/songService.py
|
mokusen/toho-music-history
|
19f43b4d675a7f0d10e4a12cf558b3c6c100dd27
|
[
"MIT"
] | null | null | null |
from ..models import Song
from django.db.models import Q
from django.db.models.functions import Lower
def __check_param(param):
"""
paramからorder_byのparamを決定する
Parameters
----------
param : request.GET['sort']
Returns
-------
order_param : Lower(str)
order_byのparam(小文字)
"""
param_dict = {
"song": "song_name",
"song_d": "song_name",
"cd": "cd__cd_name",
"cd_d": "cd__cd_name",
"release": "cd__release_on",
"release_d": "cd__release_on",
"circle": "cd__circle__circle_name",
"circle_d": "cd__circle__circle_name",
"vocal": "song_info__vocal__vocal_name",
"vocal_d": "song_info__vocal__vocal_name",
"lyric": "song_info__lyric__lyric_name",
"lyric_d": "song_info__lyric__lyric_name",
"arrange": "song_info__arrange__arrange_name",
"arrange_d": "song_info__arrange__arrange_name",
"ori_song": "original_info__original_song__original_name",
"ori_song_d": "original_info__original_song__original_name",
"ori_work": "original_info__original_song__original_work__original_work_name",
"ori_work_d": "original_info__original_song__original_work__original_work_name",
}
if param in param_dict:
if "release" in param:
order_param = param_dict[param]
else:
order_param = Lower(param_dict[param])
else:
order_param = "pk"
return order_param
def __reverse(song, param, order_param):
"""
reverseを行うメソッド
Parameters
----------
song : models.Songクラス
param : request.GET['sort]
order_param : str
Returns
-------
song : models.Song.reverse()
"""
if order_param != "pk" and "_d" in param:
song = song.reverse()
return song
def get_songs_byOR(word, param):
"""
曖昧OR検索でmodels.Songクラスを取得する
Parameters
----------
word : search_word
検索するワード
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(song_name__contains=word) |
Q(cd__cd_name__contains=word) |
Q(cd__release_on__contains=word) |
Q(cd__circle__circle_name__contains=word) |
Q(song_info__vocal__vocal_name__contains=word) |
Q(song_info__lyric__lyric_name__contains=word) |
Q(song_info__arrange__arrange_name__contains=word) |
Q(original_info__original_song__original_name__contains=word) |
Q(original_info__original_song__original_work__original_work_name__contains=word)
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
def get_songs_byAND(word_dict, param):
"""
曖昧AND条件でmodels.Songクラスを取得する
Parameters
----------
word_dict : search_dict
song,cd,release,circle,vocal,lyric,arrange,ori_song,ori_workを所持した辞書
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(song_name__contains=word_dict['song']),
Q(cd__cd_name__contains=word_dict['cd']),
Q(cd__release_on__contains=word_dict['release']),
Q(cd__circle__circle_name__contains=word_dict['circle']),
Q(song_info__vocal__vocal_name__contains=word_dict['vocal']),
Q(song_info__lyric__lyric_name__contains=word_dict['lyric']),
Q(song_info__arrange__arrange_name__contains=word_dict['arrange']),
Q(original_info__original_song__original_name__contains=word_dict['ori_song']),
Q(original_info__original_song__original_work__original_work_name__contains=word_dict['ori_work'])
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
def get_song_byCd(id, param):
"""
models.Cd.idの完全一致でmodels.Songクラスを取得する
Parameters
----------
id : int or str
検索するCDのID
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(cd__id=id).order_by(order_param)
song = __reverse(song, param, order_param)
return song
def get_song_byVocal(id, word, param):
"""
models.Vocal_master.idの完全一致でmodels.Songクラスを取得する
Parameters
----------
id : int or str
検索するCDのID
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(song_info__vocal__id=id),
Q(song_name__contains=word) |
Q(cd__cd_name__contains=word) |
Q(cd__release_on__contains=word) |
Q(cd__circle__circle_name__contains=word) |
Q(song_info__vocal__vocal_name__contains=word) |
Q(song_info__lyric__lyric_name__contains=word) |
Q(song_info__arrange__arrange_name__contains=word) |
Q(original_info__original_song__original_name__contains=word) |
Q(original_info__original_song__original_work__original_work_name__contains=word)
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
def get_song_byLyric(id, word, param):
"""
models.Lyric_master.idの完全一致でmodels.Songクラスを取得する
Parameters
----------
id : int or str
検索するCDのID
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(song_info__lyric__id=id),
Q(song_name__contains=word) |
Q(cd__cd_name__contains=word) |
Q(cd__release_on__contains=word) |
Q(cd__circle__circle_name__contains=word) |
Q(song_info__vocal__vocal_name__contains=word) |
Q(song_info__lyric__lyric_name__contains=word) |
Q(song_info__arrange__arrange_name__contains=word) |
Q(original_info__original_song__original_name__contains=word) |
Q(original_info__original_song__original_work__original_work_name__contains=word)
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
def get_song_byArrange(id, word, param):
"""
models.Arrange_master.idの完全一致でmodels.Songクラスを取得する
Parameters
----------
id : int or str
検索するCDのID
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(song_info__arrange__id=id),
Q(song_name__contains=word) |
Q(cd__cd_name__contains=word) |
Q(cd__release_on__contains=word) |
Q(cd__circle__circle_name__contains=word) |
Q(song_info__vocal__vocal_name__contains=word) |
Q(song_info__lyric__lyric_name__contains=word) |
Q(song_info__arrange__arrange_name__contains=word) |
Q(original_info__original_song__original_name__contains=word) |
Q(original_info__original_song__original_work__original_work_name__contains=word)
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
def get_song_byOrisong(id, word, param):
"""
models.Original_song.idの完全一致でmodels.Songクラスを取得する
Parameters
----------
id : int or str
検索するCDのID
param : request.GET['sort]
リクエストのsortパラメーター
Returns
-------
song
models.Songクラスを返す
"""
order_param = __check_param(param)
song = Song.objects.select_related().filter(
Q(original_info__original_song__id=id),
Q(song_name__contains=word) |
Q(cd__cd_name__contains=word) |
Q(cd__release_on__contains=word) |
Q(cd__circle__circle_name__contains=word) |
Q(song_info__vocal__vocal_name__contains=word) |
Q(song_info__lyric__lyric_name__contains=word) |
Q(song_info__arrange__arrange_name__contains=word) |
Q(original_info__original_song__original_name__contains=word) |
Q(original_info__original_song__original_work__original_work_name__contains=word)
).order_by(order_param).distinct()
song = __reverse(song, param, order_param)
return song
| 30.8223
| 107
| 0.653177
| 1,035
| 8,846
| 4.951691
| 0.07343
| 0.126439
| 0.149854
| 0.116098
| 0.792195
| 0.747317
| 0.72722
| 0.705951
| 0.684683
| 0.654829
| 0
| 0
| 0.244517
| 8,846
| 286
| 108
| 30.93007
| 0.766871
| 0.188334
| 0
| 0.6
| 0
| 0
| 0.107354
| 0.067622
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.022222
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
814bcade9f339fb3a99bdf9081464c620a6be3d9
| 107
|
py
|
Python
|
tests/core/__init__.py
|
mlund/scipp
|
26648fdcda49b21a7aacdafd58625fab7ee3403b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/core/__init__.py
|
mlund/scipp
|
26648fdcda49b21a7aacdafd58625fab7ee3403b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/core/__init__.py
|
mlund/scipp
|
26648fdcda49b21a7aacdafd58625fab7ee3403b
|
[
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2022 Scipp contributors (https://github.com/scipp)
| 35.666667
| 66
| 0.757009
| 15
| 107
| 5.4
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051546
| 0.093458
| 107
| 2
| 67
| 53.5
| 0.783505
| 0.953271
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8178eaed81e06194905ad95a61b0ff4eb3e3b43e
| 114
|
py
|
Python
|
22. What Do You Know So Far/ex22.py
|
vishalnarnaware/Learn-PYTHON-the-HARD-WAY
|
392bae04c686c4a1076144f5dd295c7533e71163
|
[
"MIT"
] | null | null | null |
22. What Do You Know So Far/ex22.py
|
vishalnarnaware/Learn-PYTHON-the-HARD-WAY
|
392bae04c686c4a1076144f5dd295c7533e71163
|
[
"MIT"
] | null | null | null |
22. What Do You Know So Far/ex22.py
|
vishalnarnaware/Learn-PYTHON-the-HARD-WAY
|
392bae04c686c4a1076144f5dd295c7533e71163
|
[
"MIT"
] | null | null | null |
print('''
WARNING! The most important thing when doing this exercise is:
“There is no failure, only trying.”
''')
| 22.8
| 62
| 0.719298
| 17
| 114
| 4.823529
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 114
| 4
| 63
| 28.5
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8179390cd3d5f53e2d7856de0b95be381a7c16f3
| 169
|
py
|
Python
|
manage.py
|
sam-xif/Spade
|
e8c501580c5825fb3c461714f806aa4163123025
|
[
"MIT"
] | null | null | null |
manage.py
|
sam-xif/Spade
|
e8c501580c5825fb3c461714f806aa4163123025
|
[
"MIT"
] | 5
|
2018-01-29T15:34:05.000Z
|
2018-02-28T02:20:14.000Z
|
manage.py
|
sam-xif/Spade
|
e8c501580c5825fb3c461714f806aa4163123025
|
[
"MIT"
] | 2
|
2018-01-26T14:03:09.000Z
|
2018-01-29T01:58:21.000Z
|
#!/usr/bin/env python
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='migrate_repo', url='sqlite:///spade.db')
| 28.166667
| 76
| 0.715976
| 23
| 169
| 4.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106509
| 169
| 5
| 77
| 33.8
| 0.741722
| 0.118343
| 0
| 0
| 0
| 0
| 0.290541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
81a0b1a88a4c33a670c755939e33b1812445af31
| 315
|
py
|
Python
|
generated-libraries/python/netapp/cluster_peer/peer_ping_protocol.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/cluster_peer/peer_ping_protocol.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/cluster_peer/peer_ping_protocol.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
class PeerPingProtocol(basestring):
"""
The network protocol to use when performing the ping operation.
Possible values:
<ul>
<li> "data" - Data Ping,
<li> "icmp" - ICMP Ping
</ul>
"""
@staticmethod
def get_api_name():
return "peer-ping-protocol"
| 21
| 67
| 0.568254
| 34
| 315
| 5.205882
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.320635
| 315
| 14
| 68
| 22.5
| 0.827103
| 0.469841
| 0
| 0
| 0
| 0
| 0.137405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
81cb625b32112423a5d0cd05909c375b10ac6822
| 91,026
|
py
|
Python
|
cohesity_management_sdk/controllers/protection_sources_controller.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | null | null | null |
cohesity_management_sdk/controllers/protection_sources_controller.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | null | null | null |
cohesity_management_sdk/controllers/protection_sources_controller.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
import logging
from cohesity_management_sdk.api_helper import APIHelper
from cohesity_management_sdk.configuration import Configuration
from cohesity_management_sdk.controllers.base_controller import BaseController
from cohesity_management_sdk.http.auth.auth_manager import AuthManager
from cohesity_management_sdk.models.upgrade_physical_agents_message import UpgradePhysicalAgentsMessage
from cohesity_management_sdk.models.protection_source_node import ProtectionSourceNode
from cohesity_management_sdk.models.registered_application_server import RegisteredApplicationServer
from cohesity_management_sdk.models.protection_source import ProtectionSource
from cohesity_management_sdk.models.protected_vm_info import ProtectedVmInfo
from cohesity_management_sdk.models.run_diagnostics_message import RunDiagnosticsMessage
from cohesity_management_sdk.models.get_registration_info_response import GetRegistrationInfoResponse
from cohesity_management_sdk.models.sql_aag_host_and_databases import SqlAagHostAndDatabases
from cohesity_management_sdk.exceptions.request_error_error_exception import RequestErrorErrorException
from cohesity_management_sdk.models.exchange_dag_hosts_response import ExchangeDagHostsResponse
from cohesity_management_sdk.models.download_cft_response import DownloadCftResponse
class ProtectionSourcesController(BaseController):
"""A Controller to access Endpoints in the cohesity_management_sdk API."""
def __init__(self, config=None, client=None, call_back=None):
super(ProtectionSourcesController, self).__init__(client, call_back)
self.logger = logging.getLogger(__name__)
self.config = config
def get_download_physical_agent(self,
host_type,
pkg_type=None,
agent_type=None):
"""Does a GET request to /public/physicalAgents/download.
Host type could be Linux, Windows, AIX.
Args:
host_type (HostTypeDownloadPhysicalAgentEnum): Specifies the host
type for which user wants to download the physical agent.
'kLinux' indicates the Linux operating system. 'kWindows'
indicates the Microsoft Windows operating system. 'kAix'
indicates the IBM AIX operating system. 'kSolaris' indicates
the Oracle Solaris operating system. 'kSapHana' indicates the
Sap Hana database system developed by SAP SE. 'kOther'
indicates the other types of operating system.
pkg_type (PkgTypeEnum, optional): Specifies the Linux installer
package type applicable only to Linux OS and the value can be
any of ("kScript","kRPM", "kSuseRPM", "kDEB") 'kScript'
indicates a script based agent installer. 'kRPM' indicates a
RPM agent installer. 'kSuseRPM' indicates a Open Suse RPM
installer. 'kDEB' indicates a Debian agent installer.
agent_type (AgentTypeEnum, optional): Specifies agent type. Can be
"kGo" for go agent and "kJava" for java agent and "kCpp" for
c++ agent. 'kCpp' indicates a c++ agent. 'kJava' indicates a
java agent. 'kGo' indicates a go agent.
Returns:
list of int: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('get_download_physical_agent called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for get_download_physical_agent.'
)
self.validate_parameters(host_type=host_type)
# Prepare query URL
self.logger.info(
'Preparing query URL for get_download_physical_agent.')
_url_path = '/public/physicalAgents/download'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'hostType': host_type,
'pkgType': pkg_type,
'agentType': agent_type
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for get_download_physical_agent.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for get_download_physical_agent.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='get_download_physical_agent')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for get_download_physical_agent.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def create_upgrade_physical_agents(self, body=None):
"""Does a POST request to /public/physicalAgents/upgrade.
If the request is successful, the Cohesity agents on the specified
Physical Servers are upgraded to the agent release
currently available from this Cohesity Cluster.
For example if the Cluster is upgraded from 3.7.1 to 4.0,
the agents on the specified Physical Servers can be upgraded to 4.0
using
this POST operation.
To get the agentIds to pass into this operation, call
GET /public/protectionSources with the environment set to
'KPhysical'.
In addition this GET operation returns the agentUpgradability field,
that
indicates if an agent can be upgraded. Use the agentUpgradability
field
to determine which Physical Servers to upgrade using this
POST /public/physicalAgents/upgrade operation.
WARNING: Only agents at a particular Cohesity release can be
upgraded using this operation.
See the Cohesity online help for details.
Returns the status of the upgrade initiation.
Args:
body (UpgradePhysicalServerAgents, optional): Request to upgrade
agents on Physical Servers.
Returns:
UpgradePhysicalAgentsMessage: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('create_upgrade_physical_agents called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for create_upgrade_physical_agents.')
_url_path = '/public/physicalAgents/upgrade'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for create_upgrade_physical_agents.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for create_upgrade_physical_agents.'
)
_request = self.http_client.post(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='create_upgrade_physical_agents')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for create_upgrade_physical_agents.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
UpgradePhysicalAgentsMessage.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_protection_sources(self,
after_cursor_entity_id=None,
before_cursor_entity_id=None,
node_id=None,
page_size=None,
has_valid_mailbox=None,
has_valid_onedrive=None,
id=None,
num_levels=None,
exclude_types=None,
exclude_aws_types=None,
include_datastores=None,
include_networks=None,
include_vm_folders=None,
include_system_v_apps=None,
environments=None,
environment=None,
include_entity_permission_info=None,
sids=None,
include_source_credentials=None,
encryption_key=None,
tenant_ids=None,
all_under_hierarchy=None):
"""Does a GET request to /public/protectionSources.
If no parameters are specified, all Protection Sources that are
registered
on the Cohesity Cluster are returned.
In addition, an Object subtree gathered from each Source is returned.
For example, the Cohesity Cluster interrogates a Source VMware vCenter
Server
and creates an hierarchical Object subtree that mirrors the
Inventory tree on vCenter Server.
The contents of the Object tree are returned as a "nodes" hierarchy
of "protectionSource"s.
Specifying parameters can alter the results that are returned.
Args:
after_cursor_entity_id (long|int, optional): Specifies the entity
id starting from which the items are to be returned.
before_cursor_entity_id (long|int, optional): Specifies the entity
id upto which the items are to be returned.
node_id (long|int, optional): Specifies the entity id for the Node
at any level within the Source entity hierarchy whose children
are to be paginated.
page_size (long|int, optional): Specifies the maximum number of
entities to be returned within the page.
has_valid_mailbox (bool, optional): If set to true, users with
valid mailbox will be returned.
has_valid_onedrive (bool, optional): If set to true, users with
valid onedrive will be returned.
id (long|int, optional): Return the Object subtree for the passed
in Protection Source id.
num_levels (int, optional): Specifies the expected number of levels
from the root node to be returned in the entity hierarchy
response.
exclude_types (list of ExcludeTypeEnum, optional): Filter out the
Object types (and their subtrees) that match the passed in
types such as 'kVCenter', 'kFolder', 'kDatacenter',
'kComputeResource', 'kResourcePool', 'kDatastore',
'kHostSystem', 'kVirtualMachine', etc. For example, set this
parameter to 'kResourcePool' to exclude Resource Pool Objects
from being returned.
exclude_aws_types (list of ExcludeAwsTypeEnum, optional): Specifies
the Object types to be filtered out for AWS that match the
passed in types such as 'kEC2Instance', 'kRDSInstance' etc.
For example, set this parameter to 'kEC2Instance' to exclude
ec2 instance from being returned.
include_datastores (bool, optional): Set this parameter to true to
also return kDatastore object types found in the Source in
addition to their Object subtrees. By default, datastores are
not returned.
include_networks (bool, optional): Set this parameter to true to
also return kNetwork object types found in the Source in
addition to their Object subtrees. By default, network objects
are not returned.
include_vm_folders (bool, optional): Set this parameter to true to
also return kVMFolder object types found in the Source in
addition to their Object subtrees. By default, VM folder
objects are not returned.
include_system_v_apps (bool, optional): Set this parameter to true
to also return system VApp object types found in the Source in
addition to their Object subtrees. By default, VM folder
objects are not returned.
environments (list of EnvironmentListProtectionSourcesEnum,
optional): Return only Protection Sources that match the
passed in environment type such as 'kVMware', 'kSQL', 'kView'
'kPhysical', 'kPuppeteer', 'kPure', 'kNetapp', 'kGenericNas',
'kHyperV', 'kAcropolis', or 'kAzure'. For example, set this
parameter to 'kVMware' to only return the Sources (and their
Object subtrees) found in the 'kVMware' (VMware vCenter
Server) environment. NOTE: 'kPuppeteer' refers to Cohesity's
Remote Adapter.
environment (string, optional): This field is deprecated. Use
environments instead. deprecated: true
include_entity_permission_info (bool, optional): If specified,
then a list of entites with permissions assigned to them are
returned.
sids (list of string, optional): Filter the object subtree for the
sids given in the list.
include_source_credentials (bool, optional): If specified, then
crednetial for the registered sources will be included.
Credential is first encrypted with internal key and then
reencrypted with user supplied 'encryption_key'.
encryption_key (string, optional): Key to be used to encrypt the
source credential. If include_source_credentials is set to true
this key must be specified.
tenant_ids (list of string, optional): TenantIds contains ids of
the tenants for which objects are to be returned.
all_under_hierarchy (bool, optional): AllUnderHierarchy specifies
if objects of all the tenants under the hierarchy of the
logged in user's organization should be returned.
Returns:
list of ProtectionSourceNode: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_protection_sources called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for list_protection_sources.')
_url_path = '/public/protectionSources'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'afterCursorEntityId': after_cursor_entity_id,
'beforeCursorEntityId': before_cursor_entity_id,
'nodeId': node_id,
'pageSize': page_size,
'hasValidMailbox': has_valid_mailbox,
'hasValidOnedrive': has_valid_onedrive,
'id': id,
'numLevels': num_levels,
'excludeTypes': exclude_types,
'excludeAwsTypes': exclude_aws_types,
'includeDatastores': include_datastores,
'includeNetworks': include_networks,
'includeVMFolders': include_vm_folders,
'includeSystemVApps': include_system_v_apps,
'environments': environments,
'environment': environment,
'includeEntityPermissionInfo': include_entity_permission_info,
'sids': sids,
'includeSourceCredentials': include_source_credentials,
'encryptionKey': encryption_key,
'tenantIds': tenant_ids,
'allUnderHierarchy': all_under_hierarchy
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for list_protection_sources.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_protection_sources.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='list_protection_sources')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_protection_sources.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
ProtectionSourceNode.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_application_servers(self,
protection_sources_root_node_id=None,
environment=None,
protection_source_id=None,
application=None):
"""Does a GET request to /public/protectionSources/applicationServers.
Given the root node id of a Protection Source tree, returns the list
of
Application Servers registered under that tree based on the filters.
Args:
protection_sources_root_node_id (long|int, optional): Specifies
the Protection Source Id of the root node of a Protection
Sources tree. A root node represents a registered Source on
the Cohesity Cluster, such as a vCenter Server.
environment (EnvironmentListApplicationServersEnum, optional):
Specifies the environment such as 'kPhysical' or 'kVMware' of
the Protection Source tree. overrideDescription: true
Supported environment types such as 'kView', 'kSQL',
'kVMware', etc. NOTE: 'kPuppeteer' refers to Cohesity's Remote
Adapter. 'kVMware' indicates the VMware Protection Source
environment. 'kHyperV' indicates the HyperV Protection Source
environment. 'kSQL' indicates the SQL Protection Source
environment. 'kView' indicates the View Protection Source
environment. 'kPuppeteer' indicates the Cohesity's Remote
Adapter. 'kPhysical' indicates the physical Protection Source
environment. 'kPure' indicates the Pure Storage Protection
Source environment. 'Nimble' indicates the Nimble Storage
Protection Source environment. 'kAzure' indicates the
Microsoft's Azure Protection Source environment. 'kNetapp'
indicates the Netapp Protection Source environment. 'kAgent'
indicates the Agent Protection Source environment.
'kGenericNas' indicates the Generic Network Attached Storage
Protection Source environment. 'kAcropolis' indicates the
Acropolis Protection Source environment. 'kPhsicalFiles'
indicates the Physical Files Protection Source environment.
'kIsilon' indicates the Dell EMC's Isilon Protection Source
environment. 'kGPFS' indicates IBM's GPFS Protection Source
environment. 'kKVM' indicates the KVM Protection Source
environment. 'kAWS' indicates the AWS Protection Source
environment. 'kExchange' indicates the Exchange Protection
Source environment. 'kHyperVVSS' indicates the HyperV VSS
Protection Source environment. 'kOracle' indicates the Oracle
Protection Source environment. 'kGCP' indicates the Google
Cloud Platform Protection Source environment. 'kFlashBlade'
indicates the Flash Blade Protection Source environment.
'kAWSNative' indicates the AWS Native Protection Source
environment. 'kO365' indicates the Office 365 Protection Source
environment. 'kO365Outlook' indicates Office 365 outlook
Protection Source environment. 'kHyperFlex' indicates the Hyper
Flex Protection Source environment. 'kGCPNative' indicates the
GCP Native Protection Source environment. 'kAzureNative'
indicates the Azure Native Protection Source environment.
'kKubernetes' indicates a Kubernetes Protection Source
environment. 'kElastifile' indicates Elastifile Protection
Source environment. 'kAD' indicates Active Directory
Protection Source environment. 'kRDSSnapshotManager'
indicates AWS RDS Protection Source environment. 'kCassandra'
indicates Cassandra Protection Source environment. 'kMongoDB'
indicates MongoDB Protection Source environment. 'kCouchbase'
indicates Couchbase Protection Source environment. 'kHdfs'
indicates Hdfs Protection Source environment. 'kHive'
indicates Hive Protection Source environment. 'kHBase'
indicates HBase Protection Source environment.
protection_source_id (long|int, optional): Specifies the
Protection Source Id of the 'kPhysical' or 'kVMware' entity in
the Protection Source tree hosting the applications.
application (ApplicationEnum, optional): Specifies the application
such as 'kSQL', 'kExchange' running on the Protection Source.
overrideDescription: true Supported environment types such as
'kView', 'kSQL', 'kVMware', etc. NOTE: 'kPuppeteer' refers to
Cohesity's Remote Adapter. 'kVMware' indicates the VMware
Protection Source environment. 'kHyperV' indicates the HyperV
Protection Source environment. 'kSQL' indicates the SQL
Protection Source environment. 'kView' indicates the View
Protection Source environment. 'kPuppeteer' indicates the
Cohesity's Remote Adapter. 'kPhysical' indicates the physical
Protection Source environment. 'kPure' indicates the Pure
Storage Protection Source environment. 'Nimble' indicates the
Nimble Storage Protection Source environment. 'kAzure'
indicates the Microsoft's Azure Protection Source environment.
'kNetapp' indicates the Netapp Protection Source environment.
'kAgent' indicates the Agent Protection Source environment.
'kGenericNas' indicates the Generic Network Attached Storage
Protection Source environment. 'kAcropolis' indicates the
Acropolis Protection Source environment. 'kPhsicalFiles'
indicates the Physical Files Protection Source environment.
'kIsilon' indicates the Dell EMC's Isilon Protection Source
environment. 'kGPFS' indicates IBM's GPFS Protection Source
environment. 'kKVM' indicates the KVM Protection Source
environment. 'kAWS' indicates the AWS Protection Source
environment. 'kExchange' indicates the Exchange Protection
Source environment. 'kHyperVVSS' indicates the HyperV VSS
Protection Source environment. 'kOracle' indicates the Oracle
Protection Source environment. 'kGCP' indicates the Google
Cloud Platform Protection Source environment. 'kFlashBlade'
indicates the Flash Blade Protection Source environment.
'kAWSNative' indicates the AWS Native Protection Source
environment. 'kO365' indicates the
Office 365 Protection Source environment. 'kO365Outlook'
indicates Office 365 outlook Protection Source environment.
'kHyperFlex' indicates the Hyper Flex Protection Source
environment. 'kGCPNative' indicates the GCP Native Protection
Source environment. 'kAzureNative' indicates the Azure Native
Protection Source environment. 'kKubernetes' indicates a
Kubernetes Protection Source environment. 'kElastifile'
indicates Elastifile Protection Source environment.
'kAD' indicates Active Directory Protection Source environment.
'kRDSSnapshotManager' indicates AWS RDS Protection Source
environment. 'kCassandra' indicates Cassandra Protection Source
environment. 'kMongoDB' indicates MongoDB Protection Source
environment. 'kCouchbase' indicates Couchbase Protection Source
environment. 'kHdfs' indicates Hdfs Protection Source
environment. 'kHive' indicates Hive Protection Source
environment. 'kHBase' indicates HBase Protection Source
environment.
Returns:
list of RegisteredApplicationServer: Response from the API.
Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_application_servers called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for list_application_servers.')
_url_path = '/public/protectionSources/applicationServers'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'protectionSourcesRootNodeId': protection_sources_root_node_id,
'environment': environment,
'protectionSourceId': protection_source_id,
'application': application
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for list_application_servers.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_application_servers.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='list_application_servers')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_application_servers.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
RegisteredApplicationServer.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def create_register_application_servers(self, body):
"""Does a POST request to /public/protectionSources/applicationServers.
Registering Application Servers will help Cohesity Cluster such that
any
application specific data can be backed up.
Returns the Protection Source registered upon success.
Args:
body (RegisterApplicationServersParameters): Request to register
Application Servers in a Protection Source.
Returns:
ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('create_register_application_servers called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for create_register_application_servers.'
)
self.validate_parameters(body=body)
# Prepare query URL
self.logger.info(
'Preparing query URL for create_register_application_servers.')
_url_path = '/public/protectionSources/applicationServers'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for create_register_application_servers.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for create_register_application_servers.'
)
_request = self.http_client.post(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='create_register_application_servers')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for create_register_application_servers.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def update_application_servers(self, body):
"""Does a PUT request to /public/protectionSources/applicationServers.
Returns the Protection Source whose registration parameters of its
Application Servers are modified upon success.
Args:
body (UpdateApplicationServerParameters): Request to modify the
Application Servers registration of a Protection Source.
Returns:
ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('update_application_servers called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for update_application_servers.'
)
self.validate_parameters(body=body)
# Prepare query URL
self.logger.info(
'Preparing query URL for update_application_servers.')
_url_path = '/public/protectionSources/applicationServers'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for update_application_servers.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for update_application_servers.'
)
_request = self.http_client.put(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='update_application_servers')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for update_application_servers.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def delete_unregister_application_servers(self, body, id):
"""Does a DELETE request to /public/protectionSources/applicationServers/{id}.
Unregistering Application Servers will fail if the Protection Source
is
currently being backed up.
Returns the Protection Source whose Application Servers are
unregistered upon
success.
Args:
body (UnRegisterApplicationServersParameters): Request to register
a protection source.
id (long|int): Specifies a unique id of the Protection Source to
unregister the Application Servers. If the Protection Source
is currently being backed up, unregister operation will fail.
Returns:
ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('delete_unregister_application_servers called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for delete_unregister_application_servers.'
)
self.validate_parameters(body=body, id=id)
# Prepare query URL
self.logger.info(
'Preparing query URL for delete_unregister_application_servers.'
)
_url_path = '/public/protectionSources/applicationServers/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for delete_unregister_application_servers.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for delete_unregister_application_servers.'
)
_request = self.http_client.delete(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='delete_unregister_application_servers')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for delete_unregister_application_servers.'
)
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_data_store_information(self, source_id):
"""Does a GET request to /public/protectionSources/datastores.
Returns the datastore information in VMware environment.
Args:
source_id (long|int): Specifies the id of the virtual machine in
vmware environment.
Returns:
list of ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_data_store_information called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for list_data_store_information.'
)
self.validate_parameters(source_id=source_id)
# Prepare query URL
self.logger.info(
'Preparing query URL for list_data_store_information.')
_url_path = '/public/protectionSources/datastores'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {'sourceId': source_id}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for list_data_store_information.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_data_store_information.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='list_data_store_information')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_data_store_information.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def run_diagnostics(self, id):
"""Does a POST request to /public/protectionSources/diagnostics/{id}
If the request is successful, the diagnostics script is triggered on
Cohesity
agent which generates a tarball containing various diagnostics and
uploads it
to the Cohesity cluster. Host type could be Linux, Windows.
Args:
id (int): Specifies the entity id.
Returns:
RunDiagnosticsMessage: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('run_diagnostics called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for run_diagnostics.'
)
self.validate_parameters(id=id)
# Prepare query URL
_url_path = '/public/protectionSources/diagnostics/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for run_diagnostics.')
_headers = {
'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for run_diagnostics.'
)
_request = self.http_client.post(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request, name='run_diagnostics')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for run_diagnostics.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
RunDiagnosticsMessage.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def download_cft_file(self, body=None):
"""Does a GET request to /public/protectionSources/downloadCftFile.
TODO: Type description here.
Args:
body (DownloadCftParams): Specifies the request to download CFT.
Returns:
DownloadCftResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('download_cft_file called.')
# Prepare query URL
self.logger.info('Preparing query URL for download_cft_file.')
_url_path = '/public/protectionSources/downloadCftFile'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for download_cft_file.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for download_cft_file.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='download_cft_file')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for download_cft_file.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
DownloadCftResponse.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_exchange_dag_hosts(self, endpoint=None, protection_source_id=None):
"""Does a GET request to /public/protectionSources/exchangeDagHosts.
Returns information about all the exchange hosts that belong to an
Exchange
DAG.
Args:
endpoint (string, optional): Specifies the endpoint of Exchange
DAG or a host which is member of Exchange DAG or a standalone
exchange server.
protection_source_id (int): Specifies the Protection Source Id of
the Exchange DAG source.
Returns:
ExchangeDagHostsResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_exchange_dag_hosts called.')
# Prepare query URL
self.logger.info('Preparing query URL for list_exchange_dag_hosts.')
_url_path = '/public/protectionSources/exchangeDagHosts'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {'endpoint': endpoint, 'protectionSourceId': protection_source_id}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for list_exchange_dag_hosts.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_exchange_dag_hosts.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request, name='list_exchange_dag_hosts')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for list_exchange_dag_hosts.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ExchangeDagHostsResponse.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def get_protection_sources_objects(self, object_ids=None):
"""Does a GET request to /public/protectionSources/objects.
Returns the Protection Source objects corresponding to the specified
ids.
Args:
object_ids (list of long|int, optional): Specifies the ids of the
Protection Source objects to return.
Returns:
list of ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('get_protection_sources_objects called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for get_protection_sources_objects.')
_url_path = '/public/protectionSources/objects'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {'objectIds': object_ids}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for get_protection_sources_objects.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for get_protection_sources_objects.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='get_protection_sources_objects')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for get_protection_sources_objects.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def get_protection_sources_object_by_id(self, id):
"""Does a GET request to /public/protectionSources/objects/{id}.
Returns the Protection Source object corresponding to the specified
id.
Args:
id (long|int): Specifies a unique id of the Protection Source to
return.
Returns:
ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('get_protection_sources_object_by_id called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for get_protection_sources_object_by_id.'
)
self.validate_parameters(id=id)
# Prepare query URL
self.logger.info(
'Preparing query URL for get_protection_sources_object_by_id.')
_url_path = '/public/protectionSources/objects/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for get_protection_sources_object_by_id.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for get_protection_sources_object_by_id.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='get_protection_sources_object_by_id')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for get_protection_sources_object_by_id.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_protected_objects(self,
environment,
id,
all_under_hierarchy=None,
include_rpo_snapshots=None):
"""Does a GET request to /public/protectionSources/protectedObjects.
Returns the list of protected Objects in a registered Protection
Source.
Args:
environment (EnvironmentListProtectedObjectsEnum): Specifies the
environment type of the registered Protection Source such as
'kVMware', 'kSQL', 'kView' 'kPhysical', 'kPuppeteer', 'kPure',
'kNetapp', 'kGenericNas', 'kHyperV', 'kAcropolis', or
'kAzure'. For example, set this parameter to 'kVMware' if the
registered Protection Source is of 'kVMware' environment type.
Supported environment types such as 'kView', 'kSQL',
'kVMware', etc. NOTE: 'kPuppeteer' refers to Cohesity's Remote
Adapter. 'kVMware' indicates the VMware Protection Source
environment. 'kHyperV' indicates the HyperV Protection Source
environment. 'kSQL' indicates the SQL Protection Source
environment. 'kView' indicates the View Protection Source
environment. 'kPuppeteer' indicates the Cohesity's Remote
Adapter. 'kPhysical' indicates the physical Protection Source
environment. 'kPure' indicates the Pure Storage Protection
Source environment. 'Nimble' indicates the Nimble Storage
Protection Source environment. 'kAzure' indicates the
Microsoft's Azure Protection Source environment. 'kNetapp'
indicates the Netapp Protection Source environment. 'kAgent'
indicates the Agent Protection Source environment.
'kGenericNas' indicates the Generic Network Attached Storage
Protection Source environment. 'kAcropolis' indicates the
Acropolis Protection Source environment. 'kPhsicalFiles'
indicates the Physical Files Protection Source environment.
'kIsilon' indicates the Dell EMC's Isilon Protection Source
environment. 'kGPFS' indicates IBM's GPFS Protection Source
environment. 'kKVM' indicates the KVM Protection Source
environment. 'kAWS' indicates the AWS Protection Source
environment. 'kExchange' indicates the Exchange Protection
Source environment. 'kHyperVVSS' indicates the HyperV VSS
Protection Source environment. 'kOracle' indicates the Oracle
Protection Source environment. 'kGCP' indicates the Google
Cloud Platform Protection Source environment. 'kFlashBlade'
indicates the Flash Blade Protection Source environment.
'kAWSNative' indicates the AWS Native Protection Source
environment. 'kO365' indicates the
Office 365 Protection Source environment. 'kO365Outlook'
indicates Office 365 outlook Protection Source environment.
'kHyperFlex' indicates the Hyper Flex Protection Source
environment. 'kGCPNative' indicates the GCP Native Protection
Source environment. 'kAzureNative' indicates the Azure Native
Protection Source environment. 'kKubernetes' indicates a
Kubernetes Protection Source environment. 'kElastifile'
indicates Elastifile Protection Source environment. 'kAD'
indicates Active Directory Protection Source environment.
'kRDSSnapshotManager' indicates AWS RDS Protection Source
environment. 'kCassandra' indicates Cassandra Protection
Source environment. 'kMongoDB' indicates MongoDB Protection
Source environment. 'kCouchbase' indicates Couchbase Protection
Source environment. 'kHdfs' indicates Hdfs Protection Source
environment. 'kHive' indicates Hive Protection Source
environment. 'kHBase' indicates HBase Protection Source
environment.
id (long|int): Specifies the Id of a registered Protection Source
of the type given in environment.
all_under_hierarchy (bool, optional): AllUnderHierarchy specifies
if objects of all the tenants under the hierarchy of the
logged in user's organization should be returned.
include_rpo_snapshots (bool, optional): If true, then the
Protected Objects protected by RPO policies will also be
returned.
Returns:
list of ProtectedVmInfo: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_protected_objects called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for list_protected_objects.')
self.validate_parameters(environment=environment, id=id)
# Prepare query URL
self.logger.info('Preparing query URL for list_protected_objects.')
_url_path = '/public/protectionSources/protectedObjects'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'environment': environment,
'id': id,
'allUnderHierarchy': all_under_hierarchy,
'includeRpoSnapshots': include_rpo_snapshots
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for list_protected_objects.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_protected_objects.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='list_protected_objects')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for list_protected_objects.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectedVmInfo.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def create_refresh_protection_source_by_id(self, id):
"""Does a POST request to /public/protectionSources/refresh/{id}.
Force an immediate refresh between the specified Protection Source
tree
on the Cohesity Cluster and the Inventory tree
in the associated vCenter Server.
For example if a new VM is added to the vCenter Server, after a
refresh,
a new Protection Source node for this VM is added to the Protection
Sources
tree.
Success indicates the forced refresh has been completed. For larger
sources it
is possible for the operation to timeout before the force refresh has
been
completed. This timeout can be increased by modifying the
'iris_post_timeout_msecs_to_magneto' gflag on the Iris service.
Args:
id (long|int): Id of the root node of the Protection Sources tree
to refresh. Force a refresh of the Object hierarchy for the
passed in Protection Sources Id.
Returns:
void: Response from the API. No Content
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('create_refresh_protection_source_by_id called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for create_refresh_protection_source_by_id.'
)
self.validate_parameters(id=id)
# Prepare query URL
self.logger.info(
'Preparing query URL for create_refresh_protection_source_by_id.'
)
_url_path = '/public/protectionSources/refresh/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
self.logger.info(
'Preparing and executing request for create_refresh_protection_source_by_id.'
)
_request = self.http_client.post(_query_url)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='create_refresh_protection_source_by_id')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for create_refresh_protection_source_by_id.'
)
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def create_register_protection_source(self, body):
"""Does a POST request to /public/protectionSources/register.
Register a Protection Source on the Cohesity Cluster.
It could be the root node of a vCenter Server or a physical server.
Returns the newly registered Protection Source upon success.
Args:
body (RegisterProtectionSourceParameters): Request to register a
protection source.
Returns:
ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('create_register_protection_source called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for create_register_protection_source.'
)
self.validate_parameters(body=body)
# Prepare query URL
self.logger.info(
'Preparing query URL for create_register_protection_source.')
_url_path = '/public/protectionSources/register'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for create_register_protection_source.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for create_register_protection_source.'
)
_request = self.http_client.post(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='create_register_protection_source')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for create_register_protection_source.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_protection_sources_registration_info(
self,
environments=None,
ids=None,
include_entity_permission_info=None,
sids=None,
include_source_credentials=None,
encryption_key=None,
include_applications_tree_info=None,
tenant_ids=None,
all_under_hierarchy=None):
"""Does a GET request to /public/protectionSources/registrationInfo.
Returns the registration and protection information of the registered
Protection Sources.
Args:
environments (list of
EnvironmentListProtectionSourcesRegistrationInfoEnum,
optional): Return only Protection Sources that match the
passed in environment type such as 'kVMware', 'kSQL', 'kView'
'kPhysical', 'kPuppeteer', 'kPure', 'kNetapp', 'kGenericNas',
'kHyperV', 'kAcropolis', or 'kAzure'. For example, set this
parameter to 'kVMware' to only return the Sources (and their
Object subtrees) found in the 'kVMware' (VMware vCenter
Server) environment. NOTE: 'kPuppeteer' refers to Cohesity's
Remote Adapter.
ids (list of long|int, optional): Return only the registered root
nodes whose Ids are given in the list.
include_entity_permission_info (bool, optional): If specified,
then a list of entities with permissions assigned to them are
returned.
sids (list of string, optional): Filter the registered root nodes
for the sids given in the list.
include_source_credentials (bool, optional): If specified, then
crednetial for the registered sources will be included.
Credential is first encrypted with internal key and then
reencrypted with user supplied 'encryption_key'.
encryption_key (string, optional): Key to be used to encrypt the
source credential. If include_source_credentials is set to true
this key must be specified.
include_applications_tree_info (bool, optional): Specifies whether
to return applications tree info or not.
tenant_ids (list of string, optional): TenantIds contains ids of
the tenants for which objects are to be returned.
all_under_hierarchy (bool, optional): AllUnderHierarchy specifies
if objects of all the tenants under the hierarchy of the
logged in user's organization should be returned.
Returns:
GetRegistrationInfoResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info(
'list_protection_sources_registration_info called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for list_protection_sources_registration_info.'
)
_url_path = '/public/protectionSources/registrationInfo'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'environments': environments,
'ids': ids,
'includeEntityPermissionInfo': include_entity_permission_info,
'sids': sids,
'includeSourceCredentials': include_source_credentials,
'encryptionKey': encryption_key,
'includeApplicationsTreeInfo':include_applications_tree_info,
'tenantIds': tenant_ids,
'allUnderHierarchy': all_under_hierarchy
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for list_protection_sources_registration_info.'
)
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_protection_sources_registration_info.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='list_protection_sources_registration_info')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_protection_sources_registration_info.'
)
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
GetRegistrationInfoResponse.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_protection_sources_root_nodes(self,
id=None,
environments=None,
environment=None):
"""Does a GET request to /public/protectionSources/rootNodes.
Returns the root Protection Sources and the registration information
for
each of these Sources.
Args:
id (long|int, optional): Return the registration information for
the Protection Source id.
environments (list of
EnvironmentListProtectionSourcesRootNodesEnum, optional):
Return only the root Protection Sources that match the passed
in environment type such as 'kVMware', 'kSQL', 'kView',
'kPuppeteer', 'kPhysical', 'kPure', 'kNetapp', 'kGenericNas',
'kHyperV', 'kAcropolis' 'kAzure'. For example, set this
parameter to 'kVMware' to only return the root Protection
Sources found in the 'kVMware' (VMware vCenter) environment.
In addition, the registration information for each Source is
returned. NOTE: 'kPuppeteer' refers to Cohesity's Remote
Adapter.
environment (string, optional): This field is deprecated. Use
environments instead. deprecated: true
Returns:
list of ProtectionSourceNode: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_protection_sources_root_nodes called.')
# Prepare query URL
self.logger.info(
'Preparing query URL for list_protection_sources_root_nodes.')
_url_path = '/public/protectionSources/rootNodes'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'id': id,
'environments': environments,
'environment': environment
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for list_protection_sources_root_nodes.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_protection_sources_root_nodes.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='list_protection_sources_root_nodes')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_protection_sources_root_nodes.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
ProtectionSourceNode.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_sql_aag_hosts_and_databases(self, sql_protection_source_ids):
"""Does a GET request to /public/protectionSources/sqlAagHostsAndDatabases.
Given a list of Protection Source Ids registered as SQL servers,
returns
AAGs found and the databases in AAG(Always on Availablity Group).
Args:
sql_protection_source_ids (list of long|int): Specifies a list of
Ids of Protection Sources registered as SQL servers. These
sources may have one or more SQL databases in them. Some of
them may be part of AAGs(Always on Availability Group).
Returns:
list of SqlAagHostAndDatabases: Response from the API. List SQL
AAG hosts and databases response.
Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_sql_aag_hosts_and_databases called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for list_sql_aag_hosts_and_databases.'
)
self.validate_parameters(
sql_protection_source_ids=sql_protection_source_ids)
# Prepare query URL
self.logger.info(
'Preparing query URL for list_sql_aag_hosts_and_databases.')
_url_path = '/public/protectionSources/sqlAagHostsAndDatabases'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'sqlProtectionSourceIds': sql_protection_source_ids
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info(
'Preparing headers for list_sql_aag_hosts_and_databases.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_sql_aag_hosts_and_databases.'
)
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='list_sql_aag_hosts_and_databases')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for list_sql_aag_hosts_and_databases.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
SqlAagHostAndDatabases.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def list_virtual_machines(self,
v_center_id=None,
names=None,
uuids=None,
protected=None):
"""Does a GET request to /public/protectionSources/virtualMachines.
Returns all Virtual Machines found in all the vCenter Servers
registered
on the Cohesity Cluster that match the filter criteria specified
using
parameters.
If an id is specified, only VMs found in the specified vCenter Server
are returned.
Only VM Objects are returned.
Other VMware Objects such as datacenters are not returned.
Args:
v_center_id (long|int, optional): Limit the VMs returned to the
set of VMs found in a specific vCenter Server. Pass in the
root Protection Source id for the vCenter Server to search for
VMs.
names (list of string, optional): Limit the returned VMs to those
that exactly match the passed in VM name. To match multiple VM
names, specify multiple "names" parameters that each specify a
single VM name. The string must exactly match the passed in VM
name and wild cards are not supported.
uuids (list of string, optional): Limit the returned VMs to those
that exactly match the passed in UUIDs.
protected (bool, optional): Limit the returned VMs to those that
have been protected by a Protection Job. By default, both
protected and unprotected VMs are returned.
Returns:
list of ProtectionSource: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('list_virtual_machines called.')
# Prepare query URL
self.logger.info('Preparing query URL for list_virtual_machines.')
_url_path = '/public/protectionSources/virtualMachines'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'vCenterId': v_center_id,
'names': names,
'uuids': uuids,
'protected': protected
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder, _query_parameters,
Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for list_virtual_machines.')
_headers = {'accept': 'application/json'}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for list_virtual_machines.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='list_virtual_machines')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for list_virtual_machines.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body,
ProtectionSource.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def delete_unregister_protection_source(self, id):
"""Does a DELETE request to /public/protectionSources/{id}.
Unregister a previously registered Protection Source.
Args:
id (long|int): Specifies a unique id of the Protection Source to
unregister. If the Protection Source is currently being backed
up, unregister operation will fail.
Returns:
void: Response from the API. No Content
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('delete_unregister_protection_source called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for delete_unregister_protection_source.'
)
self.validate_parameters(id=id)
# Prepare query URL
self.logger.info(
'Preparing query URL for delete_unregister_protection_source.')
_url_path = '/public/protectionSources/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
self.logger.info(
'Preparing and executing request for delete_unregister_protection_source.'
)
_request = self.http_client.delete(_query_url)
AuthManager.apply(_request, self.config)
_context = self.execute_request(
_request, name='delete_unregister_protection_source')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for delete_unregister_protection_source.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
def update_protection_source(self, id, body=None):
"""Does a PATCH request to /public/protectionSources/{id}.
Update a previously registered Protection Source with new details.
Args:
id (long|int): Specifies a unique id of the Protection Source to
update.
body (UpdateProtectionSourceParameters, optional): Request to
update protection source.
Returns:
ProtectionSourceNode: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('update_protection_source called.')
# Validate required parameters
self.logger.info(
'Validating required parameters for update_protection_source.')
self.validate_parameters(id=id)
# Prepare query URL
self.logger.info(
'Preparing query URL for update_protection_source.')
_url_path = '/public/protectionSources/{id}'
_url_path = APIHelper.append_url_with_template_parameters(
_url_path, {'id': id})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for update_protection_source.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info(
'Preparing and executing request for update_protection_source.'
)
_request = self.http_client.patch(
_query_url,
headers=_headers,
parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request, self.config)
_context = self.execute_request(_request,
name='update_protection_source')
# Endpoint and global error handling using HTTP status codes.
self.logger.info(
'Validating response for update_protection_source.')
if _context.response.status_code == 0:
raise RequestErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(
_context.response.raw_body,
ProtectionSourceNode.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info=True)
raise
| 46.042489
| 103
| 0.615451
| 9,119
| 91,026
| 5.951749
| 0.068977
| 0.057781
| 0.030954
| 0.026698
| 0.783561
| 0.747761
| 0.728673
| 0.711243
| 0.698824
| 0.692634
| 0
| 0.001295
| 0.329697
| 91,026
| 1,976
| 104
| 46.065789
| 0.888224
| 0.42222
| 0
| 0.623318
| 0
| 0
| 0.199716
| 0.105707
| 0
| 0
| 0
| 0.000506
| 0
| 1
| 0.025785
| false
| 0
| 0.017937
| 0
| 0.067265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
81d0684917809043c4dc315fe7e33c71a8ad8fe3
| 126
|
py
|
Python
|
project_name/tests/tests.py
|
wesleykendall/django-app-template
|
1a740bfe8eb31b04bc275aa98639d565dbddaca1
|
[
"MIT"
] | 10
|
2015-11-01T00:47:31.000Z
|
2021-04-07T12:20:50.000Z
|
project_name/tests/tests.py
|
wesleykendall/django-app-template
|
1a740bfe8eb31b04bc275aa98639d565dbddaca1
|
[
"MIT"
] | 1
|
2015-04-09T17:33:50.000Z
|
2015-04-09T18:48:54.000Z
|
project_name/tests/tests.py
|
wesleykendall/django-app-template
|
1a740bfe8eb31b04bc275aa98639d565dbddaca1
|
[
"MIT"
] | 4
|
2015-04-03T16:26:23.000Z
|
2019-04-01T16:45:02.000Z
|
from django.test import TestCase
class SampleTest(TestCase):
def test_1_equals_1(self):
self.assertEquals(1, 1)
| 18
| 32
| 0.722222
| 18
| 126
| 4.888889
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.190476
| 126
| 6
| 33
| 21
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
c4aa7bdc73431aebfcb9ea86b54dd35584f296d4
| 38
|
py
|
Python
|
foiamachine/local/lib/python2.7/encodings/koi8_r.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | 3
|
2021-08-07T04:01:55.000Z
|
2021-08-07T05:12:11.000Z
|
foiamachine/local/lib/python2.7/encodings/koi8_r.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | null | null | null |
foiamachine/local/lib/python2.7/encodings/koi8_r.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | 1
|
2021-08-05T22:51:14.000Z
|
2021-08-05T22:51:14.000Z
|
/usr/lib/python2.7/encodings/koi8_r.py
| 38
| 38
| 0.815789
| 8
| 38
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0
| 38
| 1
| 38
| 38
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c4b2eb00b2bae5168631127ef99d49ca253512f1
| 98
|
py
|
Python
|
invoices_app/apps.py
|
xmudrii/django-invoices
|
25ac6b73e217d6d38bd91e541134acbf7e9bd0a4
|
[
"Apache-2.0"
] | null | null | null |
invoices_app/apps.py
|
xmudrii/django-invoices
|
25ac6b73e217d6d38bd91e541134acbf7e9bd0a4
|
[
"Apache-2.0"
] | null | null | null |
invoices_app/apps.py
|
xmudrii/django-invoices
|
25ac6b73e217d6d38bd91e541134acbf7e9bd0a4
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class InvoicesAppConfig(AppConfig):
name = 'invoices_app'
| 16.333333
| 35
| 0.77551
| 11
| 98
| 6.818182
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 98
| 5
| 36
| 19.6
| 0.903614
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c4b9092a24308587200142b42826a6e3399ad613
| 285
|
py
|
Python
|
contentagregator/modules/cnn/controllers.py
|
Czembri/contentAgregator
|
d77408283344ea45d902ccd54cb4cac91edba9a8
|
[
"Unlicense"
] | 2
|
2020-12-26T09:05:37.000Z
|
2021-01-08T00:08:46.000Z
|
contentagregator/modules/cnn/controllers.py
|
Czembri/contentAgregator
|
d77408283344ea45d902ccd54cb4cac91edba9a8
|
[
"Unlicense"
] | 10
|
2021-01-15T22:53:18.000Z
|
2021-06-10T21:45:16.000Z
|
contentagregator/modules/cnn/controllers.py
|
Czembri/contentAgregator
|
d77408283344ea45d902ccd54cb4cac91edba9a8
|
[
"Unlicense"
] | null | null | null |
from contentagregator import app, db
from flask import render_template, Blueprint
cnn_module = Blueprint('cnn', __name__, url_prefix='/news/cnn', template_folder='templates', static_folder='static')
@app.route('/news/cnn')
def cnn_get_view():
return render_template('cnn.html')
| 31.666667
| 117
| 0.764912
| 39
| 285
| 5.282051
| 0.615385
| 0.135922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101754
| 285
| 9
| 118
| 31.666667
| 0.804688
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
c4d845e75c3f45ee6ab0eb7f95c5a65e810cdca9
| 639
|
py
|
Python
|
logicallake/ui/logicallake.qrc.py
|
stewarg9/logicallake
|
a5ac4d172b94a4bb8130545b6d41eebad60eb4b4
|
[
"BSD-2-Clause"
] | null | null | null |
logicallake/ui/logicallake.qrc.py
|
stewarg9/logicallake
|
a5ac4d172b94a4bb8130545b6d41eebad60eb4b4
|
[
"BSD-2-Clause"
] | null | null | null |
logicallake/ui/logicallake.qrc.py
|
stewarg9/logicallake
|
a5ac4d172b94a4bb8130545b6d41eebad60eb4b4
|
[
"BSD-2-Clause"
] | null | null | null |
<!DOCTYPE RCC><RCC version="1.0">
<qresource>
<file>images/pointer.png</file>
<file>images/linepointer.png</file>
<file>images/textpointer.png</file>
<file>images/bold.png</file>
<file>images/italic.png</file>
<file>images/underline.png</file>
<file>images/floodfill.png</file>
<file>images/bringtofront.png</file>
<file>images/delete.png</file>
<file>images/sendtoback.png</file>
<file>images/linecolor.png</file>
<file>images/background1.png</file>
<file>images/background2.png</file>
<file>images/background3.png</file>
<file>images/background4.png</file>
</qresource>
</RCC>
| 31.95
| 40
| 0.682316
| 84
| 639
| 5.190476
| 0.285714
| 0.344037
| 0.353211
| 0.545872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.126761
| 639
| 19
| 41
| 33.631579
| 0.770609
| 0
| 0
| 0
| 0
| 0
| 0.004695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c4d9b7b46468f363614630bc2312adb75f7650ef
| 1,549
|
py
|
Python
|
python/configWriter.py
|
jbeirer/histfitter
|
44fc5e56ca6a5878db89489542d0bdb0a88e9ada
|
[
"BSD-2-Clause"
] | 5
|
2021-06-22T23:31:08.000Z
|
2021-07-19T13:08:23.000Z
|
python/configWriter.py
|
HistFitter/HistFitter
|
f661a0ed9d52d648014ebe3575af1b0b833b41ce
|
[
"BSD-2-Clause"
] | 94
|
2021-06-22T23:06:21.000Z
|
2022-01-25T09:48:46.000Z
|
python/configWriter.py
|
HistFitter/HistFitter
|
f661a0ed9d52d648014ebe3575af1b0b833b41ce
|
[
"BSD-2-Clause"
] | 5
|
2021-07-24T08:49:58.000Z
|
2021-11-25T10:21:39.000Z
|
"""
**********************************************************************************
* Project: HistFitter - A ROOT-based package for statistical data analysis *
* Package: HistFitter *
* Script : configWriter.py *
* Created: November 2012 *
* *
* Description: *
* Only kept for back-compatibility. *
* *
* Authors: *
* HistFitter group, CERN, Geneva *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted according to the terms listed in the file *
* LICENSE. *
**********************************************************************************
"""
from fitConfig import fitConfig
from measurement import Measurement
from channel import Channel
from sample import Sample
#from channelxml import ChannelXML
#from topLevelxml import TopLevelXML
| 55.321429
| 83
| 0.306649
| 76
| 1,549
| 6.25
| 0.697368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005789
| 0.553906
| 1,549
| 27
| 84
| 57.37037
| 0.681621
| 0.901227
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c4e195b00a1db26cfe6858f13bb89505c9bc7f62
| 572
|
py
|
Python
|
ingestion/src/metadata/generated/schema/entity/policies/lifecycle/rule.py
|
naveen09/OpenMetadata
|
e4fa0247f5db8094dfd156f13fdcc5ffcd120e74
|
[
"Apache-2.0"
] | null | null | null |
ingestion/src/metadata/generated/schema/entity/policies/lifecycle/rule.py
|
naveen09/OpenMetadata
|
e4fa0247f5db8094dfd156f13fdcc5ffcd120e74
|
[
"Apache-2.0"
] | null | null | null |
ingestion/src/metadata/generated/schema/entity/policies/lifecycle/rule.py
|
naveen09/OpenMetadata
|
e4fa0247f5db8094dfd156f13fdcc5ffcd120e74
|
[
"Apache-2.0"
] | null | null | null |
# generated by datamodel-codegen:
# filename: schema/entity/policies/lifecycle/rule.json
# timestamp: 2021-11-20T15:09:34+00:00
from __future__ import annotations
from typing import List, Union
from pydantic import BaseModel, Field
from .. import filters
from . import deleteAction, moveAction
class LifecycleRule(BaseModel):
filters: filters.Filters1
actions: List[
Union[deleteAction.LifecycleDeleteAction, moveAction.LifecycleMoveAction]
] = Field(
..., description='A set of actions to take on the entities.', min_length=1
)
| 26
| 82
| 0.73951
| 67
| 572
| 6.238806
| 0.731343
| 0.043062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042283
| 0.173077
| 572
| 21
| 83
| 27.238095
| 0.841438
| 0.22028
| 0
| 0
| 1
| 0
| 0.09276
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.416667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
f200065816a5c0600008e99d9610360192c00418
| 1,144
|
py
|
Python
|
courses/admin.py
|
manisharmagarg/oddnary
|
e2dea772d44d72773aa63c449d4082a9bf07dfe1
|
[
"Apache-2.0"
] | null | null | null |
courses/admin.py
|
manisharmagarg/oddnary
|
e2dea772d44d72773aa63c449d4082a9bf07dfe1
|
[
"Apache-2.0"
] | null | null | null |
courses/admin.py
|
manisharmagarg/oddnary
|
e2dea772d44d72773aa63c449d4082a9bf07dfe1
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import (
Course, CourseSection,
CourseFile, CourseDetailTab,
CourseDetailTabList,
UserMyCourseLibrary,
Category,
CategoryCourseRelation,
)
from utils.admin import CustomAdminFormMixin
# Register your models here.
@admin.register(Course)
class CourseAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(CourseSection)
class CourseSectionAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(CourseFile)
class CourseFileAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(CourseDetailTab)
class CourseDetailTabAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(CourseDetailTabList)
class CourseDetailTabListAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(UserMyCourseLibrary)
class UserMyCourseLibraryAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(Category)
class CourseAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
@admin.register(CategoryCourseRelation)
class CourseAdmin(CustomAdminFormMixin, admin.ModelAdmin):
pass
| 22.431373
| 71
| 0.804196
| 97
| 1,144
| 9.484536
| 0.28866
| 0.113043
| 0.304348
| 0.33913
| 0.490217
| 0.490217
| 0.147826
| 0.147826
| 0
| 0
| 0
| 0
| 0.119755
| 1,144
| 51
| 72
| 22.431373
| 0.913605
| 0.022727
| 0
| 0.323529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.235294
| 0.088235
| 0
| 0.323529
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.