hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bdb2fcfea13efbaa78b623ef214228a582b42d47
| 415
|
py
|
Python
|
Dataset/Leetcode/train/58/565.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/58/565.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/58/565.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, s: str) -> int:
blank_count = 0
start = 0
for index in range(len(s)):
if s[index] != " ":
if blank_count == 0:
continue
else:
blank_count = 0
start = index
else:
blank_count += 1
return len(s) - blank_count - start
| 24.411765
| 43
| 0.39759
| 43
| 415
| 3.72093
| 0.511628
| 0.3125
| 0.20625
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.518072
| 415
| 16
| 44
| 25.9375
| 0.775
| 0
| 0
| 0.285714
| 0
| 0
| 0.002421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bdb4b68b6995ef89dcfde4f4694668c0819fabb8
| 38
|
py
|
Python
|
nanome_minimization/__init__.py
|
nanome-ai/plugin-minimization
|
2ded49f83e4d6e0443a962a8a9b79f7dd530e3d3
|
[
"MIT"
] | null | null | null |
nanome_minimization/__init__.py
|
nanome-ai/plugin-minimization
|
2ded49f83e4d6e0443a962a8a9b79f7dd530e3d3
|
[
"MIT"
] | null | null | null |
nanome_minimization/__init__.py
|
nanome-ai/plugin-minimization
|
2ded49f83e4d6e0443a962a8a9b79f7dd530e3d3
|
[
"MIT"
] | 1
|
2021-05-18T09:01:03.000Z
|
2021-05-18T09:01:03.000Z
|
__version__ = "0.4.0"
from . import *
| 12.666667
| 21
| 0.631579
| 6
| 38
| 3.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0.184211
| 38
| 3
| 22
| 12.666667
| 0.548387
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
bdead3926629853c3ac12111fa063002a4f4ebc9
| 184
|
py
|
Python
|
spa_todo_list/main/models.py
|
theodor85/spa_todo_list
|
75fc50de9aa071b7ba50c280866c24efbbd07557
|
[
"MIT"
] | null | null | null |
spa_todo_list/main/models.py
|
theodor85/spa_todo_list
|
75fc50de9aa071b7ba50c280866c24efbbd07557
|
[
"MIT"
] | 4
|
2021-03-10T12:41:05.000Z
|
2022-02-26T20:46:35.000Z
|
spa_todo_list/main/models.py
|
theodor85/spa_todo_list
|
75fc50de9aa071b7ba50c280866c24efbbd07557
|
[
"MIT"
] | null | null | null |
from django.db import models
class Task(models.Model):
text = models.CharField(max_length=250)
def __str__(self):
return 'Задача № ' + str(self.id) + ' ' + self.text
| 23
| 59
| 0.646739
| 26
| 184
| 4.461538
| 0.769231
| 0.12069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.217391
| 184
| 8
| 59
| 23
| 0.770833
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
bdf5d1d28853bdab73a45018798433c5f483fd2f
| 17,533
|
py
|
Python
|
testGen.py
|
kmirzavaziri/hAssembler
|
66479daefa9f46702bd0328f24e9e6a8da9daaba
|
[
"MIT"
] | null | null | null |
testGen.py
|
kmirzavaziri/hAssembler
|
66479daefa9f46702bd0328f24e9e6a8da9daaba
|
[
"MIT"
] | null | null | null |
testGen.py
|
kmirzavaziri/hAssembler
|
66479daefa9f46702bd0328f24e9e6a8da9daaba
|
[
"MIT"
] | null | null | null |
import sys
if len(sys.argv) < 2:
case = -1
else:
case = int(sys.argv[1])
f = open("testGened.asm", "w")
unO = [
"dec",
"inc",
]
binO = [
"add",
"adc",
"sub",
"sbb",
"and",
"or",
"xor",
"cmp",
]
for op in unO:
# op reg
if case < 0 or case == 20:
f.write(op + ' al\n')
f.write(op + ' ah\n')
f.write(op + ' ax\n')
f.write(op + ' eax\n')
f.write(op + ' rax\n')
f.write(op + ' r8\n')
f.write(op + ' r8d\n')
f.write(op + ' r8w\n')
f.write(op + ' bl\n')
f.write(op + ' bh\n')
f.write(op + ' bx\n')
f.write(op + ' ebx\n')
f.write(op + ' rbx\n')
f.write(op + ' r9\n')
f.write(op + ' r9d\n')
f.write(op + ' r9w\n')
# op mem
if case < 0 or case == 21:
f.write(op + 'b [r13d + 0x99 + 2 * 4 * r12d]\n')
f.write(op + 'b [r8d + 0b1010 - 0xA + r12d]\n')
f.write(op + 'w [r8 + r12 + 0xDAF]\n')
f.write(op + 'w [r13d + 0b00111 * 3 - 2 + r8d]\n')
f.write(op + 'd [rax + rbp + 0xDAF]\n')
f.write(op + 'd [rsp + 0b00111 * 3 - 2 + rax]\n')
f.write(op + 'q [r12 + 42 + 0x42 + r13]\n')
f.write(op + 'q [r8d + 8 * r8d + 0b1001]\n')
for op in binO:
# ---------------------------------- op reg, reg ----------------------------------
if case < 0 or case == 0:
f.write(op + ' al, al\n')
f.write(op + ' ah, ah\n')
f.write(op + ' cl, cl\n')
f.write(op + ' ch, ch\n')
f.write(op + ' ax, bx\n')
f.write(op + ' cx, dx\n')
f.write(op + ' eax, ebx\n')
f.write(op + ' ecx, edx\n')
f.write(op + ' rax, rbx\n')
f.write(op + ' rcx, rdx\n')
f.write(op + ' r10, rbx\n')
f.write(op + ' r12d, edx\n')
f.write(op + ' r9w, bx\n')
# ---------------------------------- op reg, mem ----------------------------------
# op reg, [base]
if case < 0 or case == 1:
f.write(op + ' ax, [r11d + 0]\n')
f.write(op + ' r8w, [edx]\n')
f.write(op + ' ebx, [eax]\n')
f.write(op + ' esi, [42 + ecx + 0 - 21 * 2]\n')
f.write(op + ' r9d, [esp]\n')
f.write(op + ' r8d, [ebp]\n')
f.write(op + ' esi, [esp]\n')
f.write(op + ' esi, [ebp]\n')
# 64-bits
f.write(op + ' ebx, [rax]\n')
f.write(op + ' rdx, [ebx + 0]\n')
f.write(op + ' cx, [r14d]\n')
f.write(op + ' rbx, [eax]\n')
f.write(op + ' esi, [42 + r11 + 0 - 21 * 2]\n')
f.write(op + ' esi, [rsp]\n')
f.write(op + ' rsi, [rbp]\n')
f.write(op + ' r11d, [r12]\n')
f.write(op + ' r15w, [r13]\n')
f.write(op + ' r9, [r12d]\n')
f.write(op + ' r15, [r13d]\n')
# op reg, [disp]
if case < 0 or case == 2:
f.write(op + ' ax, [0x1]\n')
f.write(op + ' al, [0x0]\n')
f.write(op + ' cx, [0b10]\n')
f.write(op + ' ebx, [10 - 20]\n')
f.write(op + ' esi, [0x77]\n')
f.write(op + ' esi, [0 - 0x1377]\n')
f.write(op + ' esi, [0x13771999]\n')
# 64-bits
f.write(op + ' rax, [0x1]\n')
f.write(op + ' rbx, [0x1000 - 0x2000]\n')
f.write(op + ' rsi, [0x99]\n')
f.write(op + ' r8d, [0x1]\n')
f.write(op + ' r12, [0x1000 - 0x2000]\n')
f.write(op + ' r13w, [0x99]\n')
# op reg, [scale * index]
if case < 0 or case == 3:
f.write(op + ' ax, [eax * 2]\n')
f.write(op + ' ax, [ebx * 2]\n')
f.write(op + ' cx, [2 * edx]\n')
f.write(op + ' ebx, [2 * eax * 2]\n')
f.write(op + ' esi, [4 * ecx]\n')
f.write(op + ' esi, [8 * ebp]\n')
# 64-bits
f.write(op + ' ax, [rax * 2]\n')
f.write(op + ' ax, [rbx * 2]\n')
f.write(op + ' cx, [2 * rdx]\n')
f.write(op + ' rbx, [2 * rax * 2]\n')
f.write(op + ' rsi, [4 * rcx]\n')
f.write(op + ' rsi, [8 * rbp]\n')
f.write(op + ' eax, [r8d * 2]\n')
f.write(op + ' rsp, [r8 * 2]\n')
f.write(op + ' r12, [2 * r11]\n')
f.write(op + ' r13w, [2 * r12 * 2]\n')
f.write(op + ' r14, [4 * r13]\n')
# op reg, [base + disp]
if case < 0 or case == 4:
f.write(op + ' ax, [ebx + 0x42]\n')
f.write(op + ' eax, [ebx + 0x99]\n')
f.write(op + ' eax, [ebx + 0 - 0x99]\n')
f.write(op + ' eax, [ebx + 0 - 0x42]\n')
f.write(op + ' ebx, [ebx + 0x1999]\n')
f.write(op + ' ch, [eax + 0x42135678]\n')
f.write(op + ' ecx, [eax + 0x1234]\n')
f.write(op + ' edx, [esp + 0xA]\n')
f.write(op + ' al, [esp + 0xABCDE]\n')
f.write(op + ' ah, [ebp + 0xB]\n')
f.write(op + ' bl, [ebp + 0xA0C]\n')
# 64-bits
f.write(op + ' ax, [rbx + 0x42]\n')
f.write(op + ' rax, [rbx + 0x99]\n')
f.write(op + ' rax, [ebx + 0 - 0x99]\n')
f.write(op + ' ecx, [rax + 0x1234]\n')
f.write(op + ' rcx, [rsp + 0xA]\n')
f.write(op + ' al, [rsp + 0xABCDE]\n')
f.write(op + ' rdx, [rbp + 0xB]\n')
f.write(op + ' bl, [rbp + 0xA0C]\n')
f.write(op + ' ax, [r8 + 0x42]\n')
f.write(op + ' rax, [r8d + 0x99]\n')
f.write(op + ' r10d, [ebx + 0 - 0x99]\n')
f.write(op + ' r11w, [r12d + 0x1234]\n')
f.write(op + ' r12, [r13d + 0xA]\n')
f.write(op + ' al, [r13d + 0xABCDE]\n')
f.write(op + ' r13d, [r12d + 0xB]\n')
f.write(op + ' r9w, [r12d + 0xA0C]\n')
# op reg, [base + scale * index]
if case < 0 or case == 5:
f.write(op + ' ax, [ebx + ebx * 2]\n')
f.write(op + ' cx, [ecx + 2 * edx]\n')
f.write(op + ' ebx, [esp + 2 * eax * 2]\n')
f.write(op + ' eax, [ebp + 4 * ecx]\n')
f.write(op + ' ebx, [esp + 2 * 4 * ebp]\n')
f.write(op + ' ecx, [ebp + ebp]\n')
f.write(op + ' dx, [eax + ebp]\n')
f.write(op + ' bp, [ebp + eax]\n')
f.write(op + ' al, [ebp + esp]\n')
f.write(op + ' ah, [eax + 8 * eax]\n')
# 64-bits
f.write(op + ' ax, [rbx + rbx * 2]\n')
f.write(op + ' cx, [rcx + 2 * rdx]\n')
f.write(op + ' rbx, [rsp + 2 * rax * 2]\n')
f.write(op + ' eax, [rbp + 4 * rcx]\n')
f.write(op + ' rbx, [rsp + 2 * 4 * rbp]\n')
f.write(op + ' ecx, [rbp + rbp]\n')
f.write(op + ' dx, [rax + rbp]\n')
f.write(op + ' bp, [rbp + rax]\n')
f.write(op + ' rdx, [rbp + rsp]\n')
f.write(op + ' ah, [rax + 8 * rax]\n')
f.write(op + ' r8, [r11 + r11 * 2]\n')
f.write(op + ' cx, [r11 + 2 * rdx]\n')
f.write(op + ' r15, [r13 + 2 * r8 * 2]\n')
f.write(op + ' eax, [r12 + 4 * r11]\n')
f.write(op + ' r10, [r13 + 2 * 4 * r12]\n')
f.write(op + ' r11d, [r12 + r12]\n')
f.write(op + ' r11w, [r8d + r12d]\n')
f.write(op + ' r12w, [r12 + r8]\n')
f.write(op + ' r11, [r12d + r13d]\n')
f.write(op + ' r11, [r12 + r13]\n')
f.write(op + ' r9, [r8d + 8 * r8d]\n')
# op reg, [scale * index + disp]
if case < 0 or case == 6:
f.write(op + ' ax, [eax * 2 + 7]\n')
f.write(op + ' ax, [ebx * 2 + 0x77]\n')
f.write(op + ' cx, [2 * edx + 0x99]\n')
f.write(op + ' ebx, [2 * eax * 2 + 0x1999]\n')
f.write(op + ' esi, [4 * ecx + 0x00 - 0x42]\n')
f.write(op + ' esi, [8 * ebp + 0x00 - 0xBB]\n')
# 64-bits
f.write(op + ' ax, [rax * 2 + 0b0 - 0b1000000]\n')
f.write(op + ' ax, [0x777 + rbx * 2 + 0x666]\n')
f.write(op + ' cx, [2 * rdx + 0x66]\n')
f.write(op + ' rbx, [2 * rax * 2 + 0b10]\n')
f.write(op + ' rsi, [4 * rcx + 0x77]\n')
f.write(op + ' rsi, [0x99 + 8 * rbp]\n')
f.write(op + ' ax, [r8 * 2 + 0b0 - 0b1000000]\n')
f.write(op + ' ax, [r8d * 2 + 0b0 - 0b1000000]\n')
f.write(op + ' r11, [r8 * 2 + 0b0 - 0b1000000]\n')
f.write(op + ' r9w, [r8d * 2 + 0b0 - 0b1000000]\n')
f.write(op + ' rsi, [0 - 0x99 + 8 * r12 + 0b1]\n')
f.write(op + ' rsi, [0 - 0x99 + 8 * r12d + 0b1]\n')
# op reg, [base + scale * index + disp]
if case < 0 or case == 7:
f.write(op + ' ax, [ebx + ebx * 2 + 0x42]\n')
f.write(op + ' cx, [ecx + 2 * edx + 0xABCDE]\n')
f.write(op + ' ebx, [0x42 - 0x77 + esp + 2 * eax * 2]\n')
f.write(op + ' eax, [0x21 + ebp + 4 * ecx]\n')
f.write(op + ' eax, [0x4221 + ebp + 4 * ecx]\n')
f.write(op + ' ebx, [esp + 0x99 + 2 * 4 * ebp]\n')
f.write(op + ' ebx, [esp + 0x13771217 + 2 * 4 * ebp]\n')
f.write(op + ' ecx, [ebp + ebp + 0xA]\n')
f.write(op + ' ecx, [ebp + ebp + 0xAFF]\n')
f.write(op + ' dx, [eax + 0b1010 - 0xA + ebp]\n')
f.write(op + ' dx, [eax + ebp + 0xDAF]\n')
f.write(op + ' bp, [ebp + 0b00111 * 3 - 2 + eax]\n')
f.write(op + ' al, [ebp + 42 + 0x42 + esp]\n')
f.write(op + ' ah, [eax + 8 * eax + 0b1001]\n')
# op reg, [base + scale * index + disp] (64-bits)
if case < 0 or case == 8:
f.write(op + ' ax, [rbx + rbx * 2 + 0x42]\n')
f.write(op + ' rbx, [0x42 - 0x77 + rsp + 2 * rax * 2]\n')
f.write(op + ' rax, [0x21 + rbp + 4 * rcx]\n')
f.write(op + ' rax, [0x4221 + rbp + 4 * rcx]\n')
f.write(op + ' ebx, [rsp + 0x99 + 2 * 4 * rbp]\n')
f.write(op + ' dx, [rax + 0b1010 - 0xA + rbp]\n')
f.write(op + ' dx, [rax + rbp + 0xDAF]\n')
f.write(op + ' bp, [rsp + 0b00111 * 3 - 2 + rax]\n')
f.write(op + ' al, [rbp + 42 + 0x42 + rsp]\n')
f.write(op + ' ah, [rax + 8 * rax + 0b1001]\n')
f.write(op + ' ax, [r10 + rbx * 2 + 0x42]\n')
f.write(op + ' r12w, [0x42 - 0x77 + r13 + 2 * rax * 2]\n')
f.write(op + ' rax, [0x21 + r12 + 4 * r10]\n')
f.write(op + ' r15w, [0x4221 + r12 + 4 * rcx]\n')
f.write(op + ' ebx, [r13d + 0x99 + 2 * 4 * r12d]\n')
f.write(op + ' dx, [r8d + 0b1010 - 0xA + r12d]\n')
f.write(op + ' r13w, [r8 + r12 + 0xDAF]\n')
f.write(op + ' bp, [r13d + 0b00111 * 3 - 2 + r8d]\n')
f.write(op + ' al, [r12 + 42 + 0x42 + r13]\n')
f.write(op + ' r11w, [r8d + 8 * r8d + 0b1001]\n')
# ---------------------------------- op mem, reg ----------------------------------
# op [base], reg
if case < 0 or case == 9:
f.write(op + ' [ebx + 0], ax\n')
f.write(op + ' [edx], cx\n')
f.write(op + ' [eax], ebx\n')
f.write(op + ' [42 + ecx + 0 - 21 * 2], esi\n')
f.write(op + ' [esp], ah\n')
f.write(op + ' [ebp], ax\n')
# 64-bits
f.write(op + ' [rax], ebx\n')
f.write(op + ' [ebx + 0], rdx\n')
f.write(op + ' [rdx], rcx\n')
f.write(op + ' [eax], rbx\n')
f.write(op + ' [42 + rcx + 0 - 21 * 2], esi\n')
f.write(op + ' [esp], esi\n')
f.write(op + ' [rsp], esi\n')
f.write(op + ' [esp], rsi\n')
f.write(op + ' [rbp], rsi\n')
f.write(op + ' [r12], r11d\n')
f.write(op + ' [r13], r15w\n')
f.write(op + ' [r12d], r9\n')
f.write(op + ' [r13d], r15\n')
# op [disp], reg
if case < 0 or case == 10:
f.write(op + ' [0x1], ax\n')
f.write(op + ' [0x0], al\n')
f.write(op + ' [0b10], cx\n')
f.write(op + ' [10 - 20], ebx\n')
f.write(op + ' [0x77], esi\n')
f.write(op + ' [0 - 0x1377], ax\n')
f.write(op + ' [0x13771999], bl\n')
# 64-bits
f.write(op + ' rax, [0x1]\n')
f.write(op + ' rbx, [0x1000 - 0x2000]\n')
f.write(op + ' rsi, [0x99]\n')
f.write(op + ' [0x1], r8d\n')
f.write(op + ' [0x1000 - 0x2000], r12\n')
f.write(op + ' [0x99], r13w\n')
# op [scale * index], reg
if case < 0 or case == 11:
f.write(op + ' [eax * 2], ax\n')
f.write(op + ' [ebx * 2], ax\n')
f.write(op + ' [2 * edx], cx\n')
f.write(op + ' [2 * eax * 2], ebx\n')
f.write(op + ' [4 * ecx], esi\n')
f.write(op + ' [8 * ebp], esi\n')
# 64-bits
f.write(op + ' ax, [rax * 2]\n')
f.write(op + ' ax, [rbx * 2]\n')
f.write(op + ' cx, [2 * rdx]\n')
f.write(op + ' rbx, [2 * rax * 2]\n')
f.write(op + ' rsi, [4 * rcx]\n')
f.write(op + ' rsi, [8 * rbp]\n')
f.write(op + ' [r8d * 2], eax\n')
f.write(op + ' [r8 * 2], rsp\n')
f.write(op + ' [2 * r11], r12\n')
f.write(op + ' [2 * r12 * 2], r13w\n')
f.write(op + ' [4 * r13], r14\n')
# op [base + disp], reg
if case < 0 or case == 12:
f.write(op + ' [ebx + 0x77], ax\n')
f.write(op + ' [ebx + 0x99], eax\n')
f.write(op + ' [ebx + 0x1999], ebx\n')
f.write(op + ' [eax + 0x42], ch\n')
f.write(op + ' [eax + 0x1234], ecx\n')
f.write(op + ' [esp + 0xA], edx\n')
f.write(op + ' [esp + 0xABCDE], al\n')
f.write(op + ' [ebp + 0xB], ah\n')
f.write(op + ' [ebp + 0xA0C], bl\n')
# 64-bits
f.write(op + ' [rbx + 0x42], ax\n')
f.write(op + ' [rbx + 0x99], rax\n')
f.write(op + ' [ebx + 0 - 0x99], rax\n')
f.write(op + ' [rax + 0x1234], ecx\n')
f.write(op + ' [rsp + 0xA], rcx\n')
f.write(op + ' [rsp + 0xABCDE], al\n')
f.write(op + ' [rbp + 0xB], rdx\n')
f.write(op + ' [rbp + 0xA0C], bl\n')
f.write(op + ' [r8 + 0x42], ax\n')
f.write(op + ' [r8d + 0x99], rax\n')
f.write(op + ' [ebx + 0 - 0x99], r10d\n')
f.write(op + ' [r12d + 0x1234], r11w\n')
f.write(op + ' [r13d + 0xA], r12\n')
f.write(op + ' [r13d + 0xABCDE], al\n')
f.write(op + ' [r12d + 0xB], r13d\n')
f.write(op + ' [r12d + 0xA0C], r9w\n')
# op [base + scale * index], reg
if case < 0 or case == 13:
f.write(op + ' [ebx + ebx * 2], ax\n')
f.write(op + ' [ecx + 2 * edx], cx\n')
f.write(op + ' [esp + 2 * eax * 2], edx\n')
f.write(op + ' [ebp + 4 * ecx], eax\n')
f.write(op + ' [esp + 2 * 4 * ebp], ebx\n')
f.write(op + ' [ebp + ebp], ecx\n')
f.write(op + ' [eax + ebp], dx\n')
f.write(op + ' [ebp + eax], bp\n')
f.write(op + ' [ebp + esp], al\n')
f.write(op + ' [eax + 8 * eax], ah\n')
# 64-bits
f.write(op + ' [rbx + rbx * 2], ax\n')
f.write(op + ' [rcx + 2 * rdx], cx\n')
f.write(op + ' [rsp + 2 * rax * 2], rbx\n')
f.write(op + ' [rbp + 4 * rcx], eax\n')
f.write(op + ' [rsp + 2 * 4 * rbp], rbx\n')
f.write(op + ' [rbp + rbp], ecx\n')
f.write(op + ' [rax + rbp], dx\n')
f.write(op + ' [rbp + rax], bp\n')
f.write(op + ' [rbp + rsp], rdx\n')
f.write(op + ' [rax + 8 * rax], ah\n')
f.write(op + ' [r11 + r11 * 2], r8\n')
f.write(op + ' [r11 + 2 * rdx], cx\n')
f.write(op + ' [r13 + 2 * r8 * 2], r15\n')
f.write(op + ' [r12 + 4 * r11], eax\n')
f.write(op + ' [r13 + 2 * 4 * r12], r10\n')
f.write(op + ' [r12 + r12], r11d\n')
f.write(op + ' [r8d + r12d], r11w\n')
f.write(op + ' [r12 + r8], r12w\n')
f.write(op + ' [r12d + r13d], r11\n')
f.write(op + ' [r12 + r13], r11\n')
f.write(op + ' [r8d + 8 * r8d], r9\n')
# op [scale * index + disp], reg
if case < 0 or case == 14:
f.write(op + ' [eax * 2 + 7], ax\n')
f.write(op + ' [ebx * 2 + 0x77], ax\n')
f.write(op + ' [2 * edx + 0x99], cx\n')
f.write(op + ' [2 * eax * 2 + 0x1999], ebx\n')
f.write(op + ' [4 * ecx + 0x00 - 0x42], esi\n')
f.write(op + ' [8 * ebp + 0x00 - 0xBB], esi\n')
# 64-bits
f.write(op + ' [rax * 2 + 0b0 - 0b1000000], ax\n')
f.write(op + ' [0x777 + rbx * 2 + 0x666], ax\n')
f.write(op + ' [2 * rdx + 0x66], cx\n')
f.write(op + ' [2 * rax * 2 + 0b10], rbx\n')
f.write(op + ' [4 * rcx + 0x77], rsi\n')
f.write(op + ' [0x99 + 8 * rbp], rsi\n')
f.write(op + ' [r8 * 2 + 0b0 - 0b1000000], ax\n')
f.write(op + ' [r8d * 2 + 0b0 - 0b1000000], ax\n')
f.write(op + ' [r8 * 2 + 0b0 - 0b1000000], r11\n')
f.write(op + ' [r8d * 2 + 0b0 - 0b1000000], r9w\n')
f.write(op + ' [0 - 0x99 + 8 * r12 + 0b1], rsi\n')
f.write(op + ' [0 - 0x99 + 8 * r12d + 0b1], rsi\n')
# op [base + scale * index + disp], reg
if case < 0 or case == 15:
f.write(op + ' [ebx + ebx * 2 + 0x42], ax\n')
f.write(op + ' [ecx + 2 * edx + 0xABCDE], cx\n')
f.write(op + ' [0x42 - 0x77 + esp + 2 * eax * 2], ebx\n')
f.write(op + ' [0x21 + ebp + 4 * ecx], eax\n')
f.write(op + ' [0x4221 + ebp + 4 * ecx], eax\n')
f.write(op + ' [esp + 0x99 + 2 * 4 * ebp], ebx\n')
f.write(op + ' [esp + 0x13771217 + 2 * 4 * ebp], ebx\n')
f.write(op + ' [ebp + ebp + 0xA], ecx\n')
f.write(op + ' [ebp + ebp + 0xAFF], ecx\n')
f.write(op + ' [eax + 0b1010 - 0xA + ebp], dx\n')
f.write(op + ' [eax + ebp + 0xDAF], dx\n')
f.write(op + ' [ebp + 0b00111 * 3 - 2 + eax], bp\n')
f.write(op + ' [ebp + 42 + 0x42 + esp], al\n')
# op [base + scale * index + disp], reg (64-bits)
if case < 0 or case == 16:
f.write(op + ' [rbx + rbx * 2 + 0x42], ax\n')
f.write(op + ' [0x42 - 0x77 + rsp + 2 * rax * 2], rbx\n')
f.write(op + ' [0x21 + rbp + 4 * rcx], rax\n')
f.write(op + ' [0x4221 + rbp + 4 * rcx], rax\n')
f.write(op + ' [rsp + 0x99 + 2 * 4 * rbp], ebx\n')
f.write(op + ' [rax + 0b1010 - 0xA + rbp], dx\n')
f.write(op + ' [rax + rbp + 0xDAF], dx\n')
f.write(op + ' [rsp + 0b00111 * 3 - 2 + rax], bp\n')
f.write(op + ' [rbp + 42 + 0x42 + rsp], al\n')
f.write(op + ' [rax + 8 * rax + 0b1001], ah\n')
f.write(op + ' [eax + 8 * eax + 0b1001], ah\n')
f.write(op + ' [r10 + rbx * 2 + 0x42], ax\n')
f.write(op + ' [0x42 - 0x77 + r13 + 2 * rax * 2], r12w\n')
f.write(op + ' [0x21 + r12 + 4 * r10], rax\n')
f.write(op + ' [0x4221 + r12 + 4 * rcx], r15w\n')
f.write(op + ' [r13d + 0x99 + 2 * 4 * r12d], ebx\n')
f.write(op + ' [r8d + 0b1010 - 0xA + r12d], dx\n')
f.write(op + ' [r8 + r12 + 0xDAF], r13w\n')
f.write(op + ' [r13d + 0b00111 * 3 - 2 + r8d], bp\n')
f.write(op + ' [r12 + 42 + 0x42 + r13], al\n')
f.write(op + ' [r8d + 8 * r8d + 0b1001], r11w\n')
# ---------------------------------- op reg, imd ----------------------------------
# op reg, imd
if case < 0 or case == 17:
f.write(op + ' ebx, 8 * 42 + 0b1001 - 0x13771999\n')
f.write(op + ' rbx, 8 * 42 + 0b1001 - 0x13771999\n')
f.write(op + ' rbx, 0 - 0x13771999\n')
f.write(op + ' r12, 0 + 0x13771999\n')
f.write(op + ' r8d, 8 * 7 + 0x13771999\n')
f.write(op + ' r8b, 0 - 128\n')
f.write(op + ' r8b, 0 - 127\n')
f.write(op + ' r8b, 0\n')
f.write(op + ' r8b, 0 + 127\n')
f.write(op + ' r8b, 0 + 128\n')
f.write(op + ' r8w, 0 - 128\n')
f.write(op + ' r8w, 0 - 127\n')
f.write(op + ' r8w, 0\n')
f.write(op + ' r8w, 0 + 127\n')
f.write(op + ' r8w, 0 + 128\n')
f.write(op + ' r8, 0 - 128\n')
f.write(op + ' r8, 0 - 127\n')
f.write(op + ' r8, 0\n')
f.write(op + ' r8, 0 + 127\n')
f.write(op + ' r8, 0 + 128\n')
# op (al | ax | eax | rax), imd
if case < 0 or case == 18:
f.write(op + ' al, 0x1377\n')
f.write(op + ' ax, 0x1377\n')
f.write(op + ' eax, 0x1377\n')
f.write(op + ' rax, 0x1377\n')
# op mem, imd
if case < 0 or case == 19:
f.write(op + 'b [2 * rax], 0 + 127\n')
f.write(op + 'w [eax], 0 + 128\n')
f.write(op + 'q [2 * eax], 0 - 128\n')
f.write(op + 'd [r8], 0 - 127\n')
f.write(op + 'b [r8d], 0\n')
f.write(op + 'w [0x99], 0 + 127\n')
f.write(op + 'q [r12], 0 + 128\n')
f.write(op + 'b [r8d], 32 * 32 * 32\n')
f.close()
| 36.679916
| 84
| 0.470028
| 3,217
| 17,533
| 2.561703
| 0.046316
| 0.280306
| 0.373741
| 0.383327
| 0.871253
| 0.637544
| 0.446548
| 0.285766
| 0.174251
| 0.093314
| 0
| 0.125219
| 0.250727
| 17,533
| 477
| 85
| 36.756813
| 0.502093
| 0.053556
| 0
| 0.04186
| 0
| 0
| 0.503473
| 0
| 0
| 0
| 0.052371
| 0
| 0
| 1
| 0
| false
| 0
| 0.002326
| 0
| 0.002326
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
da19ddf84fad7dc4c2fc9b3481faaa9115e9c0df
| 786
|
py
|
Python
|
apps/products/migrations/0010_auto_20180608_2237.py
|
xeroz/gshop
|
d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a
|
[
"MIT"
] | null | null | null |
apps/products/migrations/0010_auto_20180608_2237.py
|
xeroz/gshop
|
d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a
|
[
"MIT"
] | 4
|
2020-02-11T21:31:46.000Z
|
2020-06-05T00:43:08.000Z
|
apps/products/migrations/0010_auto_20180608_2237.py
|
xeroz/gshop
|
d1bd0920d8ffaae9e7f52fcf8d60dd2d009cde2a
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.1 on 2018-06-08 22:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0009_auto_20180520_2113'),
]
operations = [
migrations.RemoveField(
model_name='cart',
name='products',
),
migrations.RemoveField(
model_name='cart',
name='user',
),
migrations.RemoveField(
model_name='productcart',
name='cart',
),
migrations.RemoveField(
model_name='productcart',
name='product',
),
migrations.DeleteModel(
name='Cart',
),
migrations.DeleteModel(
name='ProductCart',
),
]
| 21.833333
| 48
| 0.516539
| 64
| 786
| 6.234375
| 0.515625
| 0.210526
| 0.260652
| 0.300752
| 0.41604
| 0.41604
| 0
| 0
| 0
| 0
| 0
| 0.062626
| 0.370229
| 786
| 35
| 49
| 22.457143
| 0.743434
| 0.057252
| 0
| 0.551724
| 1
| 0
| 0.133965
| 0.031123
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
da354b814bb434acd2b2b734822373cf8041a0e4
| 165
|
py
|
Python
|
mmdet2trt/models/detectors/__init__.py
|
tehkillerbee/mmdetection-to-tensorrt
|
b1532465ab1c6617b350981bbda2bc361fe291a6
|
[
"Apache-2.0"
] | 496
|
2020-07-16T08:37:02.000Z
|
2022-03-31T01:13:45.000Z
|
mmdet2trt/models/detectors/__init__.py
|
tehkillerbee/mmdetection-to-tensorrt
|
b1532465ab1c6617b350981bbda2bc361fe291a6
|
[
"Apache-2.0"
] | 98
|
2020-07-30T02:14:41.000Z
|
2022-03-21T08:58:12.000Z
|
mmdet2trt/models/detectors/__init__.py
|
tehkillerbee/mmdetection-to-tensorrt
|
b1532465ab1c6617b350981bbda2bc361fe291a6
|
[
"Apache-2.0"
] | 80
|
2020-08-06T03:52:11.000Z
|
2022-03-23T11:41:46.000Z
|
from .single_stage import SingleStageDetectorWraper
from .two_stage import TwoStageDetectorWraper
__all__ = ['SingleStageDetectorWraper', 'TwoStageDetectorWraper']
| 33
| 65
| 0.860606
| 13
| 165
| 10.461538
| 0.615385
| 0.161765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078788
| 165
| 4
| 66
| 41.25
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0.284848
| 0.284848
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
da45a683cd9931e29b662eab5e1cb9091d092f1e
| 16,447
|
py
|
Python
|
GPy/util/datasets/data_resources_create.py
|
rokroskar/GPy
|
0f8dbba56d480902c86cfe8bad9e79d9eabae009
|
[
"BSD-3-Clause"
] | 1
|
2016-08-04T21:28:11.000Z
|
2016-08-04T21:28:11.000Z
|
GPy/util/datasets/data_resources_create.py
|
rokroskar/GPy
|
0f8dbba56d480902c86cfe8bad9e79d9eabae009
|
[
"BSD-3-Clause"
] | null | null | null |
GPy/util/datasets/data_resources_create.py
|
rokroskar/GPy
|
0f8dbba56d480902c86cfe8bad9e79d9eabae009
|
[
"BSD-3-Clause"
] | null | null | null |
import json
neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/'
sam_url = 'http://www.cs.nyu.edu/~roweis/data/'
cmu_url = 'http://mocap.cs.cmu.edu/subjects/'
data_resources = {'ankur_pose_data' : {'urls' : [neil_url + 'ankur_pose_data/'],
'files' : [['ankurDataPoseSilhouette.mat']],
'license' : None,
'citation' : """3D Human Pose from Silhouettes by Relevance Vector Regression (In CVPR'04). A. Agarwal and B. Triggs.""",
'details' : """Artificially generated data of silhouettes given poses. Note that the data does not display a left/right ambiguity because across the entire data set one of the arms sticks out more the the other, disambiguating the pose as to which way the individual is facing."""},
'boston_housing' : {'urls' : ['http://archive.ics.uci.edu/ml/machine-learning-databases/housing/'],
'files' : [['Index', 'housing.data', 'housing.names']],
'citation' : """Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the demand for clean air', J. Environ. Economics & Management, vol.5, 81-102, 1978.""",
'details' : """The Boston Housing data relates house values in Boston to a range of input variables.""",
'license' : None,
'size' : 51276
},
'brendan_faces' : {'urls' : [sam_url],
'files': [['frey_rawface.mat']],
'citation' : 'Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.',
'details' : """A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.""",
'license': None,
'size' : 1100584},
'cmu_mocap_full' : {'urls' : ['http://mocap.cs.cmu.edu'],
'files' : [['allasfamc.zip']],
'citation' : """Please include this in your acknowledgements: The data used in this project was obtained from mocap.cs.cmu.edu.'
'The database was created with funding from NSF EIA-0196217.""",
'details' : """CMU Motion Capture data base. Captured by a Vicon motion capture system consisting of 12 infrared MX-40 cameras, each of which is capable of recording at 120 Hz with images of 4 megapixel resolution. Motions are captured in a working volume of approximately 3m x 8m. The capture subject wears 41 markers and a stylish black garment.""",
'license' : """From http://mocap.cs.cmu.edu. This data is free for use in research projects. You may include this data in commercially-sold products, but you may not resell this data directly, even in converted form. If you publish results obtained using this data, we would appreciate it if you would send the citation to your published paper to jkh+mocap@cs.cmu.edu, and also would add this text to your acknowledgments section: The data used in this project was obtained from mocap.cs.cmu.edu. The database was created with funding from NSF EIA-0196217.""",
'size' : None},
'creep_rupture' : {'urls' : ['http://www.msm.cam.ac.uk/map/data/tar/'],
'files' : [['creeprupt.tar']],
'citation' : 'Materials Algorithms Project Data Library: MAP_DATA_CREEP_RUPTURE. F. Brun and T. Yoshida.',
'details' : """Provides 2066 creep rupture test results of steels (mainly of two kinds of steels: 2.25Cr and 9-12 wt% Cr ferritic steels). See http://www.msm.cam.ac.uk/map/data/materials/creeprupt-b.html.""",
'license' : None,
'size' : 602797},
'della_gatta' : {'urls' : [neil_url + 'della_gatta/'],
'files': [['DellaGattadata.mat']],
'citation' : 'Direct targets of the TRP63 transcription factor revealed by a combination of gene expression profiling and reverse engineering. Giusy Della Gatta, Mukesh Bansal, Alberto Ambesi-Impiombato, Dario Antonini, Caterina Missero, and Diego di Bernardo, Genome Research 2008',
'details': "The full gene expression data set from della Gatta et al (http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2413161/) processed by RMA.",
'license':None,
'size':3729650},
'epomeo_gpx' : {'urls' : [neil_url + 'epomeo_gpx/'],
'files': [['endomondo_1.gpx', 'endomondo_2.gpx', 'garmin_watch_via_endomondo.gpx','viewranger_phone.gpx','viewranger_tablet.gpx']],
'citation' : '',
'details': "Five different GPS traces of the same run up Mount Epomeo in Ischia. The traces are from different sources. endomondo_1 and endomondo_2 are traces from the mobile phone app Endomondo, with a split in the middle. garmin_watch_via_endomondo is the trace from a Garmin watch, with a segment missing about 4 kilometers in. viewranger_phone and viewranger_tablet are traces from a phone and a tablet through the viewranger app. The viewranger_phone data comes from the same mobile phone as the Endomondo data (i.e. there are 3 GPS devices, but one device recorded two traces).",
'license':None,
'size': 2031872},
'three_phase_oil_flow': {'urls' : [neil_url + 'three_phase_oil_flow/'],
'files' : [['DataTrnLbls.txt', 'DataTrn.txt', 'DataTst.txt', 'DataTstLbls.txt', 'DataVdn.txt', 'DataVdnLbls.txt']],
'citation' : 'Bishop, C. M. and G. D. James (1993). Analysis of multiphase flows using dual-energy gamma densitometry and neural networks. Nuclear Instruments and Methods in Physics Research A327, 580-593',
'details' : """The three phase oil data used initially for demonstrating the Generative Topographic mapping.""",
'license' : None,
'size' : 712796},
'rogers_girolami_data' : {'urls' : ['https://www.dropbox.com/sh/7p6tu1t29idgliq/_XqlH_3nt9/'],
'files' : [['firstcoursemldata.tar.gz']],
'suffices' : [['?dl=1']],
'citation' : 'A First Course in Machine Learning. Simon Rogers and Mark Girolami: Chapman & Hall/CRC, ISBN-13: 978-1439824146',
'details' : """Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.""",
'license' : None,
'size' : 21949154},
'olivetti_faces' : {'urls' : [neil_url + 'olivetti_faces/', sam_url],
'files' : [['att_faces.zip'], ['olivettifaces.mat']],
'citation' : 'Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994',
'details' : """Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. """,
'license': None,
'size' : 8561331},
'olympic_marathon_men' : {'urls' : [neil_url + 'olympic_marathon_men/'],
'files' : [['olympicMarathonTimes.csv']],
'citation' : None,
'details' : """Olympic mens' marathon gold medal winning times from 1896 to 2012. Time given in pace (minutes per kilometer). Data is originally downloaded and collated from Wikipedia, we are not responsible for errors in the data""",
'license': None,
'size' : 584},
'osu_run1' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'],
'files': [['run1TXT.ZIP'],['connections.txt']],
'details' : "Motion capture data of a stick man running from the Open Motion Data Project at Ohio State University.",
'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.',
'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).',
'size': 338103},
'osu_accad' : {'urls': ['http://accad.osu.edu/research/mocap/data/', neil_url + 'stick/'],
'files': [['swagger1TXT.ZIP','handspring1TXT.ZIP','quickwalkTXT.ZIP','run1TXT.ZIP','sprintTXT.ZIP','dogwalkTXT.ZIP','camper_04TXT.ZIP','dance_KB3_TXT.ZIP','per20_TXT.ZIP','perTWO07_TXT.ZIP','perTWO13_TXT.ZIP','perTWO14_TXT.ZIP','perTWO15_TXT.ZIP','perTWO16_TXT.ZIP'],['connections.txt']],
'details' : "Motion capture data of different motions from the Open Motion Data Project at Ohio State University.",
'citation' : 'The Open Motion Data Project by The Ohio State University Advanced Computing Center for the Arts and Design, http://accad.osu.edu/research/mocap/mocap_data.htm.',
'license' : 'Data is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License (http://creativecommons.org/licenses/by-nc-sa/3.0/).',
'size': 15922790},
'pumadyn-32nm' : {'urls' : ['ftp://ftp.cs.toronto.edu/pub/neuron/delve/data/tarfiles/pumadyn-family/'],
'files' : [['pumadyn-32nm.tar.gz']],
'details' : """Pumadyn non linear 32 input data set with moderate noise. See http://www.cs.utoronto.ca/~delve/data/pumadyn/desc.html for details.""",
'citation' : """Created by Zoubin Ghahramani using the Matlab Robotics Toolbox of Peter Corke. Corke, P. I. (1996). A Robotics Toolbox for MATLAB. IEEE Robotics and Automation Magazine, 3 (1): 24-32.""",
'license' : """Data is made available by the Delve system at the University of Toronto""",
'size' : 5861646},
'robot_wireless' : {'urls' : [neil_url + 'robot_wireless/'],
'files' : [['uw-floor.txt']],
'citation' : """WiFi-SLAM using Gaussian Process Latent Variable Models by Brian Ferris, Dieter Fox and Neil Lawrence in IJCAI'07 Proceedings pages 2480-2485. Data used in A Unifying Probabilistic Perspective for Spectral Dimensionality Reduction: Insights and New Models by Neil D. Lawrence, JMLR 13 pg 1609--1638, 2012.""",
'details' : """Data created by Brian Ferris and Dieter Fox. Consists of WiFi access point strengths taken during a circuit of the Paul Allen building at the University of Washington.""",
'license' : None,
'size' : 284390},
'swiss_roll' : {'urls' : ['http://isomap.stanford.edu/'],
'files' : [['swiss_roll_data.mat']],
'details' : """Swiss roll data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""",
'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000',
'license' : None,
'size' : 800256},
'ripley_prnn_data' : {'urls' : ['http://www.stats.ox.ac.uk/pub/PRNN/'],
'files' : [['Cushings.dat', 'README', 'crabs.dat', 'fglass.dat', 'fglass.grp', 'pima.te', 'pima.tr', 'pima.tr2', 'synth.te', 'synth.tr', 'viruses.dat', 'virus3.dat']],
'details' : """Data sets from Brian Ripley's Pattern Recognition and Neural Networks""",
'citation': """Pattern Recognition and Neural Networks by B.D. Ripley (1996) Cambridge University Press ISBN 0 521 46986 7""",
'license' : None,
'size' : 93565},
'isomap_face_data' : {'urls' : [neil_url + 'isomap_face_data/'],
'files' : [['face_data.mat']],
'details' : """Face data made available by Tenenbaum, de Silva and Langford to demonstrate isomap, available from http://isomap.stanford.edu/datasets.html.""",
'citation' : 'A Global Geometric Framework for Nonlinear Dimensionality Reduction, J. B. Tenenbaum, V. de Silva and J. C. Langford, Science 290 (5500): 2319-2323, 22 December 2000',
'license' : None,
'size' : 24229368},
'xw_pen' : {'urls' : [neil_url + 'xw_pen/'],
'files' : [['xw_pen_15.csv']],
'details' : """Accelerometer pen data used for robust regression by Tipping and Lawrence.""",
'citation' : 'Michael E. Tipping and Neil D. Lawrence. Variational inference for Student-t models: Robust Bayesian interpolation and generalised component analysis. Neurocomputing, 69:123--141, 2005',
'license' : None,
'size' : 3410},
'hapmap3' : {'urls' : ['http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/latest_phaseIII_ncbi_b36/plink_format/'],
'files' : [['hapmap3_r2_b36_fwd.consensus.qc.poly.map.bz2', 'hapmap3_r2_b36_fwd.consensus.qc.poly.ped.bz2', 'relationships_w_pops_121708.txt']],
'details' : """HapMap Project: Single Nucleotide Polymorphism sequenced in all human populations. See http://www.nature.com/nature/journal/v426/n6968/abs/nature02168.html for details.""",
'citation': """Gibbs, Richard A., et al. "The international HapMap project." Nature 426.6968 (2003): 789-796.""",
'license' : """International HapMap Project Public Access License (http://hapmap.ncbi.nlm.nih.gov/cgi-perl/registration#licence)""",
'size' : 2*1729092237 + 62265},
}
with open('data_resources.json', 'w') as f:
print "writing data_resources"
json.dump(data_resources, f)
| 121.82963
| 620
| 0.544963
| 1,779
| 16,447
| 4.975267
| 0.40416
| 0.018642
| 0.023726
| 0.008813
| 0.2141
| 0.200429
| 0.188679
| 0.1819
| 0.166761
| 0.166761
| 0
| 0.041651
| 0.34748
| 16,447
| 134
| 621
| 122.738806
| 0.783079
| 0
| 0
| 0.161538
| 0
| 0.269231
| 0.624308
| 0.030401
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007692
| null | null | 0.015385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
da58bf7d0bb3773c04e9a9676a1d5824d2235e62
| 1,093
|
py
|
Python
|
GDLC/tests/helper_default_copy.py
|
ptoche/Gran-Diccionari-de-la-llengua-catalana-Kindle-Edition-
|
a31412d2a6f05a6c1a9bd9854cdd6fee8abd65f4
|
[
"BSD-3-Clause"
] | 1
|
2022-02-01T16:08:03.000Z
|
2022-02-01T16:08:03.000Z
|
GDLC/tests/helper_default_copy.py
|
ptoche/Gran-Diccionari-de-la-llengua-catalana-Kindle-Edition-
|
a31412d2a6f05a6c1a9bd9854cdd6fee8abd65f4
|
[
"BSD-3-Clause"
] | null | null | null |
GDLC/tests/helper_default_copy.py
|
ptoche/Gran-Diccionari-de-la-llengua-catalana-Kindle-Edition-
|
a31412d2a6f05a6c1a9bd9854cdd6fee8abd65f4
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Makes a list of the files to be copied from source to destination.
Notes: Couldn't make this doctest work, so useful only as a visual test.
>>> from GDLC.GDLC import *
# Copy a list of files to the default directory:
>>> template_copy(dir='~/GDLC/source/GDLC_unpacked') # doctest:+ELLIPSIS
[PosixPath('.../mobi7/Images/author_footer.jpeg'),
PosixPath('.../mobi7/Images/author_image.jpeg'),
PosixPath('.../mobi7/Images/cover_image.jpeg'),
PosixPath('.../mobi7/Images/cover_logo.jpeg'),
PosixPath('.../mobi7/Images/cover_thumb.jpeg'),
PosixPath('.../mobi8/mimetype'),
PosixPath('.../mobi8/META-INF/container.xml'),
PosixPath('.../mobi8/OEBPS/content.opf'),
PosixPath('.../mobi8/OEBPS/toc.ncx'),
PosixPath('.../mobi8/OEBPS/Styles/style0001.css'),
PosixPath('.../mobi8/OEBPS/Styles/style0002.css'),
PosixPath('.../mobi8/OEBPS/Images/author_footer.jpeg'),
PosixPath('.../mobi8/OEBPS/Images/author_image.jpeg'),
PosixPath('.../mobi8/OEBPS/Images/cover_image.jpeg'),
PosixPath('.../mobi8/OEBPS/Images/cover_logo.jpeg'),
PosixPath('.../mobi8/OEBPS/Text/cover_page.xhtml')]
"""
| 39.035714
| 72
| 0.711802
| 145
| 1,093
| 5.282759
| 0.427586
| 0.201044
| 0.223238
| 0.125326
| 0.406005
| 0.190601
| 0.101828
| 0
| 0
| 0
| 0
| 0.023762
| 0.075938
| 1,093
| 27
| 73
| 40.481481
| 0.734653
| 0.989936
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
da5965efd18efcbcc1054a341ce522c266eb499c
| 1,603
|
py
|
Python
|
tests/integration/test_path.py
|
bengabay11/chromesy
|
03328f7330c4e119e7f6a5bc2b75575eb47e4e61
|
[
"MIT"
] | null | null | null |
tests/integration/test_path.py
|
bengabay11/chromesy
|
03328f7330c4e119e7f6a5bc2b75575eb47e4e61
|
[
"MIT"
] | 23
|
2021-08-05T17:38:02.000Z
|
2022-02-14T08:28:15.000Z
|
tests/integration/test_path.py
|
bengabay11/chromesy
|
03328f7330c4e119e7f6a5bc2b75575eb47e4e61
|
[
"MIT"
] | null | null | null |
import os
import pytest
from chpass.exceptions.operating_system_not_supported import OperatingSystemNotSupported
from chpass.exceptions.user_not_found_exception import UserNotFoundException
from chpass.services.path import get_home_directory, get_chrome_user_folder
@pytest.fixture(scope="module")
def invalid_os() -> int:
return -1
@pytest.fixture(scope="module")
def os_not_exist() -> str:
return "ChromeOS"
def test_get_home_directory(connected_user):
home_directory = get_home_directory(connected_user)
assert os.path.basename(home_directory) == connected_user
assert os.path.exists(home_directory)
@pytest.fixture
def user_not_exist() -> str:
return "not_exist"
def test_get_home_directory_user_not_exist(user_not_exist):
with pytest.raises(UserNotFoundException):
get_home_directory(user_not_exist)
@pytest.fixture
def invalid_user() -> int:
return -1
def test_get_home_directory_invalid_user(invalid_user):
with pytest.raises(TypeError):
get_home_directory(invalid_user)
def test_get_chrome_user_folder(connected_user):
chrome_user_folder = get_chrome_user_folder(connected_user)
assert os.path.exists(chrome_user_folder)
def test_get_chrome_user_folder_os_not_exist(connected_user, os_not_exist):
with pytest.raises(OperatingSystemNotSupported):
get_chrome_user_folder(connected_user, platform=os_not_exist)
def test_get_chrome_user_folder_invalid_os(connected_user, invalid_os):
with pytest.raises(OperatingSystemNotSupported):
get_chrome_user_folder(connected_user, platform=invalid_os)
| 27.169492
| 88
| 0.805365
| 217
| 1,603
| 5.525346
| 0.193548
| 0.108424
| 0.1201
| 0.110926
| 0.545455
| 0.371143
| 0.201835
| 0.138449
| 0.138449
| 0.138449
| 0
| 0.001418
| 0.120399
| 1,603
| 58
| 89
| 27.637931
| 0.848936
| 0
| 0
| 0.222222
| 0
| 0
| 0.018091
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.277778
| false
| 0.083333
| 0.138889
| 0.111111
| 0.527778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
e506e440b9944e3663077c3d5d48f8e3e702deb4
| 16
|
py
|
Python
|
tailhead/version.py
|
moisesluza/tailhead
|
262e14a1a9a928fa26094906939a0d9c521431c9
|
[
"MIT"
] | 12
|
2015-10-20T20:48:46.000Z
|
2020-07-07T18:39:51.000Z
|
tailhead/version.py
|
moisesluza/tailhead
|
262e14a1a9a928fa26094906939a0d9c521431c9
|
[
"MIT"
] | 3
|
2016-09-13T01:51:15.000Z
|
2021-12-30T21:46:25.000Z
|
tailhead/version.py
|
moisesluza/tailhead
|
262e14a1a9a928fa26094906939a0d9c521431c9
|
[
"MIT"
] | 4
|
2015-11-04T18:31:11.000Z
|
2021-11-10T19:01:55.000Z
|
VERSION='1.0.2'
| 8
| 15
| 0.625
| 4
| 16
| 2.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.0625
| 16
| 1
| 16
| 16
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e512933c921eeae4ae1e67cc732283d409bf706e
| 100
|
py
|
Python
|
db_adapter/apps.py
|
weynelucas/django-db-editor
|
92893cf471ac4d0988b47e10b82c43907006fe2a
|
[
"MIT"
] | 6
|
2018-04-18T14:55:00.000Z
|
2021-04-27T22:31:47.000Z
|
db_adapter/apps.py
|
weynelucas/django-db-editor
|
92893cf471ac4d0988b47e10b82c43907006fe2a
|
[
"MIT"
] | 2
|
2021-02-16T20:52:24.000Z
|
2021-02-18T14:53:26.000Z
|
db_adapter/apps.py
|
weynelucas/django-db-editor
|
92893cf471ac4d0988b47e10b82c43907006fe2a
|
[
"MIT"
] | 2
|
2019-01-22T17:34:25.000Z
|
2019-08-25T12:37:23.000Z
|
from django.apps import AppConfig
class DatabaseAdapterConfig(AppConfig):
name = 'db_adapter'
| 16.666667
| 39
| 0.78
| 11
| 100
| 7
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 100
| 5
| 40
| 20
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e52bc635762b36d8f3c7c89d30a236bb115e6d28
| 61
|
py
|
Python
|
web/views/auth/__init__.py
|
qingqingcaoyuanlin/atxserver2
|
f3aefcf501f4cbb67441e58e7e223d70e74971b1
|
[
"MIT"
] | 4
|
2019-12-10T14:41:45.000Z
|
2019-12-29T03:23:24.000Z
|
web/views/auth/__init__.py
|
qingqingcaoyuanlin/atxserver2
|
f3aefcf501f4cbb67441e58e7e223d70e74971b1
|
[
"MIT"
] | null | null | null |
web/views/auth/__init__.py
|
qingqingcaoyuanlin/atxserver2
|
f3aefcf501f4cbb67441e58e7e223d70e74971b1
|
[
"MIT"
] | 1
|
2021-09-03T08:26:59.000Z
|
2021-09-03T08:26:59.000Z
|
# coding: utf-8
#
from .openid import OpenIdMixin, AuthError
| 15.25
| 42
| 0.754098
| 8
| 61
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.147541
| 61
| 4
| 42
| 15.25
| 0.865385
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e552a9bb832d0fb5e0cdbc503fa4cfb27a8eb16b
| 513
|
py
|
Python
|
test.py
|
LRHammond/pv4dsrl
|
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
|
[
"MIT"
] | null | null | null |
test.py
|
LRHammond/pv4dsrl
|
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
|
[
"MIT"
] | 2
|
2020-03-24T16:31:16.000Z
|
2020-03-31T00:56:57.000Z
|
test.py
|
LRHammond/pv4dsrl
|
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
|
[
"MIT"
] | null | null | null |
# TEST
# Script for running tests of other functions
import lern
import veri
import main
# main.run(name="one", mode="vgdl", numEpisodes=2, numSteps=50, numSamples=50, discount=0.95, horizon=5, manual_episodes=1)
# main.run(name="two", mode="vgdl", numEpisodes=1, numSteps=250, numSamples=50, discount=0.95, horizon=5, manual_episodes=0)
main.run(name="us_c_test", mode="vgdl", safe=True, numEpisodes=5, numSteps=25, numSamples=50, discount=0.95, horizon=5, deterministic=True, manual_episodes=0, epsilon=0.25)
| 46.636364
| 172
| 0.752437
| 83
| 513
| 4.590361
| 0.46988
| 0.055118
| 0.086614
| 0.165354
| 0.317585
| 0.317585
| 0.317585
| 0.23622
| 0.23622
| 0
| 0
| 0.072961
| 0.091618
| 513
| 10
| 173
| 51.3
| 0.744635
| 0.57115
| 0
| 0
| 0
| 0
| 0.060465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e55f13dc7177e0ccd39ddbd7acfc8ae237814ab0
| 26
|
py
|
Python
|
homeassistant/components/ping/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 23
|
2017-11-15T21:03:53.000Z
|
2021-03-29T21:33:48.000Z
|
homeassistant/components/ping/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:13:11.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/ping/__init__.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 10
|
2018-01-01T00:12:51.000Z
|
2021-12-21T23:08:05.000Z
|
"""The ping component."""
| 13
| 25
| 0.615385
| 3
| 26
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.695652
| 0.730769
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e5648e4114e86d6ce849eaaf4d090c50acfa355e
| 175
|
py
|
Python
|
problem0553.py
|
kmarcini/Project-Euler-Python
|
d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3
|
[
"BSD-3-Clause"
] | null | null | null |
problem0553.py
|
kmarcini/Project-Euler-Python
|
d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3
|
[
"BSD-3-Clause"
] | null | null | null |
problem0553.py
|
kmarcini/Project-Euler-Python
|
d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3
|
[
"BSD-3-Clause"
] | null | null | null |
###########################
#
# #553 Power sets of power sets - Project Euler
# https://projecteuler.net/problem=553
#
# Code by Kevin Marciniak
#
###########################
| 19.444444
| 47
| 0.485714
| 17
| 175
| 5
| 0.823529
| 0.211765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.125714
| 175
| 8
| 48
| 21.875
| 0.51634
| 0.6
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e57861b5e22bf7c73710ae2a9b27d80858c1d34a
| 90
|
py
|
Python
|
sparklingpandas/test/__init__.py
|
MLnick/sparklingpandas
|
4a8daefd6bd6c51741b0b849d41d9189b7787023
|
[
"Apache-2.0"
] | 2
|
2015-05-17T11:21:10.000Z
|
2017-01-09T08:57:12.000Z
|
sparklingpandas/test/__init__.py
|
MLnick/sparklingpandas
|
4a8daefd6bd6c51741b0b849d41d9189b7787023
|
[
"Apache-2.0"
] | null | null | null |
sparklingpandas/test/__init__.py
|
MLnick/sparklingpandas
|
4a8daefd6bd6c51741b0b849d41d9189b7787023
|
[
"Apache-2.0"
] | null | null | null |
"""
Tests for the PandaSpark module, you probably don't want to import this
directly.
"""
| 18
| 71
| 0.733333
| 14
| 90
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 90
| 4
| 72
| 22.5
| 0.88
| 0.9
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e59498afb5a86d822863138dc496fa7e78e493a4
| 158
|
py
|
Python
|
python/lib/Lib/site-packages/django/contrib/admindocs/tests/fields.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
django/contrib/admindocs/tests/fields.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
django/contrib/admindocs/tests/fields.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from django.db import models
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
| 19.75
| 44
| 0.765823
| 19
| 158
| 6.368421
| 0.736842
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158228
| 158
| 7
| 45
| 22.571429
| 0.909774
| 0
| 0
| 0
| 0
| 0
| 0.120253
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
e59792d62a407cd1ceff533d64444f1fe93ec4ac
| 282
|
py
|
Python
|
Connector.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | 1
|
2019-07-17T09:08:41.000Z
|
2019-07-17T09:08:41.000Z
|
Connector.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
Connector.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
import MySQLdb
class DBConnection:
db = ""
# Open database connection
def __init__(self):
self.db = MySQLdb.connect('localhost', 'root', '', 'new_pyproject')
# Return DB Connection
def getConnection(self):
return self.db
| 15.666667
| 76
| 0.585106
| 28
| 282
| 5.714286
| 0.642857
| 0.1625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.312057
| 282
| 17
| 77
| 16.588235
| 0.824742
| 0.159574
| 0
| 0
| 0
| 0
| 0.12037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
e5f3c32525bb8e784e7d769175cb8c5e4190d96d
| 49
|
py
|
Python
|
pdf_to_json/__init__.py
|
antoinecarme/pdf_to_json
|
b04b264738de3207248eaa17897fc276d19ad663
|
[
"BSD-3-Clause"
] | null | null | null |
pdf_to_json/__init__.py
|
antoinecarme/pdf_to_json
|
b04b264738de3207248eaa17897fc276d19ad663
|
[
"BSD-3-Clause"
] | null | null | null |
pdf_to_json/__init__.py
|
antoinecarme/pdf_to_json
|
b04b264738de3207248eaa17897fc276d19ad663
|
[
"BSD-3-Clause"
] | null | null | null |
from . import pdf_to_json
__version__ = '0.1'
| 8.166667
| 25
| 0.693878
| 8
| 49
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0.204082
| 49
| 5
| 26
| 9.8
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e5fe551e4a16a55180c24cd7d58a38d89bb68ac8
| 65
|
py
|
Python
|
scraper/errors.py
|
earaujoassis/apps-scraper
|
921e96423ce4f74cdb51e6f8854963a8dc36c093
|
[
"MIT"
] | null | null | null |
scraper/errors.py
|
earaujoassis/apps-scraper
|
921e96423ce4f74cdb51e6f8854963a8dc36c093
|
[
"MIT"
] | null | null | null |
scraper/errors.py
|
earaujoassis/apps-scraper
|
921e96423ce4f74cdb51e6f8854963a8dc36c093
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class ScraperError(Exception):
pass
| 13
| 30
| 0.615385
| 7
| 65
| 5.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.2
| 65
| 4
| 31
| 16.25
| 0.75
| 0.323077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
f91fd87304c5f04c2a7fa4de6baaa4632865260f
| 60
|
py
|
Python
|
python/loki/environment.py
|
agu3rra/loki
|
0c6e30516f087113340d3f396c13650ca0bd095b
|
[
"MIT"
] | null | null | null |
python/loki/environment.py
|
agu3rra/loki
|
0c6e30516f087113340d3f396c13650ca0bd095b
|
[
"MIT"
] | 7
|
2020-05-09T10:48:07.000Z
|
2020-05-30T14:00:00.000Z
|
python/loki/environment.py
|
agu3rra/goss
|
0c6e30516f087113340d3f396c13650ca0bd095b
|
[
"MIT"
] | null | null | null |
import os
GITHUB_PAT = os.environ.get('GITHUB_PAT', None)
| 12
| 47
| 0.733333
| 10
| 60
| 4.2
| 0.7
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 60
| 4
| 48
| 15
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
f9226230e3a207800bf375f8d56f825ff54389ab
| 2,721
|
py
|
Python
|
apps/taxonomy/admin.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
apps/taxonomy/admin.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
apps/taxonomy/admin.py
|
iamjdcollins/districtwebsite
|
89e2aea47ca3d221665bc23586a4374421be5800
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from mptt.admin import MPTTModelAdmin
from apps.pages.admin import PrecinctMapInline
import apps.common.functions as commonfunctions
from .models import Location, City, State, Zipcode, Language, TranslationType, SchoolType, OpenEnrollmentStatus, BoardPrecinct, BoardMeetingType, BoardPolicySection, DistrictCalendarEventCategory, DistrictLogoGroup, DistrictLogoStyleVariation, SchoolOption
class LocationAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class CityAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class StateAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class ZipcodeAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class LanguageAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class TranslationTypeAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class SchoolTypeAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class OpenEnrollmentStatusAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class BoardPrecinctAdmin(MPTTModelAdmin,GuardedModelAdmin):
inlines = [PrecinctMapInline,]
has_change_permission = commonfunctions.has_change_permission
has_add_permission = commonfunctions.has_add_permission
has_delete_permission = commonfunctions.has_delete_permission
save_formset = commonfunctions.save_formset
save_model = commonfunctions.save_model
response_change = commonfunctions.response_change
class BoardMeetingTypeAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class BoardPolicySectionAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class DistrictCalendarEventCategoryAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class DistrictLogoGroupAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class DistrictLogoStyleVariationAdmin(MPTTModelAdmin,GuardedModelAdmin):
pass
class SchoolOptionAdmin(MPTTModelAdmin, GuardedModelAdmin):
pass
admin.site.register(Location, LocationAdmin)
admin.site.register(City, CityAdmin)
admin.site.register(State, StateAdmin)
admin.site.register(Zipcode, ZipcodeAdmin)
admin.site.register(Language, LanguageAdmin)
admin.site.register(TranslationType, TranslationTypeAdmin)
admin.site.register(SchoolType, SchoolTypeAdmin)
admin.site.register(OpenEnrollmentStatus, OpenEnrollmentStatusAdmin)
admin.site.register(BoardPrecinct, BoardPrecinctAdmin)
admin.site.register(BoardMeetingType, BoardMeetingTypeAdmin)
admin.site.register(BoardPolicySection, BoardPolicySectionAdmin)
admin.site.register(DistrictCalendarEventCategory, DistrictCalendarEventCategoryAdmin)
admin.site.register(DistrictLogoGroup, DistrictLogoGroupAdmin)
admin.site.register(DistrictLogoStyleVariation, DistrictLogoStyleVariationAdmin)
admin.site.register(SchoolOption, SchoolOptionAdmin)
| 36.28
| 256
| 0.860345
| 232
| 2,721
| 10.012931
| 0.262931
| 0.200172
| 0.109772
| 0.223848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076075
| 2,721
| 74
| 257
| 36.77027
| 0.924025
| 0
| 0
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.245614
| 0.105263
| 0
| 0.491228
| 0
| 0
| 0
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
0065973e8bbfb235aba6736953cf7870a116c556
| 9,648
|
py
|
Python
|
equations.py
|
The0nix/PerceptionProject
|
86b69ba0b7473417fd5bab7d5ef0b608964d7abe
|
[
"MIT"
] | null | null | null |
equations.py
|
The0nix/PerceptionProject
|
86b69ba0b7473417fd5bab7d5ef0b608964d7abe
|
[
"MIT"
] | null | null | null |
equations.py
|
The0nix/PerceptionProject
|
86b69ba0b7473417fd5bab7d5ef0b608964d7abe
|
[
"MIT"
] | null | null | null |
# import numpy as np
import jax.numpy as np
from jax import jacfwd
from jax.ops import index_update
def C_b_v(angles):
"""
:param angles: Euler angles, np.ndarray, shape: (3,1)
:return: transition matrix from b-frame to v-frame, np.ndarray, shape: (3,3)
"""
phi, theta, psi = angles.flatten()
result = np.zeros(shape=(3, 3))
#first row
result = index_update(result, (0, 0), np.cos(psi) * np.cos(theta))
result = index_update(result, (0, 1), np.cos(psi) * np.sin(theta) * np.sin(phi) - np.sin(psi) * np.cos(phi))
result = index_update(result, (0, 2), np.cos(psi) * np.sin(theta) * np.cos(phi) + np.sin(psi) * np.sin(phi))
# second row
result = index_update(result, (1, 0), np.sin(psi) * np.cos(theta))
result = index_update(result, (1, 1), np.sin(psi) * np.sin(theta) * np.sin(phi) + np.cos(psi) * np.cos(phi))
result = index_update(result, (1, 2), np.sin(psi) * np.sin(theta) * np.cos(phi) - np.cos(psi) * np.sin(phi))
# third row
result = index_update(result, (2, 0), -np.sin(theta))
result = index_update(result, (2, 1), np.cos(theta) * np.sin(phi))
result = index_update(result, (2, 2), np.cos(theta) * np.cos(phi))
return result
def f_euler_update(x, u, w, delta_t):
"""
:param x: state vector, np.ndarray, shape: (15,1)
:param u: measurements vector, np.ndarray, shape: (6,1)
:param w: noise vector, np.ndarray, shape: (6,1)
:param delta_t: time step, scalar
:return: deltas of eulaer angles, np.ndarray, shape: (3,1)
"""
omega_x, omega_y, omega_z = u.flatten()[:3]
phi, theta, psi = x.flatten()[:3]
result = np.zeros(shape=3)
result = index_update(result, 0, (omega_y * np.sin(phi) + omega_z * np.cos(phi)) * np.tan(theta) + omega_x)
result = index_update(result, 1, omega_y * np.cos(phi) - omega_z * np.sin(phi))
result = index_update(result, 2, (omega_y * np.sin(phi) + omega_z * np.cos(phi)) * (1./np.cos(theta)))
return result.reshape(-1, 1) * delta_t
def omega_unbiased(omega, bias, noise):
return omega - bias.ravel() - noise.ravel()
def acc_unbiased(acc, bias, noise):
return acc.ravel() - bias.ravel() - noise.ravel()
def f(x, u, w, delta_t, g_v=None):
"""
:param x: state vector, np.ndarray, shape: (15,1)
:param u: measurements vector, np.ndarray, shape: (6,1)
:param w: noise vector, np.ndarray, shape: (6,1)
:param delta_t: time step, scalar
:param g_v: acceleration of gravity, np.ndarray: shape: (3,)
:return: state vector at the next time step, np.ndarray, shape: (15,1)
"""
if g_v is None:
g_v = np.array([0, 0, 0])
result = np.zeros(shape=15)
angles = x.flatten()[:3]
pose_coordinates = x.flatten()[3:6] # x,y,z
velocity = x.flatten()[6:9] # v_x, v_y, v_z
omega_imu = u.flatten()[:3] # measurements from gyroscope
acc_imu = u.flatten()[3:] # measurements from accelerometer
noise_omega = w.flatten()[:3] # omega noise
noise_acc = w.flatten()[3:] # acceleration noise
bias_omega = x.flatten()[9:12] # bias in gyroscope
bias_acc = x.flatten()[12:] # bias in accelerometer
u_unbiased = np.hstack((omega_unbiased(omega=omega_imu, bias=bias_omega, noise=noise_omega),
acc_unbiased(acc=acc_imu, bias=bias_acc, noise=noise_acc)))
trans_matrix = C_b_v(angles)
result = index_update(result, slice(0, 3), angles + f_euler_update(x=x, u=u_unbiased, w=w, delta_t=delta_t).flatten())
result = index_update(result, slice(3, 6), pose_coordinates + velocity * delta_t + \
0.5 * delta_t**2 * (trans_matrix @ u_unbiased[3:] + g_v))
result = index_update(result, slice(6, 9), velocity + delta_t * (trans_matrix @ u_unbiased[3:] + g_v))
result = index_update(result, slice(9, 12), bias_omega)
result = index_update(result, slice(12, 15), bias_acc)
return result.reshape(-1, 1)
def jac_f_euler_angles(x, u, delta_t):
"""
:param x: state vector, np.ndarray, shape: (15,1)
:param u: measurements vector, np.ndarray, shape: (6,1)
:param delta_t: time step, scalar
:return: the derivetive of f_euler_update function by angles.
np.ndarray, shape: (3, 3)
"""
phi, theta, psi = x.flatten()[:3]
omega_x, omega_y, omega_z = u.flatten()[:3]
result = np.zeros(shape=(3,3))
# first row
result = index_update(result, (0, 0), (omega_y * np.cos(phi) - omega_z * np.sin(phi)) * np.tan(theta))
result = index_update(result, (0, 1), (omega_y * np.sin(phi) + omega_z * np.cos(phi)) * (1./np.cos(theta))**2)
# second row
result = index_update(result, (1, 0), -omega_y * np.sin(phi) - omega_z * np.cos(phi))
# third row
result = index_update(result, (2, 0), (omega_y * np.cos(phi) - omega_z * np.sin(phi))*(1./np.cos(theta)))
result = index_update(result, (2, 1), (omega_y * np.sin(phi) + omega_z * np.cos(phi))*(np.sin(theta)/(np.cos(theta)**2)))
return result * delta_t
def h(x, v):
result = x[:6].reshape(-1) + v.reshape(-1)
return result
def c_b_v_angles(angles, acc):
C = C_b_v(angles)
return C @ acc
def jac_c_b_v_angles(angles, acc): # uff...
"""
:param angles: Euler angles, np.ndarray, shape: (3,1)
:param acc: accelerations, np.ndarray, shape: (3, 1)
:return: the derivetive of C_b_v @ acc function by angles.
np.ndarray, shape: (3, 3)
"""
phi, theta, psi = angles.flatten()
a_x, a_y, a_z = acc.flatten()
result = np.zeros(shape=(3,3))
# first row
result = index_update(result, (0, 0), a_y * (np.cos(psi) * np.sin(theta) * np.cos(phi) + np.sin(psi) * np.sin(phi)) + \
a_z * (-np.cos(psi) * np.sin(theta) * np.sin(phi) + np.sin(psi) * np.cos(phi)))
result = index_update(result, (0, 1), a_x * (-np.cos(psi) * np.sin(theta)) + \
a_y * (np.cos(psi) * np.cos(theta) * np.sin(phi)) + \
a_z * (np.cos(psi) * np.cos(theta) * np.cos(phi)))
result = index_update(result, (0, 2), a_x * (-np.sin(psi) * np.cos(theta)) + \
a_y * (-np.sin(psi) * np.sin(theta) * np.sin(phi) - np.cos(psi) * np.cos(phi)) + \
a_z * (-np.sin(psi) * np.sin(theta) * np.cos(phi) + np.cos(psi) * np.sin(phi)))
# second row
result = index_update(result, (1, 0), a_y * (np.sin(psi) * np.sin(theta) * np.cos(phi) - np.cos(psi) * np.sin(phi)) + \
a_z * (-np.sin(psi) * np.sin(theta) * np.sin(phi) - np.cos(psi) * np.cos(phi)))
result = index_update(result, (1, 1), a_x * (-np.sin(psi) * np.sin(theta)) + \
a_y * (np.sin(psi) * np.cos(theta) * np.sin(phi)) + \
a_z * (np.sin(psi) * np.cos(theta) * np.cos(phi)))
result = index_update(result, (1, 2), a_x * (np.cos(psi) * np.cos(theta)) + \
a_y * (np.cos(psi) * np.sin(theta) * np.sin(phi) - np.sin(psi) * np.cos(phi)) + \
a_z * (np.cos(psi) * np.sin(theta) * np.cos(phi) + np.sin(psi) * np.sin(phi)))
result = index_update(result, (2, 0), a_y * (np.cos(theta) * np.cos(psi)) + \
a_z * (-np.cos(theta) * np.sin(phi)))
result = index_update(result, (2, 1), a_x * (-np.cos(theta)) + \
a_y * (-np.sin(theta) * np.sin(phi)) + \
a_z * (-np.sin(theta) * np.cos(phi)))
result = index_update(result, (2, 2), 0)
return result
def jac_f_x(x, u, w, delta_t):
"""
:param x: state vector, np.ndarray, shape: (15,1)
:param u: measurements vector, np.ndarray, shape: (6,1)
:param w: noise vector, np.ndarray, shape: (6,1)
:param delta_t: time step, scalar
:return: jacobian of transition function with respect to state
np.ndarray, shape: (15, 15)
"""
angles = x.flatten()[:3]
omega_imu = u.flatten()[:3] # measurements from gyroscope
acc_imu = u.flatten()[3:] # measurements from accelerometer
noise_omega = w.flatten()[:3] # omega noise
noise_acc = w.flatten()[3:] # acceleration noise
bias_omega = x.flatten()[9:12] # bias in gyroscope
bias_acc = x.flatten()[12:] # bias in accelerometer
u_unbiased = np.hstack((omega_unbiased(omega=omega_imu, bias=bias_omega, noise=noise_omega),
acc_unbiased(acc=acc_imu, bias=bias_acc, noise=noise_acc)))
result = np.zeros(shape=(15, 15))
result = index_update(result, [slice(0, 3), slice(0,3)], jac_f_euler_angles(x=x, u=u_unbiased, delta_t=delta_t))
result = index_update(result, [slice(3, 6), slice(0,3)], 0.5 * delta_t**2 * jac_c_b_v_angles(angles=angles, acc=u_unbiased.flatten()[3:]))
result = index_update(result, [slice(3, 6), slice(3,6)], np.identity(3))
result = index_update(result, [slice(3, 6), slice(6,9)], delta_t * np.identity(3))
result = index_update(result, [slice(6, 9), slice(0,3)], delta_t * jac_c_b_v_angles(angles=angles, acc=u_unbiased.flatten()[3:]))
result = index_update(result, [slice(6, 9), slice(6,9)], np.identity(3))
result = index_update(result, [slice(9, 12), slice(9,12)], np.identity(3))
result = index_update(result, [slice(12, 15), slice(12,15)], np.identity(3))
return result
_make_F = jacfwd(f, argnums=0)
def make_F(x, u, w, delta_t):
return _make_F(x, u, w, delta_t).reshape(x.shape[0], x.shape[0])
_make_W = jacfwd(f, argnums=1)
def make_W(x, u, w, delta_t):
return _make_W(x, u, w, delta_t).reshape(x.shape[0], w.shape[0])
_make_H = jacfwd(h, argnums=0)
def make_H(x, v):
return _make_H(x, v).reshape(6, x.shape[0])
_make_V = jacfwd(h, argnums=1)
def make_V(x, v):
return _make_V(x, v).reshape(6, 6)
| 40.033195
| 142
| 0.600435
| 1,596
| 9,648
| 3.490602
| 0.065163
| 0.05385
| 0.119009
| 0.161012
| 0.810806
| 0.749237
| 0.720337
| 0.646204
| 0.584814
| 0.522527
| 0
| 0.030733
| 0.217558
| 9,648
| 240
| 143
| 40.2
| 0.707246
| 0.189262
| 0
| 0.246032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.02381
| 0.047619
| 0.246032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
006b715cfa05d58a5bdcaf74dfb0a2b2b82f27b5
| 5,633
|
py
|
Python
|
tests/Object/test_rotatingObjects.py
|
nd9600/arc
|
f6573898a37fa9395e673b80c25102fcc20e09a1
|
[
"MIT"
] | null | null | null |
tests/Object/test_rotatingObjects.py
|
nd9600/arc
|
f6573898a37fa9395e673b80c25102fcc20e09a1
|
[
"MIT"
] | null | null | null |
tests/Object/test_rotatingObjects.py
|
nd9600/arc
|
f6573898a37fa9395e673b80c25102fcc20e09a1
|
[
"MIT"
] | null | null | null |
import unittest
import src.Runtime.GeometryRuntime as GeometryRuntime
import src.UsefulFunctions as UsefulFunctions
from src.FrameModel.Object import Object
class TestRotatingObjects(unittest.TestCase):
def test_rotating_line_90_degrees(self):
obj = Object(
1,
(1, 0),
[
(0, 0),
(1, 0),
(2, 0),
]
)
rotated_obj: Object = GeometryRuntime.relatively_rotate_object_90(obj)
self.assertEqual(
[
(0, 0),
(0, 1),
(0, 2),
],
rotated_obj.relative_positions
)
def test_relatively_rotating_l_shape_270_degrees(self):
obj = Object(
1,
(1, 0),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = UsefulFunctions.compose([
GeometryRuntime.relatively_rotate_object_270,
])(obj)
self.assertEqual(
(1, 0),
rotated_obj.top_left_offset
)
self.assertEqual(
[
(0, 2),
(0, 1),
(0, 0),
(1, 0)
],
rotated_obj.relative_positions
)
def test_absolutely_rotating_l_shape_270_degrees_when_it_would_go_off_top_of_grid(self):
obj = Object(
1,
(1, 0),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = GeometryRuntime.absolutely_rotate_object_270((1, 0), obj)
self.assertEqual(
(1, 0),
rotated_obj.top_left_offset
)
self.assertEqual(
[
(0, 2),
(0, 1),
(0, 0),
(1, 0)
],
rotated_obj.relative_positions
)
def test_absolutely_rotating_l_shape_270_degrees_when_it_wouldnt_go_off_top_of_grid(self):
obj = Object(
1,
(5, 5),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = GeometryRuntime.absolutely_rotate_object_270((5, 5), obj)
self.assertEqual(
(5, 3),
rotated_obj.top_left_offset
)
self.assertEqual(
[
(0, 2),
(0, 1),
(0, 0),
(1, 0)
],
rotated_obj.relative_positions
)
def test_relatively_rotating_l_shape_270_degrees_when_it_wouldnt_go_off_top_of_grid(self):
obj = Object(
1,
(5, 5),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = GeometryRuntime.relatively_rotate_object_270(obj)
self.assertEqual(
(5, 3),
rotated_obj.top_left_offset
)
self.assertEqual(
[
(0, 2),
(0, 1),
(0, 0),
(1, 0)
],
rotated_obj.relative_positions
)
def test_rotating_l_shape_90_degrees_when_it_wouldnt_go_off_grid(self):
obj = Object(
1,
(9, 3),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = UsefulFunctions.compose([GeometryRuntime.relatively_rotate_object_90])(obj)
# the top left offset had to be shifted to accommodate the L being 2 squares wide
self.assertEqual(
(8, 3),
rotated_obj.top_left_offset
)
self.assertEqual(
[
(1, 0),
(1, 1),
(1, 2),
(0, 2),
],
rotated_obj.relative_positions
)
def test_rotating_l_shape_360_degrees_doesnt_move_it_at_all(self):
obj = Object(
1,
(1, 0),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj: Object = GeometryRuntime.relatively_rotate_object(360, obj)
self.assertEqual(
obj.top_left_offset,
rotated_obj.top_left_offset
)
self.assertEqual(
obj.relative_positions,
rotated_obj.relative_positions
)
def test_rotating_l_shape_90_degrees_4_times_doesnt_change_its_relative_positions(self):
"""
rotating an object can make it go off the grid; to prevent that, we snap it back onto the grid
:return:
"""
obj = Object(
1,
(1, 0),
[
(0, 0),
(1, 0),
(2, 0),
(2, 1),
]
)
rotated_obj = UsefulFunctions.compose(
[
GeometryRuntime.relatively_rotate_object_90,
GeometryRuntime.relatively_rotate_object_90,
GeometryRuntime.relatively_rotate_object_90,
GeometryRuntime.relatively_rotate_object_90
]
)(obj)
self.assertEqual(
obj.relative_positions,
rotated_obj.relative_positions
)
if __name__ == '__main__':
unittest.main()
| 25.721461
| 105
| 0.43831
| 528
| 5,633
| 4.352273
| 0.153409
| 0.022628
| 0.022193
| 0.022628
| 0.781114
| 0.773281
| 0.762402
| 0.734987
| 0.687554
| 0.618364
| 0
| 0.065706
| 0.470442
| 5,633
| 218
| 106
| 25.83945
| 0.70466
| 0.032665
| 0
| 0.602041
| 0
| 0
| 0.001477
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.040816
| false
| 0
| 0.020408
| 0
| 0.066327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
006f6aa9aa5d552154797cfab32e0b36af76ed1c
| 185
|
py
|
Python
|
src/sentry/eventstore/snuba/backend.py
|
lauryndbrown/sentry
|
c5304e303966566386f5e61df1b72624a30803b4
|
[
"BSD-3-Clause"
] | 1
|
2020-02-27T02:46:25.000Z
|
2020-02-27T02:46:25.000Z
|
src/sentry/eventstore/snuba/backend.py
|
lauryndbrown/sentry
|
c5304e303966566386f5e61df1b72624a30803b4
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/eventstore/snuba/backend.py
|
lauryndbrown/sentry
|
c5304e303966566386f5e61df1b72624a30803b4
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from sentry.eventstore.base import EventStorage
class SnubaEventStorage(EventStorage):
"""
Eventstore backend backed by Snuba
"""
| 16.818182
| 47
| 0.762162
| 19
| 185
| 7.157895
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178378
| 185
| 10
| 48
| 18.5
| 0.894737
| 0.183784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
00862602d196f5756d79418d7f00555e0b6c4b02
| 37
|
py
|
Python
|
tests/unit/sim_proxy/sim_proxy_test.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 8
|
2019-01-29T15:19:39.000Z
|
2020-07-16T03:55:36.000Z
|
tests/unit/sim_proxy/sim_proxy_test.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 46
|
2019-02-08T14:23:11.000Z
|
2021-04-06T13:45:10.000Z
|
tests/unit/sim_proxy/sim_proxy_test.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 3
|
2019-05-06T14:18:07.000Z
|
2021-06-17T10:39:59.000Z
|
"""
Tests for the SimProxy class
"""
| 9.25
| 28
| 0.648649
| 5
| 37
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 37
| 3
| 29
| 12.333333
| 0.8
| 0.756757
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0094ebc62228c1949a2d2e31feb71f5ee12e3fa2
| 118
|
py
|
Python
|
ARC085/ARC085.py
|
VolgaKurvar/AtCoder
|
21acb489f1594bbb1cdc64fbf8421d876b5b476d
|
[
"Unlicense"
] | null | null | null |
ARC085/ARC085.py
|
VolgaKurvar/AtCoder
|
21acb489f1594bbb1cdc64fbf8421d876b5b476d
|
[
"Unlicense"
] | null | null | null |
ARC085/ARC085.py
|
VolgaKurvar/AtCoder
|
21acb489f1594bbb1cdc64fbf8421d876b5b476d
|
[
"Unlicense"
] | null | null | null |
n, z, w = map(int, input().split())
a = list(map(int, input().split()))
print(max(abs(a[n-1]-w), abs(a[n-1]-a[n-2])))
| 29.5
| 45
| 0.542373
| 27
| 118
| 2.37037
| 0.518519
| 0.09375
| 0.34375
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028302
| 0.101695
| 118
| 3
| 46
| 39.333333
| 0.575472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
00a26356724be93efd653f154c207b25d7f0234e
| 108
|
py
|
Python
|
Module 1/Chapter4/4_10_defining_an_exception.py
|
real-slim-chadi/Python_Master-the-Art-of-Design-Patterns
|
95ec92272374e330b04d931208abbb184c7c7908
|
[
"MIT"
] | 73
|
2016-09-15T23:07:04.000Z
|
2022-03-05T15:09:48.000Z
|
Module 1/Chapter4/4_10_defining_an_exception.py
|
real-slim-chadi/Python_Master-the-Art-of-Design-Patterns
|
95ec92272374e330b04d931208abbb184c7c7908
|
[
"MIT"
] | null | null | null |
Module 1/Chapter4/4_10_defining_an_exception.py
|
real-slim-chadi/Python_Master-the-Art-of-Design-Patterns
|
95ec92272374e330b04d931208abbb184c7c7908
|
[
"MIT"
] | 51
|
2016-10-07T20:47:51.000Z
|
2021-12-22T21:00:24.000Z
|
class InvalidWithdrawal(Exception):
pass
raise InvalidWithdrawal("You don't have $50 in your account")
| 21.6
| 61
| 0.768519
| 14
| 108
| 5.928571
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.148148
| 108
| 4
| 62
| 27
| 0.880435
| 0
| 0
| 0
| 0
| 0
| 0.314815
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
00ba7858f65ef7b0e56f27897b52da91dfe1f640
| 23
|
py
|
Python
|
p0/elftools/__init__.py
|
wrestle/CMU-15410
|
9a2d67acd95917a82fa4f2125c5b9568127a00ae
|
[
"MIT"
] | null | null | null |
p0/elftools/__init__.py
|
wrestle/CMU-15410
|
9a2d67acd95917a82fa4f2125c5b9568127a00ae
|
[
"MIT"
] | null | null | null |
p0/elftools/__init__.py
|
wrestle/CMU-15410
|
9a2d67acd95917a82fa4f2125c5b9568127a00ae
|
[
"MIT"
] | 1
|
2019-07-17T16:49:14.000Z
|
2019-07-17T16:49:14.000Z
|
__version__ = '0.10'
| 5.75
| 20
| 0.608696
| 3
| 23
| 3.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0.217391
| 23
| 3
| 21
| 7.666667
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
00d4d55e3a2948f88a5acab543d101a7b19eb876
| 80
|
py
|
Python
|
examples/p2/main.py
|
djpeach/pygamer
|
77a0cdab58bc29d06cc88c8cc823850794fe0bf0
|
[
"MIT"
] | null | null | null |
examples/p2/main.py
|
djpeach/pygamer
|
77a0cdab58bc29d06cc88c8cc823850794fe0bf0
|
[
"MIT"
] | null | null | null |
examples/p2/main.py
|
djpeach/pygamer
|
77a0cdab58bc29d06cc88c8cc823850794fe0bf0
|
[
"MIT"
] | null | null | null |
from Pong import Pong
if __name__ == "__main__":
Pong((5, 5), 7, 20).run()
| 16
| 29
| 0.6
| 13
| 80
| 3.076923
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079365
| 0.2125
| 80
| 4
| 30
| 20
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
00d528bccf5d71e31b9419e0d20e07e19e9f6325
| 158
|
py
|
Python
|
Curso em Vídeo/Mundo 3 Estruturas Compostas/Aulas/file033.py
|
henriqueumeda/-Estudo-python
|
28e93a377afa4732037a29eb74d4bc7c9e24b62f
|
[
"MIT"
] | null | null | null |
Curso em Vídeo/Mundo 3 Estruturas Compostas/Aulas/file033.py
|
henriqueumeda/-Estudo-python
|
28e93a377afa4732037a29eb74d4bc7c9e24b62f
|
[
"MIT"
] | null | null | null |
Curso em Vídeo/Mundo 3 Estruturas Compostas/Aulas/file033.py
|
henriqueumeda/-Estudo-python
|
28e93a377afa4732037a29eb74d4bc7c9e24b62f
|
[
"MIT"
] | null | null | null |
def print_sum(*values):
s = 0
for number in values:
s += number
print(f'The sum of {values} is {s}')
print_sum(5, 2)
print_sum(2, 9, 4)
| 15.8
| 40
| 0.56962
| 29
| 158
| 3
| 0.586207
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053097
| 0.28481
| 158
| 9
| 41
| 17.555556
| 0.716814
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.142857
| 0.571429
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
00e13cbb5491a30cc22bbb968747a7bd81e19f8d
| 310
|
py
|
Python
|
frontier/news/translation.py
|
mcmaxwell/frontier
|
d1f59154108566c652965a43c4b999de33c05c58
|
[
"MIT"
] | null | null | null |
frontier/news/translation.py
|
mcmaxwell/frontier
|
d1f59154108566c652965a43c4b999de33c05c58
|
[
"MIT"
] | null | null | null |
frontier/news/translation.py
|
mcmaxwell/frontier
|
d1f59154108566c652965a43c4b999de33c05c58
|
[
"MIT"
] | null | null | null |
from modeltranslation.translator import translator, TranslationOptions
from .models import News
from common.translation import CommonPostTranslationOptions
class NewsTranslationOptions(TranslationOptions):
fields = ('title', 'text_preview', 'text')
translator.register(News, NewsTranslationOptions)
| 34.444444
| 71
| 0.822581
| 27
| 310
| 9.407407
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106452
| 310
| 9
| 72
| 34.444444
| 0.916968
| 0
| 0
| 0
| 0
| 0
| 0.069307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
00f04d7bfa1967b4519329e824c4d4b976e4b796
| 1,166
|
py
|
Python
|
core/recc/database/interfaces/db_interface.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | 3
|
2021-06-20T02:24:10.000Z
|
2022-01-26T23:55:33.000Z
|
core/recc/database/interfaces/db_interface.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
core/recc/database/interfaces/db_interface.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from abc import ABCMeta
from recc.database.interfaces.db_daemon import DbDaemon
from recc.database.interfaces.db_group import DbGroup
from recc.database.interfaces.db_group_member import DbGroupMember
from recc.database.interfaces.db_info import DbInfo
from recc.database.interfaces.db_layout import DbLayout
from recc.database.interfaces.db_misc import DbMisc
from recc.database.interfaces.db_open import DbOpen
from recc.database.interfaces.db_permission import DbPermission
from recc.database.interfaces.db_port import DbPort
from recc.database.interfaces.db_project import DbProject
from recc.database.interfaces.db_project_member import DbProjectMember
from recc.database.interfaces.db_task import DbTask
from recc.database.interfaces.db_user import DbUser
from recc.database.interfaces.db_utils import DbUtils
from recc.database.interfaces.db_widget import DbWidget
class DbInterface(
DbDaemon,
DbGroup,
DbGroupMember,
DbInfo,
DbLayout,
DbMisc,
DbOpen,
DbPermission,
DbPort,
DbProject,
DbProjectMember,
DbTask,
DbUser,
DbUtils,
DbWidget,
metaclass=ABCMeta,
):
pass
| 29.15
| 70
| 0.799314
| 149
| 1,166
| 6.14094
| 0.295302
| 0.131148
| 0.262295
| 0.42623
| 0.485246
| 0.148634
| 0
| 0
| 0
| 0
| 0
| 0.000994
| 0.137221
| 1,166
| 39
| 71
| 29.897436
| 0.908549
| 0.01801
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.028571
| 0.457143
| 0
| 0.485714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
da9ed86ad17680316066312f964ef138658005fa
| 159
|
py
|
Python
|
labgraph/loggers/__init__.py
|
Yunusbcr/labgraph
|
a00ae7098b7b0e0eda8ce2e7e62dae86854616fb
|
[
"MIT"
] | 124
|
2021-07-14T21:25:59.000Z
|
2022-03-08T20:40:16.000Z
|
labgraph/loggers/__init__.py
|
VanEdward/labgraph
|
9488feac59f9ef86091befdeaddb69d84e4d6fb3
|
[
"MIT"
] | 46
|
2021-07-16T18:41:11.000Z
|
2022-03-31T20:53:00.000Z
|
labgraph/loggers/__init__.py
|
VanEdward/labgraph
|
9488feac59f9ef86091befdeaddb69d84e4d6fb3
|
[
"MIT"
] | 22
|
2021-07-16T18:34:56.000Z
|
2022-03-31T15:12:06.000Z
|
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
__all__ = ["Logger", "LoggerConfig"]
from .logger import Logger, LoggerConfig
| 22.714286
| 55
| 0.754717
| 19
| 159
| 6.105263
| 0.789474
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035971
| 0.125786
| 159
| 6
| 56
| 26.5
| 0.798561
| 0.471698
| 0
| 0
| 0
| 0
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
dabff3814b1e0a8bfef90216863e20208efdbefa
| 332
|
py
|
Python
|
python/tests/conftest.py
|
bakdata/common-kafka-streams-demo
|
c893cf0cb13aa71ab9cb00812437038c2808bb00
|
[
"MIT"
] | 4
|
2021-02-21T05:49:45.000Z
|
2021-08-01T06:08:55.000Z
|
python/tests/conftest.py
|
bakdata/common-kafka-streams-demo
|
c893cf0cb13aa71ab9cb00812437038c2808bb00
|
[
"MIT"
] | null | null | null |
python/tests/conftest.py
|
bakdata/common-kafka-streams-demo
|
c893cf0cb13aa71ab9cb00812437038c2808bb00
|
[
"MIT"
] | 1
|
2021-06-11T20:25:39.000Z
|
2021-06-11T20:25:39.000Z
|
import pytest
from spacy_lemmatizer.models import Text, LemmaText
@pytest.fixture
def test_text():
return Text(content='One morning, when Gregor Samsa woke from troubled dreams')
@pytest.fixture()
def test_lemma_text():
lemmas = ['morning', 'gregor', 'samsa', 'wake', 'troubled', 'dream']
return LemmaText(lemmas)
| 20.75
| 83
| 0.71988
| 42
| 332
| 5.595238
| 0.595238
| 0.110638
| 0.13617
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153614
| 332
| 15
| 84
| 22.133333
| 0.836299
| 0
| 0
| 0
| 0
| 0
| 0.274096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.222222
| 0.111111
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
97009a4399198dc718bfe87718aac3e7295b2990
| 867
|
py
|
Python
|
job_snapshotting/snapshot.py
|
justinholmes/job_snapshotting
|
c66358fe93cf09f9e17e28cb587d5f96e2b7f249
|
[
"Apache-2.0"
] | null | null | null |
job_snapshotting/snapshot.py
|
justinholmes/job_snapshotting
|
c66358fe93cf09f9e17e28cb587d5f96e2b7f249
|
[
"Apache-2.0"
] | null | null | null |
job_snapshotting/snapshot.py
|
justinholmes/job_snapshotting
|
c66358fe93cf09f9e17e28cb587d5f96e2b7f249
|
[
"Apache-2.0"
] | null | null | null |
from job_snapshotting.google_cloud_storage import GoogleCloudStorage
from job_snapshotting.internal_model import SnapshotSchema
class Snapshot:
def __init__(self, name, job_run_timestamp, bucket_id):
self._snapshot_schema = SnapshotSchema()
self._storage = GoogleCloudStorage(bucket_id, self._snapshot_schema)
self._snapshot_model = self._storage.load(name, job_run_timestamp)
def get_schema(self):
return self._snapshot_schema.dump(self._snapshot_model)
def complete_stage(self):
self._snapshot_model.stage = self._snapshot_model.stage + 1
self._storage.save(self._snapshot_model)
def get_current_stage(self):
return self._storage.load(self._snapshot_model.name, self._snapshot_model.job_run_timestamp).stage
def finished_job(self):
self._storage.delete(self._snapshot_model)
| 37.695652
| 106
| 0.763552
| 109
| 867
| 5.623853
| 0.302752
| 0.215334
| 0.22186
| 0.06199
| 0.084829
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001368
| 0.156863
| 867
| 22
| 107
| 39.409091
| 0.837209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0
| 0.125
| 0.125
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
9727220cb857dfab8cc4ebe234ead20e65916730
| 128
|
py
|
Python
|
cellxgene_gateway/flask_util.py
|
fidelram/cellxgene-gateway
|
11bbc1aba8509b02865c145be78b021b70e22fda
|
[
"Apache-2.0"
] | null | null | null |
cellxgene_gateway/flask_util.py
|
fidelram/cellxgene-gateway
|
11bbc1aba8509b02865c145be78b021b70e22fda
|
[
"Apache-2.0"
] | null | null | null |
cellxgene_gateway/flask_util.py
|
fidelram/cellxgene-gateway
|
11bbc1aba8509b02865c145be78b021b70e22fda
|
[
"Apache-2.0"
] | null | null | null |
from flask import request
def querystring():
qs = request.query_string.decode()
return f'?{qs}' if len(qs) > 0 else ''
| 21.333333
| 42
| 0.65625
| 19
| 128
| 4.368421
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.203125
| 128
| 5
| 43
| 25.6
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0.039063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
973dd9d9bbc2e1ec6f0c559673182023104f2c5c
| 158
|
py
|
Python
|
PyTMX-3.19.0/pytmx/constants.py
|
unmonk/Project-Infinity
|
63c22fcf4086adf868967cb172434e3ceba4bac5
|
[
"CC0-1.0"
] | null | null | null |
PyTMX-3.19.0/pytmx/constants.py
|
unmonk/Project-Infinity
|
63c22fcf4086adf868967cb172434e3ceba4bac5
|
[
"CC0-1.0"
] | null | null | null |
PyTMX-3.19.0/pytmx/constants.py
|
unmonk/Project-Infinity
|
63c22fcf4086adf868967cb172434e3ceba4bac5
|
[
"CC0-1.0"
] | null | null | null |
# internal flags
TRANS_FLIPX = 1
TRANS_FLIPY = 2
TRANS_ROT = 4
# Tiled gid flags
GID_TRANS_FLIPX = 1 << 31
GID_TRANS_FLIPY = 1 << 30
GID_TRANS_ROT = 1 << 29
| 15.8
| 25
| 0.708861
| 29
| 158
| 3.551724
| 0.482759
| 0.23301
| 0.213592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0.202532
| 158
| 9
| 26
| 17.555556
| 0.722222
| 0.189873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9758a11e32e0635899d16ee5a175acb7eff5a58a
| 86
|
py
|
Python
|
__init__.py
|
arfon/Qlunc
|
2b65f52fc6367ee7c5e4484c60ac8325ec625f54
|
[
"BSD-3-Clause"
] | 7
|
2021-03-19T14:37:47.000Z
|
2022-03-03T16:20:53.000Z
|
__init__.py
|
arfon/Qlunc
|
2b65f52fc6367ee7c5e4484c60ac8325ec625f54
|
[
"BSD-3-Clause"
] | 31
|
2021-03-03T02:00:03.000Z
|
2022-01-10T19:40:37.000Z
|
__init__.py
|
arfon/Qlunc
|
2b65f52fc6367ee7c5e4484c60ac8325ec625f54
|
[
"BSD-3-Clause"
] | 11
|
2021-03-03T01:57:32.000Z
|
2021-05-25T09:45:04.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 17 17:05:02 2020
@author: fcosta
"""
| 10.75
| 35
| 0.569767
| 14
| 86
| 3.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191176
| 0.209302
| 86
| 7
| 36
| 12.285714
| 0.529412
| 0.872093
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
975af57f0deb6dd0008aa298e55bbcc1bc895e76
| 41
|
py
|
Python
|
bot/config.py
|
lacmus-foundation/openstack-bot
|
7621419ac735de9e06b95d93661242db0475d1b5
|
[
"MIT"
] | 2
|
2020-06-17T20:12:29.000Z
|
2020-07-03T11:43:17.000Z
|
bot/config.py
|
lacmus-foundation/openstack-bot
|
7621419ac735de9e06b95d93661242db0475d1b5
|
[
"MIT"
] | null | null | null |
bot/config.py
|
lacmus-foundation/openstack-bot
|
7621419ac735de9e06b95d93661242db0475d1b5
|
[
"MIT"
] | null | null | null |
channel_id = 'channel_id'
token = 'token'
| 20.5
| 25
| 0.731707
| 6
| 41
| 4.666667
| 0.5
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 2
| 26
| 20.5
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
976e182add006f77b0aaa215c110fbfca071378c
| 195
|
py
|
Python
|
utils/validators.py
|
niomwungeri-fabrice/questionnaire-api
|
bd280a30d12daf61c403ae3a58df7027398e80c2
|
[
"MIT"
] | null | null | null |
utils/validators.py
|
niomwungeri-fabrice/questionnaire-api
|
bd280a30d12daf61c403ae3a58df7027398e80c2
|
[
"MIT"
] | 9
|
2021-03-19T03:26:15.000Z
|
2022-03-11T23:58:06.000Z
|
utils/validators.py
|
niomwungeri-fabrice/questionnaire-api
|
bd280a30d12daf61c403ae3a58df7027398e80c2
|
[
"MIT"
] | null | null | null |
def validate_empty(field, name=None):
if not field:
raise ValueError('This field is Required.')
if name and not field:
raise validate_empty('{} is Required'.format(name))
| 32.5
| 59
| 0.666667
| 27
| 195
| 4.740741
| 0.555556
| 0.203125
| 0.203125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225641
| 195
| 5
| 60
| 39
| 0.847682
| 0
| 0
| 0
| 0
| 0
| 0.189744
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
97b19d7904c859762ef95786c5c194983de850a6
| 36
|
py
|
Python
|
python/snr/__init__.py
|
gromitsun/sim-xrf
|
e0009b969551e5a03c3b14e4d02153be234b7ca5
|
[
"MIT"
] | 2
|
2019-01-09T12:37:48.000Z
|
2019-01-09T12:37:55.000Z
|
python/snr/__init__.py
|
gromitsun/sim-xrf
|
e0009b969551e5a03c3b14e4d02153be234b7ca5
|
[
"MIT"
] | null | null | null |
python/snr/__init__.py
|
gromitsun/sim-xrf
|
e0009b969551e5a03c3b14e4d02153be234b7ca5
|
[
"MIT"
] | 1
|
2021-05-23T23:07:12.000Z
|
2021-05-23T23:07:12.000Z
|
__author__ = "Yue"
__all__ = ["snr"]
| 18
| 18
| 0.638889
| 4
| 36
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 36
| 2
| 19
| 18
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8ade83a7364c7c65bd275df9be51cb99fe80b69a
| 214
|
py
|
Python
|
yawlib/__main__.py
|
letuananh/yawlib
|
01cac5ec1eda82d131f00431391594dd52ba1a98
|
[
"MIT"
] | 1
|
2018-10-18T15:42:02.000Z
|
2018-10-18T15:42:02.000Z
|
yawlib/__main__.py
|
letuananh/yawlib
|
01cac5ec1eda82d131f00431391594dd52ba1a98
|
[
"MIT"
] | 3
|
2017-01-10T03:39:14.000Z
|
2021-07-05T07:41:40.000Z
|
yawlib/__main__.py
|
letuananh/yawlib
|
01cac5ec1eda82d131f00431391594dd52ba1a98
|
[
"MIT"
] | 1
|
2017-07-16T08:30:45.000Z
|
2017-07-16T08:30:45.000Z
|
# This code is a part of yawlib library: https://github.com/letuananh/yawlib
# :copyright: (c) 2014 Le Tuan Anh <tuananh.ke@gmail.com>
# :license: MIT, see LICENSE for more details.
from . import wntk
wntk.main()
| 30.571429
| 76
| 0.724299
| 35
| 214
| 4.428571
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021978
| 0.149533
| 214
| 6
| 77
| 35.666667
| 0.82967
| 0.817757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
8aea11ed66cf4249228a08654b372c3b7c58ed01
| 5,520
|
py
|
Python
|
lib/app/dash_app.py
|
andreassiskos/unnamed_project
|
74caeee201888937ba01e6ec9775f9e99ba17373
|
[
"MIT"
] | null | null | null |
lib/app/dash_app.py
|
andreassiskos/unnamed_project
|
74caeee201888937ba01e6ec9775f9e99ba17373
|
[
"MIT"
] | null | null | null |
lib/app/dash_app.py
|
andreassiskos/unnamed_project
|
74caeee201888937ba01e6ec9775f9e99ba17373
|
[
"MIT"
] | null | null | null |
import dash
import dash_cytoscape as cyto
import dash_html_components as html
import dash_bootstrap_components as dbc
import dash_core_components as dcc
from dash.dependencies import Input, Output
elements = [{'data': {'id': 'prb_id_12642', 'label': 'probe 12642'}}, {'data': {'id': '10.139.128.1', 'label': '10.139.128.1'}}, {'data': {'id': '85.112.112.225', 'label': '85.112.112.225'}}, {'data': {'id': '85.112.127.97', 'label': '85.112.127.97'}}, {'data': {'id': '85.112.127.245', 'label': '85.112.127.245'}}, {'data': {'id': '85.112.122.13', 'label': '85.112.122.13'}}, {'data': {'id': '5.143.250.113', 'label': '5.143.250.113'}}, {'data': {'id': '178.34.130.99', 'label': '178.34.130.99'}}, {'data': {'id': '185.109.23.1', 'label': '185.109.23.1'}}, {'data': {'source': 'prb_id_12642', 'target': '10.139.128.1'}}, {'data': {'source': '10.139.128.1', 'target': '85.112.112.225'}}, {'data': {'source': '85.112.112.225', 'target': '85.112.127.97'}}, {'data': {'source': '85.112.127.97', 'target': '85.112.127.245'}}, {'data': {'source': '85.112.127.245', 'target': '85.112.122.13'}}, {'data': {'source': '85.112.122.13', 'target': '5.143.250.113'}}, {'data': {'source': '5.143.250.113', 'target': '178.34.130.99'}}, {'data': {'source': '178.34.130.99', 'target': '185.109.23.1'}},{'data': {'id': 'prb_id_17718', 'label': 'probe 17718'}}, {'data': {'id': '213.208.191.121', 'label': '213.208.191.121'}}, {'data': {'id': '77.94.163.1', 'label': '77.94.163.1'}}, {'data': {'id': '77.94.160.75', 'label': '77.94.160.75'}}, {'data': {'id': '95.167.38.237', 'label': '95.167.38.237'}}, {'data': {'id': '178.34.130.99', 'label': '178.34.130.99'}}, {'data': {'id': '185.109.23.1', 'label': '185.109.23.1'}}, {'data': {'source': 'prb_id_17718', 'target': '213.208.191.121'}}, {'data': {'source': '213.208.191.121', 'target': '77.94.163.1'}}, {'data': {'source': '77.94.163.1', 'target': '77.94.160.75'}}, {'data': {'source': '77.94.160.75', 'target': '95.167.38.237'}}, {'data': {'source': '95.167.38.237', 'target': '178.34.130.99'}}, {'data': {'source': '178.34.130.99', 'target': '185.109.23.1'}}, {'data': {'id': 'prb_id_22690', 'label': 'probe 22690'}}, {'data': {'id': '93.178.232.217', 'label': '93.178.232.217'}}, {'data': {'id': '213.130.29.26', 'label': '213.130.29.26'}}, {'data': {'id': '92.50.209.93', 'label': '92.50.209.93'}}, {'data': {'id': '178.34.130.99', 'label': '178.34.130.99'}}, {'data': {'id': '185.109.23.1', 'label': '185.109.23.1'}}, {'data': {'source': 'prb_id_22690', 'target': '93.178.232.217'}}, {'data': {'source': '93.178.232.217', 'target': '213.130.29.26'}}, {'data': {'source': '213.130.29.26', 'target': '92.50.209.93'}}, {'data': {'source': '92.50.209.93', 'target': '178.34.130.99'}}, {'data': {'source': '178.34.130.99', 'target': '185.109.23.1'}}, {'data': {'id': 'prb_id_26656', 'label': 'probe 26656'}}, {'data': {'id': '192.168.98.1', 'label': '192.168.98.1'}}, {'data': {'id': '89.106.171.133', 'label': '89.106.171.133'}}, {'data': {'id': '217.67.176.249', 'label': '217.67.176.249'}}, {'data': {'id': '217.67.176.54', 'label': '217.67.176.54'}}, {'data': {'id': '90.154.110.165', 'label': '90.154.110.165'}}, {'data': {'id': '178.34.129.99', 'label': '178.34.129.99'}}, {'data': {'id': '185.109.23.1', 'label': '185.109.23.1'}}, {'data': {'source': 'prb_id_26656', 'target': '192.168.98.1'}}, {'data': {'source': '192.168.98.1', 'target': '89.106.171.133'}}, {'data': {'source': '89.106.171.133', 'target': '217.67.176.249'}}, {'data': {'source': '217.67.176.249', 'target': '217.67.176.54'}}, {'data': {'source': '217.67.176.54', 'target': '90.154.110.165'}}, {'data': {'source': '90.154.110.165', 'target': '178.34.129.99'}}, {'data': {'source': '178.34.129.99', 'target': '185.109.23.1'}},{'data': {'id': 'prb_id_31463', 'label': 'probe 31463'}}, {'data': {'id': '192.168.118.1', 'label': '192.168.118.1'}}, {'data': {'id': '192.168.101.4', 'label': '192.168.101.4'}}, {'data': {'id': '185.214.184.193', 'label': '185.214.184.193'}}, {'data': {'id': '95.167.75.178', 'label': '95.167.75.178'}}, {'data': {'id': '95.167.75.177', 'label': '95.167.75.177'}}, {'data': {'id': '178.34.129.99', 'label': '178.34.129.99'}}, {'data': {'id': '185.109.23.1', 'label': '185.109.23.1'}}, {'data': {'source': 'prb_id_31463', 'target': '192.168.118.1'}}, {'data': {'source': '192.168.118.1', 'target': '192.168.101.4'}}, {'data': {'source': '192.168.101.4', 'target': '185.214.184.193'}}, {'data': {'source': '185.214.184.193', 'target': '95.167.75.178'}}, {'data': {'source': '95.167.75.178', 'target': '95.167.75.177'}}, {'data': {'source': '95.167.75.177', 'target': '178.34.129.99'}}, {'data': {'source': '178.34.129.99', 'target': '185.109.23.1'}}]
controls = dbc.Card(
[
dbc.FormGroup(
[
dbc.Label("X variable"),
dcc.Dropdown(
id="x-variable",
options=[
{"label": "col", "value": "hi"}
],
value="sepal length (cm)",
),
]
)
],
body=True,
)
app = dash.Dash(external_stylesheets=[dbc.themes.BOOTSTRAP])
app.layout = html.Div([
cyto.Cytoscape(
id='cytoscape-layout-6',
elements=elements,
style={'width': '100%', 'height': '600px'},
layout={'name': 'breadthfirst',
'roots': '#prb_id_12642, #prb_id_17718, #prb_id_22690, #prb_id_26656, #prb_id_31463'}
)
])
if __name__ == '__main__':
app.run_server(debug=False, host="0.0.0.0", port = 12345)
| 125.454545
| 4,486
| 0.536594
| 877
| 5,520
| 3.323831
| 0.153934
| 0.078216
| 0.041166
| 0.046312
| 0.504288
| 0.231218
| 0.231218
| 0.231218
| 0.231218
| 0.231218
| 0
| 0.287045
| 0.124638
| 5,520
| 43
| 4,487
| 128.372093
| 0.316225
| 0
| 0
| 0.055556
| 0
| 0
| 0.538406
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c127dce92a796f9db79314fa23e05cd5410c7f00
| 5,309
|
py
|
Python
|
src/integrations/etl/integration_xls.py
|
jQSfire125/price-transparency-tx
|
f9abe613acace8f9c819696c061a0aa79fcef553
|
[
"Apache-2.0"
] | null | null | null |
src/integrations/etl/integration_xls.py
|
jQSfire125/price-transparency-tx
|
f9abe613acace8f9c819696c061a0aa79fcef553
|
[
"Apache-2.0"
] | null | null | null |
src/integrations/etl/integration_xls.py
|
jQSfire125/price-transparency-tx
|
f9abe613acace8f9c819696c061a0aa79fcef553
|
[
"Apache-2.0"
] | null | null | null |
# import libraries
import os
import pandas as pd
import numpy as np
import datetime as dt
# must match a reference ontology
concept = pd.read_csv('/opt/data/dim/CONCEPT.csv.gz',
header=0, sep='\t', low_memory=False)
concept = concept[['concept_code', 'concept_id']]
concept.drop_duplicates(inplace=True)
concept.rename(columns= {'concept_code': 'cpt'}, inplace=True)
# make a control file to iterate through
path = '/opt/data/raw/'
files = os.listdir(path)
csv_files = [x for x in files if x.endswith(".xlsx")]
# removes '.xlsx' (5 characters)
hospital_ids = [int(x[:-5]) for x in csv_files]
control = pd.DataFrame({'file': csv_files, 'hospital_id': hospital_ids})
#cross reference the control file with the static dimension table
dim = pd.read_csv('/opt/data/dim/hospital.csv',
usecols=['hospital_id', 'affiliation'])
control = control.merge(dim, how= 'left', on='hospital_id')
control.sort_values(by='hospital_id', inplace=True, ignore_index=True)
# Ascencion is a group of hospitals
Ascencion = list(range(2,8)) + [32, 33]
for i in range(control.shape[0]):
out = None
# print file you are working on now
print(dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S") +
' - parsing hospital - ' + str(control.hospital_id[i]))
if control.hospital_id[i] in Ascencion:
# read in the data
df = pd.read_excel('/opt/data/raw/' + control.file[i],
sheet_name=2, skiprows=2)
# hospital specific configuration
out = pd.DataFrame({
'cpt': df['CPT'].astype(str),
'max': df['Max_Negotiated_Rate'],
'min': df['Min_Negotiated_Rate']
})
out.drop_duplicates(inplace=True)
out['cpt'] = out['cpt'].str.replace('.0', '', regex=False)
elif control.hospital_id[i] in [54]:
# read in the data
df = pd.read_excel('/opt/data/raw/' + control.file[i],
skiprows=3)
# hospital specific configuration
out = pd.DataFrame({
'cpt': df['CPT/HCPCS Code'],
'gross': df['Inpatient Default Gross Charge'],
'cash': df['Discounted Cash Price']
})
out.drop_duplicates(inplace=True)
elif control.hospital_id[i] in [59]:
# read in the data
df = pd.read_excel('/opt/data/raw/' + control.file[i],
sheet_name=2, skiprows=2)
# hospital specific configuration
out = pd.DataFrame({
'cpt': df['Procedure Code'],
'gross': df['Gross Charges'],
'max': df['De-identified Maximum'],
'min': df['De-identified Minimum']
})
out.drop_duplicates(inplace=True)
elif control.hospital_id[i] in [74]:
# read in the data
df = pd.read_excel('/opt/data/raw/' + control.file[i])
# hospital specific configuration
out = pd.DataFrame({
'cpt': df['CPT'],
'gross': df['GROSS CHARGES'],
'cash': df['SELF PAY CASH PRICE'],
'max': df['MAX NEGOTIATED RATE'],
'min': df['MIN NEGOTIATED RATE']
})
out.drop_duplicates(inplace=True)
elif control.hospital_id[i] in [75]:
# read in the data
df = pd.read_excel('/opt/data/raw/' + control.file[i])
# hospital specific configuration
out = pd.DataFrame({
'cpt': df['HCPCS_CODE'],
'gross': df['CHARGES'],
'cash': df['CASH_AMOUNT']
})
out.drop_duplicates(inplace=True)
# Check if out was created in the previous step
if out is not None:
# uniform
out.dropna(subset=['cpt'], inplace=True)
out = out[out.cpt != '']
out = out[out.cpt != '*']
# must match a reference code, usually CPT or HCPCS
out = out.merge(concept, on='cpt', sort=True)
out = out.drop('cpt', axis=1)
# melt from wide to long, and clean
long = pd.melt(out, id_vars='concept_id')
# skips transformation if values are already float
if long.value.dtypes not in ('float64', 'int64'):
long['value'] = long['value'].str.replace('[,-]', '', regex=True)
long['value'] = long['value'].str.replace('[$]', '', regex=True)
long['value'] = long['value'].str.replace('[A-Za-z]', '', regex=True)
long['value'] = long['value'].str.strip()
# some values end up being an empty string
long.loc[long['value'] == '', 'value'] = np.nan
long['value'] = long.value.astype(float)
long.dropna(subset=['value'], inplace=True)
long = long[long.value > 0]
# Add hospital id and order columns
long['hospital_id'] = control.hospital_id[i]
long = long[long.columns[[3,0,1,2]]]
# write the data to a flatfile for postgres
if long.shape[0] > 0:
out_path = 'opt/data/transformed/' + str(control.hospital_id[i]) + '.csv'
long.to_csv(out_path, header=False, index=None)
# clear variables from last iteration (inside the loop)
del out
del long
| 5,309
| 5,309
| 0.558486
| 674
| 5,309
| 4.324926
| 0.290801
| 0.048027
| 0.046655
| 0.0494
| 0.371527
| 0.340652
| 0.319383
| 0.309091
| 0.309091
| 0.289537
| 0
| 0.009626
| 0.295536
| 5,309
| 1
| 5,309
| 5,309
| 0.769786
| 0.994538
| 0
| 0.236559
| 0
| 0
| 0.175248
| 0.016938
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043011
| 0
| 0.043011
| 0.010753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c1514c3123eb57bfeb5b6332c82b533e93b9cd6e
| 137
|
py
|
Python
|
autocode/__main__.py
|
HashCode55/AutoCode
|
9af63e3bf5abf6520baf31c914073e179849cccf
|
[
"MIT"
] | 2
|
2017-06-14T23:38:14.000Z
|
2017-06-15T10:47:27.000Z
|
autocode/__main__.py
|
HashCode55/AutoCode
|
9af63e3bf5abf6520baf31c914073e179849cccf
|
[
"MIT"
] | null | null | null |
autocode/__main__.py
|
HashCode55/AutoCode
|
9af63e3bf5abf6520baf31c914073e179849cccf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""autotype.__main__: executed when bootstrap directory is called as script."""
from .run import main
main()
| 15.222222
| 79
| 0.678832
| 18
| 137
| 4.944444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.167883
| 137
| 8
| 80
| 17.125
| 0.77193
| 0.70073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c169dddca2834bc77ab2bc5b99532c0190e237b9
| 23
|
py
|
Python
|
docassemble_webapp/docassemble/webapp/__init__.py
|
britadeiro/docassemble
|
25f9d572d9e7ba85a16b6db313027a3a85002b2e
|
[
"MIT"
] | null | null | null |
docassemble_webapp/docassemble/webapp/__init__.py
|
britadeiro/docassemble
|
25f9d572d9e7ba85a16b6db313027a3a85002b2e
|
[
"MIT"
] | null | null | null |
docassemble_webapp/docassemble/webapp/__init__.py
|
britadeiro/docassemble
|
25f9d572d9e7ba85a16b6db313027a3a85002b2e
|
[
"MIT"
] | null | null | null |
__version__ = "1.1.32"
| 11.5
| 22
| 0.652174
| 4
| 23
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.130435
| 23
| 1
| 23
| 23
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c19832dbb51844b8989a35b5abd9cc588de1175c
| 23
|
py
|
Python
|
min_codemirror/__init__.py
|
django-min/django-min-codemirror
|
ca02905cf90549044488bc76e65261ee0bb22538
|
[
"MIT"
] | null | null | null |
min_codemirror/__init__.py
|
django-min/django-min-codemirror
|
ca02905cf90549044488bc76e65261ee0bb22538
|
[
"MIT"
] | null | null | null |
min_codemirror/__init__.py
|
django-min/django-min-codemirror
|
ca02905cf90549044488bc76e65261ee0bb22538
|
[
"MIT"
] | null | null | null |
__version__ = '3.2rc0'
| 11.5
| 22
| 0.695652
| 3
| 23
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.130435
| 23
| 1
| 23
| 23
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c1a910e46070bcfc76cbf59b3270c72ae08c94ab
| 54
|
py
|
Python
|
abc/020/B.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | 2
|
2022-01-22T07:56:58.000Z
|
2022-01-24T00:29:37.000Z
|
abc/020/B.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
abc/020/B.py
|
tonko2/AtCoder
|
5d617072517881d226d7c8af09cb88684d41af7e
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
A, B = map(str, input().split())
print(int(A + B) * 2)
| 27
| 32
| 0.537037
| 11
| 54
| 2.636364
| 0.818182
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.166667
| 54
| 2
| 33
| 27
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
c1acac6a1c63a6aa8a0d28ec6595b75d68da192a
| 139
|
py
|
Python
|
tests/app_b/models.py
|
emihir0/django-schema-graph
|
0c8280c035f47a7c736b8feb05d4dd1084b27979
|
[
"MIT"
] | 316
|
2020-02-17T13:47:47.000Z
|
2022-03-29T20:25:36.000Z
|
tests/app_b/models.py
|
emihir0/django-schema-graph
|
0c8280c035f47a7c736b8feb05d4dd1084b27979
|
[
"MIT"
] | 20
|
2020-02-23T07:59:36.000Z
|
2021-05-11T03:39:17.000Z
|
tests/app_b/models.py
|
emihir0/django-schema-graph
|
0c8280c035f47a7c736b8feb05d4dd1084b27979
|
[
"MIT"
] | 11
|
2020-03-02T17:31:31.000Z
|
2022-03-17T05:55:50.000Z
|
from django.db import models
class InterAppForeignKey(models.Model):
user = models.ForeignKey("auth.User", on_delete=models.CASCADE)
| 23.166667
| 67
| 0.776978
| 18
| 139
| 5.944444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115108
| 139
| 5
| 68
| 27.8
| 0.869919
| 0
| 0
| 0
| 0
| 0
| 0.064748
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c1d4402fd03ac2496bc808b43f69f030d9d7be01
| 103
|
py
|
Python
|
apps/forum_core/apps.py
|
WebPres-org/webpres-forum
|
acb89a474404fd2bb06168aa40fe5a2d321f45be
|
[
"Apache-2.0"
] | 1
|
2021-12-27T10:03:30.000Z
|
2021-12-27T10:03:30.000Z
|
apps/forum_core/apps.py
|
WebPres-org/webpres-forum
|
acb89a474404fd2bb06168aa40fe5a2d321f45be
|
[
"Apache-2.0"
] | null | null | null |
apps/forum_core/apps.py
|
WebPres-org/webpres-forum
|
acb89a474404fd2bb06168aa40fe5a2d321f45be
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class ProfileOfUserConfig(AppConfig):
name = 'apps.forum_core'
| 17.166667
| 37
| 0.776699
| 12
| 103
| 6.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 103
| 5
| 38
| 20.6
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c1d9d5a116cd6ba56364dbf076ca75a1a827a386
| 160
|
py
|
Python
|
main/tests/conftest.py
|
keystro/Solaris-Systems
|
09cdaf124e1f0061bc46faa6c5277a0574a9902c
|
[
"Apache-2.0"
] | null | null | null |
main/tests/conftest.py
|
keystro/Solaris-Systems
|
09cdaf124e1f0061bc46faa6c5277a0574a9902c
|
[
"Apache-2.0"
] | null | null | null |
main/tests/conftest.py
|
keystro/Solaris-Systems
|
09cdaf124e1f0061bc46faa6c5277a0574a9902c
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from main import main as flask_app
@pytest.fixture
def app():
yeild flask_app
@pytest.fixture
def client(app):
return app.test_client()
| 14.545455
| 34
| 0.74375
| 25
| 160
| 4.64
| 0.52
| 0.137931
| 0.241379
| 0.362069
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 160
| 10
| 35
| 16
| 0.878788
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.25
| null | null | 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c1daea9ed17402365b9af08d108b781aaba3dca7
| 177
|
py
|
Python
|
main/forms.py
|
elhamrazi/ElhamBlog
|
e10240f16770b3aec505dda27cb0456be05bdddc
|
[
"MIT"
] | 5
|
2021-10-19T06:23:23.000Z
|
2022-03-09T13:57:06.000Z
|
main/forms.py
|
elhamrazi/ElhamBlog
|
e10240f16770b3aec505dda27cb0456be05bdddc
|
[
"MIT"
] | null | null | null |
main/forms.py
|
elhamrazi/ElhamBlog
|
e10240f16770b3aec505dda27cb0456be05bdddc
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import Post, Comment
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('author', 'content',)
| 19.666667
| 39
| 0.666667
| 20
| 177
| 5.9
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237288
| 177
| 9
| 39
| 19.666667
| 0.874074
| 0
| 0
| 0
| 0
| 0
| 0.073034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
de14b84004ad55f23945c351e7aab5a09f4ef010
| 34,786
|
py
|
Python
|
src/trunk/libs/swig/Seismology.py
|
yannikbehr/seiscomp3
|
ebb44c77092555eef7786493d00ac4efc679055f
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
src/trunk/libs/swig/Seismology.py
|
yannikbehr/seiscomp3
|
ebb44c77092555eef7786493d00ac4efc679055f
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
src/trunk/libs/swig/Seismology.py
|
yannikbehr/seiscomp3
|
ebb44c77092555eef7786493d00ac4efc679055f
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2021-09-15T08:13:27.000Z
|
2021-09-15T08:13:27.000Z
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
"""
Codes for various seismological computations
"""
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_Seismology', [dirname(__file__)])
except ImportError:
import _Seismology
return _Seismology
if fp is not None:
try:
_mod = imp.load_module('_Seismology', fp, pathname, description)
finally:
fp.close()
return _mod
_Seismology = swig_import_helper()
del swig_import_helper
else:
import _Seismology
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _Seismology.delete_SwigPyIterator
__del__ = lambda self : None;
def value(self): return _Seismology.SwigPyIterator_value(self)
def incr(self, n=1): return _Seismology.SwigPyIterator_incr(self, n)
def decr(self, n=1): return _Seismology.SwigPyIterator_decr(self, n)
def distance(self, *args): return _Seismology.SwigPyIterator_distance(self, *args)
def equal(self, *args): return _Seismology.SwigPyIterator_equal(self, *args)
def copy(self): return _Seismology.SwigPyIterator_copy(self)
def next(self): return _Seismology.SwigPyIterator_next(self)
def __next__(self): return _Seismology.SwigPyIterator___next__(self)
def previous(self): return _Seismology.SwigPyIterator_previous(self)
def advance(self, *args): return _Seismology.SwigPyIterator_advance(self, *args)
def __eq__(self, *args): return _Seismology.SwigPyIterator___eq__(self, *args)
def __ne__(self, *args): return _Seismology.SwigPyIterator___ne__(self, *args)
def __iadd__(self, *args): return _Seismology.SwigPyIterator___iadd__(self, *args)
def __isub__(self, *args): return _Seismology.SwigPyIterator___isub__(self, *args)
def __add__(self, *args): return _Seismology.SwigPyIterator___add__(self, *args)
def __sub__(self, *args): return _Seismology.SwigPyIterator___sub__(self, *args)
def __iter__(self): return self
SwigPyIterator_swigregister = _Seismology.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import IO
import Core
import DataModel
import Config
class TravelTimeList_internal(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TravelTimeList_internal, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TravelTimeList_internal, name)
__repr__ = _swig_repr
def iterator(self): return _Seismology.TravelTimeList_internal_iterator(self)
def __iter__(self): return self.iterator()
def __nonzero__(self): return _Seismology.TravelTimeList_internal___nonzero__(self)
def __bool__(self): return _Seismology.TravelTimeList_internal___bool__(self)
def __len__(self): return _Seismology.TravelTimeList_internal___len__(self)
def pop(self): return _Seismology.TravelTimeList_internal_pop(self)
def __getslice__(self, *args): return _Seismology.TravelTimeList_internal___getslice__(self, *args)
def __setslice__(self, *args): return _Seismology.TravelTimeList_internal___setslice__(self, *args)
def __delslice__(self, *args): return _Seismology.TravelTimeList_internal___delslice__(self, *args)
def __delitem__(self, *args): return _Seismology.TravelTimeList_internal___delitem__(self, *args)
def __getitem__(self, *args): return _Seismology.TravelTimeList_internal___getitem__(self, *args)
def __setitem__(self, *args): return _Seismology.TravelTimeList_internal___setitem__(self, *args)
def append(self, *args): return _Seismology.TravelTimeList_internal_append(self, *args)
def empty(self): return _Seismology.TravelTimeList_internal_empty(self)
def size(self): return _Seismology.TravelTimeList_internal_size(self)
def clear(self): return _Seismology.TravelTimeList_internal_clear(self)
def swap(self, *args): return _Seismology.TravelTimeList_internal_swap(self, *args)
def get_allocator(self): return _Seismology.TravelTimeList_internal_get_allocator(self)
def begin(self): return _Seismology.TravelTimeList_internal_begin(self)
def end(self): return _Seismology.TravelTimeList_internal_end(self)
def rbegin(self): return _Seismology.TravelTimeList_internal_rbegin(self)
def rend(self): return _Seismology.TravelTimeList_internal_rend(self)
def pop_back(self): return _Seismology.TravelTimeList_internal_pop_back(self)
def erase(self, *args): return _Seismology.TravelTimeList_internal_erase(self, *args)
def __init__(self, *args):
this = _Seismology.new_TravelTimeList_internal(*args)
try: self.this.append(this)
except: self.this = this
def push_back(self, *args): return _Seismology.TravelTimeList_internal_push_back(self, *args)
def front(self): return _Seismology.TravelTimeList_internal_front(self)
def back(self): return _Seismology.TravelTimeList_internal_back(self)
def assign(self, *args): return _Seismology.TravelTimeList_internal_assign(self, *args)
def resize(self, *args): return _Seismology.TravelTimeList_internal_resize(self, *args)
def insert(self, *args): return _Seismology.TravelTimeList_internal_insert(self, *args)
def pop_front(self): return _Seismology.TravelTimeList_internal_pop_front(self)
def push_front(self, *args): return _Seismology.TravelTimeList_internal_push_front(self, *args)
def reverse(self): return _Seismology.TravelTimeList_internal_reverse(self)
__swig_destroy__ = _Seismology.delete_TravelTimeList_internal
__del__ = lambda self : None;
TravelTimeList_internal_swigregister = _Seismology.TravelTimeList_internal_swigregister
TravelTimeList_internal_swigregister(TravelTimeList_internal)
class Regions(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Regions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Regions, name)
__repr__ = _swig_repr
def __init__(self):
this = _Seismology.new_Regions()
try: self.this.append(this)
except: self.this = this
__swig_getmethods__["load"] = lambda x: _Seismology.Regions_load
if _newclass:load = staticmethod(_Seismology.Regions_load)
__swig_getmethods__["getRegionName"] = lambda x: _Seismology.Regions_getRegionName
if _newclass:getRegionName = staticmethod(_Seismology.Regions_getRegionName)
__swig_getmethods__["polyRegions"] = lambda x: _Seismology.Regions_polyRegions
if _newclass:polyRegions = staticmethod(_Seismology.Regions_polyRegions)
__swig_destroy__ = _Seismology.delete_Regions
__del__ = lambda self : None;
Regions_swigregister = _Seismology.Regions_swigregister
Regions_swigregister(Regions)
def Regions_load():
return _Seismology.Regions_load()
Regions_load = _Seismology.Regions_load
def Regions_getRegionName(*args):
return _Seismology.Regions_getRegionName(*args)
Regions_getRegionName = _Seismology.Regions_getRegionName
def Regions_polyRegions():
return _Seismology.Regions_polyRegions()
Regions_polyRegions = _Seismology.Regions_polyRegions
class SensorLocationDelegate(Core.BaseObject):
__swig_setmethods__ = {}
for _s in [Core.BaseObject]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SensorLocationDelegate, name, value)
__swig_getmethods__ = {}
for _s in [Core.BaseObject]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, SensorLocationDelegate, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
def getSensorLocation(self, *args): return _Seismology.SensorLocationDelegate_getSensorLocation(self, *args)
__swig_destroy__ = _Seismology.delete_SensorLocationDelegate
__del__ = lambda self : None;
SensorLocationDelegate_swigregister = _Seismology.SensorLocationDelegate_swigregister
SensorLocationDelegate_swigregister(SensorLocationDelegate)
class LocatorInterface(Core.BaseObject):
__swig_setmethods__ = {}
for _s in [Core.BaseObject]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, LocatorInterface, name, value)
__swig_getmethods__ = {}
for _s in [Core.BaseObject]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, LocatorInterface, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
NoCapability = _Seismology.LocatorInterface_NoCapability
InitialLocation = _Seismology.LocatorInterface_InitialLocation
FixedDepth = _Seismology.LocatorInterface_FixedDepth
DistanceCutOff = _Seismology.LocatorInterface_DistanceCutOff
IgnoreInitialLocation = _Seismology.LocatorInterface_IgnoreInitialLocation
CapQuantity = _Seismology.LocatorInterface_CapQuantity
Log = _Seismology.LocatorInterface_Log
Warning = _Seismology.LocatorInterface_Warning
__swig_destroy__ = _Seismology.delete_LocatorInterface
__del__ = lambda self : None;
__swig_getmethods__["Create"] = lambda x: _Seismology.LocatorInterface_Create
if _newclass:Create = staticmethod(_Seismology.LocatorInterface_Create)
def name(self): return _Seismology.LocatorInterface_name(self)
def setSensorLocationDelegate(self, *args): return _Seismology.LocatorInterface_setSensorLocationDelegate(self, *args)
def init(self, *args): return _Seismology.LocatorInterface_init(self, *args)
def parameters(self): return _Seismology.LocatorInterface_parameters(self)
def parameter(self, *args): return _Seismology.LocatorInterface_parameter(self, *args)
def setParameter(self, *args): return _Seismology.LocatorInterface_setParameter(self, *args)
def profiles(self): return _Seismology.LocatorInterface_profiles(self)
def setProfile(self, *args): return _Seismology.LocatorInterface_setProfile(self, *args)
def capabilities(self): return _Seismology.LocatorInterface_capabilities(self)
def locate(self, *args): return _Seismology.LocatorInterface_locate(self, *args)
def relocate(self, *args): return _Seismology.LocatorInterface_relocate(self, *args)
def lastMessage(self, *args): return _Seismology.LocatorInterface_lastMessage(self, *args)
def supports(self, *args): return _Seismology.LocatorInterface_supports(self, *args)
def setFixedDepth(self, *args): return _Seismology.LocatorInterface_setFixedDepth(self, *args)
def useFixedDepth(self, use=True): return _Seismology.LocatorInterface_useFixedDepth(self, use)
def usingFixedDepth(self): return _Seismology.LocatorInterface_usingFixedDepth(self)
def releaseDepth(self): return _Seismology.LocatorInterface_releaseDepth(self)
def setDistanceCutOff(self, *args): return _Seismology.LocatorInterface_setDistanceCutOff(self, *args)
def releaseDistanceCutOff(self): return _Seismology.LocatorInterface_releaseDistanceCutOff(self)
def isInitialLocationIgnored(self): return _Seismology.LocatorInterface_isInitialLocationIgnored(self)
def setIgnoreInitialLocation(self, *args): return _Seismology.LocatorInterface_setIgnoreInitialLocation(self, *args)
def getPick(self, *args): return _Seismology.LocatorInterface_getPick(self, *args)
def getSensorLocation(self, *args): return _Seismology.LocatorInterface_getSensorLocation(self, *args)
LocatorInterface_swigregister = _Seismology.LocatorInterface_swigregister
LocatorInterface_swigregister(LocatorInterface)
def LocatorInterface_Create(*args):
return _Seismology.LocatorInterface_Create(*args)
LocatorInterface_Create = _Seismology.LocatorInterface_Create
class PickNotFoundException(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, PickNotFoundException, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, PickNotFoundException, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_PickNotFoundException(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_PickNotFoundException
__del__ = lambda self : None;
PickNotFoundException_swigregister = _Seismology.PickNotFoundException_swigregister
PickNotFoundException_swigregister(PickNotFoundException)
class LocatorException(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, LocatorException, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, LocatorException, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_LocatorException(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_LocatorException
__del__ = lambda self : None;
LocatorException_swigregister = _Seismology.LocatorException_swigregister
LocatorException_swigregister(LocatorException)
class StationNotFoundException(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, StationNotFoundException, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, StationNotFoundException, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_StationNotFoundException(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_StationNotFoundException
__del__ = lambda self : None;
StationNotFoundException_swigregister = _Seismology.StationNotFoundException_swigregister
StationNotFoundException_swigregister(StationNotFoundException)
ARRIVAL_TIME_ERROR = _Seismology.ARRIVAL_TIME_ERROR
LP_NUM_DEG_FREEDOM = _Seismology.LP_NUM_DEG_FREEDOM
LP_EST_STD_ERROR = _Seismology.LP_EST_STD_ERROR
LP_CONF_LEVEL = _Seismology.LP_CONF_LEVEL
LP_DAMPING = _Seismology.LP_DAMPING
LP_MAX_ITERATIONS = _Seismology.LP_MAX_ITERATIONS
LP_FIX_DEPTH = _Seismology.LP_FIX_DEPTH
LP_FIXING_DEPTH = _Seismology.LP_FIXING_DEPTH
LP_LAT_INIT = _Seismology.LP_LAT_INIT
LP_LONG_INIT = _Seismology.LP_LONG_INIT
LP_DEPTH_INIT = _Seismology.LP_DEPTH_INIT
LP_USE_LOCATION = _Seismology.LP_USE_LOCATION
LP_VERBOSE = _Seismology.LP_VERBOSE
LP_COR_LEVEL = _Seismology.LP_COR_LEVEL
LP_OUT_FILENAME = _Seismology.LP_OUT_FILENAME
LP_PREFIX = _Seismology.LP_PREFIX
LP_MIN_ARRIVAL_WEIGHT = _Seismology.LP_MIN_ARRIVAL_WEIGHT
LP_RMS_AS_TIME_ERROR = _Seismology.LP_RMS_AS_TIME_ERROR
class LocSATErrorEllipsoid(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, LocSATErrorEllipsoid, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, LocSATErrorEllipsoid, name)
__repr__ = _swig_repr
def __init__(self):
this = _Seismology.new_LocSATErrorEllipsoid()
try: self.this.append(this)
except: self.this = this
__swig_setmethods__["sxx"] = _Seismology.LocSATErrorEllipsoid_sxx_set
__swig_getmethods__["sxx"] = _Seismology.LocSATErrorEllipsoid_sxx_get
if _newclass:sxx = _swig_property(_Seismology.LocSATErrorEllipsoid_sxx_get, _Seismology.LocSATErrorEllipsoid_sxx_set)
__swig_setmethods__["syy"] = _Seismology.LocSATErrorEllipsoid_syy_set
__swig_getmethods__["syy"] = _Seismology.LocSATErrorEllipsoid_syy_get
if _newclass:syy = _swig_property(_Seismology.LocSATErrorEllipsoid_syy_get, _Seismology.LocSATErrorEllipsoid_syy_set)
__swig_setmethods__["szz"] = _Seismology.LocSATErrorEllipsoid_szz_set
__swig_getmethods__["szz"] = _Seismology.LocSATErrorEllipsoid_szz_get
if _newclass:szz = _swig_property(_Seismology.LocSATErrorEllipsoid_szz_get, _Seismology.LocSATErrorEllipsoid_szz_set)
__swig_setmethods__["stt"] = _Seismology.LocSATErrorEllipsoid_stt_set
__swig_getmethods__["stt"] = _Seismology.LocSATErrorEllipsoid_stt_get
if _newclass:stt = _swig_property(_Seismology.LocSATErrorEllipsoid_stt_get, _Seismology.LocSATErrorEllipsoid_stt_set)
__swig_setmethods__["sxy"] = _Seismology.LocSATErrorEllipsoid_sxy_set
__swig_getmethods__["sxy"] = _Seismology.LocSATErrorEllipsoid_sxy_get
if _newclass:sxy = _swig_property(_Seismology.LocSATErrorEllipsoid_sxy_get, _Seismology.LocSATErrorEllipsoid_sxy_set)
__swig_setmethods__["sxz"] = _Seismology.LocSATErrorEllipsoid_sxz_set
__swig_getmethods__["sxz"] = _Seismology.LocSATErrorEllipsoid_sxz_get
if _newclass:sxz = _swig_property(_Seismology.LocSATErrorEllipsoid_sxz_get, _Seismology.LocSATErrorEllipsoid_sxz_set)
__swig_setmethods__["syz"] = _Seismology.LocSATErrorEllipsoid_syz_set
__swig_getmethods__["syz"] = _Seismology.LocSATErrorEllipsoid_syz_get
if _newclass:syz = _swig_property(_Seismology.LocSATErrorEllipsoid_syz_get, _Seismology.LocSATErrorEllipsoid_syz_set)
__swig_setmethods__["stx"] = _Seismology.LocSATErrorEllipsoid_stx_set
__swig_getmethods__["stx"] = _Seismology.LocSATErrorEllipsoid_stx_get
if _newclass:stx = _swig_property(_Seismology.LocSATErrorEllipsoid_stx_get, _Seismology.LocSATErrorEllipsoid_stx_set)
__swig_setmethods__["sty"] = _Seismology.LocSATErrorEllipsoid_sty_set
__swig_getmethods__["sty"] = _Seismology.LocSATErrorEllipsoid_sty_get
if _newclass:sty = _swig_property(_Seismology.LocSATErrorEllipsoid_sty_get, _Seismology.LocSATErrorEllipsoid_sty_set)
__swig_setmethods__["stz"] = _Seismology.LocSATErrorEllipsoid_stz_set
__swig_getmethods__["stz"] = _Seismology.LocSATErrorEllipsoid_stz_get
if _newclass:stz = _swig_property(_Seismology.LocSATErrorEllipsoid_stz_get, _Seismology.LocSATErrorEllipsoid_stz_set)
__swig_setmethods__["sdobs"] = _Seismology.LocSATErrorEllipsoid_sdobs_set
__swig_getmethods__["sdobs"] = _Seismology.LocSATErrorEllipsoid_sdobs_get
if _newclass:sdobs = _swig_property(_Seismology.LocSATErrorEllipsoid_sdobs_get, _Seismology.LocSATErrorEllipsoid_sdobs_set)
__swig_setmethods__["smajax"] = _Seismology.LocSATErrorEllipsoid_smajax_set
__swig_getmethods__["smajax"] = _Seismology.LocSATErrorEllipsoid_smajax_get
if _newclass:smajax = _swig_property(_Seismology.LocSATErrorEllipsoid_smajax_get, _Seismology.LocSATErrorEllipsoid_smajax_set)
__swig_setmethods__["sminax"] = _Seismology.LocSATErrorEllipsoid_sminax_set
__swig_getmethods__["sminax"] = _Seismology.LocSATErrorEllipsoid_sminax_get
if _newclass:sminax = _swig_property(_Seismology.LocSATErrorEllipsoid_sminax_get, _Seismology.LocSATErrorEllipsoid_sminax_set)
__swig_setmethods__["strike"] = _Seismology.LocSATErrorEllipsoid_strike_set
__swig_getmethods__["strike"] = _Seismology.LocSATErrorEllipsoid_strike_get
if _newclass:strike = _swig_property(_Seismology.LocSATErrorEllipsoid_strike_get, _Seismology.LocSATErrorEllipsoid_strike_set)
__swig_setmethods__["sdepth"] = _Seismology.LocSATErrorEllipsoid_sdepth_set
__swig_getmethods__["sdepth"] = _Seismology.LocSATErrorEllipsoid_sdepth_get
if _newclass:sdepth = _swig_property(_Seismology.LocSATErrorEllipsoid_sdepth_get, _Seismology.LocSATErrorEllipsoid_sdepth_set)
__swig_setmethods__["stime"] = _Seismology.LocSATErrorEllipsoid_stime_set
__swig_getmethods__["stime"] = _Seismology.LocSATErrorEllipsoid_stime_get
if _newclass:stime = _swig_property(_Seismology.LocSATErrorEllipsoid_stime_get, _Seismology.LocSATErrorEllipsoid_stime_set)
__swig_setmethods__["conf"] = _Seismology.LocSATErrorEllipsoid_conf_set
__swig_getmethods__["conf"] = _Seismology.LocSATErrorEllipsoid_conf_get
if _newclass:conf = _swig_property(_Seismology.LocSATErrorEllipsoid_conf_get, _Seismology.LocSATErrorEllipsoid_conf_set)
__swig_destroy__ = _Seismology.delete_LocSATErrorEllipsoid
__del__ = lambda self : None;
LocSATErrorEllipsoid_swigregister = _Seismology.LocSATErrorEllipsoid_swigregister
LocSATErrorEllipsoid_swigregister(LocSATErrorEllipsoid)
class LocSAT(LocatorInterface):
__swig_setmethods__ = {}
for _s in [LocatorInterface]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, LocSAT, name, value)
__swig_getmethods__ = {}
for _s in [LocatorInterface]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, LocSAT, name)
__repr__ = _swig_repr
def __init__(self):
this = _Seismology.new_LocSAT()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_LocSAT
__del__ = lambda self : None;
def init(self, *args): return _Seismology.LocSAT_init(self, *args)
def parameters(self): return _Seismology.LocSAT_parameters(self)
def parameter(self, *args): return _Seismology.LocSAT_parameter(self, *args)
def setParameter(self, *args): return _Seismology.LocSAT_setParameter(self, *args)
def profiles(self): return _Seismology.LocSAT_profiles(self)
def setProfile(self, *args): return _Seismology.LocSAT_setProfile(self, *args)
__swig_getmethods__["setDefaultProfile"] = lambda x: _Seismology.LocSAT_setDefaultProfile
if _newclass:setDefaultProfile = staticmethod(_Seismology.LocSAT_setDefaultProfile)
__swig_getmethods__["currentDefaultProfile"] = lambda x: _Seismology.LocSAT_currentDefaultProfile
if _newclass:currentDefaultProfile = staticmethod(_Seismology.LocSAT_currentDefaultProfile)
def setNewOriginID(self, *args): return _Seismology.LocSAT_setNewOriginID(self, *args)
def capabilities(self): return _Seismology.LocSAT_capabilities(self)
def locate(self, *args): return _Seismology.LocSAT_locate(self, *args)
def relocate(self, *args): return _Seismology.LocSAT_relocate(self, *args)
def errorEllipsoid(self): return _Seismology.LocSAT_errorEllipsoid(self)
LocSAT_swigregister = _Seismology.LocSAT_swigregister
LocSAT_swigregister(LocSAT)
def LocSAT_setDefaultProfile(*args):
return _Seismology.LocSAT_setDefaultProfile(*args)
LocSAT_setDefaultProfile = _Seismology.LocSAT_setDefaultProfile
def LocSAT_currentDefaultProfile():
return _Seismology.LocSAT_currentDefaultProfile()
LocSAT_currentDefaultProfile = _Seismology.LocSAT_currentDefaultProfile
class FileNotFoundError(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, FileNotFoundError, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, FileNotFoundError, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_FileNotFoundError(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_FileNotFoundError
__del__ = lambda self : None;
FileNotFoundError_swigregister = _Seismology.FileNotFoundError_swigregister
FileNotFoundError_swigregister(FileNotFoundError)
class MultipleModelsError(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, MultipleModelsError, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, MultipleModelsError, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_MultipleModelsError(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_MultipleModelsError
__del__ = lambda self : None;
MultipleModelsError_swigregister = _Seismology.MultipleModelsError_swigregister
MultipleModelsError_swigregister(MultipleModelsError)
class NoPhaseError(Core.GeneralException):
__swig_setmethods__ = {}
for _s in [Core.GeneralException]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, NoPhaseError, name, value)
__swig_getmethods__ = {}
for _s in [Core.GeneralException]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, NoPhaseError, name)
__repr__ = _swig_repr
def __init__(self):
this = _Seismology.new_NoPhaseError()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_NoPhaseError
__del__ = lambda self : None;
NoPhaseError_swigregister = _Seismology.NoPhaseError_swigregister
NoPhaseError_swigregister(NoPhaseError)
class TravelTime(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TravelTime, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TravelTime, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Seismology.new_TravelTime(*args)
try: self.this.append(this)
except: self.this = this
def __eq__(self, *args): return _Seismology.TravelTime___eq__(self, *args)
def __lt__(self, *args): return _Seismology.TravelTime___lt__(self, *args)
__swig_setmethods__["phase"] = _Seismology.TravelTime_phase_set
__swig_getmethods__["phase"] = _Seismology.TravelTime_phase_get
if _newclass:phase = _swig_property(_Seismology.TravelTime_phase_get, _Seismology.TravelTime_phase_set)
__swig_setmethods__["time"] = _Seismology.TravelTime_time_set
__swig_getmethods__["time"] = _Seismology.TravelTime_time_get
if _newclass:time = _swig_property(_Seismology.TravelTime_time_get, _Seismology.TravelTime_time_set)
__swig_setmethods__["dtdd"] = _Seismology.TravelTime_dtdd_set
__swig_getmethods__["dtdd"] = _Seismology.TravelTime_dtdd_get
if _newclass:dtdd = _swig_property(_Seismology.TravelTime_dtdd_get, _Seismology.TravelTime_dtdd_set)
__swig_setmethods__["dtdh"] = _Seismology.TravelTime_dtdh_set
__swig_getmethods__["dtdh"] = _Seismology.TravelTime_dtdh_get
if _newclass:dtdh = _swig_property(_Seismology.TravelTime_dtdh_get, _Seismology.TravelTime_dtdh_set)
__swig_setmethods__["dddp"] = _Seismology.TravelTime_dddp_set
__swig_getmethods__["dddp"] = _Seismology.TravelTime_dddp_get
if _newclass:dddp = _swig_property(_Seismology.TravelTime_dddp_get, _Seismology.TravelTime_dddp_set)
__swig_setmethods__["takeoff"] = _Seismology.TravelTime_takeoff_set
__swig_getmethods__["takeoff"] = _Seismology.TravelTime_takeoff_get
if _newclass:takeoff = _swig_property(_Seismology.TravelTime_takeoff_get, _Seismology.TravelTime_takeoff_set)
__swig_destroy__ = _Seismology.delete_TravelTime
__del__ = lambda self : None;
TravelTime_swigregister = _Seismology.TravelTime_swigregister
TravelTime_swigregister(TravelTime)
class TravelTimeList(TravelTimeList_internal):
__swig_setmethods__ = {}
for _s in [TravelTimeList_internal]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, TravelTimeList, name, value)
__swig_getmethods__ = {}
for _s in [TravelTimeList_internal]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, TravelTimeList, name)
__repr__ = _swig_repr
def isEmpty(self): return _Seismology.TravelTimeList_isEmpty(self)
def sortByTime(self): return _Seismology.TravelTimeList_sortByTime(self)
__swig_setmethods__["depth"] = _Seismology.TravelTimeList_depth_set
__swig_getmethods__["depth"] = _Seismology.TravelTimeList_depth_get
if _newclass:depth = _swig_property(_Seismology.TravelTimeList_depth_get, _Seismology.TravelTimeList_depth_set)
__swig_setmethods__["delta"] = _Seismology.TravelTimeList_delta_set
__swig_getmethods__["delta"] = _Seismology.TravelTimeList_delta_get
if _newclass:delta = _swig_property(_Seismology.TravelTimeList_delta_get, _Seismology.TravelTimeList_delta_set)
def __init__(self):
this = _Seismology.new_TravelTimeList()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _Seismology.delete_TravelTimeList
__del__ = lambda self : None;
TravelTimeList_swigregister = _Seismology.TravelTimeList_swigregister
TravelTimeList_swigregister(TravelTimeList)
class TravelTimeTableInterface(Core.BaseObject):
__swig_setmethods__ = {}
for _s in [Core.BaseObject]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, TravelTimeTableInterface, name, value)
__swig_getmethods__ = {}
for _s in [Core.BaseObject]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, TravelTimeTableInterface, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _Seismology.delete_TravelTimeTableInterface
__del__ = lambda self : None;
__swig_getmethods__["Create"] = lambda x: _Seismology.TravelTimeTableInterface_Create
if _newclass:Create = staticmethod(_Seismology.TravelTimeTableInterface_Create)
def setModel(self, *args): return _Seismology.TravelTimeTableInterface_setModel(self, *args)
def model(self): return _Seismology.TravelTimeTableInterface_model(self)
def compute(self, *args): return _Seismology.TravelTimeTableInterface_compute(self, *args)
def computeFirst(self, *args): return _Seismology.TravelTimeTableInterface_computeFirst(self, *args)
TravelTimeTableInterface_swigregister = _Seismology.TravelTimeTableInterface_swigregister
TravelTimeTableInterface_swigregister(TravelTimeTableInterface)
def TravelTimeTableInterface_Create(*args):
return _Seismology.TravelTimeTableInterface_Create(*args)
TravelTimeTableInterface_Create = _Seismology.TravelTimeTableInterface_Create
class TravelTimeTable(TravelTimeTableInterface):
__swig_setmethods__ = {}
for _s in [TravelTimeTableInterface]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{}))
__setattr__ = lambda self, name, value: _swig_setattr(self, TravelTimeTable, name, value)
__swig_getmethods__ = {}
for _s in [TravelTimeTableInterface]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{}))
__getattr__ = lambda self, name: _swig_getattr(self, TravelTimeTable, name)
__repr__ = _swig_repr
def __init__(self):
this = _Seismology.new_TravelTimeTable()
try: self.this.append(this)
except: self.this = this
def setModel(self, *args): return _Seismology.TravelTimeTable_setModel(self, *args)
def model(self): return _Seismology.TravelTimeTable_model(self)
def compute(self, *args): return _Seismology.TravelTimeTable_compute(self, *args)
def computeFirst(self, *args): return _Seismology.TravelTimeTable_computeFirst(self, *args)
__swig_destroy__ = _Seismology.delete_TravelTimeTable
__del__ = lambda self : None;
TravelTimeTable_swigregister = _Seismology.TravelTimeTable_swigregister
TravelTimeTable_swigregister(TravelTimeTable)
def ellipcorr(*args):
return _Seismology.ellipcorr(*args)
ellipcorr = _Seismology.ellipcorr
def getPhase(*args):
return _Seismology.getPhase(*args)
getPhase = _Seismology.getPhase
def firstArrivalP(*args):
return _Seismology.firstArrivalP(*args)
firstArrivalP = _Seismology.firstArrivalP
# This file is compatible with both classic and new-style classes.
| 57.119869
| 130
| 0.789053
| 3,674
| 34,786
| 6.801851
| 0.079477
| 0.037455
| 0.048019
| 0.0509
| 0.487995
| 0.332453
| 0.277031
| 0.262425
| 0.229692
| 0.216967
| 0
| 0.00046
| 0.125798
| 34,786
| 608
| 131
| 57.213816
| 0.821309
| 0.009774
| 0
| 0.251799
| 1
| 0
| 0.029567
| 0.00061
| 0
| 0
| 0
| 0
| 0
| 1
| 0.232014
| false
| 0.003597
| 0.02518
| 0.194245
| 0.544964
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
a9b55d250272274527fcf9914fdab2d6b6ceb7cd
| 2,497
|
py
|
Python
|
resources/currencies.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 1
|
2021-03-13T16:04:54.000Z
|
2021-03-13T16:04:54.000Z
|
resources/currencies.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 7
|
2021-07-21T12:42:39.000Z
|
2022-01-06T10:34:04.000Z
|
resources/currencies.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 2
|
2021-06-22T08:10:48.000Z
|
2021-09-01T09:16:41.000Z
|
import requests
import json
from .helper import Helper
class Currencies(Helper):
def __init__(self, base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination):
super().__init__(base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination)
def get_currencies_list(self, page=1):
"""Get the currencies list """
route = 'v1/currencies/list/?page_size={0}&page={1}'.format(self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_currency(self, data):
""" Create a currency
Keyword arguments:
data -- data create, required fields:
{
"name": "string",
"longname": "string",
"decimal_points": 0,
"symbol": "string"
}
"""
route = 'v1/currencies/list/'
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
# TODO POST on /api/v1/currencies/list/actions/{org_pk}/
def get_currency_details(self, pk):
""" Get the currency details
Keyword arguments:
pk -- the pk of the currency
"""
route = 'v1/currencies/{0}/'.format(pk)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def update_currency(self, pk, data):
""" Update a currency
Keyword arguments:
data -- data create, required fields:
{
"name": "string",
"longname": "string",
"decimal_points": 0,
"symbol": "string"
}
pk -- the pk of the currency
"""
route = 'v1/currencies/{0}/'.format(pk)
response = self.process_request(requests, 'PATCH', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_currency(self, pk):
""" Delete a currency
Keyword arguments:
pk -- the pk of the currency
"""
route = 'v1/currencies/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
| 32.012821
| 118
| 0.598318
| 285
| 2,497
| 5.080702
| 0.217544
| 0.075967
| 0.04558
| 0.089779
| 0.709945
| 0.686464
| 0.68232
| 0.68232
| 0.68232
| 0.68232
| 0
| 0.007799
| 0.281137
| 2,497
| 78
| 119
| 32.012821
| 0.798886
| 0.261514
| 0
| 0.346154
| 0
| 0
| 0.08442
| 0.026071
| 0
| 0
| 0
| 0.012821
| 0
| 1
| 0.230769
| false
| 0
| 0.115385
| 0
| 0.576923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
a9e851f0023dbcc96c72c1aa543f71ed695808d7
| 34
|
py
|
Python
|
python/testData/codeInsight/smartEnter/withTargetOmitted_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/codeInsight/smartEnter/withTargetOmitted_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/codeInsight/smartEnter/withTargetOmitted_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
with open('file.txt'):
<caret>
| 17
| 22
| 0.588235
| 5
| 34
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 34
| 2
| 23
| 17
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a9eac88e0189d0573a6cfa619dd8e384cf753400
| 6,505
|
py
|
Python
|
tests/test_users.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 1
|
2019-11-01T09:43:19.000Z
|
2019-11-01T09:43:19.000Z
|
tests/test_users.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 12
|
2019-09-30T22:35:20.000Z
|
2019-10-12T23:39:01.000Z
|
tests/test_users.py
|
benranderson/demo
|
b27834c79b19b478c917edced8e170122a0f7113
|
[
"MIT"
] | 1
|
2019-11-13T12:19:17.000Z
|
2019-11-13T12:19:17.000Z
|
import json
from tests.utils import add_user, recreate_db
class TestUsersList:
def test_add_user(self, client, test_db):
response = client.post(
"/users",
data=json.dumps({"username": "ben", "email": "ben@email.com"}),
content_type="application/json",
)
assert response.status_code == 201
assert "ben@email.com was added!" in response.json["message"]
assert "success" in response.json["status"]
def test_add_user_invalid_json(self, client, test_db):
resp = client.post(
"/users", data=json.dumps({}), content_type="application/json"
)
data = json.loads(resp.data.decode())
assert resp.status_code == 400
assert "Invalid payload." in data["message"]
assert "fail" in data["status"]
def test_add_user_invalid_json_keys(self, client, test_db):
resp = client.post(
"/users",
data=json.dumps({"email": "abi@email.com"}),
content_type="application/json",
)
data = json.loads(resp.data.decode())
assert resp.status_code == 400
assert "Invalid payload." in data["message"]
assert "fail" in data["status"]
def test_add_user_duplicate_email(self, client, test_db):
client.post(
"/users",
data=json.dumps({"username": "ben", "email": "ben@email.com"}),
content_type="application/json",
)
resp = client.post(
"/users",
data=json.dumps({"username": "ben", "email": "ben@email.com"}),
content_type="application/json",
)
data = json.loads(resp.data.decode())
assert resp.status_code == 400
assert "Sorry. That email already exists." in data["message"]
assert "fail" in data["status"]
class TestUsers:
def test_single_user(self, client, test_db):
user = add_user("tom", "tom@email.com")
resp = client.get(f"/users/{user.id}")
data = json.loads(resp.data.decode())
assert resp.status_code == 200
assert "tom" in data["data"]["username"]
assert "tom@email.com" in data["data"]["email"]
assert "success" in data["status"]
def test_single_user_no_id(self, client, test_db):
resp = client.get("/users/blah")
data = json.loads(resp.data.decode())
assert resp.status_code == 404
assert "User does not exist" in data["message"]
assert "fail" in data["status"]
def test_single_user_incorrect_id(self, client, test_db):
resp = client.get("/users/999")
data = json.loads(resp.data.decode())
assert resp.status_code == 404
assert "User does not exist" in data["message"]
assert "fail" in data["status"]
def test_all_users(self, client, test_db):
recreate_db()
add_user("michael", "michael@mherman.org")
add_user("fletcher", "fletcher@notreal.com")
resp = client.get("/users")
data = json.loads(resp.data.decode())
assert resp.status_code == 200
assert len(data["data"]["users"]) == 2
assert "michael" in data["data"]["users"][0]["username"]
assert "michael@mherman.org" in data["data"]["users"][0]["email"]
assert "fletcher" in data["data"]["users"][1]["username"]
assert "fletcher@notreal.com" in data["data"]["users"][1]["email"]
assert "success" in data["status"]
def test_remove_user(self, client, test_db):
recreate_db()
user = add_user("user-to-be-removed", "remove-me@email.io")
resp_one = client.get("/users")
data = json.loads(resp_one.data.decode())
assert resp_one.status_code == 200
assert len(data["data"]["users"]) == 1
resp_two = client.delete(f"/users/{user.id}")
data = json.loads(resp_two.data.decode())
assert resp_two.status_code == 200
assert "remove-me@email.io was removed!" in data["message"]
assert "success" in data["status"]
resp_three = client.get("/users")
data = json.loads(resp_three.data.decode())
assert resp_three.status_code == 200
assert len(data["data"]["users"]) == 0
def test_remove_user_incorrect_id(self, client, test_db):
resp = client.delete("/users/999")
data = json.loads(resp.data.decode())
assert resp.status_code == 404
assert "User does not exist" in data["message"]
assert "fail" in data["status"]
def test_update_user(self, client, test_db):
user = add_user("user-to-be-updated", "update-me@email.io")
resp_one = client.put(
f"/users/{user.id}",
data=json.dumps({"username": "me", "email": "me@email.io"}),
content_type="application/json",
)
data = json.loads(resp_one.data.decode())
assert resp_one.status_code == 200
assert f"{user.id} was updated!" in data["message"]
assert "success" in data["status"]
resp_two = client.get(f"/users/{user.id}")
data = json.loads(resp_two.data.decode())
assert resp_two.status_code == 200
assert "me" in data["data"]["username"]
assert "me@email.io" in data["data"]["email"]
assert "success" in data["status"]
def test_update_user_invalid_json(self, client, test_db):
resp = client.put(
"/users/1", data=json.dumps({}), content_type="application/json"
)
data = json.loads(resp.data.decode())
assert resp.status_code == 400
assert "Invalid payload." in data["message"]
assert "fail" in data["status"]
def test_update_user_invalid_json_keys(self, client, test_db):
resp = client.put(
"/users/1",
data=json.dumps({"email": "me@email.io"}),
content_type="application/json",
)
data = json.loads(resp.data.decode())
assert resp.status_code == 400
assert "Invalid payload." in data["message"]
assert "fail" in data["status"]
def test_update_user_does_not_exist(self, client, test_db):
resp = client.put(
"/users/999",
data=json.dumps({"username": "me", "email": "me@email.io"}),
content_type="application/json",
)
data = json.loads(resp.data.decode())
assert resp.status_code == 404
assert "User does not exist" in data["message"]
assert "fail" in data["status"]
| 39.664634
| 76
| 0.591699
| 830
| 6,505
| 4.501205
| 0.106024
| 0.052998
| 0.055675
| 0.072805
| 0.821734
| 0.785332
| 0.758565
| 0.725375
| 0.663544
| 0.599036
| 0
| 0.01428
| 0.257187
| 6,505
| 163
| 77
| 39.907975
| 0.758899
| 0
| 0
| 0.520548
| 0
| 0
| 0.20661
| 0
| 0
| 0
| 0
| 0
| 0.376712
| 1
| 0.09589
| false
| 0
| 0.013699
| 0
| 0.123288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a9efbb63d42b1349a82591b414f4a79e8ee2ba8d
| 578
|
py
|
Python
|
json_to_dict.py
|
MattTurnock/PlanetarySciencesMatt
|
81954d2182d9577bd7327a98c45963ae42968df4
|
[
"MIT"
] | null | null | null |
json_to_dict.py
|
MattTurnock/PlanetarySciencesMatt
|
81954d2182d9577bd7327a98c45963ae42968df4
|
[
"MIT"
] | null | null | null |
json_to_dict.py
|
MattTurnock/PlanetarySciencesMatt
|
81954d2182d9577bd7327a98c45963ae42968df4
|
[
"MIT"
] | null | null | null |
from astropy import units as u
import json
import os
AE4878_path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(AE4878_path, 'constants_main.json')) as handle:
course_constants = json.loads(handle.read())
constants = {}
for constant in course_constants:
#print(course_constants[constant]['val'], type(course_constants[constant]['val']))
#print(u.Unit(course_constants[constant]['unit']), type(u.Unit(course_constants[constant]['unit'])))
constants[constant] = course_constants[constant]['val'] * u.Unit(course_constants[constant]['unit'])
| 48.166667
| 104
| 0.747405
| 79
| 578
| 5.278481
| 0.367089
| 0.28777
| 0.330935
| 0.18705
| 0.230216
| 0.230216
| 0
| 0
| 0
| 0
| 0
| 0.015267
| 0.093426
| 578
| 12
| 105
| 48.166667
| 0.780534
| 0.311419
| 0
| 0
| 0
| 0
| 0.065491
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e708a6b06ab68f928f24386e935f0b0dcd753034
| 66
|
py
|
Python
|
task_scheduler/fetchdata/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
task_scheduler/fetchdata/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
task_scheduler/fetchdata/__init__.py
|
pedrolp85/pydevice
|
39b961bb67f59ac9a9373ecc99748e07505b249e
|
[
"Apache-2.0"
] | null | null | null |
from .fetchdata import get_fetchdata
__all__ = ["get_fetchdata"]
| 16.5
| 36
| 0.787879
| 8
| 66
| 5.75
| 0.625
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 3
| 37
| 22
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0.19697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e75e0aa687d118230ce7f0f7979c63964ccebf40
| 53
|
py
|
Python
|
templates/cli_project/hooks/post_gen_project.py
|
cmstead/python-automation
|
4238ebf76b2e0ef62e34f360c47447a775264b73
|
[
"MIT"
] | 1
|
2020-10-30T14:44:54.000Z
|
2020-10-30T14:44:54.000Z
|
templates/library_project/hooks/post_gen_project.py
|
cmstead/python-automation
|
4238ebf76b2e0ef62e34f360c47447a775264b73
|
[
"MIT"
] | null | null | null |
templates/library_project/hooks/post_gen_project.py
|
cmstead/python-automation
|
4238ebf76b2e0ef62e34f360c47447a775264b73
|
[
"MIT"
] | null | null | null |
from os import system
system("pipenv install --dev")
| 17.666667
| 30
| 0.754717
| 8
| 53
| 5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 53
| 3
| 30
| 17.666667
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e78531b89d0b0db0837619fe73753748b9106325
| 1,689
|
py
|
Python
|
answers.py
|
Tskken/classification
|
83918a522eaa0f882464fc0c98dbbdc8a20f7467
|
[
"MIT"
] | null | null | null |
answers.py
|
Tskken/classification
|
83918a522eaa0f882464fc0c98dbbdc8a20f7467
|
[
"MIT"
] | null | null | null |
answers.py
|
Tskken/classification
|
83918a522eaa0f882464fc0c98dbbdc8a20f7467
|
[
"MIT"
] | null | null | null |
"""Answer to Question 3 goes here.
Author: Dylan Blanchard, Sloan Anderson, and Stephen Johnson
Class: CSI-480-01
Assignment: PA 5 -- Supervised Learning
Due Date: Nov 30, 2018 11:59 PM
Certification of Authenticity:
I certify that this is entirely my own work, except where I have given
fully-documented references to the work of others. I understand the definition
and consequences of plagiarism and acknowledge that the assessor of this
assignment may, for the purpose of assessing this assignment:
- Reproduce this assignment and provide a copy to another member of academic
- staff; and/or Communicate a copy of this assignment to a plagiarism checking
- service (which may then retain a copy of this assignment on its database for
- the purpose of future plagiarism checking)
Champlain College CSI-480, Fall 2018
The following code was adapted by Joshua Auerbach (jauerbach@champlain.edu)
from the UC Berkeley Pacman Projects (see license and attribution below).
----------------------
Licensing Information: You are free to use or extend these projects for
educational purposes provided that (1) you do not distribute or publish
solutions, (2) you retain this notice, and (3) you provide clear
attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
Attribution Information: The Pacman AI projects were developed at UC Berkeley.
The core projects and autograders were primarily created by John DeNero
(denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
Student side autograding was added by Brad Miller, Nick Hay, and
Pieter Abbeel (pabbeel@cs.berkeley.edu).
"""
def q3():
"""Answer question 3."""
# *** YOUR CODE HERE ***
return "a"
| 42.225
| 78
| 0.769686
| 259
| 1,689
| 5.019305
| 0.590734
| 0.053846
| 0.036923
| 0.023077
| 0.032308
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020423
| 0.159266
| 1,689
| 39
| 79
| 43.307692
| 0.89507
| 0.970397
| 0
| 0
| 0
| 0
| 0.026316
| 0
| 0
| 0
| 0
| 0.025641
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e796cd0e82ff7259d676b4d6baae3afcd3cee738
| 62,580
|
py
|
Python
|
A_SHERIFS_CAD/lib/hm_visual/Participation_rates.py
|
fault2shaESCWG/CentralApenninesLabFAULT2RISK
|
362cbc8b8dda0c2b5ba1e0ef5c9144fb6acb2ed3
|
[
"BSD-3-Clause"
] | null | null | null |
A_SHERIFS_CAD/lib/hm_visual/Participation_rates.py
|
fault2shaESCWG/CentralApenninesLabFAULT2RISK
|
362cbc8b8dda0c2b5ba1e0ef5c9144fb6acb2ed3
|
[
"BSD-3-Clause"
] | null | null | null |
A_SHERIFS_CAD/lib/hm_visual/Participation_rates.py
|
fault2shaESCWG/CentralApenninesLabFAULT2RISK
|
362cbc8b8dda0c2b5ba1e0ef5c9144fb6acb2ed3
|
[
"BSD-3-Clause"
] | 2
|
2020-10-30T16:39:30.000Z
|
2020-11-27T17:12:43.000Z
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""SHERIFS
Seismic Hazard and Earthquake Rates In Fault Systems
Version 1.0
@author: thomas
"""
import numpy as np
import os
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
import xml.etree.ElementTree as ET
import pandas as pd
import matplotlib.path as mplPath
from math import pi, cos, radians
def Geom_bg(Model_name,File_bg):
Lon_bg = []
Lat_bg = []
# manually defined in the file Background geometry
geom_bg = np.genfromtxt(File_bg,dtype=[('U100'),('f8'),('f8')],skip_header = 1)
column_model = list(map(lambda i : geom_bg[i][0],range(len(geom_bg))))
index_model = np.where(np.array(column_model) == Model_name)[0]
Lon_bg = list(map(lambda i : geom_bg[i][1],index_model))
Lat_bg = list(map(lambda i : geom_bg[i][2],index_model))
return Lon_bg, Lat_bg
def FaultGeometry(File_geom,model):
#CritereDistance = 3.
NomFichier_InfosZonage = File_geom
InfosZonage = np.genfromtxt(NomFichier_InfosZonage,dtype=[('U100'),('U100'),('f8'),('f8')],skip_header = 1)
Column_model_name = list(map(lambda i : InfosZonage[i][0],range(len(InfosZonage))))
index_model = np.where(np.array(Column_model_name) == model)
Column_Fault_name = list(map(lambda i : InfosZonage[i][1],index_model[0]))
Longitudes = list(map(lambda i : InfosZonage[i][2],index_model[0]))
Latitudes = list(map(lambda i : InfosZonage[i][3],index_model[0]))
return Column_Fault_name,Longitudes,Latitudes
def reproject(latitude, longitude):
"""Returns the x & y coordinates in meters using a sinusoidal projection"""
earth_radius = 6371009 # in meters
lat_dist = pi * earth_radius / 180.0
y = [lat * lat_dist for lat in latitude]
x = [long * lat_dist * cos(radians(lat))
for lat, long in zip(latitude, longitude)]
return x, y
def area_of_polygon(x, y):
"""Calculates the area of an arbitrary polygon given its verticies"""
area = 0.0
for i in range(-1, len(x)-1):
area += x[i] * (y[i+1] - y[i-1])
return abs(area) / 2.0
def do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,data_on_fault_available,
data_M,data_sig_M,data_rate,data_sig_rate,data_type,sub_area_flag):
for i in range(len(mfd_X)):
plt.scatter(mega_bining_in_mag,mfd_X[i], c='darkcyan', s=50, edgecolor='',marker = '_',alpha = 0.5)
axes = plt.gca()
axes.set_xlim([xmin,xmax])
axes.set_ylim([ymin,ymax])
for index_mag in range(len(mega_bining_in_mag)):
rate_plus = np.percentile(mfd_X,84,axis=0)[index_mag]
rate_minus = np.percentile(mfd_X,16,axis=0)[index_mag]
mag = mega_bining_in_mag[index_mag]
mag_plus = mag+0.05
mag_minus = mag-0.05
#print(rate_plus,rate_minus,mag_plus,mag_minus)
verts = [(mag_minus, rate_minus ),
(mag_minus, rate_plus),
(mag_plus, rate_plus),
(mag_plus, rate_minus),
(mag_minus, rate_minus)]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY]
path_poly = Path(verts, codes)
patch = patches.PathPatch(path_poly,facecolor = '#598556', lw = 0., alpha = 0.15)
axes.add_patch(patch)
plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,50,axis=0),
c='darkgreen', s=25, edgecolor='',marker = 'o',alpha = 0.8)
plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,16,axis=0),
c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
plt.scatter(mega_bining_in_mag,np.percentile(mfd_X,84,axis=0),
c='darkgreen', s=60, edgecolor='',marker = '_',alpha = 0.8)
plt.plot(mega_bining_in_mag,np.array(mfd_X).mean(axis=0),
color='darkgreen', linewidth = 2)
plt.grid()
#print('in Participation rates')
#draw the data on the plot if they are availabla and the plot is cumulative
if data_on_fault_available == True and sub_area_flag == False:
for i in range(len(data_M)):
verts = [(data_M[i] - data_sig_M[i]-0.02, data_rate[i] - data_sig_rate[i]-0.01 * data_rate[i]),
(data_M[i] - data_sig_M[i]-0.02, data_rate[i] + data_sig_rate[i]+0.01 * data_rate[i]),
(data_M[i] + data_sig_M[i]+0.02, data_rate[i] + data_sig_rate[i]+0.01 * data_rate[i]),
(data_M[i] + data_sig_M[i]+0.02, data_rate[i] - data_sig_rate[i]-0.01 * data_rate[i]),
(data_M[i] - data_sig_M[i]-0.02, data_rate[i] - data_sig_rate[i]-0.01 * data_rate[i])]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY]
path = Path(verts, codes)
#print(verts)
if data_type[i] == 'cat' :
patch = patches.PathPatch(path,facecolor = 'red', lw = 0.3, alpha = 0.2)
plt.scatter(data_M[i],data_rate[i],color='red',s=4,marker = 'o',alpha=0.6)
if data_type[i] == 'pal' :
patch = patches.PathPatch(path,facecolor = 'indigo', lw = 0.3, alpha = 0.2)
plt.scatter(data_M[i],data_rate[i],color='indigo',s=4,marker = 'o',alpha=0.6)
axes.add_patch(patch)
plt.yscale('log')
plt.title(title_for_boxplot)
plt.savefig(path_for_boxplot ,dpi = 100, transparent=True)
plt.close()
file_eq_rate = open(path_for_boxplot[:-3]+'txt','w')
index_mag=0
for mag in mega_bining_in_mag:
file_eq_rate.write(str(mag)+'\t'+str(np.percentile(mfd_X,16,axis=0)[index_mag])+'\t'
+str(np.percentile(mfd_X,50,axis=0)[index_mag])+'\t'
+str(np.array(mfd_X).mean(axis=0)[index_mag])+'\t'
+str(np.percentile(mfd_X,84,axis=0)[index_mag])+'\n')
index_mag+=1
file_eq_rate.close()
def plt_EQ_rates(Run_name,mega_MFD,df_mega_MFD, scenarios_names_list, ScL_complet_list, ScL_list, Model_list,BG_hyp_list,
dimension_used_list,faults_name_list,sample_list,b_value_list,MFD_type_list,m_Mmax,
mega_bining_in_mag,a_s_model,b_sample,sm_sample,Mt_sample,plot_mfd,plot_as_rep,plot_Mmax,xmin,xmax,ymin,ymax,
file_faults_data,File_bg,File_geom,sub_area_file,File_prop):
#extract the faults data
faults_data = np.genfromtxt(file_faults_data,dtype=[('U100000'),('U100000'),('U100000'),('f8'),('f8'),('f8'),('f8')]
,delimiter = '\t',skip_header = 1)
#print faults_data
print(faults_data)
data_model = list(map(lambda i : faults_data[i][0], range(len(faults_data))))
data_fault_name =list( map(lambda i : faults_data[i][1], range(len(faults_data))))
data_type =list( map(lambda i : faults_data[i][2], range(len(faults_data))))
data_M =list( map(lambda i : float(faults_data[i][3]), range(len(faults_data))))
data_sig_M =list( map(lambda i : float(faults_data[i][4]), range(len(faults_data))))
data_rate = list(map(lambda i : float(faults_data[i][5]), range(len(faults_data))))
data_sig_rate =list( map(lambda i : float(faults_data[i][6]), range(len(faults_data))))
magnitude_groups = np.linspace(xmin,xmax,int((xmax-xmin)*10)+1)
#print df_mega_MFD
'''############################################
###############################################
# for each model extract the data
###############################################
############################################'''
for model in Model_list:
i_mfd = 0
while mega_MFD[i_mfd][3] != model :
i_mfd += 1
# input_faults_file = (str(Run_name) + '/' + str(mega_MFD[i_mfd][3]) + '/' + 'bg_' + str(mega_MFD[i_mfd][4])
# + '/' + str(mega_MFD[i_mfd][0]) + '_' + str(mega_MFD[i_mfd][1]) + '_' + str(mega_MFD[i_mfd][2])
# + '/sc_' + str(mega_MFD[i_mfd][8]) + '/faults_n_scenarios.txt')
#
#
# fault_names = np.genfromtxt(input_faults_file,dtype=[('U1000000')],delimiter = '\n')
# #extract from the text file
# if np.size(fault_names) == 1 :
# list_fault_names = str(fault_names)[2::]
# list_fault_names = list_fault_names[:-3]
# list_fault_names = list_fault_names.split(' ')
# else :
# list_fault_names = str(np.array(fault_names[0]))[2::]
# list_fault_names = list_fault_names[:-3]
# list_fault_names = list_fault_names.split(' ') #adapt format to be usable (there is probably a better way to do that)
Prop = np.genfromtxt(File_prop,
dtype=[('U100'),('U100'),('f8'),('U100'),('U100'),('f8'),('f8'),('f8'),
('f8'),('f8'),('U100'),('f8')],skip_header = 1)
Column_model_name = list(map(lambda i : Prop[i][0],range(len(Prop))))
Column_fault_name = list(map(lambda i : Prop[i][1],range(len(Prop))))
index_model = np.where(np.array(Column_model_name) == str(mega_MFD[i_mfd][3]))[0]
Prop = np.take(Prop,index_model)
faults_names = np.array(Column_fault_name[index_model[0]:index_model[-1]+1])
faults_names = list(faults_names)
for fault_name in faults_names:
plot_for_all_faults = True
if fault_name in data_fault_name or plot_for_all_faults == True:
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
if fault_name in data_fault_name:
index_fault_in_data = np.where(np.array(data_fault_name)==fault_name)[0]
for index_i in index_fault_in_data:
if data_model[index_i] == model :
self_data_on_fault_available = True
self_data_type.append(data_type[index_i])
self_data_M.append(data_M[index_i])
self_data_sig_M.append(data_sig_M[index_i])
self_data_rate.append(data_rate[index_i])
self_data_sig_rate.append(data_sig_rate[index_i])
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model) & (df_mega_MFD.source.str.contains(fault_name))]
if df_fault_mfd.empty == False:
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# #print 'grouped_df_mfd',grouped_df_mfd
# index_mag = 0
# for group in magnitude_groups: #loop on the magnitudes
#
# reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
# rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
# reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
#
#
# if str(group)[-1] == '0' or str(group)[-1] == '5' :
# label_for_boxplot.append(str(group))
# else :
# label_for_boxplot.append(' ')
#
# data_for_boxplot_cum.append(reccurence_cum_fault_mag)
#
# index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum')
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name)
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/' + 'all_' + fault_name +'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +'all_ ' +fault_name+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
#for the background
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model) & (df_mega_MFD.source.str.contains('Background') )]
if df_fault_mfd.empty == False:
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# #print 'grouped_df_mfd',grouped_df_mfd
# index_mag = 0
# for group in magnitude_groups: #loop on the magnitudes
#
# reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
# rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
#
#
# reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
#
#
# if str(group)[-1] == '0' or str(group)[-1] == '5' :
# label_for_boxplot.append(str(group))
# else :
# label_for_boxplot.append(' ')
#
# data_for_boxplot_cum.append(reccurence_cum_fault_mag)
#
#
# index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum')
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background')
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background/' + 'all_Background.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +'all_ Background cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
'''###############################
##################################
# # for each scenario
# ##################################
# ###############################'''
for model in Model_list:
#this file contains the participation rate for each of the fault with paleo for each sample
file_for_comparison = open(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/file_for_comparison.txt','w')
for MFD_type in MFD_type_list :
for scenario in scenarios_names_list :
for fault_name in faults_names:
if fault_name in data_fault_name:
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
index_fault_in_data = np.where(np.array(data_fault_name)==fault_name)[0]
for index_i in index_fault_in_data:
if data_model[index_i] == model :
self_data_on_fault_available = True
self_data_type.append(data_type[index_i])
self_data_M.append(data_M[index_i])
self_data_sig_M.append(data_sig_M[index_i])
self_data_rate.append(data_rate[index_i])
self_data_sig_rate.append(data_sig_rate[index_i])
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model)
& (df_mega_MFD.source.str.contains(fault_name))
& (df_mega_MFD.scenario_set.str.contains(scenario))
& (df_mega_MFD.MFD_type.str.contains(MFD_type))]
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample'],as_index=False).sum()
# index_mag = 0
# for group in magnitude_groups: #loop on the magnitudes
#
# reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
# rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
#
#
# reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
#
#
# if str(group)[-1] == '0' or str(group)[-1] == '5' :
# label_for_boxplot.append(str(group))
# else :
# label_for_boxplot.append(' ')
#
# data_for_boxplot_cum.append(reccurence_cum_fault_mag)
#
#
# index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set')
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set/'+'MFD_'+MFD_type +'_' + scenario + '_' + fault_name +'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +MFD_type+' '+ scenario+' ' +fault_name+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
list_selected_ScL= grouped_df_mfd['selected_ScL']
grouped_df_mfd=grouped_df_mfd.drop('selected_ScL',1)
list_dim_used= grouped_df_mfd['dim_used']
grouped_df_mfd=grouped_df_mfd.drop('dim_used',1)
list_str_all_data= grouped_df_mfd['str_all_data']
grouped_df_mfd=grouped_df_mfd.drop('str_all_data',1)
list_BG= grouped_df_mfd['BG_hyp']
grouped_df_mfd=grouped_df_mfd.drop('BG_hyp',1)
list_b_min= grouped_df_mfd['b_min']
grouped_df_mfd=grouped_df_mfd.drop('b_min',1)
list_b_max= grouped_df_mfd['b_max']
grouped_df_mfd=grouped_df_mfd.drop('b_max',1)
grouped_df_mfd=grouped_df_mfd.drop('MFD_type',1)
grouped_df_mfd=grouped_df_mfd.drop('scenario_set',1)
indexes_for_print = np.argsort(np.array(grouped_df_mfd['sample']).astype(int))
list_samples= grouped_df_mfd['sample']
grouped_df_mfd=grouped_df_mfd.drop('sample',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
for index__i_for_print in indexes_for_print :#zip(mfd_X,list_samples):
file_for_comparison.write(model+'\t'+MFD_type+'\t'+scenario
+'\t'+list_BG[index__i_for_print]
+'\t'+list_selected_ScL[index__i_for_print]+'_'+list_dim_used[index__i_for_print]+'_'+list_str_all_data[index__i_for_print]
+'\tbmin_'+str(list_b_min[index__i_for_print])+'_bmax_'+str(list_b_max[index__i_for_print])
+'\t'+list_samples[index__i_for_print]
+'\t'+fault_name)
for mfd_ii in mfd_X[index__i_for_print] :
file_for_comparison.write('\t'+str(mfd_ii))
file_for_comparison.write('\n')
file_for_comparison.close()
'''###############################
##################################
# for each BG
##################################
###############################'''
for model in Model_list:
if len(BG_hyp_list) > 1:
for BG_hyp in BG_hyp_list :
for fault_name in faults_names:
if fault_name in data_fault_name:
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
index_fault_in_data = np.where(np.array(data_fault_name)==fault_name)[0]
for index_i in index_fault_in_data:
if data_model[index_i] == model :
self_data_on_fault_available = True
self_data_type.append(data_type[index_i])
self_data_M.append(data_M[index_i])
self_data_sig_M.append(data_sig_M[index_i])
self_data_rate.append(data_rate[index_i])
self_data_sig_rate.append(data_sig_rate[index_i])
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model)
& (df_mega_MFD.source.str.contains(fault_name))
& (df_mega_MFD.BG_hyp.str.contains(BG_hyp))]
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# index_mag = 0
# for group in magnitude_groups: #loop on the magnitudes
#
# reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
# rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
#
#
# reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
#
#
# if str(group)[-1] == '0' or str(group)[-1] == '5' :
# label_for_boxplot.append(str(group))
# else :
# label_for_boxplot.append(' ')
#
# data_for_boxplot_cum.append(reccurence_cum_fault_mag)
#
#
# index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/BG'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/BG')
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/BG/' + BG_hyp + '_' + fault_name +'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' + BG_hyp+' ' +fault_name+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model)
& (df_mega_MFD.source.str.contains('Background'))
& (df_mega_MFD.BG_hyp.str.contains(BG_hyp))]
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
if df_fault_mfd.empty == False:
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# #print 'grouped_df_mfd',grouped_df_mfd
# index_mag = 0
# for group in magnitude_groups: #loop on the magnitudes
#
# reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
# rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
#
#
# reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
#
#
# if str(group)[-1] == '0' or str(group)[-1] == '5' :
# label_for_boxplot.append(str(group))
# else :
# label_for_boxplot.append(' ')
#
# data_for_boxplot_cum.append(reccurence_cum_fault_mag)
#
#
# index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum')
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background'+ '/BG'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background'+ '/BG')
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/Background'+ '/BG/' + BG_hyp + '_' +'_Background.png'
title_for_boxplot = 'Frequency of rupture '+ model+' ' + BG_hyp+' ' +'_ Background cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
'''###############################
##################################
# plot for the sub areas
##################################
###############################'''
for model in Model_list:
#extract the name and geometry of the faults
Column_Fault_name,Longitudes,Latitudes = FaultGeometry(File_geom,model)
fault_names = []
Lon = []
Lat = []
for fault_name in faults_names:
fault_names.append(fault_name)
index_fault = np.where(np.array(Column_Fault_name)== fault_name)[0]
Lon.append(np.take(Longitudes,index_fault))
Lat.append(np.take(Latitudes,index_fault))
# extract the geometry of the zone ( geometry of the background)
Lon_bg, Lat_bg = Geom_bg(model,File_bg)
#calculate the area of the background
x,y = reproject(Lat_bg,Lon_bg)
area_of_the_bg = area_of_polygon( x,y) #to be verified!!
#extract the name of the sources
df_sources_names = df_mega_MFD[(df_mega_MFD.Model == model)]
df_sources_names.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
source_names = np.array(df_sources_names.source.unique())
bbPath_sub_areas = []
if os.path.exists(sub_area_file):
read_sub_area_file = open(sub_area_file,'rU')
lines_sub_area = read_sub_area_file.readlines()
sub_area_names = []
sub_area_coord = []
sub_area_lon = []
sub_area_lat = []
for line in lines_sub_area:
model_sub_area = line.split('\t')[0]
if model == model_sub_area:
sub_area_names.append(line.split('\t')[1])
sub_area_coord.append(line.split('\t')[2:])
sub_area_lon_i = []
sub_area_lat_i = []
for sub_area_coord_i in line.split('\t')[2:]:
if not '\n' in sub_area_coord_i.split(','):
if not '' in sub_area_coord_i.split(','):
sub_area_lon_i.append(float(sub_area_coord_i.split(',')[1]))
sub_area_lat_i.append(float(sub_area_coord_i.split(',')[0]))
sub_area_lon.append(sub_area_lon_i)
sub_area_lat.append(sub_area_lat_i)
if not os.path.exists(str(Run_name) + '/analysis/figures/catalogue/sub_area'):
os.makedirs(str(Run_name) + '/analysis/figures/catalogue/sub_area')
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum')
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1])
Poly_sub = []
for x1,y1 in zip(sub_area_lon_i,sub_area_lat_i): # creation du polygon de la zone
Poly_sub.append((x1,y1))
bbPath_sub_area_i = mplPath.Path(Poly_sub)
bbPath_sub_areas.append(bbPath_sub_area_i)
# calculate the area of the sub_area
x,y = reproject(sub_area_lat_i,sub_area_lon_i)
area_of_the_sub_area = area_of_polygon( x,y) #to be verified!!
faults_in_sub_area = []
index_fault = 0
for fault_name in fault_names:
nb_point_in_sub_area = 0
for lon_i,lat_i in zip(Lon[index_fault],Lat[index_fault]):
if bbPath_sub_area_i.contains_point((lon_i,lat_i)) == 1: #test to know if the point is in the sub_area
nb_point_in_sub_area += 1
if nb_point_in_sub_area > len(Lon[index_fault])/2.: #if more than half the points of the trace are in the sub area
faults_in_sub_area.append(fault_name) #the fault is in the sub area
index_fault +=1
sources_in_sub_area = []
ratio_in_subarea = []
for source_name_i in source_names:
#print 'source_name_i',source_name_i
nb_faults_in_source_n_area = 0.
for fault_name in faults_in_sub_area:
if fault_name in source_name_i:
nb_faults_in_source_n_area += 1.
if not ']_f_' in source_name_i : #it's a single fault
if nb_faults_in_source_n_area >= 1.:
sources_in_sub_area.append(source_name_i)
ratio_in_subarea.append(1.)
#print source_name_i
else :
nb_faults_in_source = len(source_name_i.split(']_f_'))
if nb_faults_in_source_n_area >= 1.:
sources_in_sub_area.append(source_name_i)
ratio_in_subarea.append(nb_faults_in_source_n_area/nb_faults_in_source)
#print source_name_i,nb_faults_in_source_n_area/nb_faults_in_source
df_subarea_mfd = pd.DataFrame(columns=['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9'], index = range(len(sources_in_sub_area)*10000))
index_source = 0
#print sources_in_sub_area
for source in sources_in_sub_area:
df_source_i_mfd = df_mega_MFD[(df_mega_MFD.Model == model) & (df_mega_MFD.source == source)]
if df_source_i_mfd.empty == False:
df_source_i_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
# print
# print
# print source,df_source_i_mfd['5.0']
# print 'ratio',ratio_in_subarea[index_source]
for magnitude in ['4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']:
df_source_i_mfd[magnitude] = df_source_i_mfd[magnitude].astype(float)*ratio_in_subarea[index_source]
#print source,df_source_i_mfd['5.0']
df_subarea_mfd = pd.concat([df_subarea_mfd,df_source_i_mfd])
index_source+=1
#extract the background
df_source_i_mfd = df_mega_MFD[(df_mega_MFD.Model == model) & (df_mega_MFD.source == 'Background')]
if df_source_i_mfd.empty == False:
df_source_i_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
for magnitude in ['4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']:
df_source_i_mfd[magnitude] = df_source_i_mfd[magnitude].astype(float)*float(area_of_the_sub_area)/float(area_of_the_bg)
#print source,df_source_i_mfd['5.0']
df_subarea_mfd = pd.concat([df_subarea_mfd,df_source_i_mfd])
#print in a file for later comparisons
grouped_df_mfd_for_print = df_subarea_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample'], as_index = False).sum()
file_rate_sub_area = (str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/'
+ model + '/'+sub_area_names[-1]+
'/eq_rate_all_'+model+'_'+sub_area_names[-1]+'.txt')
grouped_df_mfd_for_print.to_csv(path_or_buf = file_rate_sub_area ,sep = '\t',index=False)
del grouped_df_mfd_for_print
grouped_df_mfd = df_subarea_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+'/' + sub_area_names[-1]+'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +'all_ ' + sub_area_names[-1]+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot,xmin,xmax,ymin,ymax)
mfd_X = grouped_df_mfd.values
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,True)
#for a bit more detail
for MFD_type in MFD_type_list :
for scenario in scenarios_names_list :
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+ '/' + MFD_type):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+ '/' + MFD_type)
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+ '/' + MFD_type+ '/' +scenario):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+ '/' + MFD_type+ '/' +scenario)
df_source_i_mfd = df_subarea_mfd[(df_subarea_mfd.scenario_set == scenario) & (df_subarea_mfd.MFD_type == MFD_type)]
if df_source_i_mfd.empty == False:
df_source_i_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_source_i_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
# data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/'+sub_area_names[-1]+ '/' + MFD_type+ '/' +scenario+'/' + sub_area_names[-1]+'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +scenario+' ' +MFD_type+' ' + sub_area_names[-1]+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot,xmin,xmax,ymin,ymax)
mfd_X = grouped_df_mfd.values
do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,
path_for_boxplot,title_for_boxplot,self_data_on_fault_available,
self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,True)
'''###############################
##################################
# plot the median moment rate magnitude for each fault
this is the magnitude for which the fault has half or more of its moment rate
spend on this magnitude of large magnitudes.
(see Shaw et all 2018)
##################################
###############################'''
#print('magnitude of median moment rate\nfor the whole logic tree')
for model in Model_list:
#print(model,'median moment mag')
file_Mmmr = open(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/'+model+'/Mmmr_' + model +'.txt','w')
for fault_name in faults_names:
file_rates = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/all_'+fault_name +'.txt'
rates_data = np.genfromtxt(file_rates,dtype=[('f8'),('f8'),('f8'),('f8'),('f8')])
mag = list(map(lambda i : rates_data[i][0],range(len(rates_data))))
rate_mean = list(map(lambda i : rates_data[i][3],range(len(rates_data))))
rate_inc = [] #incremental rate
for i in range(len(rate_mean)-1):
rate_inc.append(rate_mean[i]-rate_mean[i+1])
rate_inc.append(rate_mean[-1])
moment_rate=[]
for mag_i,rate_i in zip(mag,rate_inc):
moment_rate.append(10. ** (1.5 * mag_i + 9.1) * rate_i)
i=0
while sum(moment_rate[:i+1])<0.5*sum(moment_rate):
i+=1
file_Mmmr.write(fault_name+'\t'+str(round(mag[i],1))+'\n')
#print(fault_name, mag[i])
file_Mmmr.close()
#print('for more specific branches')
for model in Model_list:
#print(model)
for MFD_type in MFD_type_list :
#print('\t',MFD_type)
for scenario in scenarios_names_list :
#print('\t','\t',scenario)
file_Mmmr = open(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/'+model+'/Mmmr_' + model +'_'+MFD_type+'_'+scenario+'.txt','w')
for fault_name in faults_names:
label_for_boxplot = []
#data_for_boxplot = []
data_for_boxplot_cum = []
#find if there is data conserning that fault
self_data_on_fault_available = False
self_data_type = []
self_data_M = []
self_data_sig_M = []
self_data_rate = []
self_data_sig_rate = []
index_fault_in_data = np.where(np.array(data_fault_name)==fault_name)[0]
for index_i in index_fault_in_data:
if data_model[index_i] == model :
self_data_on_fault_available = True
self_data_type.append(data_type[index_i])
self_data_M.append(data_M[index_i])
self_data_sig_M.append(data_sig_M[index_i])
self_data_rate.append(data_rate[index_i])
self_data_sig_rate.append(data_sig_rate[index_i])
df_fault_mfd = df_mega_MFD[(df_mega_MFD.Model == model)
& (df_mega_MFD.source.str.contains(fault_name))
& (df_mega_MFD.scenario_set.str.contains(scenario))
& (df_mega_MFD.MFD_type.str.contains(MFD_type))]
df_fault_mfd.columns = ['selected_ScL','dim_used','str_all_data','Model','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample','source',
'4.0','4.1','4.2','4.3','4.4','4.5','4.6','4.7','4.8','4.9',
'5.0','5.1','5.2','5.3','5.4','5.5','5.6','5.7','5.8','5.9',
'6.0','6.1','6.2','6.3','6.4','6.5','6.6','6.7','6.8','6.9',
'7.0','7.1','7.2','7.3','7.4','7.5','7.6','7.7','7.8','7.9',
'8.0','8.1','8.2','8.3','8.4','8.5','8.6','8.7','8.8','8.9',
'9.0','9.1','9.2','9.3','9.4','9.5','9.6','9.7','9.8','9.9']
grouped_df_mfd = df_fault_mfd.groupby(['selected_ScL','dim_used','str_all_data','BG_hyp',
'b_min','b_max','MFD_type','scenario_set','sample']).sum()
index_mag = 0
for group in magnitude_groups: #loop on the magnitudes
reccurence_cum_fault_mag = [] #frequency this fault produce this magnitude cumulative
rec_cum_fault_in_model = grouped_df_mfd[str(round(group,1))].tolist()
reccurence_cum_fault_mag.append(rec_cum_fault_in_model)
if str(group)[-1] == '0' or str(group)[-1] == '5' :
label_for_boxplot.append(str(group))
else :
label_for_boxplot.append(' ')
data_for_boxplot_cum.append(reccurence_cum_fault_mag)
index_mag += 1
if not os.path.exists(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set'):
os.makedirs(str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set')
#data_for_boxplot = data_for_boxplot_cum
path_for_boxplot = str(Run_name) + '/analysis/figures/rupture_rate_for_each_fault_cum/' + model + '/' + fault_name + '/scenario_set/'+'MFD_'+MFD_type +'_' + scenario + '_' + fault_name +'.png'
title_for_boxplot = 'Frequency of rupture '+ model +' ' +MFD_type+' '+ scenario+' ' +fault_name+' cumulative rate'
# box_plot_log(data_for_boxplot,label_for_boxplot,title_for_boxplot,self_data_on_fault_available,
# self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,path_for_boxplot)
grouped_df_mfd=grouped_df_mfd.drop('source',1)
grouped_df_mfd=grouped_df_mfd.drop('Model',1)
mfd_X = grouped_df_mfd.values
mfd_X = np.array(mfd_X)
#do_the_plots(mfd_X,mega_bining_in_mag,xmin,xmax,ymin,ymax,Run_name,path_for_boxplot,title_for_boxplot,self_data_on_fault_available,self_data_M,self_data_sig_M,self_data_rate,self_data_sig_rate,self_data_type,False)
mag = mega_bining_in_mag
rate_mean = np.array(mfd_X).mean(axis=0)
rate_inc = [] #incremental rate
for i in range(len(rate_mean)-1):
rate_inc.append(rate_mean[i]-rate_mean[i+1])
rate_inc.append(rate_mean[-1])
moment_rate=[]
for mag_i,rate_i in zip(mag,rate_inc):
moment_rate.append(10. ** (1.5 * mag_i + 9.1) * rate_i)
i=0
while sum(moment_rate[:i+1])<0.5*sum(moment_rate):
i+=1
#print('\t','\t','\t',fault_name, mag[i])
file_Mmmr.write(fault_name+'\t'+str(round(mag[i],1))+'\n')
file_Mmmr.close()
| 62.894472
| 235
| 0.479786
| 8,079
| 62,580
| 3.381607
| 0.045922
| 0.045681
| 0.035578
| 0.027013
| 0.791837
| 0.743045
| 0.729832
| 0.704319
| 0.684883
| 0.66552
| 0
| 0.049767
| 0.369703
| 62,580
| 994
| 236
| 62.957746
| 0.642861
| 0.167322
| 0
| 0.553968
| 0
| 0
| 0.140858
| 0.040172
| 0.006349
| 0
| 0
| 0
| 0
| 1
| 0.009524
| false
| 0
| 0.014286
| 0
| 0.030159
| 0.01746
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e796eee6602913a59c89b070e74131654ed33809
| 615
|
py
|
Python
|
manage.py
|
morianemo/ecflow_django
|
4cf7341e5d02b428e8d008d7e2bb9a0546bda97e
|
[
"Apache-2.0"
] | 1
|
2022-02-11T07:51:11.000Z
|
2022-02-11T07:51:11.000Z
|
manage.py
|
morianemo/ecflow_django
|
4cf7341e5d02b428e8d008d7e2bb9a0546bda97e
|
[
"Apache-2.0"
] | 5
|
2020-02-17T20:39:22.000Z
|
2021-12-13T20:34:01.000Z
|
manage.py
|
morianemo/ecflow_django
|
4cf7341e5d02b428e8d008d7e2bb9a0546bda97e
|
[
"Apache-2.0"
] | 1
|
2022-02-11T07:51:11.000Z
|
2022-02-11T07:51:11.000Z
|
#!/usr/bin/env python
import os
import sys
import django
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "flow.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
"""
python manage.py supervisor --daemonize
python manage.py supervisor stop all
python manage.py supervisor start all
python ./manage.py runserver 0.0.0.0:8001
firefox http://localhost:8001
python manage.py runserver 1000
mysite/
manage.py
mysite/
__init__.py
settings.py
urls.py
wsgi.py
"""
| 21.964286
| 72
| 0.702439
| 83
| 615
| 4.963855
| 0.493976
| 0.116505
| 0.169903
| 0.174757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032854
| 0.20813
| 615
| 27
| 73
| 22.777778
| 0.813142
| 0.03252
| 0
| 0
| 0
| 0
| 0.174797
| 0.089431
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
e7c56676517084d0b372b89434490cc0d0029537
| 80
|
py
|
Python
|
helloflask/helloflask/static/__init__.py
|
omegaml/apps
|
5d4ad4b31b934363acaa29aed6f5a59e40d12fc7
|
[
"Apache-2.0"
] | null | null | null |
helloflask/helloflask/static/__init__.py
|
omegaml/apps
|
5d4ad4b31b934363acaa29aed6f5a59e40d12fc7
|
[
"Apache-2.0"
] | 1
|
2022-03-12T01:02:39.000Z
|
2022-03-12T01:02:39.000Z
|
helloflask/helloflask/templates/__init__.py
|
omegaml/apps
|
5d4ad4b31b934363acaa29aed6f5a59e40d12fc7
|
[
"Apache-2.0"
] | null | null | null |
# placeholder to make setup(..., include_package_data=True) include this folder
| 40
| 79
| 0.7875
| 11
| 80
| 5.545455
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1125
| 80
| 1
| 80
| 80
| 0.859155
| 0.9625
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e7c5d37e0936fe67d0e7a21da90e8b585ced12a7
| 161
|
py
|
Python
|
venv/Scripts/django-admin.py
|
gting/HttpRunnerManager-master
|
2f1514da0dfbfe00556ac984471513cadd20c73f
|
[
"MIT"
] | null | null | null |
venv/Scripts/django-admin.py
|
gting/HttpRunnerManager-master
|
2f1514da0dfbfe00556ac984471513cadd20c73f
|
[
"MIT"
] | null | null | null |
venv/Scripts/django-admin.py
|
gting/HttpRunnerManager-master
|
2f1514da0dfbfe00556ac984471513cadd20c73f
|
[
"MIT"
] | null | null | null |
#!D:\HttpRunnerManager-master\venv\Scripts\python3.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 26.833333
| 54
| 0.795031
| 20
| 161
| 5.85
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.093168
| 161
| 5
| 55
| 32.2
| 0.794521
| 0.329193
| 0
| 0
| 0
| 0
| 0.074766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e7da28e6cc9aed34fa6c8985bcf133c6180ef936
| 29
|
py
|
Python
|
scrapy_patterns/spiderlings/__init__.py
|
oliverdozsa/scrapy-patterns
|
52ec8b65edfc2046a189cbf437854bced2dd2ce4
|
[
"MIT"
] | 2
|
2019-12-28T01:04:19.000Z
|
2019-12-29T07:18:13.000Z
|
scrapy_patterns/spiderlings/__init__.py
|
oliverdozsa/scrapy-patterns
|
52ec8b65edfc2046a189cbf437854bced2dd2ce4
|
[
"MIT"
] | 3
|
2019-12-29T18:07:32.000Z
|
2020-04-04T20:26:28.000Z
|
scrapy_patterns/spiderlings/__init__.py
|
oliverdozsa/scrapy-patterns
|
52ec8b65edfc2046a189cbf437854bced2dd2ce4
|
[
"MIT"
] | 1
|
2019-12-28T01:04:23.000Z
|
2019-12-28T01:04:23.000Z
|
"""
Contains spiderlings.
"""
| 9.666667
| 21
| 0.655172
| 2
| 29
| 9.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 3
| 22
| 9.666667
| 0.730769
| 0.724138
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
e7e18f58791a61196de568301ecbab96ce7ba39a
| 142
|
py
|
Python
|
utils/userinput.py
|
Nobodybe/HANDS
|
b853591f529444e162b944065e0ed36d8665d93c
|
[
"Apache-2.0"
] | null | null | null |
utils/userinput.py
|
Nobodybe/HANDS
|
b853591f529444e162b944065e0ed36d8665d93c
|
[
"Apache-2.0"
] | null | null | null |
utils/userinput.py
|
Nobodybe/HANDS
|
b853591f529444e162b944065e0ed36d8665d93c
|
[
"Apache-2.0"
] | 1
|
2021-05-17T14:53:59.000Z
|
2021-05-17T14:53:59.000Z
|
from pymouse import PyMouse
from pykeyboard import PyKeyboard
import time
k = PyKeyboard()
m = PyMouse()
time.sleep(5)
k.tap_key(k.right_key)
| 17.75
| 33
| 0.78169
| 23
| 142
| 4.73913
| 0.521739
| 0.293578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.126761
| 142
| 7
| 34
| 20.285714
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
99b516ce18fe8e2616ddcf449bf24be562f68fe7
| 12,059
|
py
|
Python
|
2021/day1.py
|
boisvert42/advent-of-code
|
3e146d95048bfe3b48e15b85e82a567905226813
|
[
"MIT"
] | 1
|
2021-12-01T18:25:43.000Z
|
2021-12-01T18:25:43.000Z
|
2021/day1.py
|
boisvert42/advent-of-code
|
3e146d95048bfe3b48e15b85e82a567905226813
|
[
"MIT"
] | null | null | null |
2021/day1.py
|
boisvert42/advent-of-code
|
3e146d95048bfe3b48e15b85e82a567905226813
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 30 22:35:24 2021
@author: aboisvert
"""
#%% Part 1
"""
As the submarine drops below the surface of the ocean, it automatically performs a sonar sweep of the nearby sea floor. On a small screen, the sonar sweep report (your puzzle input) appears: each line is a measurement of the sea floor depth as the sweep looks further and further away from the submarine.
For example, suppose you had the following report:
199
200
208
210
200
207
240
269
260
263
This report indicates that, scanning outward from the submarine, the sonar sweep found depths of 199, 200, 208, 210, and so on.
The first order of business is to figure out how quickly the depth increases, just so you know what you're dealing with - you never know if the keys will get carried into deeper water by an ocean current or a fish or something.
To do this, count the number of times a depth measurement increases from the previous measurement. (There is no measurement before the first measurement.)
"""
a = '''
190
168
166
163
170
160
171
166
161
167
175
178
193
189
188
191
193
192
193
180
177
178
176
177
196
203
211
209
210
209
225
219
229
214
202
205
208
207
208
204
208
206
205
208
209
219
236
241
243
239
251
278
279
284
283
287
244
245
257
253
272
276
287
288
304
306
313
312
284
293
288
289
290
283
298
309
300
298
297
291
292
304
301
303
306
280
286
266
270
272
274
269
270
271
270
266
274
275
278
280
283
284
283
267
282
283
285
286
302
307
309
311
309
315
314
317
319
318
314
313
330
333
335
365
370
361
362
367
358
361
359
358
349
352
367
355
359
360
355
357
359
368
375
383
385
399
410
413
414
418
417
418
419
420
421
422
434
439
438
440
441
443
445
444
446
450
452
463
466
476
464
467
470
474
460
473
475
476
482
481
489
495
505
503
508
493
508
509
508
513
538
536
521
526
523
536
537
558
557
563
557
560
561
560
575
578
582
583
587
574
576
581
580
579
581
599
601
602
606
607
606
605
606
607
611
607
606
605
603
597
598
593
584
585
587
619
614
616
596
627
628
630
631
634
633
631
632
641
637
647
643
651
625
593
601
602
599
592
593
597
601
592
612
611
612
616
620
622
627
626
629
632
642
654
655
657
658
661
662
653
655
658
665
674
664
694
689
692
693
681
686
687
698
701
706
687
689
706
710
716
723
733
735
734
733
727
729
744
750
736
739
745
743
742
743
741
739
742
748
755
752
762
768
767
760
758
756
765
779
796
797
798
796
797
775
779
780
781
784
794
799
793
797
795
793
802
800
809
814
826
830
820
804
805
809
816
817
845
850
861
857
858
863
868
873
880
891
865
876
890
925
927
926
932
941
942
943
942
943
942
947
938
940
943
949
953
954
966
970
967
963
961
964
966
973
983
975
974
980
977
988
989
991
989
990
1000
1001
1006
996
998
997
996
979
981
1014
1018
1023
996
995
1006
1000
994
996
999
1001
999
1000
1002
1001
1003
1005
1006
1014
1037
1039
1040
1066
1059
1081
1085
1072
1069
1070
1069
1080
1079
1106
1105
1118
1127
1150
1151
1147
1133
1134
1135
1136
1137
1138
1140
1138
1149
1150
1149
1143
1136
1137
1140
1141
1140
1141
1142
1155
1182
1173
1167
1163
1172
1186
1180
1176
1179
1178
1189
1201
1205
1201
1200
1209
1207
1218
1226
1232
1234
1233
1225
1228
1229
1219
1221
1220
1199
1201
1204
1212
1224
1208
1209
1210
1220
1219
1215
1231
1240
1246
1249
1281
1247
1251
1255
1263
1260
1292
1293
1305
1311
1316
1319
1323
1307
1326
1319
1316
1327
1328
1333
1342
1337
1338
1335
1353
1368
1371
1386
1382
1381
1378
1374
1391
1396
1397
1399
1388
1380
1406
1396
1398
1397
1401
1402
1401
1369
1372
1373
1370
1371
1362
1375
1378
1389
1400
1402
1404
1420
1425
1429
1425
1406
1412
1425
1421
1401
1402
1399
1400
1412
1387
1386
1390
1392
1396
1402
1404
1405
1406
1379
1384
1419
1420
1422
1427
1423
1426
1421
1420
1428
1424
1428
1436
1432
1439
1444
1457
1458
1460
1464
1459
1460
1493
1489
1510
1503
1505
1506
1492
1515
1517
1506
1488
1485
1488
1471
1473
1496
1499
1497
1501
1475
1465
1460
1458
1462
1467
1466
1483
1471
1476
1471
1493
1492
1494
1503
1504
1505
1506
1507
1510
1492
1493
1500
1498
1528
1533
1538
1539
1575
1576
1557
1555
1557
1579
1589
1594
1599
1600
1598
1609
1619
1618
1617
1629
1613
1619
1620
1631
1627
1628
1630
1631
1616
1631
1651
1657
1658
1637
1642
1641
1651
1652
1647
1649
1658
1649
1629
1631
1632
1619
1617
1620
1634
1632
1633
1636
1634
1641
1639
1642
1647
1640
1609
1611
1607
1597
1601
1606
1635
1637
1647
1640
1650
1654
1656
1636
1626
1624
1625
1615
1620
1621
1618
1614
1615
1616
1622
1623
1625
1627
1626
1627
1632
1644
1659
1668
1671
1672
1673
1676
1684
1709
1735
1734
1731
1738
1740
1748
1751
1741
1740
1739
1741
1739
1750
1762
1766
1765
1764
1775
1774
1775
1776
1777
1778
1757
1755
1756
1757
1756
1776
1780
1781
1790
1792
1791
1803
1810
1801
1800
1831
1834
1818
1823
1806
1801
1812
1814
1837
1842
1843
1842
1841
1836
1828
1835
1846
1845
1855
1854
1858
1856
1865
1866
1865
1850
1859
1872
1868
1866
1867
1862
1869
1870
1882
1896
1901
1903
1917
1939
1942
1945
1955
1956
1954
1974
1944
1943
1942
1956
1961
1957
1953
1954
1955
1958
1961
1948
1947
1959
1960
1961
1969
1996
1992
1993
1988
1994
2021
2020
2003
1999
1995
2006
2002
1994
1995
1998
1995
1988
1992
1997
2003
2006
2007
2010
2006
2015
2000
1998
1993
2003
2004
2006
2005
2008
2019
2020
2030
2031
2036
2037
2038
2037
2036
2038
2031
2043
2044
2047
2050
2049
2027
2053
2057
2058
2034
2038
2033
2034
2038
2045
2031
2037
2053
2061
2058
2062
2063
2051
2046
2045
2042
2043
2042
2037
2053
2047
2059
2058
2057
2058
2069
2070
2073
2086
2098
2116
2110
2108
2109
2111
2112
2114
2116
2102
2108
2110
2111
2115
2114
2123
2124
2121
2144
2167
2170
2167
2135
2141
2145
2152
2166
2159
2160
2133
2132
2133
2100
2104
2101
2102
2109
2108
2109
2112
2114
2113
2114
2123
2121
2122
2116
2115
2126
2133
2135
2136
2137
2138
2137
2140
2156
2148
2144
2143
2142
2145
2161
2160
2162
2154
2155
2136
2134
2132
2138
2140
2143
2135
2136
2143
2130
2127
2158
2161
2167
2169
2186
2191
2185
2193
2192
2205
2199
2198
2199
2201
2200
2201
2225
2234
2232
2241
2230
2227
2218
2228
2241
2242
2243
2258
2251
2242
2226
2218
2223
2222
2225
2247
2246
2243
2245
2238
2239
2237
2255
2257
2258
2251
2267
2268
2269
2283
2264
2266
2264
2265
2293
2310
2327
2334
2354
2353
2358
2354
2359
2353
2362
2363
2364
2362
2366
2362
2356
2355
2351
2385
2386
2387
2389
2391
2400
2411
2373
2343
2332
2331
2335
2346
2343
2357
2365
2366
2373
2374
2368
2363
2362
2366
2365
2369
2340
2341
2337
2357
2358
2353
2355
2374
2373
2379
2382
2390
2388
2387
2388
2387
2388
2389
2399
2400
2398
2406
2409
2414
2416
2425
2434
2424
2418
2420
2413
2418
2419
2427
2428
2429
2428
2431
2430
2418
2430
2434
2442
2443
2410
2409
2416
2400
2398
2402
2403
2405
2440
2433
2439
2435
2414
2431
2440
2443
2442
2445
2449
2448
2444
2443
2450
2432
2431
2436
2460
2467
2471
2470
2473
2477
2486
2487
2482
2487
2488
2490
2493
2465
2463
2471
2467
2466
2473
2469
2471
2483
2484
2496
2494
2497
2493
2521
2522
2500
2504
2512
2521
2522
2529
2524
2521
2528
2529
2544
2521
2532
2525
2530
2524
2523
2505
2498
2499
2494
2487
2484
2487
2488
2491
2498
2497
2486
2478
2484
2488
2489
2491
2489
2493
2496
2499
2496
2495
2498
2499
2498
2497
2503
2500
2534
2535
2546
2588
2616
2642
2641
2640
2654
2663
2659
2663
2664
2674
2678
2680
2683
2692
2698
2703
2704
2707
2704
2735
2710
2728
2730
2739
2734
2741
2738
2760
2744
2745
2746
2747
2750
2744
2746
2759
2761
2762
2763
2764
2785
2795
2818
2820
2824
2828
2831
2833
2837
2838
2840
2839
2847
2844
2835
2834
2829
2835
2837
2805
2832
2833
2832
2833
2822
2823
2826
2835
2823
2825
2828
2829
2828
2850
2851
2853
2862
2864
2877
2871
2874
2886
2921
2895
2899
2887
2898
2897
2899
2922
2929
2935
2937
2938
2935
2939
2944
2945
2981
2982
2986
3002
3003
3010
3009
3020
3021
3029
3028
3019
3020
3021
2996
2993
2999
3009
3013
2996
2988
2990
3032
3031
3023
3037
3027
2994
2988
2991
3023
3025
3029
3033
3043
3052
3053
3052
3054
3055
3045
3044
3053
3056
3077
3078
3079
3073
3099
3093
3097
3100
3064
3076
3082
3095
3091
3095
3101
3102
3099
3100
3095
3097
3093
3102
3105
3143
3160
3157
3156
3158
3150
3151
3150
3157
3184
3179
3181
3173
3179
3184
3187
3197
3198
3199
3208
3214
3221
3205
3209
3210
3214
3215
3214
3216
3234
3237
3246
3254
3255
3278
3277
3278
3281
3299
3298
3306
3305
3304
3315
3323
3330
3331
3323
3326
3329
3330
3336
3326
3324
3320
3318
3317
3319
3318
3320
3321
3322
3331
3322
3315
3326
3325
3317
3316
3312
3323
3322
3329
3328
3323
3324
3332
3331
3329
3294
3293
3290
3283
3278
3281
3290
3291
3265
3268
3271
3255
3248
3249
3250
3253
3254
3276
3277
3274
3279
3281
3295
3290
3306
3309
3300
3306
3317
3316
3325
3322
3343
3346
3355
3356
3370
3371
3372
3373
3377
3378
3390
3397
3399
3433
3454
3468
3474
3472
3475
3473
3471
3489
3490
3495
3510
3505
3514
3516
3528
3529
3545
3544
3549
3547
3555
3556
3555
3546
3555
3556
3566
3553
3552
3578
3565
3566
3558
3544
3540
3539
3534
3535
3522
3523
3512
3508
3548
3559
3566
3561
3581
3584
3616
3624
3623
3626
3625
3624
3630
3627
3631
3618
3632
3631
3641
3648
3645
3649
3653
3663
3672
3677
3678
3686
3688
3687
3694
3684
3688
3690
3699
3703
3706
3707
3711
3718
3715
3716
3717
3718
3724
3723
3733
3737
3738
3739
3738
3741
3769
3770
3767
3770
3769
3768
3771
3743
3744
3749
3752
3755
3756
3755
3750
3751
3750
3745
3755
3757
3756
3758
3759
3769
3768
3776
3766
3774
3776
3775
3780
3801
3776
3775
3776
3789
3794
3793
3790
3773
3769
3770
3781
3782
3781
3782
3755
3762
3763
3768
3765
3780
3789
3790
3792
3795
3796
3797
3796
3789
3788
3791
3786
3785
3784
3764
3767
3771
3774
3782
3794
3811
3802
3808
3819
3806
3826
3818
3808
3809
3824
3844
3833
3834
3825
3830
3835
3839
3841
3828
3826
3835
3841
3849
3858
3851
3846
3850
3853
3863
3862
3866
3867
3873
3871
3859
3892
3887
3905
3914
3915
3909
3910
3926
3925
3933
3930
3933
3930
3920
3919
3923
3924
3920
3922
3935
3938
3940
3935
3947
3948
3952
3953
3962
3985
3984
3987
4005
4006
4007
4027
4031
4043
4042
4037
4038
4034
4037
4044
4054
4063
4081
4078
4090
4114
4125
4122
4130
4137
4142
4140
4139
4140
4139
4143
4158
4155
4151
4154
4153
4152
4148
4144
4145
4157
4145
4143
4144
4142
4143
4144
4150
4152
4153
4176
4165
4168
4175
4178
4184
4183
4182
4164
4163
4164
4177
4192
4190
4197
4216
4217
4214
4192
4193
4189
4191
4171
4172
4177
4194
4189
4187
4192
4198
4200
4210
4222
4243
4252
4253
4258
4264
4260
4261
4259
4272
4290
4318
4320
4334
4335
4336
4352
4356
4380
4400
4396
4389
4390
4393
4392
4397
4399
4397
4395
4400
4394
4403
4400
4416
4417
4424
4431
4437
4424
4422
4421
4422
4410
4417
4420
4421
4428
4439
4449
4448
4437
4414
4429
4440
4417
4418
4422
4401
4393
4402
4400
4414
4419
4392
4395
4406
4428
4429
4442
4413
4437
4447
4444
4430
4429
4438
4434
4433
4436
4422
4423
4422
4413
4415
4403
4416
4420
4386
4379
4371
4378
4381
4404
4417
4416
4418
4412
4416
4407
4400
4408
4412
4423
4421
4412
4409
4426
4424
4429
4420
4398
4404
4403
4409
4410
4418
4417
4426
4428
4447
4459
4461
4465
4478
4479
4450
4452
4454
4445
4447
4459
4482'''.strip().split('\n')
a = list(map(int, a))
prev_val = 99999999
ctr = 0
for val in a:
if val > prev_val:
ctr += 1
prev_val = val
print(ctr)
#%% Part 2
"""
Considering every single measurement isn't as useful as you expected: there's just too much noise in the data.
Instead, consider sums of a three-measurement sliding window. Again considering the above example:
199 A
200 A B
208 A B C
210 B C D
200 E C D
207 E F D
240 E F G
269 F G H
260 G H
263 H
Start by comparing the first and second three-measurement windows. The measurements in the first window are marked A (199, 200, 208); their sum is 199 + 200 + 208 = 607. The second window is marked B (200, 208, 210); its sum is 618. The sum of measurements in the second window is larger than the sum of the first, so this first comparison increased.
Your goal now is to count the number of times the sum of measurements in this sliding window increases from the previous sum. So, compare A with B, then compare B with C, then C with D, and so on. Stop when there aren't enough measurements left to create a new three-measurement sum.
"""
prev_vals = [0, 0, 0]
ctr = 0
for i, val in enumerate(a):
new_vals = [prev_vals[1], prev_vals[2], val]
if 0 not in prev_vals:
s1 = sum(prev_vals)
s2 = sum(new_vals)
if s2 > s1:
ctr += 1
prev_vals = new_vals
print(ctr)
| 5.80597
| 350
| 0.773613
| 2,446
| 12,059
| 3.809076
| 0.674162
| 0.005152
| 0.003864
| 0.002576
| 0.00923
| 0
| 0
| 0
| 0
| 0
| 0
| 0.81352
| 0.212455
| 12,059
| 2,076
| 351
| 5.808767
| 0.167527
| 0.009702
| 0
| 0.458911
| 0
| 0
| 0.955919
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.00099
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
99b85f49f23084afac47f925be35e4533571ccd5
| 70
|
py
|
Python
|
stubs/six/__init__.py
|
jcollado/rabbithole
|
fdddde25a3c8f1e2f78bdeedcb3fc282a92c36d2
|
[
"MIT"
] | 1
|
2018-07-13T22:29:35.000Z
|
2018-07-13T22:29:35.000Z
|
stubs/six/__init__.py
|
jcollado/rabbithole
|
fdddde25a3c8f1e2f78bdeedcb3fc282a92c36d2
|
[
"MIT"
] | 274
|
2016-11-25T00:58:22.000Z
|
2021-11-15T17:47:17.000Z
|
stubs/six/__init__.py
|
jcollado/rabbithole
|
fdddde25a3c8f1e2f78bdeedcb3fc282a92c36d2
|
[
"MIT"
] | null | null | null |
def iteritems(dictonary):
pass
class StringIO(object):
pass
| 10
| 25
| 0.685714
| 8
| 70
| 6
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 70
| 6
| 26
| 11.666667
| 0.888889
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
99e8ee8b3d1140ebaeed07c6cd92bbbbe18757b5
| 142
|
py
|
Python
|
Mundo01/Python/aula10-031.py
|
molonti/CursoemVideo---Python
|
4f6a7af648f7f619d11e95fa3dc7a33b28fcfa11
|
[
"MIT"
] | null | null | null |
Mundo01/Python/aula10-031.py
|
molonti/CursoemVideo---Python
|
4f6a7af648f7f619d11e95fa3dc7a33b28fcfa11
|
[
"MIT"
] | null | null | null |
Mundo01/Python/aula10-031.py
|
molonti/CursoemVideo---Python
|
4f6a7af648f7f619d11e95fa3dc7a33b28fcfa11
|
[
"MIT"
] | null | null | null |
dis = int(input('Qual a Distância da Viagem: '))
if dis>=200:
print(f'Vocâ pagará {dis*0.45}')
else:
print(f'Vocâ pagará {dis*0.50}')
| 23.666667
| 48
| 0.626761
| 26
| 142
| 3.423077
| 0.692308
| 0.134831
| 0.224719
| 0.359551
| 0.449438
| 0.449438
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.176056
| 142
| 5
| 49
| 28.4
| 0.683761
| 0
| 0
| 0
| 0
| 0
| 0.507042
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
8206987696c19a02d7b194876609090f0d950a90
| 44,620
|
py
|
Python
|
pysnmp_mibs/IP-FORWARD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 6
|
2017-04-21T13:48:08.000Z
|
2022-01-06T19:42:52.000Z
|
pysnmp_mibs/IP-FORWARD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 1
|
2020-05-05T16:42:25.000Z
|
2020-05-05T16:42:25.000Z
|
pysnmp_mibs/IP-FORWARD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 6
|
2020-02-08T20:28:49.000Z
|
2021-09-14T13:36:46.000Z
|
#
# PySNMP MIB module IP-FORWARD-MIB (http://pysnmp.sf.net)
# ASN.1 source http://mibs.snmplabs.com:80/asn1/IP-FORWARD-MIB
# Produced by pysmi-0.0.7 at Sun Feb 14 00:17:45 2016
# On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose
# Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52)
#
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion")
( IANAipRouteProtocol, ) = mibBuilder.importSymbols("IANA-RTPROTO-MIB", "IANAipRouteProtocol")
( InterfaceIndexOrZero, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero")
( InetAutonomousSystemNumber, InetAddressPrefixLength, InetAddressType, InetAddress, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAutonomousSystemNumber", "InetAddressPrefixLength", "InetAddressType", "InetAddress")
( ip, ) = mibBuilder.importSymbols("IP-MIB", "ip")
( ModuleCompliance, ObjectGroup, NotificationGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
( Unsigned32, ModuleIdentity, Gauge32, Counter64, MibIdentifier, iso, Counter32, NotificationType, ObjectIdentity, Bits, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Integer32, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "ModuleIdentity", "Gauge32", "Counter64", "MibIdentifier", "iso", "Counter32", "NotificationType", "ObjectIdentity", "Bits", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Integer32")
( RowStatus, TextualConvention, DisplayString, ) = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString")
ipForward = ModuleIdentity((1, 3, 6, 1, 2, 1, 4, 24)).setRevisions(("2006-02-01 00:00", "1996-09-19 00:00", "1992-07-02 21:56",))
if mibBuilder.loadTexts: ipForward.setLastUpdated('200602010000Z')
if mibBuilder.loadTexts: ipForward.setOrganization('IETF IPv6 Working Group\n http://www.ietf.org/html.charters/ipv6-charter.html')
if mibBuilder.loadTexts: ipForward.setContactInfo('Editor:\n Brian Haberman\n Johns Hopkins University - Applied Physics Laboratory\n Mailstop 17-S442\n 11100 Johns Hopkins Road\n Laurel MD, 20723-6099 USA\n\n Phone: +1-443-778-1319\n Email: brian@innovationslab.net\n\n Send comments to <ipv6@ietf.org>')
if mibBuilder.loadTexts: ipForward.setDescription('The MIB module for the management of CIDR multipath IP\n Routes.\n\n Copyright (C) The Internet Society (2006). This version\n of this MIB module is a part of RFC 4292; see the RFC\n itself for full legal notices.')
inetCidrRouteNumber = MibScalar((1, 3, 6, 1, 2, 1, 4, 24, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inetCidrRouteNumber.setDescription('The number of current inetCidrRouteTable entries that\n are not invalid.')
inetCidrRouteDiscards = MibScalar((1, 3, 6, 1, 2, 1, 4, 24, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inetCidrRouteDiscards.setDescription('The number of valid route entries discarded from the\n inetCidrRouteTable. Discarded route entries do not\n appear in the inetCidrRouteTable. One possible reason\n for discarding an entry would be to free-up buffer space\n for other route table entries.')
inetCidrRouteTable = MibTable((1, 3, 6, 1, 2, 1, 4, 24, 7), )
if mibBuilder.loadTexts: inetCidrRouteTable.setDescription("This entity's IP Routing table.")
inetCidrRouteEntry = MibTableRow((1, 3, 6, 1, 2, 1, 4, 24, 7, 1), ).setIndexNames((0, "IP-FORWARD-MIB", "inetCidrRouteDestType"), (0, "IP-FORWARD-MIB", "inetCidrRouteDest"), (0, "IP-FORWARD-MIB", "inetCidrRoutePfxLen"), (0, "IP-FORWARD-MIB", "inetCidrRoutePolicy"), (0, "IP-FORWARD-MIB", "inetCidrRouteNextHopType"), (0, "IP-FORWARD-MIB", "inetCidrRouteNextHop"))
if mibBuilder.loadTexts: inetCidrRouteEntry.setDescription('A particular route to a particular destination, under a\n particular policy (as reflected in the\n inetCidrRoutePolicy object).\n\n Dynamically created rows will survive an agent reboot.\n\n Implementers need to be aware that if the total number\n of elements (octets or sub-identifiers) in\n inetCidrRouteDest, inetCidrRoutePolicy, and\n inetCidrRouteNextHop exceeds 111, then OIDs of column\n instances in this table will have more than 128 sub-\n identifiers and cannot be accessed using SNMPv1,\n SNMPv2c, or SNMPv3.')
inetCidrRouteDestType = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 1), InetAddressType())
if mibBuilder.loadTexts: inetCidrRouteDestType.setDescription('The type of the inetCidrRouteDest address, as defined\n in the InetAddress MIB.\n\n Only those address types that may appear in an actual\n routing table are allowed as values of this object.')
inetCidrRouteDest = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 2), InetAddress())
if mibBuilder.loadTexts: inetCidrRouteDest.setDescription('The destination IP address of this route.\n\n The type of this address is determined by the value of\n the inetCidrRouteDestType object.\n\n The values for the index objects inetCidrRouteDest and\n inetCidrRoutePfxLen must be consistent. When the value\n of inetCidrRouteDest (excluding the zone index, if one\n is present) is x, then the bitwise logical-AND\n of x with the value of the mask formed from the\n corresponding index object inetCidrRoutePfxLen MUST be\n equal to x. If not, then the index pair is not\n consistent and an inconsistentName error must be\n returned on SET or CREATE requests.')
inetCidrRoutePfxLen = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 3), InetAddressPrefixLength())
if mibBuilder.loadTexts: inetCidrRoutePfxLen.setDescription('Indicates the number of leading one bits that form the\n mask to be logical-ANDed with the destination address\n before being compared to the value in the\n inetCidrRouteDest field.\n The values for the index objects inetCidrRouteDest and\n inetCidrRoutePfxLen must be consistent. When the value\n of inetCidrRouteDest (excluding the zone index, if one\n is present) is x, then the bitwise logical-AND\n of x with the value of the mask formed from the\n corresponding index object inetCidrRoutePfxLen MUST be\n equal to x. If not, then the index pair is not\n consistent and an inconsistentName error must be\n returned on SET or CREATE requests.')
inetCidrRoutePolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 4), ObjectIdentifier())
if mibBuilder.loadTexts: inetCidrRoutePolicy.setDescription('This object is an opaque object without any defined\n semantics. Its purpose is to serve as an additional\n index that may delineate between multiple entries to\n the same destination. The value { 0 0 } shall be used\n as the default value for this object.')
inetCidrRouteNextHopType = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 5), InetAddressType())
if mibBuilder.loadTexts: inetCidrRouteNextHopType.setDescription('The type of the inetCidrRouteNextHop address, as\n defined in the InetAddress MIB.\n\n Value should be set to unknown(0) for non-remote\n routes.\n\n Only those address types that may appear in an actual\n routing table are allowed as values of this object.')
inetCidrRouteNextHop = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 6), InetAddress())
if mibBuilder.loadTexts: inetCidrRouteNextHop.setDescription('On remote routes, the address of the next system en\n route. For non-remote routes, a zero length string.\n The type of this address is determined by the value of\n the inetCidrRouteNextHopType object.')
inetCidrRouteIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 7), InterfaceIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteIfIndex.setDescription('The ifIndex value that identifies the local interface\n through which the next hop of this route should be\n reached. A value of 0 is valid and represents the\n scenario where no interface is specified.')
inetCidrRouteType = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5,))).clone(namedValues=NamedValues(("other", 1), ("reject", 2), ("local", 3), ("remote", 4), ("blackhole", 5),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteType.setDescription('The type of route. Note that local(3) refers to a\n route for which the next hop is the final destination;\n remote(4) refers to a route for which the next hop is\n not the final destination.\n\n Routes that do not result in traffic forwarding or\n rejection should not be displayed, even if the\n implementation keeps them stored internally.\n\n reject(2) refers to a route that, if matched, discards\n the message as unreachable and returns a notification\n (e.g., ICMP error) to the message sender. This is used\n in some protocols as a means of correctly aggregating\n routes.\n\n blackhole(5) refers to a route that, if matched,\n discards the message silently.')
inetCidrRouteProto = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 9), IANAipRouteProtocol()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inetCidrRouteProto.setDescription('The routing mechanism via which this route was learned.\n Inclusion of values for gateway routing protocols is\n not intended to imply that hosts should support those\n protocols.')
inetCidrRouteAge = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inetCidrRouteAge.setDescription("The number of seconds since this route was last updated\n or otherwise determined to be correct. Note that no\n semantics of 'too old' can be implied, except through\n knowledge of the routing protocol by which the route\n was learned.")
inetCidrRouteNextHopAS = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 11), InetAutonomousSystemNumber()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteNextHopAS.setDescription("The Autonomous System Number of the Next Hop. The\n semantics of this object are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. When this object is unknown or not relevant, its\n value should be set to zero.")
inetCidrRouteMetric1 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 12), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteMetric1.setDescription("The primary routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
inetCidrRouteMetric2 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 13), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteMetric2.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
inetCidrRouteMetric3 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 14), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteMetric3.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
inetCidrRouteMetric4 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 15), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteMetric4.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
inetCidrRouteMetric5 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 16), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteMetric5.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's inetCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
inetCidrRouteStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 7, 1, 17), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: inetCidrRouteStatus.setDescription('The row status variable, used according to row\n installation and removal conventions.\n\n A row entry cannot be modified when the status is\n marked as active(1).')
ipForwardConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 4, 24, 5))
ipForwardGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 4, 24, 5, 1))
ipForwardCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 4, 24, 5, 2))
ipForwardFullCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 4, 24, 5, 2, 3)).setObjects(*(("IP-FORWARD-MIB", "inetForwardCidrRouteGroup"),))
if mibBuilder.loadTexts: ipForwardFullCompliance.setDescription('When this MIB is implemented for read-create, the\n implementation can claim full compliance.\n\n There are a number of INDEX objects that cannot be\n represented in the form of OBJECT clauses in SMIv2,\n but for which there are compliance requirements,\n expressed in OBJECT clause form in this description:\n\n -- OBJECT inetCidrRouteDestType\n -- SYNTAX InetAddressType (ipv4(1), ipv6(2),\n -- ipv4z(3), ipv6z(4))\n -- DESCRIPTION\n -- This MIB requires support for global and\n -- non-global ipv4 and ipv6 addresses.\n --\n -- OBJECT inetCidrRouteDest\n -- SYNTAX InetAddress (SIZE (4 | 8 | 16 | 20))\n -- DESCRIPTION\n -- This MIB requires support for global and\n -- non-global IPv4 and IPv6 addresses.\n --\n -- OBJECT inetCidrRouteNextHopType\n -- SYNTAX InetAddressType (unknown(0), ipv4(1),\n -- ipv6(2), ipv4z(3)\n -- ipv6z(4))\n -- DESCRIPTION\n -- This MIB requires support for global and\n -- non-global ipv4 and ipv6 addresses.\n --\n -- OBJECT inetCidrRouteNextHop\n -- SYNTAX InetAddress (SIZE (0 | 4 | 8 | 16 | 20))\n -- DESCRIPTION\n -- This MIB requires support for global and\n -- non-global IPv4 and IPv6 addresses.\n ')
ipForwardReadOnlyCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 4, 24, 5, 2, 4)).setObjects(*(("IP-FORWARD-MIB", "inetForwardCidrRouteGroup"),))
if mibBuilder.loadTexts: ipForwardReadOnlyCompliance.setDescription('When this MIB is implemented without support for read-\n create (i.e., in read-only mode), the implementation can\n claim read-only compliance.')
inetForwardCidrRouteGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 4, 24, 5, 1, 4)).setObjects(*(("IP-FORWARD-MIB", "inetCidrRouteDiscards"), ("IP-FORWARD-MIB", "inetCidrRouteIfIndex"), ("IP-FORWARD-MIB", "inetCidrRouteType"), ("IP-FORWARD-MIB", "inetCidrRouteProto"), ("IP-FORWARD-MIB", "inetCidrRouteAge"), ("IP-FORWARD-MIB", "inetCidrRouteNextHopAS"), ("IP-FORWARD-MIB", "inetCidrRouteMetric1"), ("IP-FORWARD-MIB", "inetCidrRouteMetric2"), ("IP-FORWARD-MIB", "inetCidrRouteMetric3"), ("IP-FORWARD-MIB", "inetCidrRouteMetric4"), ("IP-FORWARD-MIB", "inetCidrRouteMetric5"), ("IP-FORWARD-MIB", "inetCidrRouteStatus"), ("IP-FORWARD-MIB", "inetCidrRouteNumber"),))
if mibBuilder.loadTexts: inetForwardCidrRouteGroup.setDescription('The IP version-independent CIDR Route Table.')
ipCidrRouteNumber = MibScalar((1, 3, 6, 1, 2, 1, 4, 24, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteNumber.setDescription('The number of current ipCidrRouteTable entries that are\n not invalid. This object is deprecated in favor of\n inetCidrRouteNumber and the inetCidrRouteTable.')
ipCidrRouteTable = MibTable((1, 3, 6, 1, 2, 1, 4, 24, 4), )
if mibBuilder.loadTexts: ipCidrRouteTable.setDescription("This entity's IP Routing table. This table has been\n deprecated in favor of the IP version neutral\n inetCidrRouteTable.")
ipCidrRouteEntry = MibTableRow((1, 3, 6, 1, 2, 1, 4, 24, 4, 1), ).setIndexNames((0, "IP-FORWARD-MIB", "ipCidrRouteDest"), (0, "IP-FORWARD-MIB", "ipCidrRouteMask"), (0, "IP-FORWARD-MIB", "ipCidrRouteTos"), (0, "IP-FORWARD-MIB", "ipCidrRouteNextHop"))
if mibBuilder.loadTexts: ipCidrRouteEntry.setDescription('A particular route to a particular destination, under a\n particular policy.')
ipCidrRouteDest = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteDest.setDescription('The destination IP address of this route.\n\n This object may not take a Multicast (Class D) address\n value.\n\n Any assignment (implicit or otherwise) of an instance\n of this object to a value x must be rejected if the\n bitwise logical-AND of x with the value of the\n corresponding instance of the ipCidrRouteMask object is\n not equal to x.')
ipCidrRouteMask = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteMask.setDescription('Indicate the mask to be logical-ANDed with the\n destination address before being compared to the value\n in the ipCidrRouteDest field. For those systems that\n do not support arbitrary subnet masks, an agent\n constructs the value of the ipCidrRouteMask by\n reference to the IP Address Class.\n\n Any assignment (implicit or otherwise) of an instance\n of this object to a value x must be rejected if the\n bitwise logical-AND of x with the value of the\n corresponding instance of the ipCidrRouteDest object is\n not equal to ipCidrRouteDest.')
ipCidrRouteTos = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteTos.setDescription('The policy specifier is the IP TOS Field. The encoding\n of IP TOS is as specified by the following convention.\n Zero indicates the default path if no more specific\n policy applies.\n\n +-----+-----+-----+-----+-----+-----+-----+-----+\n | | | |\n | PRECEDENCE | TYPE OF SERVICE | 0 |\n | | | |\n +-----+-----+-----+-----+-----+-----+-----+-----+\n\n IP TOS IP TOS\n Field Policy Field Policy\n Contents Code Contents Code\n 0 0 0 0 ==> 0 0 0 0 1 ==> 2\n 0 0 1 0 ==> 4 0 0 1 1 ==> 6\n 0 1 0 0 ==> 8 0 1 0 1 ==> 10\n 0 1 1 0 ==> 12 0 1 1 1 ==> 14\n 1 0 0 0 ==> 16 1 0 0 1 ==> 18\n 1 0 1 0 ==> 20 1 0 1 1 ==> 22\n 1 1 0 0 ==> 24 1 1 0 1 ==> 26\n 1 1 1 0 ==> 28 1 1 1 1 ==> 30')
ipCidrRouteNextHop = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteNextHop.setDescription('On remote routes, the address of the next system en\n route; Otherwise, 0.0.0.0.')
ipCidrRouteIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 5), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteIfIndex.setDescription('The ifIndex value that identifies the local interface\n through which the next hop of this route should be\n reached.')
ipCidrRouteType = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4,))).clone(namedValues=NamedValues(("other", 1), ("reject", 2), ("local", 3), ("remote", 4),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteType.setDescription('The type of route. Note that local(3) refers to a\n route for which the next hop is the final destination;\n remote(4) refers to a route for which the next hop is\n not the final destination.\n\n Routes that do not result in traffic forwarding or\n rejection should not be displayed, even if the\n implementation keeps them stored internally.\n\n reject (2) refers to a route that, if matched,\n discards the message as unreachable. This is used in\n some protocols as a means of correctly aggregating\n routes.')
ipCidrRouteProto = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,))).clone(namedValues=NamedValues(("other", 1), ("local", 2), ("netmgmt", 3), ("icmp", 4), ("egp", 5), ("ggp", 6), ("hello", 7), ("rip", 8), ("isIs", 9), ("esIs", 10), ("ciscoIgrp", 11), ("bbnSpfIgp", 12), ("ospf", 13), ("bgp", 14), ("idpr", 15), ("ciscoEigrp", 16),))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteProto.setDescription('The routing mechanism via which this route was learned.\n Inclusion of values for gateway routing protocols is\n not intended to imply that hosts should support those\n protocols.')
ipCidrRouteAge = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCidrRouteAge.setDescription("The number of seconds since this route was last updated\n or otherwise determined to be correct. Note that no\n semantics of `too old' can be implied, except through\n knowledge of the routing protocol by which the route\n was learned.")
ipCidrRouteInfo = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 9), ObjectIdentifier()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteInfo.setDescription("A reference to MIB definitions specific to the\n particular routing protocol that is responsible for\n this route, as determined by the value specified in the\n route's ipCidrRouteProto value. If this information is\n not present, its value should be set to the OBJECT\n IDENTIFIER { 0 0 }, which is a syntactically valid\n object identifier, and any implementation conforming to\n ASN.1 and the Basic Encoding Rules must be able to\n generate and recognize this value.")
ipCidrRouteNextHopAS = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 10), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteNextHopAS.setDescription("The Autonomous System Number of the Next Hop. The\n semantics of this object are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. When this object is unknown or not relevant, its\n value should be set to zero.")
ipCidrRouteMetric1 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 11), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteMetric1.setDescription("The primary routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
ipCidrRouteMetric2 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 12), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteMetric2.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
ipCidrRouteMetric3 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 13), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteMetric3.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
ipCidrRouteMetric4 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 14), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteMetric4.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
ipCidrRouteMetric5 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 15), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteMetric5.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipCidrRouteProto\n value. If this metric is not used, its value should be\n set to -1.")
ipCidrRouteStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 4, 1, 16), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipCidrRouteStatus.setDescription('The row status variable, used according to row\n installation and removal conventions.')
ipForwardCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 4, 24, 5, 2, 1)).setObjects(*(("IP-FORWARD-MIB", "ipForwardCidrRouteGroup"),))
if mibBuilder.loadTexts: ipForwardCompliance.setDescription('The compliance statement for SNMPv2 entities that\n implement the ipForward MIB.\n\n This compliance statement has been deprecated and\n replaced with ipForwardFullCompliance and\n ipForwardReadOnlyCompliance.')
ipForwardCidrRouteGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 4, 24, 5, 1, 3)).setObjects(*(("IP-FORWARD-MIB", "ipCidrRouteNumber"), ("IP-FORWARD-MIB", "ipCidrRouteDest"), ("IP-FORWARD-MIB", "ipCidrRouteMask"), ("IP-FORWARD-MIB", "ipCidrRouteTos"), ("IP-FORWARD-MIB", "ipCidrRouteNextHop"), ("IP-FORWARD-MIB", "ipCidrRouteIfIndex"), ("IP-FORWARD-MIB", "ipCidrRouteType"), ("IP-FORWARD-MIB", "ipCidrRouteProto"), ("IP-FORWARD-MIB", "ipCidrRouteAge"), ("IP-FORWARD-MIB", "ipCidrRouteInfo"), ("IP-FORWARD-MIB", "ipCidrRouteNextHopAS"), ("IP-FORWARD-MIB", "ipCidrRouteMetric1"), ("IP-FORWARD-MIB", "ipCidrRouteMetric2"), ("IP-FORWARD-MIB", "ipCidrRouteMetric3"), ("IP-FORWARD-MIB", "ipCidrRouteMetric4"), ("IP-FORWARD-MIB", "ipCidrRouteMetric5"), ("IP-FORWARD-MIB", "ipCidrRouteStatus"),))
if mibBuilder.loadTexts: ipForwardCidrRouteGroup.setDescription('The CIDR Route Table.\n\n This group has been deprecated and replaced with\n inetForwardCidrRouteGroup.')
ipForwardNumber = MibScalar((1, 3, 6, 1, 2, 1, 4, 24, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardNumber.setDescription('The number of current ipForwardTable entries that are\n not invalid.')
ipForwardTable = MibTable((1, 3, 6, 1, 2, 1, 4, 24, 2), )
if mibBuilder.loadTexts: ipForwardTable.setDescription("This entity's IP Routing table.")
ipForwardEntry = MibTableRow((1, 3, 6, 1, 2, 1, 4, 24, 2, 1), ).setIndexNames((0, "IP-FORWARD-MIB", "ipForwardDest"), (0, "IP-FORWARD-MIB", "ipForwardProto"), (0, "IP-FORWARD-MIB", "ipForwardPolicy"), (0, "IP-FORWARD-MIB", "ipForwardNextHop"))
if mibBuilder.loadTexts: ipForwardEntry.setDescription('A particular route to a particular destination, under a\n particular policy.')
ipForwardDest = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardDest.setDescription('The destination IP address of this route. An entry\n with a value of 0.0.0.0 is considered a default route.\n\n This object may not take a Multicast (Class D) address\n value.\n\n Any assignment (implicit or otherwise) of an instance\n of this object to a value x must be rejected if the\n bitwise logical-AND of x with the value of the\n corresponding instance of the ipForwardMask object is\n not equal to x.')
ipForwardMask = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMask.setDescription('Indicate the mask to be logical-ANDed with the\n destination address before being compared to the value\n in the ipForwardDest field. For those systems that do\n not support arbitrary subnet masks, an agent constructs\n the value of the ipForwardMask by reference to the IP\n Address Class.\n\n Any assignment (implicit or otherwise) of an instance\n of this object to a value x must be rejected if the\n bitwise logical-AND of x with the value of the\n corresponding instance of the ipForwardDest object is\n not equal to ipForwardDest.')
ipForwardPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardPolicy.setDescription("The general set of conditions that would cause\n the selection of one multipath route (set of\n next hops for a given destination) is referred\n to as 'policy'.\n\n Unless the mechanism indicated by ipForwardProto\n specifies otherwise, the policy specifier is\n the IP TOS Field. The encoding of IP TOS is as\n specified by the following convention. Zero\n indicates the default path if no more specific\n policy applies.\n\n +-----+-----+-----+-----+-----+-----+-----+-----+\n | | | |\n | PRECEDENCE | TYPE OF SERVICE | 0 |\n | | | |\n +-----+-----+-----+-----+-----+-----+-----+-----+\n\n IP TOS IP TOS\n Field Policy Field Policy\n Contents Code Contents Code\n 0 0 0 0 ==> 0 0 0 0 1 ==> 2\n 0 0 1 0 ==> 4 0 0 1 1 ==> 6\n 0 1 0 0 ==> 8 0 1 0 1 ==> 10\n 0 1 1 0 ==> 12 0 1 1 1 ==> 14\n 1 0 0 0 ==> 16 1 0 0 1 ==> 18\n 1 0 1 0 ==> 20 1 0 1 1 ==> 22\n 1 1 0 0 ==> 24 1 1 0 1 ==> 26\n 1 1 1 0 ==> 28 1 1 1 1 ==> 30\n\n Protocols defining 'policy' otherwise must either\n define a set of values that are valid for\n this object or must implement an integer-instanced\n policy table for which this object's\n value acts as an index.")
ipForwardNextHop = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardNextHop.setDescription('On remote routes, the address of the next system en\n route; otherwise, 0.0.0.0.')
ipForwardIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 5), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardIfIndex.setDescription('The ifIndex value that identifies the local interface\n through which the next hop of this route should be\n reached.')
ipForwardType = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4,))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("local", 3), ("remote", 4),)).clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardType.setDescription('The type of route. Note that local(3) refers to a\n route for which the next hop is the final destination;\n remote(4) refers to a route for which the next hop is\n not the final destination.\n\n Setting this object to the value invalid(2) has the\n effect of invalidating the corresponding entry in the\n ipForwardTable object. That is, it effectively\n disassociates the destination identified with said\n entry from the route identified with said entry. It is\n an implementation-specific matter as to whether the\n agent removes an invalidated entry from the table.\n Accordingly, management stations must be prepared to\n receive tabular information from agents that\n corresponds to entries not currently in use. Proper\n interpretation of such entries requires examination of\n the relevant ipForwardType object.')
ipForwardProto = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,))).clone(namedValues=NamedValues(("other", 1), ("local", 2), ("netmgmt", 3), ("icmp", 4), ("egp", 5), ("ggp", 6), ("hello", 7), ("rip", 8), ("is-is", 9), ("es-is", 10), ("ciscoIgrp", 11), ("bbnSpfIgp", 12), ("ospf", 13), ("bgp", 14), ("idpr", 15),))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardProto.setDescription('The routing mechanism via which this route was learned.\n Inclusion of values for gateway routing protocols is\n not intended to imply that hosts should support those\n protocols.')
ipForwardAge = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipForwardAge.setDescription("The number of seconds since this route was last updated\n or otherwise determined to be correct. Note that no\n semantics of `too old' can be implied except through\n knowledge of the routing protocol by which the route\n was learned.")
ipForwardInfo = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 9), ObjectIdentifier()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardInfo.setDescription("A reference to MIB definitions specific to the\n particular routing protocol that is responsible for\n this route, as determined by the value specified in the\n route's ipForwardProto value. If this information is\n not present, its value should be set to the OBJECT\n IDENTIFIER { 0 0 }, which is a syntactically valid\n object identifier, and any implementation conforming to\n ASN.1 and the Basic Encoding Rules must be able to\n generate and recognize this value.")
ipForwardNextHopAS = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 10), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardNextHopAS.setDescription('The Autonomous System Number of the Next Hop. When\n this is unknown or not relevant to the protocol\n indicated by ipForwardProto, zero.')
ipForwardMetric1 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 11), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMetric1.setDescription("The primary routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipForwardProto value.\n If this metric is not used, its value should be set to\n -1.")
ipForwardMetric2 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 12), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMetric2.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipForwardProto value.\n If this metric is not used, its value should be set to\n -1.")
ipForwardMetric3 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 13), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMetric3.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipForwardProto value.\n If this metric is not used, its value should be set to\n -1.")
ipForwardMetric4 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 14), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMetric4.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipForwardProto value.\n If this metric is not used, its value should be set to\n -1.")
ipForwardMetric5 = MibTableColumn((1, 3, 6, 1, 2, 1, 4, 24, 2, 1, 15), Integer32().clone(-1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: ipForwardMetric5.setDescription("An alternate routing metric for this route. The\n semantics of this metric are determined by the routing-\n protocol specified in the route's ipForwardProto value.\n If this metric is not used, its value should be set to\n -1.")
ipForwardOldCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 4, 24, 5, 2, 2)).setObjects(*(("IP-FORWARD-MIB", "ipForwardMultiPathGroup"),))
if mibBuilder.loadTexts: ipForwardOldCompliance.setDescription('The compliance statement for SNMP entities that\n implement the ipForward MIB.')
ipForwardMultiPathGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 4, 24, 5, 1, 2)).setObjects(*(("IP-FORWARD-MIB", "ipForwardNumber"), ("IP-FORWARD-MIB", "ipForwardDest"), ("IP-FORWARD-MIB", "ipForwardMask"), ("IP-FORWARD-MIB", "ipForwardPolicy"), ("IP-FORWARD-MIB", "ipForwardNextHop"), ("IP-FORWARD-MIB", "ipForwardIfIndex"), ("IP-FORWARD-MIB", "ipForwardType"), ("IP-FORWARD-MIB", "ipForwardProto"), ("IP-FORWARD-MIB", "ipForwardAge"), ("IP-FORWARD-MIB", "ipForwardInfo"), ("IP-FORWARD-MIB", "ipForwardNextHopAS"), ("IP-FORWARD-MIB", "ipForwardMetric1"), ("IP-FORWARD-MIB", "ipForwardMetric2"), ("IP-FORWARD-MIB", "ipForwardMetric3"), ("IP-FORWARD-MIB", "ipForwardMetric4"), ("IP-FORWARD-MIB", "ipForwardMetric5"),))
if mibBuilder.loadTexts: ipForwardMultiPathGroup.setDescription('IP Multipath Route Table.')
mibBuilder.exportSymbols("IP-FORWARD-MIB", inetCidrRouteAge=inetCidrRouteAge, ipForwardFullCompliance=ipForwardFullCompliance, inetCidrRouteDest=inetCidrRouteDest, ipCidrRouteInfo=ipCidrRouteInfo, ipForwardNextHop=ipForwardNextHop, ipForwardConformance=ipForwardConformance, PYSNMP_MODULE_ID=ipForward, inetCidrRouteMetric5=inetCidrRouteMetric5, inetCidrRouteEntry=inetCidrRouteEntry, inetCidrRouteIfIndex=inetCidrRouteIfIndex, ipForwardNextHopAS=ipForwardNextHopAS, ipCidrRouteIfIndex=ipCidrRouteIfIndex, inetCidrRouteMetric2=inetCidrRouteMetric2, ipForwardInfo=ipForwardInfo, ipForwardDest=ipForwardDest, inetCidrRouteMetric1=inetCidrRouteMetric1, ipForwardEntry=ipForwardEntry, inetCidrRouteNextHop=inetCidrRouteNextHop, ipCidrRouteNextHopAS=ipCidrRouteNextHopAS, ipCidrRouteNumber=ipCidrRouteNumber, ipForwardMask=ipForwardMask, ipForwardPolicy=ipForwardPolicy, ipCidrRouteProto=ipCidrRouteProto, ipCidrRouteMetric2=ipCidrRouteMetric2, ipForward=ipForward, ipCidrRouteMetric4=ipCidrRouteMetric4, inetForwardCidrRouteGroup=inetForwardCidrRouteGroup, ipForwardMetric3=ipForwardMetric3, ipForwardCompliances=ipForwardCompliances, ipForwardReadOnlyCompliance=ipForwardReadOnlyCompliance, ipCidrRouteMask=ipCidrRouteMask, ipForwardIfIndex=ipForwardIfIndex, ipForwardCompliance=ipForwardCompliance, ipCidrRouteMetric3=ipCidrRouteMetric3, ipForwardProto=ipForwardProto, ipForwardMetric4=ipForwardMetric4, inetCidrRouteDiscards=inetCidrRouteDiscards, ipCidrRouteNextHop=ipCidrRouteNextHop, ipForwardTable=ipForwardTable, ipCidrRouteAge=ipCidrRouteAge, inetCidrRouteNextHopAS=inetCidrRouteNextHopAS, inetCidrRouteTable=inetCidrRouteTable, inetCidrRoutePolicy=inetCidrRoutePolicy, ipCidrRouteType=ipCidrRouteType, ipForwardOldCompliance=ipForwardOldCompliance, ipForwardGroups=ipForwardGroups, ipForwardMetric1=ipForwardMetric1, ipForwardNumber=ipForwardNumber, inetCidrRouteProto=inetCidrRouteProto, ipForwardMetric2=ipForwardMetric2, ipForwardAge=ipForwardAge, inetCidrRouteMetric3=inetCidrRouteMetric3, inetCidrRoutePfxLen=inetCidrRoutePfxLen, ipCidrRouteEntry=ipCidrRouteEntry, ipCidrRouteMetric5=ipCidrRouteMetric5, ipForwardType=ipForwardType, ipCidrRouteTable=ipCidrRouteTable, ipForwardMultiPathGroup=ipForwardMultiPathGroup, inetCidrRouteStatus=inetCidrRouteStatus, ipCidrRouteDest=ipCidrRouteDest, ipForwardMetric5=ipForwardMetric5, ipCidrRouteTos=ipCidrRouteTos, inetCidrRouteType=inetCidrRouteType, inetCidrRouteNumber=inetCidrRouteNumber, ipCidrRouteStatus=ipCidrRouteStatus, ipForwardCidrRouteGroup=ipForwardCidrRouteGroup, ipCidrRouteMetric1=ipCidrRouteMetric1, inetCidrRouteMetric4=inetCidrRouteMetric4, inetCidrRouteNextHopType=inetCidrRouteNextHopType, inetCidrRouteDestType=inetCidrRouteDestType)
| 284.203822
| 2,709
| 0.663312
| 5,575
| 44,620
| 5.308341
| 0.11139
| 0.005812
| 0.007096
| 0.009326
| 0.570487
| 0.532912
| 0.508481
| 0.497263
| 0.467156
| 0.460059
| 0
| 0.046025
| 0.230143
| 44,620
| 156
| 2,710
| 286.025641
| 0.815493
| 0.006813
| 0
| 0
| 0
| 0.38255
| 0.6241
| 0.016453
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.067114
| 0
| 0.067114
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
82085fa23cf14d32be7fb8226beba3c6f462d6de
| 182
|
py
|
Python
|
ocean_provider/exceptions.py
|
mariacarmina/provider
|
7b757a0255c1337ecf4a092c4c461654f60a666c
|
[
"Apache-2.0"
] | null | null | null |
ocean_provider/exceptions.py
|
mariacarmina/provider
|
7b757a0255c1337ecf4a092c4c461654f60a666c
|
[
"Apache-2.0"
] | null | null | null |
ocean_provider/exceptions.py
|
mariacarmina/provider
|
7b757a0255c1337ecf4a092c4c461654f60a666c
|
[
"Apache-2.0"
] | null | null | null |
class InvalidSignatureError(Exception):
""" User signature is not valid."""
class BadRequestError(Exception):
""" Indicates a malformed request or missing parameters."""
| 20.222222
| 63
| 0.725275
| 18
| 182
| 7.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 182
| 8
| 64
| 22.75
| 0.868421
| 0.445055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
822a325d815f65888dc5191687145c941033e331
| 83
|
py
|
Python
|
test/test__name__main__.py
|
BriteLight/sandbox
|
bf45ccf2dc649043a501e3752f836725ff0baf5e
|
[
"Unlicense"
] | null | null | null |
test/test__name__main__.py
|
BriteLight/sandbox
|
bf45ccf2dc649043a501e3752f836725ff0baf5e
|
[
"Unlicense"
] | null | null | null |
test/test__name__main__.py
|
BriteLight/sandbox
|
bf45ccf2dc649043a501e3752f836725ff0baf5e
|
[
"Unlicense"
] | null | null | null |
def multi(a,b):
return a*b
if __name__ == '__main__':
print(multi(2,3))
| 10.375
| 26
| 0.578313
| 14
| 83
| 2.857143
| 0.785714
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.240964
| 83
| 7
| 27
| 11.857143
| 0.603175
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
41422fccfc1186c89970ecea8060f0ef9cf79de4
| 376
|
py
|
Python
|
biodb/api/views/__init__.py
|
juby-gif/BiodB-back
|
1e126e329dc97c020033354e6bd2ea0d9264bf8f
|
[
"BSD-3-Clause"
] | null | null | null |
biodb/api/views/__init__.py
|
juby-gif/BiodB-back
|
1e126e329dc97c020033354e6bd2ea0d9264bf8f
|
[
"BSD-3-Clause"
] | null | null | null |
biodb/api/views/__init__.py
|
juby-gif/BiodB-back
|
1e126e329dc97c020033354e6bd2ea0d9264bf8f
|
[
"BSD-3-Clause"
] | null | null | null |
from api.views.gateway.views import RegisterAPI, LogoutAPI
from api.views.uploads.views import AppleHealthKitListUploadAPI,AppleHealthKitUploadAPI,AppleHealthKitV2UploadAPI
from api.views.dashboard.views import AppleHealthKitListDataAPI,TimeSeriesDataStatisticsAPI,TimeSeriesDataFilteredAPI
from api.views.user_profile.views import UserprofileRetrieveAPI,UserprofileUpdateAPI
| 75.2
| 117
| 0.906915
| 35
| 376
| 9.714286
| 0.542857
| 0.082353
| 0.141176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002786
| 0.045213
| 376
| 4
| 118
| 94
| 0.94429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
415c9eb8da865828b152c94aa325fd2dba68b129
| 142
|
py
|
Python
|
produto/templatestags/filtros.py
|
SricardoSdSouza/Sistema_Delivery
|
40425498b4a5c2bd3a174b1019e8c18827cf4142
|
[
"MIT"
] | 1
|
2021-07-11T22:36:13.000Z
|
2021-07-11T22:36:13.000Z
|
produto/templatestags/filtros.py
|
SricardoSdSouza/Sistema_Delivery
|
40425498b4a5c2bd3a174b1019e8c18827cf4142
|
[
"MIT"
] | null | null | null |
produto/templatestags/filtros.py
|
SricardoSdSouza/Sistema_Delivery
|
40425498b4a5c2bd3a174b1019e8c18827cf4142
|
[
"MIT"
] | null | null | null |
from django import template
register = template.Library()
@register.filter(name='enumerate')
def enumerat(valor):
return enumerate(valor)
| 23.666667
| 34
| 0.774648
| 17
| 142
| 6.470588
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 142
| 6
| 35
| 23.666667
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0.062937
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
4194ef724b926a3193c7465471eaacbde6c2bd3e
| 89
|
py
|
Python
|
mirrors/apps.py
|
likeyiyy/mirrorweb
|
a1544fd76db4da6365caa820a4d4c4216835cd34
|
[
"Apache-2.0"
] | null | null | null |
mirrors/apps.py
|
likeyiyy/mirrorweb
|
a1544fd76db4da6365caa820a4d4c4216835cd34
|
[
"Apache-2.0"
] | null | null | null |
mirrors/apps.py
|
likeyiyy/mirrorweb
|
a1544fd76db4da6365caa820a4d4c4216835cd34
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class MirrorsConfig(AppConfig):
name = 'mirrors'
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
68e06f4fbcf5e9a949b2b02af9cbbf0d4e6e7826
| 428
|
py
|
Python
|
l0bnb/relaxation/__init__.py
|
hazimehh/l0bnb
|
37e9faed866c2eba3a68072a4545bc8362bff0e0
|
[
"MIT"
] | null | null | null |
l0bnb/relaxation/__init__.py
|
hazimehh/l0bnb
|
37e9faed866c2eba3a68072a4545bc8362bff0e0
|
[
"MIT"
] | null | null | null |
l0bnb/relaxation/__init__.py
|
hazimehh/l0bnb
|
37e9faed866c2eba3a68072a4545bc8362bff0e0
|
[
"MIT"
] | null | null | null |
import warnings
from numba.errors import NumbaDeprecationWarning, \
NumbaPendingDeprecationWarning, NumbaPerformanceWarning
warnings.simplefilter('ignore', category=NumbaDeprecationWarning)
warnings.simplefilter('ignore', category=NumbaPendingDeprecationWarning)
warnings.simplefilter('ignore', category=NumbaPerformanceWarning)
from .core import solve as cd_solve
from .mosek import l0mosek
from .gurobi import l0gurobi
| 32.923077
| 72
| 0.850467
| 39
| 428
| 9.307692
| 0.487179
| 0.165289
| 0.214876
| 0.280992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005102
| 0.084112
| 428
| 12
| 73
| 35.666667
| 0.920918
| 0
| 0
| 0
| 0
| 0
| 0.042056
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.555556
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
68e1f5458f1b66726c67ad7d13ea67e5c2444e3a
| 3,842
|
py
|
Python
|
python/test_raytracing.py
|
troiwill/ray_tracing
|
200a41557eaa04f5e1daddf7e2a75d8060c270e4
|
[
"MIT"
] | 3
|
2021-03-06T08:48:42.000Z
|
2022-01-10T01:42:28.000Z
|
python/test_raytracing.py
|
troiwill/ray_tracing
|
200a41557eaa04f5e1daddf7e2a75d8060c270e4
|
[
"MIT"
] | 4
|
2019-08-26T13:15:53.000Z
|
2021-12-18T18:33:27.000Z
|
python/test_raytracing.py
|
troiwill/ray_tracing
|
200a41557eaa04f5e1daddf7e2a75d8060c270e4
|
[
"MIT"
] | 1
|
2021-05-31T14:45:02.000Z
|
2021-05-31T14:45:02.000Z
|
#!/usr/bin/env python
import numpy as np
import pytest
import raytracing as rt
def test_ray_penetrates_grid_1d_pos_x():
start = np.array([[-1.23]])
end = np.array([[6.78]])
shape = np.array([5])
size = np.array([0.45])
map_gt = rt.gridmap(shape, size)
map_gt.misses[:5] = 1
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_ray_penetrates_grid_1d_neg_x():
start = np.array([[1.98]])
end = np.array([[-6.71]])
shape = np.array([100])
size = np.array([0.01])
map_gt = rt.gridmap(shape, size)
map_gt.misses = np.ones(100)
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_ray_starts_and_ends_in_grid_1d_pos_x():
start = np.array([[0.671]])
end = np.array([[0.985]])
shape = np.array([100])
size = np.array([0.01])
map_gt = rt.gridmap(shape, size)
map_gt.misses[67:98] = 1
map_gt.hits[98] = 1
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_ray_starts_and_ends_in_grid_1d_neg_x():
start = np.array([[0.985]])
end = np.array([[0.671]])
shape = np.array([100])
size = np.array([0.01])
map_gt = rt.gridmap(shape, size)
map_gt.misses[98:67:-1] = 1
map_gt.hits[67] = 1
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_ray_starts_in_grid_and_ends_outside_1d_pos_x():
start = np.array([[12.1]])
end = np.array([[1009.7]])
shape = np.array([51])
size = np.array([3.0])
map_gt = rt.gridmap(shape, size)
map_gt.misses[4:51] = 1
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_ray_starts_in_grid_and_ends_outside_1d_neg_x():
start = np.array([[109.7]])
end = np.array([[-12.1]])
shape = np.array([51])
size = np.array([3.0])
map_gt = rt.gridmap(shape, size)
map_gt.misses[:37] = 1
map = rt.gridmap(shape, size)
rt.trace1d(start, end, map)
assert(map_gt == map)
def test_identical_start_and_end_2d_in_cell():
point = np.array([[0.23, 0.25]])
shape = np.array([50, 50])
size = np.array([1.0, 1.0])
map_gt = rt.gridmap(shape, size)
map_gt.hits[0,0] = 1
map = rt.gridmap(shape, size)
rt.trace2d(point, point, map)
assert(map_gt == map)
def test_identical_start_and_end_2d_on_grid_line():
point = np.array([[0.0, 0.0]])
shape = np.array([50, 50])
size = np.array([1.0, 1.0])
map_gt = rt.gridmap(shape, size)
map_gt.hits[0,0] = 1
map = rt.gridmap(shape, size)
rt.trace2d(point, point, map)
assert(map_gt == map)
def test_identical_start_and_end_2d_outside_grid():
point = np.array([[100.0, 100.0]])
shape = np.array([50, 50])
size = np.array([1.0, 1.0])
map_gt = rt.gridmap(shape, size)
map = rt.gridmap(shape, size)
rt.trace2d(point, point, map)
assert(map_gt == map)
def test_parallel_ray_tracing_2d():
start = np.array(
[[-1.5, +1.5],
[+1.0, -2.0],
[+3.5, -1.0],
[+7.5, +1.0],
[+5.5, +4.5],
[-0.5, +2.0]])
end = np.array(
[[+2.5, +1.5],
[+1.0, -0.5],
[+3.5, +1.5],
[+4.5, +0.5],
[+5.5, +2.5],
[+1.0, +3.5]])
shape = np.array([6, 3])
size = np.array([1, 1])
map_gt = rt.gridmap(shape, size)
map_gt.misses[:2,1] += 1
map_gt.hits[2,1] += 1
map_gt.misses[3,0] += 1
map_gt.hits[3,1] += 1
map_gt.misses[5,0] += 1
map_gt.hits[4,0] += 1
map_gt.hits[5,2] += 1
map_gt.misses[0,2] += 1
map = rt.gridmap(shape, size)
rt.trace2d(start, end, map)
assert(map_gt == map)
if __name__ == '__main__':
pytest.main()
| 21.226519
| 56
| 0.567153
| 655
| 3,842
| 3.140458
| 0.114504
| 0.092368
| 0.136121
| 0.175012
| 0.78561
| 0.733593
| 0.703452
| 0.654351
| 0.639281
| 0.606223
| 0
| 0.084798
| 0.244925
| 3,842
| 180
| 57
| 21.344444
| 0.624267
| 0.005206
| 0
| 0.467213
| 0
| 0
| 0.002094
| 0
| 0
| 0
| 0
| 0
| 0.081967
| 1
| 0.081967
| false
| 0
| 0.02459
| 0
| 0.106557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
68e77d108a7a2bee88503ecddf1919311196b584
| 178
|
py
|
Python
|
strack_connect/config/strack_error.py
|
zuokangbo/strack-connect
|
8dff543d118ee827d1c5b96f749cf43121fd8cfb
|
[
"Apache-2.0"
] | null | null | null |
strack_connect/config/strack_error.py
|
zuokangbo/strack-connect
|
8dff543d118ee827d1c5b96f749cf43121fd8cfb
|
[
"Apache-2.0"
] | null | null | null |
strack_connect/config/strack_error.py
|
zuokangbo/strack-connect
|
8dff543d118ee827d1c5b96f749cf43121fd8cfb
|
[
"Apache-2.0"
] | 2
|
2022-01-04T09:22:37.000Z
|
2022-03-20T19:56:18.000Z
|
# :coding: utf-8
# :copyright: Copyright (c) 2021 strack
class StrackError(RuntimeError):
""" Custom error class. """
def __init__(self, arg):
self.args = arg
| 17.8
| 39
| 0.629213
| 21
| 178
| 5.142857
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036496
| 0.230337
| 178
| 9
| 40
| 19.777778
| 0.751825
| 0.41573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ec1e4aa042407bcd6f980ea9d824a458ba588668
| 138
|
py
|
Python
|
test.py
|
Zuckonit/QPS
|
fba9b8e8bb4fc38c58c2a6ae2a3a64569aefc48d
|
[
"MIT"
] | 1
|
2015-03-13T06:27:47.000Z
|
2015-03-13T06:27:47.000Z
|
test.py
|
Zuckonit/QPS
|
fba9b8e8bb4fc38c58c2a6ae2a3a64569aefc48d
|
[
"MIT"
] | null | null | null |
test.py
|
Zuckonit/QPS
|
fba9b8e8bb4fc38c58c2a6ae2a3a64569aefc48d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
from qps import QPS
def add(a, b):
return a + b
with QPS(1000, add, 1, 2) as qps:
pass
| 12.545455
| 33
| 0.608696
| 27
| 138
| 3.111111
| 0.777778
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067961
| 0.253623
| 138
| 10
| 34
| 13.8
| 0.747573
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
ec4116a7c60e4d7be673bd15304979f80f77d1b8
| 598
|
py
|
Python
|
web/transiq/customer/views_booking.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/customer/views_booking.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/customer/views_booking.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
from api.decorators import api_post
from customer.booking_helper import do_booking_vendor_request, do_booking_save, do_add_vendor, do_delete_vendor
from customer.decorators import authenticated_user
@api_post
@authenticated_user
def booking_vendor_request(request):
return do_booking_vendor_request(request)
@api_post
@authenticated_user
def booking_save(request):
return do_booking_save(request)
@api_post
@authenticated_user
def add_vendor(request):
return do_add_vendor(request)
@api_post
@authenticated_user
def delete_vendor(request):
return do_delete_vendor(request)
| 20.62069
| 111
| 0.83612
| 84
| 598
| 5.547619
| 0.214286
| 0.195279
| 0.171674
| 0.206009
| 0.306867
| 0.306867
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107023
| 598
| 28
| 112
| 21.357143
| 0.872659
| 0
| 0
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.157895
| 0.210526
| 0.578947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ec499e7ae67a8dabc5cda32aeaaffca332fed2be
| 319
|
py
|
Python
|
config.py
|
gkswjdzz/word_cloud
|
45e23ed498471e0aad73b60b71781aa0e3d6f1e1
|
[
"MIT"
] | 1
|
2020-04-12T11:24:14.000Z
|
2020-04-12T11:24:14.000Z
|
config.py
|
gkswjdzz/word_cloud
|
45e23ed498471e0aad73b60b71781aa0e3d6f1e1
|
[
"MIT"
] | 1
|
2020-03-18T06:53:15.000Z
|
2020-03-18T06:53:15.000Z
|
config.py
|
gkswjdzz/word_cloud
|
45e23ed498471e0aad73b60b71781aa0e3d6f1e1
|
[
"MIT"
] | null | null | null |
class Config(object) :
#DETECTRON_URL = 'http://localhost/predictions'
DETECTRON_URL = 'https://master-ainized-detectron2-gkswjdzz.endpoint.ainize.ai/predictions'
#STANFORDNLP_URL = 'http://localhost:81/analyze'
STANFORDNLP_URL = 'https://master-ainized-stanfordnlp-gkswjdzz.endpoint.ainize.ai/analyze'
| 53.166667
| 95
| 0.758621
| 36
| 319
| 6.611111
| 0.527778
| 0.10084
| 0.134454
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.097179
| 319
| 5
| 96
| 63.8
| 0.815972
| 0.291536
| 0
| 0
| 0
| 0
| 0.638393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6ba473384b79dd196101b1500a8cb241b57336b8
| 1,965
|
py
|
Python
|
utils/boilerplate/test_ox.py
|
cfginn/sap-simulation-package
|
73314e5380cec5c61a9fe5ff5fbafa25b9e2beac
|
[
"MIT"
] | null | null | null |
utils/boilerplate/test_ox.py
|
cfginn/sap-simulation-package
|
73314e5380cec5c61a9fe5ff5fbafa25b9e2beac
|
[
"MIT"
] | null | null | null |
utils/boilerplate/test_ox.py
|
cfginn/sap-simulation-package
|
73314e5380cec5c61a9fe5ff5fbafa25b9e2beac
|
[
"MIT"
] | null | null | null |
import unittest
from pysapets.ox import Ox
from pysapets.animal import Animal
import pysapets.constants as constants
from unittest.mock import patch
from io import StringIO
from copy import deepcopy
class OxTest(unittest.TestCase):
def setUp(self):
self.ox = Ox()
self.friends = [self.ox, Animal(2, 2), Animal(2, 2), Animal(2, 2), Animal(2, 2)]
# test that get_type returns the correct type
def test_get_type(self):
self.assertEqual(self.ox.get_type(), constants.OX)
# test that ox starts with base health of 4
def test_get_health(self):
self.assertEqual(self.ox.get_health(), 4)
# test that ox starts with base attack of 1
def test_get_attack(self):
self.assertEqual(self.ox.get_attack(), 1)
# test that initializing ox with additional health increases health
def test_init_add_health(self):
newOx = Ox(addHealth = 3)
self.assertEqual(newOx.get_health(), 4 + 3)
# test that initializing an ox with additional attack increases attack
def test_init_add_attack(self):
newOx = Ox(addAttack = 3)
self.assertEqual(newOx.get_attack(), 1 + 3)
# test that initializing ox with additional health and attack increases health and attack
def test_init_add_health_attack(self):
newOx = Ox(addHealth = 3, addAttack = 3)
self.assertEqual(newOx.get_health(), 4 + 3)
self.assertEqual(newOx.get_attack(), 1 + 3)
# test that ox ability has correct trigger
def test_get_ability_trigger(self):
self.assertEqual(self.ox.get_ability_trigger(), constants.FAINT)
# test that ox ability has correct triggeredBy
def test_get_ability_triggeredBy(self):
self.assertEqual(self.ox.get_ability_triggeredBy(), constants.FRIEND_AHEAD)
# TODO add relevant tests for ox ability
def test_run_ability(self):
pass
def test_run_ability_level_1(self):
pass
def test_run_ability_level_2(self):
pass
def test_run_ability_level_3(self):
pass
| 29.328358
| 91
| 0.725191
| 294
| 1,965
| 4.680272
| 0.214286
| 0.061047
| 0.036337
| 0.083576
| 0.518895
| 0.441134
| 0.298692
| 0.12718
| 0.081395
| 0.05814
| 0
| 0.016928
| 0.188295
| 1,965
| 67
| 92
| 29.328358
| 0.845768
| 0.242239
| 0
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.230769
| 1
| 0.333333
| false
| 0.102564
| 0.179487
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
6bab4997522cb5b5ede35439faba7950d984bd5f
| 45
|
py
|
Python
|
Flask-todolist-Sqlite3-master/venv/lib/python3.6/hmac.py
|
IncredibleDraco/MyScholar
|
272aafa33f7227d1bc0d937d046788cbabede453
|
[
"Apache-2.0"
] | null | null | null |
Flask-todolist-Sqlite3-master/venv/lib/python3.6/hmac.py
|
IncredibleDraco/MyScholar
|
272aafa33f7227d1bc0d937d046788cbabede453
|
[
"Apache-2.0"
] | null | null | null |
Flask-todolist-Sqlite3-master/venv/lib/python3.6/hmac.py
|
IncredibleDraco/MyScholar
|
272aafa33f7227d1bc0d937d046788cbabede453
|
[
"Apache-2.0"
] | 1
|
2019-11-25T10:25:21.000Z
|
2019-11-25T10:25:21.000Z
|
/home/sheldon/anaconda3/lib/python3.6/hmac.py
| 45
| 45
| 0.822222
| 8
| 45
| 4.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 45
| 1
| 45
| 45
| 0.755556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6bb3371c6c89480d900704a9a862879d1de374b3
| 107
|
py
|
Python
|
INF101/TP/TP1/2.1.2.py
|
Marshellson/UGA_IMF
|
eb293deabcc5ef6e45617d8c5bb6268b63b34f21
|
[
"MIT"
] | 1
|
2021-09-21T21:53:17.000Z
|
2021-09-21T21:53:17.000Z
|
INF101/TP/TP1/2.1.2.py
|
Marshellson/UGA_INF
|
eb293deabcc5ef6e45617d8c5bb6268b63b34f21
|
[
"MIT"
] | null | null | null |
INF101/TP/TP1/2.1.2.py
|
Marshellson/UGA_INF
|
eb293deabcc5ef6e45617d8c5bb6268b63b34f21
|
[
"MIT"
] | null | null | null |
print("Premier programme")
nom = input("Donnez votre nom : ")
print("Bonjour %s, comment vas-tu? " % nom)
| 21.4
| 43
| 0.663551
| 15
| 107
| 4.733333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158879
| 107
| 4
| 44
| 26.75
| 0.788889
| 0
| 0
| 0
| 0
| 0
| 0.603774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
6bba14d12f21cdb973c99d3cb027c91f1c037281
| 217
|
py
|
Python
|
watson_bots_communicator/__init__.py
|
cesarbruschetta/bot_telegran_watson
|
803750ace6641a56c108ed26e8320cfcca0869c8
|
[
"Apache-2.0"
] | null | null | null |
watson_bots_communicator/__init__.py
|
cesarbruschetta/bot_telegran_watson
|
803750ace6641a56c108ed26e8320cfcca0869c8
|
[
"Apache-2.0"
] | null | null | null |
watson_bots_communicator/__init__.py
|
cesarbruschetta/bot_telegran_watson
|
803750ace6641a56c108ed26e8320cfcca0869c8
|
[
"Apache-2.0"
] | null | null | null |
from os import sys, path
if __name__ == '__main__' and __package__ is None:
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from watson_bots_communicator.processor import main
main()
| 27.125
| 71
| 0.746544
| 30
| 217
| 4.8
| 0.666667
| 0.097222
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152074
| 217
| 7
| 72
| 31
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0.036866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6be3c955ce224ecbe8a509961ec94c87082c5939
| 15,844
|
py
|
Python
|
tests/test_azure_2_sas.py
|
OLC-LOC-Bioinformatics/AzureStorage
|
ac4dbd83e307a5b8d3fd3b77103ec837b821c564
|
[
"MIT"
] | null | null | null |
tests/test_azure_2_sas.py
|
OLC-LOC-Bioinformatics/AzureStorage
|
ac4dbd83e307a5b8d3fd3b77103ec837b821c564
|
[
"MIT"
] | null | null | null |
tests/test_azure_2_sas.py
|
OLC-LOC-Bioinformatics/AzureStorage
|
ac4dbd83e307a5b8d3fd3b77103ec837b821c564
|
[
"MIT"
] | null | null | null |
from azure_storage.methods import extract_account_name, sas_prep, write_sas
from azure_storage.azure_sas import AzureContainerSAS, AzureSAS, cli, container_sas, file_sas, folder_sas
from unittest.mock import patch
import argparse
import pathlib
import pytest
import os
@pytest.fixture(name='variables', scope='module')
def setup():
class Variables:
def __init__(self):
# Extract the account name and connection string from the system keyring prior to running tests
self.passphrase = 'AzureStorage'
self.account_name = extract_account_name(passphrase=self.passphrase)
self.container_name = '00000container'
self.test_path = os.path.abspath(os.path.dirname(__file__))
self.file_path = os.path.join(self.test_path, 'files')
self.output_file = os.path.join(self.file_path, 'sas_urls.txt')
return Variables()
def delete_output_file(output_file):
os.remove(output_file)
assert not os.path.isfile(output_file)
def read_contents(output_file):
contents = open(output_file, 'r').readlines()
return contents
def test_sas_prep(variables):
variables.container_name, variables.connection_string, variables.account_key, \
variables.blob_service_client, variables.container_client = \
sas_prep(container_name=variables.container_name,
passphrase=variables.passphrase,
account_name=variables.account_name)
assert variables.connection_string.startswith('DefaultEndpointsProtocol')
@pytest.mark.parametrize('file_name',
['file_1.txt',
'nested_folder/nested_file_2.txt',
'nested_folder/nested_folder_2/nested_folder_test_1.txt'])
def test_file_sas(variables, file_name):
variables.sas_urls = AzureSAS.file_sas(container_client=variables.container_client,
account_name=variables.account_name,
container_name=variables.container_name,
object_name=file_name,
account_key=variables.account_key,
expiry=10,
sas_urls=dict())
assert variables.sas_urls[os.path.basename(file_name)]\
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/{file_name}?')
def test_file_sas_invalid_expiry(variables):
with pytest.raises(SystemExit):
sas_file = AzureSAS(object_name='file_1.txt',
container_name=variables.container_name,
output_file=variables.output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=700,
verbosity='info',
category='file')
sas_file.main()
def test_file_sas_invalid_category(variables):
with pytest.raises(SystemExit):
sas_file = AzureSAS(object_name='file_1.txt',
container_name=variables.container_name,
output_file=variables.output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=7,
verbosity='info',
category='container')
sas_file.main()
def test_file_sas_invalid_path(variables):
output_file = os.path.join('/invalid', 'sas_urls.txt')
if not os.environ.get('CIRCLECI'):
with pytest.raises(SystemExit):
sas_file = AzureSAS(object_name='file_1.txt',
container_name=variables.container_name,
output_file=output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=7,
verbosity='info',
category='file')
sas_file.main()
def test_file_sas_tilde(variables):
path_obj = pathlib.Path(variables.file_path)
path = f'~{os.sep}{path_obj.relative_to(pathlib.Path.home())}'
sas_file = AzureSAS(object_name='file_1.txt',
container_name=variables.container_name,
output_file=os.path.join(path, 'sas_urls.txt'),
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=7,
verbosity='info',
category='file')
sas_file.main()
contents = read_contents(output_file=variables.output_file)
assert contents[0] \
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/')
delete_output_file(output_file=variables.output_file)
@pytest.mark.parametrize('file_name',
['file_3.txt',
'nested_folder/nested_file_1.txt',
'nested_folder_3/nested_folder_2/nested_folder_test_1.txt'])
def test_file_sas_invalid(variables, file_name):
with pytest.raises(SystemExit):
AzureSAS.file_sas(container_client=variables.container_client,
account_name=variables.account_name,
container_name=variables.container_name,
object_name=file_name,
account_key=variables.account_key,
expiry=10,
sas_urls=dict())
def test_sas_urls_output_exists(variables):
write_sas(output_file=variables.output_file,
sas_urls=variables.sas_urls)
assert os.path.isfile(variables.output_file)
def test_sas_urls_output_contents(variables):
contents = read_contents(output_file=variables.output_file)
assert contents[0]\
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/')
@pytest.mark.parametrize('output_file,expiry',
[('sas_urls.txt', 0),
('sas_urls.txt', 500),
('', 1),
('folder/', 1)])
@patch('argparse.ArgumentParser.parse_args')
def test_file_sas_integration_invalid(mock_args, output_file, expiry, variables):
file_name = 'file_1.txt'
output_file = os.path.join(variables.file_path, output_file)
with pytest.raises(SystemExit):
mock_args.return_value = argparse.Namespace(passphrase=variables.passphrase,
account_name=variables.account_name,
container_name=variables.container_name,
verbosity='info',
file=file_name,
output_file=output_file,
expiry=expiry)
arguments = cli()
file_sas(args=arguments)
@patch('argparse.ArgumentParser.parse_args')
def test_file_sas_integration(mock_args, variables):
delete_output_file(output_file=variables.output_file)
file_name = 'file_1.txt'
mock_args.return_value = argparse.Namespace(passphrase=variables.passphrase,
account_name=variables.account_name,
container_name=variables.container_name,
verbosity='info',
file=file_name,
output_file=variables.output_file,
expiry=1)
arguments = cli()
file_sas(args=arguments)
contents = read_contents(output_file=variables.output_file)
assert contents[0] \
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/')
delete_output_file(output_file=variables.output_file)
@pytest.mark.parametrize('folder_name,expected_dictionary_length',
[('nested_folder', 3),
('nested_folder/nested_folder_2/', 1)])
def test_folder_sas(variables, folder_name, expected_dictionary_length):
variables.sas_urls = AzureSAS.folder_sas(container_client=variables.container_client,
account_name=variables.account_name,
container_name=variables.container_name,
object_name=folder_name,
account_key=variables.account_key,
expiry=10,
sas_urls=dict())
assert len(variables.sas_urls) == expected_dictionary_length
@pytest.mark.parametrize('folder_name',
['nested3',
'',
'nested_folder/nested_folder_2/nested_folder_test_1.txt'
'nested_folder/nested_folder_1/',
'nested_folder/nested_folder_2/nested_folder_3'])
def test_folder_sas_invalid(variables, folder_name):
with pytest.raises(SystemExit):
variables.sas_urls = AzureSAS.folder_sas(container_client=variables.container_client,
account_name=variables.account_name,
container_name=variables.container_name,
object_name=folder_name,
account_key=variables.account_key,
expiry=10,
sas_urls=dict())
@patch('argparse.ArgumentParser.parse_args')
def test_folder_sas_integration(mock_args, variables):
folder_name = 'nested_folder'
mock_args.return_value = argparse.Namespace(passphrase=variables.passphrase,
account_name=variables.account_name,
container_name=variables.container_name,
verbosity='info',
folder=folder_name,
output_file=variables.output_file,
expiry=1)
arguments = cli()
folder_sas(args=arguments)
contents = read_contents(output_file=variables.output_file)
assert contents[0] \
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/')
def test_folder_sas_urls_integration_output_length(variables):
contents = read_contents(output_file=variables.output_file)
assert len(contents) == 3
delete_output_file(output_file=variables.output_file)
def test_container_sas(variables):
variables.sas_urls = AzureContainerSAS.container_sas(container_client=variables.container_client,
account_name=variables.account_name,
container_name=variables.container_name,
account_key=variables.account_key,
expiry=1,
sas_urls=dict())
assert len(variables.sas_urls) == 10
def test_container_sas_tilde(variables):
path_obj = pathlib.Path(variables.file_path)
path = f'~{os.sep}{path_obj.relative_to(pathlib.Path.home())}'
sas = AzureContainerSAS(container_name=variables.container_name,
output_file=os.path.join(path, 'sas_urls.txt'),
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=1,
verbosity='info')
sas.main()
contents = read_contents(output_file=variables.output_file)
assert contents[0] \
.startswith(f'https://{variables.account_name}.blob.core.windows.net/{variables.container_name}/')
delete_output_file(output_file=variables.output_file)
def test_container_invalid_name(variables):
with pytest.raises(SystemExit):
sas = AzureContainerSAS(container_name='000000000container',
output_file=variables.output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=1,
verbosity='info')
sas.main()
def test_container_invalid_path(variables):
output_file = os.path.join('/invalid', 'sas_urls.txt')
if not os.environ.get('CIRCLECI'):
with pytest.raises(SystemExit):
sas = AzureContainerSAS(container_name=variables.container_name,
output_file=output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=1,
verbosity='info')
sas.main()
def test_container_folder_provided(variables):
with pytest.raises(SystemExit):
sas = AzureContainerSAS(container_name=variables.container_name,
output_file=variables.file_path,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=1,
verbosity='info')
sas.main()
def test_container_invalid_expiry(variables):
with pytest.raises(SystemExit):
sas = AzureContainerSAS(container_name=variables.container_name,
output_file=variables.output_file,
account_name=variables.account_name,
passphrase=variables.passphrase,
expiry=500,
verbosity='info')
sas.main()
@patch('argparse.ArgumentParser.parse_args')
def test_container_sas_integration(mock_args, variables):
mock_args.return_value = argparse.Namespace(passphrase=variables.passphrase,
account_name=variables.account_name,
container_name=variables.container_name,
verbosity='info',
output_file=variables.output_file,
expiry=1)
arguments = cli()
container_sas(args=arguments)
contents = read_contents(output_file=variables.output_file)
assert len(contents) == 10
def test_delete_output_file(variables):
delete_output_file(output_file=variables.output_file)
| 48.304878
| 119
| 0.546011
| 1,447
| 15,844
| 5.670352
| 0.085695
| 0.087751
| 0.067032
| 0.063985
| 0.795491
| 0.720658
| 0.710786
| 0.69153
| 0.671298
| 0.641926
| 0
| 0.007991
| 0.376041
| 15,844
| 327
| 120
| 48.452599
| 0.82197
| 0.00587
| 0
| 0.652015
| 0
| 0
| 0.101025
| 0.041045
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.098901
| false
| 0.058608
| 0.025641
| 0
| 0.135531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
6bfdc4352f7391d8dc960603f34dc993b5664c14
| 160
|
py
|
Python
|
tests/__init__.py
|
uTest/appthwack-python
|
ba6872a63a03d3aca5ac538dd0bf3dcc641d67b3
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
uTest/appthwack-python
|
ba6872a63a03d3aca5ac538dd0bf3dcc641d67b3
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
uTest/appthwack-python
|
ba6872a63a03d3aca5ac538dd0bf3dcc641d67b3
|
[
"MIT"
] | 1
|
2020-04-28T11:46:17.000Z
|
2020-04-28T11:46:17.000Z
|
"""
appthwack.tests
~~~~~~~~~~~~~~~
Package which contains tests for the AppThwack client.
"""
__author__ = 'Andrew Hawker <andrew@appthwack.com>'
| 20
| 58
| 0.625
| 16
| 160
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 160
| 7
| 59
| 22.857143
| 0.738462
| 0.54375
| 0
| 0
| 0
| 0
| 0.679245
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d40fc2fbf0f1bb11fa294941e284a9a49933924a
| 497
|
py
|
Python
|
metaopt/concurrent/worker/thread.py
|
cigroup-ol/metaopt
|
6dfd5105d3c6eaf00f96670175cae16021069514
|
[
"BSD-3-Clause"
] | 8
|
2015-02-02T21:42:23.000Z
|
2019-06-30T18:12:43.000Z
|
metaopt/concurrent/worker/thread.py
|
cigroup-ol/metaopt
|
6dfd5105d3c6eaf00f96670175cae16021069514
|
[
"BSD-3-Clause"
] | 4
|
2015-09-24T14:12:38.000Z
|
2021-12-08T22:42:52.000Z
|
metaopt/concurrent/worker/thread.py
|
cigroup-ol/metaopt
|
6dfd5105d3c6eaf00f96670175cae16021069514
|
[
"BSD-3-Clause"
] | 6
|
2015-02-27T12:35:33.000Z
|
2020-10-15T21:04:02.000Z
|
# -*- coding: utf-8 -*-
"""
Worker implementation that executes objective functions in Python threads.
"""
# Future
from __future__ import absolute_import, division, print_function, \
unicode_literals, with_statement
# Standard Library
from threading import Thread
# First Party
from metaopt.worker.worker import Worker
class ThreadWorker(Thread, Worker):
"""
Worker implementation that executes objective functions in Python threads.
"""
def __init__(self):
pass
| 21.608696
| 78
| 0.736419
| 56
| 497
| 6.321429
| 0.642857
| 0.112994
| 0.135593
| 0.180791
| 0.367232
| 0.367232
| 0.367232
| 0.367232
| 0.367232
| 0
| 0
| 0.002463
| 0.183099
| 497
| 22
| 79
| 22.590909
| 0.869458
| 0.418511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.142857
| 0.428571
| 0
| 0.714286
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
d477e02cd5f88311d8ce8e85d18cee9ce5e8289e
| 868
|
py
|
Python
|
scenario_builder/builder/compose_runner.py
|
ameserole/Forensic-Scenario-Builder
|
d700f2453cdf8dbf87cdfad5ee53c0a467163186
|
[
"MIT"
] | null | null | null |
scenario_builder/builder/compose_runner.py
|
ameserole/Forensic-Scenario-Builder
|
d700f2453cdf8dbf87cdfad5ee53c0a467163186
|
[
"MIT"
] | 17
|
2018-05-07T14:56:55.000Z
|
2018-05-16T15:16:35.000Z
|
scenario_builder/builder/compose_runner.py
|
ameserole/Forensic-Scenario-Builder
|
d700f2453cdf8dbf87cdfad5ee53c0a467163186
|
[
"MIT"
] | null | null | null |
from .. import utils
class ComposeRunner:
"""
Wrapper class for running docker-compose commnads
"""
def __init__(self):
pass
def build(self):
cmd = ['docker-compose']
cmd.append('build')
utils.run_cmd(cmd)
def up(self, detach=False):
cmd = ['docker-compose']
cmd.append('up')
if detach:
cmd.append('--d')
utils.run_cmd(cmd)
def down(self):
cmd = ['docker-compose']
cmd.append('down')
utils.run_cmd(cmd)
def stop(self):
cmd = ['docker-compose']
cdm.append('stop')
utils.run_cmd(cmd)
def pause(self):
cmd = ['docker-compose']
cmd.append('pause')
utils.run_cmd(cmd)
def unpause(self):
cmd = ['docker-compose']
cmd.append('unpause')
utils.run_cmd(cmd)
| 20.666667
| 53
| 0.529954
| 101
| 868
| 4.455446
| 0.287129
| 0.202222
| 0.213333
| 0.186667
| 0.502222
| 0.257778
| 0
| 0
| 0
| 0
| 0
| 0
| 0.326037
| 868
| 41
| 54
| 21.170732
| 0.769231
| 0.056452
| 0
| 0.4
| 0
| 0
| 0.141968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.233333
| false
| 0.033333
| 0.033333
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d483922a7f150605be33959997d1bd486a3f0bc4
| 55
|
py
|
Python
|
Flask/Flask.py
|
96486d9b/jianshu
|
b7c50b0c5bbfd09810177cf7f561e6d04da3d8c6
|
[
"Apache-2.0"
] | 3
|
2018-01-25T10:34:11.000Z
|
2018-12-27T09:04:23.000Z
|
Flask/Flask.py
|
ttcqaq/jianshu
|
b7c50b0c5bbfd09810177cf7f561e6d04da3d8c6
|
[
"Apache-2.0"
] | null | null | null |
Flask/Flask.py
|
ttcqaq/jianshu
|
b7c50b0c5bbfd09810177cf7f561e6d04da3d8c6
|
[
"Apache-2.0"
] | 1
|
2018-09-11T00:04:47.000Z
|
2018-09-11T00:04:47.000Z
|
from app import app
app.run(port=8000, host='0.0.0.0')
| 18.333333
| 34
| 0.690909
| 13
| 55
| 2.923077
| 0.615385
| 0.157895
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 0.109091
| 55
| 3
| 34
| 18.333333
| 0.612245
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d4867a9a9b6dd0cfadcdb744ffc21ab3a658a958
| 88
|
py
|
Python
|
code/__init__.py
|
akimaleo/raspberry-PiLight
|
7e22db065c05e42f02944a89c9c3f5643c748b78
|
[
"MIT"
] | null | null | null |
code/__init__.py
|
akimaleo/raspberry-PiLight
|
7e22db065c05e42f02944a89c9c3f5643c748b78
|
[
"MIT"
] | null | null | null |
code/__init__.py
|
akimaleo/raspberry-PiLight
|
7e22db065c05e42f02944a89c9c3f5643c748b78
|
[
"MIT"
] | null | null | null |
import bl_
import lighton
if __name__ == '__main__':
bl_.init()
lighton.main()
| 12.571429
| 26
| 0.659091
| 11
| 88
| 4.363636
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215909
| 88
| 6
| 27
| 14.666667
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
2e1b32b97221c922f6cfcf11cc4df270f1f82741
| 89
|
py
|
Python
|
aiomatrix/dispatcher/storage/internal_data/models/internal_data_pair.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 2
|
2021-10-29T18:07:08.000Z
|
2021-11-19T00:25:43.000Z
|
aiomatrix/dispatcher/storage/internal_data/models/internal_data_pair.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 1
|
2022-03-06T11:17:43.000Z
|
2022-03-06T11:17:43.000Z
|
aiomatrix/dispatcher/storage/internal_data/models/internal_data_pair.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | null | null | null |
import pydantic
class InternalDataPair(pydantic.BaseModel):
key: str
data: str
| 12.714286
| 43
| 0.730337
| 10
| 89
| 6.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202247
| 89
| 6
| 44
| 14.833333
| 0.915493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
2e3f72361f861bfd710235d3a5d39f37ff2ee968
| 751
|
py
|
Python
|
bbc/apps/accounts/migrations/0002_auto_20171006_1044.py
|
blopker/bluebutton-sample-client-django
|
92bc221016d7ab3eed995bf2a8b3256f706f18c2
|
[
"Apache-2.0"
] | 4
|
2017-12-06T07:34:17.000Z
|
2018-12-17T19:33:16.000Z
|
bbc/apps/accounts/migrations/0002_auto_20171006_1044.py
|
blopker/bluebutton-sample-client-django
|
92bc221016d7ab3eed995bf2a8b3256f706f18c2
|
[
"Apache-2.0"
] | 7
|
2018-02-11T06:06:17.000Z
|
2019-04-08T14:40:10.000Z
|
bbc/apps/accounts/migrations/0002_auto_20171006_1044.py
|
blopker/bluebutton-sample-client-django
|
92bc221016d7ab3eed995bf2a8b3256f706f18c2
|
[
"Apache-2.0"
] | 6
|
2018-02-22T22:19:41.000Z
|
2021-04-13T20:25:57.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-06 10:44
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='userprofile',
name='access_key_id',
),
migrations.RemoveField(
model_name='userprofile',
name='access_key_reset',
),
migrations.RemoveField(
model_name='userprofile',
name='access_key_secret',
),
migrations.RemoveField(
model_name='userprofile',
name='organization_name',
),
]
| 23.46875
| 48
| 0.573901
| 69
| 751
| 6
| 0.550725
| 0.202899
| 0.251208
| 0.289855
| 0.5
| 0.5
| 0.391304
| 0.391304
| 0
| 0
| 0
| 0.040936
| 0.316911
| 751
| 31
| 49
| 24.225806
| 0.766082
| 0.090546
| 0
| 0.5
| 1
| 0
| 0.186765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
2e6b7a2cabb2d47b1045ddb290c9ef42d0d2b556
| 95
|
py
|
Python
|
snac/main.py
|
eric-lee-wise/SNAC
|
3529e404dd3fdf17c3a48df54523407bbdf1e84d
|
[
"MIT"
] | null | null | null |
snac/main.py
|
eric-lee-wise/SNAC
|
3529e404dd3fdf17c3a48df54523407bbdf1e84d
|
[
"MIT"
] | null | null | null |
snac/main.py
|
eric-lee-wise/SNAC
|
3529e404dd3fdf17c3a48df54523407bbdf1e84d
|
[
"MIT"
] | null | null | null |
from SitePinger import SitePinger
google_site = SitePinger("www.google.com")
google_site.run()
| 23.75
| 42
| 0.810526
| 13
| 95
| 5.769231
| 0.615385
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 95
| 4
| 43
| 23.75
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
2e76a8afd2e9684f8f5e2381a2889acb2b1df563
| 1,526
|
py
|
Python
|
tests/conftest.py
|
paloth/aws-accesskey-manager
|
46c420621063e314b3455ccf0a31a8c6954f251c
|
[
"MIT"
] | 1
|
2020-08-12T11:27:37.000Z
|
2020-08-12T11:27:37.000Z
|
tests/conftest.py
|
paloth/aws-accesskey-manager
|
46c420621063e314b3455ccf0a31a8c6954f251c
|
[
"MIT"
] | 5
|
2020-06-24T12:50:47.000Z
|
2020-08-11T13:49:25.000Z
|
tests/conftest.py
|
paloth/aws-accesskey-manager
|
46c420621063e314b3455ccf0a31a8c6954f251c
|
[
"MIT"
] | null | null | null |
from pytest import fixture
from datetime import datetime
from dateutil.tz import tzutc
@fixture()
def fake_profile():
return "[test]\n" + "aws_access_key_id = AKEXAMPLE\n" + "aws_secret_access_key = SKEXAMPLE"
@fixture()
def fake_config():
return {"test": {"aws_access_key_id": "", "aws_secret_access_key": "", "aws_default_region": ""}}
@fixture()
def sts_get_session_response():
return {"Credentials": {"AccessKeyId": "accesskey", "SecretAccessKey": "secretkey", "SessionToken": "sessiontoken"}}
@fixture()
def sts_get_caller_id_response():
return {"UserId": "string", "Account": "000000000000", "Arn": "string"}
@fixture()
def iam_list_ak_response():
return {"AccessKeyMetadata": [{"AccessKeyId": "AKIA111111111EXAMPLE"}, {"AccessKeyId": "AKIA222222222EXAMPLE"}]}
@fixture()
def iam_create_access_key_return():
return {"AccessKey": {"AccessKeyId": "accesskey", "SecretAccessKey": "secretkey"}}
@fixture()
def boto_standard_error():
return {"Error": {"Code": "WhatEver", "Message": "Error"}}
@fixture()
def sts_get_session_error():
return {"Error": {"Code": "WhatEver", "Message": "Error"}}
@fixture()
def iam_limit_exceeded_exception():
return {"Error": {"Code": "LimitExceededException", "Message": "Error"}}
@fixture()
def current_mocked_date():
return datetime(2020, 7, 1, 10, 00, 00, tzinfo=tzutc())
@fixture()
def creation_mocked_date():
return datetime(2020, 5, 1, 10, 00, 00, tzinfo=tzutc())
| 25.864407
| 121
| 0.664482
| 166
| 1,526
| 5.855422
| 0.409639
| 0.113169
| 0.040123
| 0.049383
| 0.234568
| 0.139918
| 0.102881
| 0.102881
| 0.102881
| 0
| 0
| 0.042254
| 0.162516
| 1,526
| 58
| 122
| 26.310345
| 0.71831
| 0
| 0
| 0.361111
| 0
| 0
| 0.3297
| 0.043597
| 0
| 0
| 0
| 0
| 0
| 1
| 0.305556
| true
| 0
| 0.083333
| 0.305556
| 0.694444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
2e8cf3d0b9ecc8d6fd5094f8377b58aa49c46888
| 118
|
py
|
Python
|
tasks.py
|
santhoshbhumireddy/nbviewer
|
3247939c92876db015b6ac3e98f1a29cce21bb6b
|
[
"BSD-3-Clause-Clear",
"Apache-2.0"
] | 1
|
2015-09-07T22:00:04.000Z
|
2015-09-07T22:00:04.000Z
|
tasks.py
|
Parsely/nbviewer
|
18a58efffe8392d71ec043f74458d4963ec707f5
|
[
"BSD-3-Clause-Clear",
"Apache-2.0"
] | null | null | null |
tasks.py
|
Parsely/nbviewer
|
18a58efffe8392d71ec043f74458d4963ec707f5
|
[
"BSD-3-Clause-Clear",
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import invoke
@invoke.task
def test():
invoke.run("nosetests -v")
| 13.111111
| 30
| 0.618644
| 17
| 118
| 4.294118
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.169492
| 118
| 8
| 31
| 14.75
| 0.734694
| 0.355932
| 0
| 0
| 0
| 0
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5cef7766a5d037d36aa8f122a7b29b450c774618
| 4,686
|
py
|
Python
|
resources/2020/TSGCTF2020/modulus-amittendus/solve.py
|
S3v3ru5/S3v3ru5.github.io
|
c40ee77a722b1a9ca3ce669a3b692b141d1c19ff
|
[
"MIT"
] | 2
|
2020-08-11T19:01:45.000Z
|
2021-07-03T17:36:04.000Z
|
resources/2020/TSGCTF2020/modulus-amittendus/solve.py
|
S3v3ru5/S3v3ru5.github.io
|
c40ee77a722b1a9ca3ce669a3b692b141d1c19ff
|
[
"MIT"
] | null | null | null |
resources/2020/TSGCTF2020/modulus-amittendus/solve.py
|
S3v3ru5/S3v3ru5.github.io
|
c40ee77a722b1a9ca3ce669a3b692b141d1c19ff
|
[
"MIT"
] | 1
|
2020-09-14T10:38:55.000Z
|
2020-09-14T10:38:55.000Z
|
from Crypto.Util.number import *
from math import gcd
import json
ct = 17320751473362084127402636657144071375427833219607663443601124449781249403644322557541872089652267070211212915903557690040206709235417332498271540915493529128300376560226137139676145984352993170584208658625255938806836396696141456961179529532070976247738546045494839964768476955634323305122778089058798906645471526156569091101098698045293624474978286797899191202843389249922173166570341752053592397746313995966365207638042347023262633148306194888008613632757146845037310325643855138147271259215908333877374609302786041209284422691820450450982123612630485471082506484250009427242444806889873164459216407213750735305784
pubkey = json.loads(open("pubkey.json").read())
e = pubkey['e']
d = pubkey['n']
cf = pubkey['cf']
upper_lim = min(e, d)
ks = []
for k in range(2, upper_lim):
if (e * d - 1) % k == 0 and ((e * d - 1) // k).bit_length() <= 2048:
ks.append(k)
# print("[*] Possible number of k values = ", len(ks)) # 1
k = ks[0]
phi = (e * d - 1) // k
pmul = cf * phi - cf + 1
p = pmul
i = 2
while not isPrime(p):
pmuli = pow(i, phi, p) - 1
p = gcd(p, pmuli)
i += 1
print("[*] p = ", p)
assert isPrime(p)
q = inverse(cf, p)
while not isPrime(q):
q += p
print("[+] q = ", q)
n = p*q
flag = pow(ct, d, n)
print("[*] flag = ", long_to_bytes(flag).decode())
# TSGCTF{Okay_this_flag_will_be_quite_long_so_listen_carefully_Happiness_is_our_bodys_default_setting_Please_dont_feel_SAd_in_all_sense_Be_happy!_Anyway_this_challenge_is_simple_rewrite_of_HITCON_CTF_2019_Lost_Modulus_Again_so_Im_very_thankful_to_the_author}
# cf * q - 1 = 0 % p
# e*d - 1 = k*phi(n) = k * (n - p - q + 1)
# (k * cf * n - cf*p - cf*q + k*cf) % p = 0 - 0 - 1 + k*cf
# => cf*(e*d - 1) % p
# => k*cf*(-q + 1) % p
# => k*(-q*cf + 1) % p
# => k*(-1 + 1) % p
# => % p
# (p - 1) * (q - 1) = p*q - p - q + 1
# (cf * n - cf*p - cf*q + cf) % p
# (cf*(e*d-1) - k*cf + k) % p
# pmul = 203924475685273125673924567120722211197775789263823099609868179699015077739937525153867786646043922585435823756174102804827621224012186527737220778697750686298504260946611508174775095028972083466656969206793950062958025993036588234037930950865051977169712023795454717172724050588927197417362483522665713180702064548911358519148833656893687268847981618012321143608846445283212339025960431983853360060365733268970028150731318222987414098112588917736299798600733169255883039636648735455383984164507504457126131525930193208740160169062153623683889489951639660237228935581524527590348429642329760869581365117284215618981383483485976418787736969369390525017827257117474256133845432195634375183086883483821098636303137578274580400857016015168040903407435592004125171338858221961907733370940358870810621175748650252715949875073790463288302867296608163915781016208936301706216678625383778953978965425482339341899846735007166492941035127380
# pmul = 52871625506863469419970250719490016323438261079632535659972196074062281756032418324707994774780978077881125703592494150857770444275957060015005543070749389508699526195699950562256489171658321739432399251541222401652762587029604658228120911819200296203337399741328902417078454470021218211752170832535207176943477551815420586375552412656031261323416935867750802948919763817913904619636387757456676028579830241631471496193442177537000367043380226834382432547665765302379769017120323576169609864531412731871066142913798852039237313106964440793824133838447453168959734590724595814427780125968288230737653460603699573421755813765784342172444219065557777861491096411736518672838886951216381604510758433645866840793473370665218390455588303248265131410645514979767065151380724369822120823837398339402126341715195298675794664717874103594705706809110678031532356225963575832673789299449695140173957530720061459
# k = 62676
# phi = 28704238313373626957214994960008954139581982756499074110405661944616161563699953881355718340419202795817270395957729473476747145698006830528333554418856853696099479893465653196878573256509001276103164075993991197413993941054447827049697897358634408174823100151899093845913695607502289118576896877429840052069202417729078184494305213296459526263835596373320000276434715668950539321926894606721051470912695132078472801786561226166468695792501860509172562449551353991095017747320372209557378661393466880782009425950486774637282820715849947612074722195167230790483523030570166160601490546009595990365306583485864701801668
# TSGCTF{Okay_this_flag_will_be_quite_long_so_listen_carefully_Happiness_is_our_bodys_default_setting_Please_dont_feel_SAd_in_all_sense_Be_happy!_Anyway_this_challenge_is_simple_rewrite_of_HITCON_CTF_2019_Lost_Modulus_Again_so_Im_very_thankful_to_the_author}
| 61.657895
| 939
| 0.869825
| 311
| 4,686
| 12.807074
| 0.315113
| 0.004017
| 0.004519
| 0.005021
| 0.109465
| 0.109465
| 0.104946
| 0.104946
| 0.104946
| 0.104946
| 0
| 0.711798
| 0.068502
| 4,686
| 75
| 940
| 62.48
| 0.200687
| 0.715109
| 0
| 0
| 0
| 0
| 0.031939
| 0
| 0
| 1
| 0
| 0
| 0.032258
| 1
| 0
| false
| 0
| 0.096774
| 0
| 0.096774
| 0.096774
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cf0b9b89ceb5214a8b032d89d0708d9b9ea82190
| 110
|
py
|
Python
|
src/sentdex/elif.py
|
mketiku/python-tutorials
|
57f05ed78d5391b0c551c7e064a1b2f4304a3c82
|
[
"MIT"
] | 1
|
2022-03-30T00:22:21.000Z
|
2022-03-30T00:22:21.000Z
|
src/sentdex/elif.py
|
mketiku/python-tutorials
|
57f05ed78d5391b0c551c7e064a1b2f4304a3c82
|
[
"MIT"
] | null | null | null |
src/sentdex/elif.py
|
mketiku/python-tutorials
|
57f05ed78d5391b0c551c7e064a1b2f4304a3c82
|
[
"MIT"
] | null | null | null |
x = 5
y = 10
z = 22
if x > y :
print ('x is greater than y')
elif x < z :
print (' x is lesser than z')
| 12.222222
| 30
| 0.518182
| 24
| 110
| 2.375
| 0.541667
| 0.210526
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069444
| 0.345455
| 110
| 8
| 31
| 13.75
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0.345455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
cf170db9e5e32cbb3beafae6c32485457137d559
| 595
|
py
|
Python
|
workerInfra/domain/__init__.py
|
jtom38/newsbot.worker
|
6f5d93c474d21542f1af20e3b2537f26e2bcbbc3
|
[
"MIT"
] | 1
|
2021-09-23T16:19:46.000Z
|
2021-09-23T16:19:46.000Z
|
workerInfra/domain/__init__.py
|
jtom38/newsbot.worker
|
6f5d93c474d21542f1af20e3b2537f26e2bcbbc3
|
[
"MIT"
] | 10
|
2021-09-26T05:53:11.000Z
|
2022-01-07T00:38:46.000Z
|
workerInfra/domain/__init__.py
|
jtom38/newsbot.worker
|
6f5d93c474d21542f1af20e3b2537f26e2bcbbc3
|
[
"MIT"
] | null | null | null |
from .dbApiInterface import DbApiTableInterface
# from .dbApiTableInterface import DbApiTableInterface
from .cacheInterface import CacheInterface
from .outputInterface import OutputInterface
from .outputFormatterInterface import OutputFormatterInterface
from .loggerInterface import LoggerInterface
from .envReaderInterface import EnvReaderInterface
from .sourcesInterface import SourcesInterface
from .driverInterface import DriverInterface
from .sourceParseInterface import SourceParseInterface
from .rssFeedInterface import RssFeedInterface
from .rssHelperInterface import RssHelperInterface
| 45.769231
| 62
| 0.895798
| 48
| 595
| 11.104167
| 0.291667
| 0.093809
| 0.108818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082353
| 595
| 12
| 63
| 49.583333
| 0.97619
| 0.087395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.