hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1ec5acb6cddde1e1c45143866f925a23902a44c9
| 261
|
py
|
Python
|
basic_grammar/list_usecase.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
basic_grammar/list_usecase.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
basic_grammar/list_usecase.py
|
OnoYuta/python_programing
|
5d191bef5666c0a826f6daa0bd45bc9dd6603d59
|
[
"MIT"
] | null | null | null |
seat = []
min = 0
max = 5
print(min <= len(seat) < max)
# True
seat.append('person')
seat.append('person')
seat.append('person')
seat.append('person')
seat.append('person')
print(min <= len(seat) < max)
# False
seat.pop()
print(min <= len(seat) < max)
# True
| 14.5
| 29
| 0.636015
| 40
| 261
| 4.15
| 0.3
| 0.301205
| 0.481928
| 0.481928
| 0.855422
| 0.746988
| 0.481928
| 0.481928
| 0.481928
| 0.481928
| 0
| 0.008969
| 0.145594
| 261
| 18
| 30
| 14.5
| 0.735426
| 0.057471
| 0
| 0.666667
| 0
| 0
| 0.123457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9491785f5843edade9ff6c276358ed5d23d11458
| 42
|
py
|
Python
|
connect_extras/tests/__init__.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | null | null | null |
connect_extras/tests/__init__.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | null | null | null |
connect_extras/tests/__init__.py
|
lpatmo/actionify_the_news
|
998d8ca6b35d0ef1b16efca70f50e59503f5a62d
|
[
"MIT"
] | null | null | null |
"""Tests for 3rd party Connect helpers"""
| 21
| 41
| 0.714286
| 6
| 42
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.142857
| 42
| 1
| 42
| 42
| 0.805556
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
94aa1923e155c54770574e5edede898b7a79083b
| 95
|
py
|
Python
|
mmdet3d/models/utils/__init__.py
|
gopi231091/mmdetection3d
|
1b2e64cd75c8d1c238c61a3bc1e3c62a7d403b53
|
[
"Apache-2.0"
] | 217
|
2021-12-10T09:44:33.000Z
|
2022-03-31T16:17:35.000Z
|
mmdet3d/models/utils/__init__.py
|
gopi231091/mmdetection3d
|
1b2e64cd75c8d1c238c61a3bc1e3c62a7d403b53
|
[
"Apache-2.0"
] | 22
|
2021-12-29T08:57:31.000Z
|
2022-03-31T11:21:53.000Z
|
mmdet3d/models/utils/__init__.py
|
gopi231091/mmdetection3d
|
1b2e64cd75c8d1c238c61a3bc1e3c62a7d403b53
|
[
"Apache-2.0"
] | 23
|
2021-12-13T06:56:38.000Z
|
2022-03-28T02:02:13.000Z
|
from .clip_sigmoid import clip_sigmoid
from .mlp import MLP
__all__ = ['clip_sigmoid', 'MLP']
| 19
| 38
| 0.757895
| 14
| 95
| 4.642857
| 0.428571
| 0.507692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 95
| 4
| 39
| 23.75
| 0.792683
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
94f0e815742b183791270712c5a45d7f720eedea
| 13,211
|
py
|
Python
|
Deque_Hanoi_&_DelimiterChecks/DSQ_Test.py
|
rockgard3n/Data-Structures
|
93b6b5aaf74cb0f39be233d0f80fd881e27969af
|
[
"MIT"
] | null | null | null |
Deque_Hanoi_&_DelimiterChecks/DSQ_Test.py
|
rockgard3n/Data-Structures
|
93b6b5aaf74cb0f39be233d0f80fd881e27969af
|
[
"MIT"
] | null | null | null |
Deque_Hanoi_&_DelimiterChecks/DSQ_Test.py
|
rockgard3n/Data-Structures
|
93b6b5aaf74cb0f39be233d0f80fd881e27969af
|
[
"MIT"
] | null | null | null |
import unittest
from Deque_Generator import get_deque, LL_DEQUE_TYPE, ARR_DEQUE_TYPE
from Stack import Stack
from Queue import Queue
class DSQTester(unittest.TestCase):
def setUp(self):
# Run your tests with each deque type to ensure that
# they behave identically.
self.__deque = get_deque(LL_DEQUE_TYPE)
self.__stack = Stack()
self.__queue = Queue()
#Empty Tests
def test_empty_deque(self):
self.assertEqual('[ ]', str(self.__deque), 'Empty deque should print as "[ ]"')
def test_empty_stack(self):
self.assertEqual('[ ]', str(self.__stack), 'Empty stack should print as "[ ]"')
def test_empty_queue(self):
self.assertEqual('[ ]', str(self.__queue), 'Empty queue should print as "[ ]"')
#Deque Tests
##push tests starting with empty deque up to two, tests to make sure push
##functions create proper deques with proper string format and len
def test_push_front_empty_deque(self):
self.__deque.push_front('Victory')
self.assertEqual('[ Victory ]', str(self.__deque))
self.assertEqual(1, len(self.__deque))
def test_push_front_empty_deque_length(self):
self.__deque.push_front('Victory')
self.assertEqual(1, len(self.__deque))
def test_push_front_single_deque(self):
self.__deque.push_front('Victory')
self.__deque.push_front('have')
self.assertEqual('[ have, Victory ]', str(self.__deque))
def test_push_front_single_deque_length(self):
self.__deque.push_front('Victory')
self.__deque.push_front('have')
self.assertEqual(2, len(self.__deque))
def test_push_front_double_deque(self):
self.__deque.push_front('Victory')
self.__deque.push_front('have')
self.__deque.push_front('we')
self.assertEqual('[ we, have, Victory ]', str(self.__deque))
self.assertEqual(3, len(self.__deque))
def test_push_front_double_deque_length(self):
self.__deque.push_front('Victory')
self.__deque.push_front('have')
self.__deque.push_front('we')
self.assertEqual(3, len(self.__deque))
##pop front tests starting with empty deque working up to two, tests make
##sure pop functions create proper deques with proper string format and len
def test_pop_front_empty_deque(self):
with self.assertRaises(IndexError):
pop = self.__deque.pop_front()
self.assertEqual('[ ]', str(self.__deque))
def test_pop_front_single_deque_string(self):
self.__deque.push_front('Liam')
pop = self.__deque.pop_front()
self.assertEqual('[ ]', str(self.__deque))
def test_pop_front_single_deque_popvalue(self):
self.__deque.push_front('Liam')
pop = self.__deque.pop_front()
self.assertEqual('Liam', pop)
def test_pop_front_single_deque_length(self):
self.__deque.push_front('Liam')
pop = self.__deque.pop_front()
self.assertEqual(0, len(self.__deque))
def test_pop_front_double_deque_string(self):
self.__deque.push_front('Liam')
self.__deque.push_front('Coolguy')
pop = self.__deque.pop_front()
self.assertEqual('[ Liam ]', str(self.__deque))
def test_pop_front_double_deque_popvalue(self):
self.__deque.push_front('Liam')
self.__deque.push_front('Coolguy')
pop = self.__deque.pop_front()
self.assertEqual('Coolguy', pop)
def test_pop_front_double_deque_length(self):
self.__deque.push_front('Liam')
self.__deque.push_front('Coolguy')
pop = self.__deque.pop_front()
self.assertEqual(1, len(self.__deque))
##peek front tests starting with empty deque working up to deque with 2
##entries. Test makes sure peek function working correctly without affecting
##entries in deque
def test_peek_front_empty_deque(self):
with self.assertRaises(IndexError):
peek = self.__deque.peek_front()
self.assertEqual('[ ]', str(self.__deque))
def test_peek_front_single_deque_string(self):
self.__deque.push_front('Liam')
peek = self.__deque.peek_front()
self.assertEqual('[ Liam ]', str(self.__deque))
def test_peek_front_single_deque_peekvalue(self):
self.__deque.push_front('Liam')
peek = self.__deque.peek_front()
self.assertEqual('Liam', peek)
def test_peek_front_single_deque_length(self):
self.__deque.push_front('Liam')
peek = self.__deque.peek_front()
self.assertEqual(1, len(self.__deque))
def test_peek_front_double_deque_string(self):
self.__deque.push_front('Victory')
self.__deque.push_front('have')
peek = self.__deque.peek_front()
self.assertEqual('[ have, Victory ]', str(self.__deque))
def test_peek_front_double_deque(self):
self.__deque.push_front('Liam')
self.__deque.push_front('Coolguy')
peek = self.__deque.peek_front()
self.assertEqual('Coolguy', peek)
def test_peek_front_double_deque(self):
self.__deque.push_front('Liam')
self.__deque.push_front('Coolguy')
self.assertEqual(2, len(self.__deque))
##push back tests starting with empty deque working up to deque with 2
##entries. Test ensures push back function creating correct deque structure
##with proper str format and len value.
def test_push_back_on_empty_deque_string(self):
self.__deque.push_back('Liam')
self.assertEqual('[ Liam ]', str(self.__deque))
def test_push_back_on_empty_deque_length(self):
self.__deque.push_back('Liam')
self.assertEqual(1, len(self.__deque))
def test_push_back_twice_on_deque_string(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
self.assertEqual('[ Coolguy, Liam ]', str(self.__deque))
def test_push_back_twice_on_deque_length(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
self.assertEqual(2, len(self.__deque))
def test_push_back_thrice_on_deque_string(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
self.__deque.push_back('duh')
self.assertEqual('[ Coolguy, Liam, duh ]', str(self.__deque))
def test_push_back_thrice_on_deque_length(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
self.__deque.push_back('duh')
self.assertEqual(3, len(self.__deque))
##pop back tests starting with empty deque working up to deque with 3
##entries. Test ensures pop back function creates correct deque structure
##with proper str format and len value
def test_pop_back_empty_deque(self):
with self.assertRaises(IndexError):
pop = self.__deque.pop_back()
self.assertEqual('[ ]', str(self.__deque))
def test_pop_back_single_deque_string(self):
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual('[ ]', str(self.__deque))
def test_pop_back_single_deque_popvalue(self):
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual('Liam', pop)
def test_pop_back_single_deque_length(self):
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual(0, len(self.__deque))
def test_pop_back_double_deque_string(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual('[ Coolguy ]', str(self.__deque))
def test_pop_back_double_deque_popvalue(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual('Liam', pop)
def test_pop_back_double_deque_length(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
pop = self.__deque.pop_back()
self.assertEqual(1, len(self.__deque))
##peek back tests starting with empty deque up to deque with 2 entries.
##Tests to ensure peek back function is pulling correct data without
##affecting entries in deque.
def test_peek_back_empty_deque_string(self):
with self.assertRaises(IndexError):
peek = self.__deque.peek_back()
self.assertEqual(0, len(self.__deque))
def test_peek_back_empty_deque_length(self):
with self.assertRaises(IndexError):
peek = self.__deque.peek_back()
self.assertEqual(0, len(self.__deque))
def test_peek_back_single_deque_string(self):
self.__deque.push_back('Coolguy')
peek = self.__deque.peek_back()
self.assertEqual('[ Coolguy ]', str(self.__deque))
def test_peek_back_single_deque_peekvalue(self):
self.__deque.push_back('Coolguy')
peek = self.__deque.peek_back()
self.assertEqual('Coolguy', peek)
def test_peek_back_single_deque_length(self):
self.__deque.push_back('Coolguy')
peek = self.__deque.peek_back()
self.assertEqual(1, len(self.__deque))
def test_peek_back_double_deque_string(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
peek = self.__deque.peek_back()
self.assertEqual('[ Coolguy, Liam ]', str(self.__deque))
def test_peek_back_double_deque_peekvalue(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
peek = self.__deque.peek_back()
self.assertEqual('Liam', peek)
def test_peek_back_double_deque_length(self):
self.__deque.push_back('Coolguy')
self.__deque.push_back('Liam')
peek = self.__deque.peek_back()
self.assertEqual(2, len(self.__deque))
#Stack Tests
##push tests starting with empty stack up to stack with 2 entries
def test_push_empty_stack_string(self):
self.__stack.push('Liam')
self.assertEqual('[ Liam ]', str(self.__stack))
def test_push_empty_stack_length(self):
self.__stack.push('Liam')
self.assertEqual(1, len(self.__stack))
def test_push_single_stack_string(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
self.assertEqual('[ Coolguy, Liam ]', str(self.__stack))
def test_push_single_stack_length(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
self.assertEqual(2, len(self.__stack))
def test_push_multiple_stack_string(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
self.__stack.push('here')
self.assertEqual('[ here, Coolguy, Liam ]', str(self.__stack))
def test_push_multiple_stack_length(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
self.__stack.push('here')
self.assertEqual(3, len(self.__stack))
##pop tests
def test_pop_empty_stack(self):
with self.assertRaises(IndexError):
pop = self.__stack.pop()
self.assertEqual('[ ]', str(self.__stack))
def test_pop_single_stack_string(self):
self.__stack.push('Liam')
self.__stack.pop()
self.assertEqual('[ ]', str(self.__stack))
def test_pop_single_stack_length(self):
self.__stack.push('Liam')
self.__stack.pop()
self.assertEqual(0, len(self.__stack))
def test_pop_double_stack_string(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
pop = self.__stack.pop()
self.assertEqual('[ Liam ]', str(self.__stack))
def test_pop_double_stack_popvalue(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
pop = self.__stack.pop()
self.assertEqual('Coolguy', pop)
def test_pop_double_stack_length(self):
self.__stack.push('Liam')
self.__stack.push('Coolguy')
pop = self.__stack.pop()
self.assertEqual(1, len(self.__stack))
##peek tests
def test_peek_empty_stack(self):
with self.assertRaises(IndexError):
peek = self.__stack.peek()
self.assertEqual('[ ]', str(self.__stack))
def test_peek_single_stack_string(self):
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual('[ Liam ]', str(self.__stack))
def test_peek_single_stack_peekvalue(self):
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual('Liam', peek)
def test_peek_single_stack_length(self):
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual(1, len(self.__stack))
def test_peek_double_stack_string(self):
self.__stack.push('Coolguy')
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual('[ Liam, Coolguy ]', str(self.__stack))
def test_peek_double_stack_peekvalue(self):
self.__stack.push('Coolguy')
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual('Coolguy', peek)
def test_peek_double_stack_length(self):
self.__stack.push('Coolguy')
self.__stack.push('Liam')
peek = self.__stack.peek()
self.assertEqual(2, len(self.__stack))
#Queue Tests
##Enqueue tests
def test_enq_empty_string(self):
self.__queue.enqueue('Liam')
self.assertEqual('[ Liam ]', str(self.__queue))
def test_enq_empty_length(self):
self.__queue.enqueue('Liam')
self.assertEqual(1, len(self.__queue))
def test_enq_multiple_string(self):
self.__queue.enqueue('Liam')
self.__queue.enqueue('Coolguy')
self.assertEqual('[ Liam, Coolguy ]', str(self.__queue))
def test_enq_multiple_length(self):
self.__queue.enqueue('Liam')
self.__queue.enqueue('Coolguy')
self.assertEqual(2, len(self.__queue))
if __name__ == '__main__':
unittest.main()
| 33.615776
| 83
| 0.70464
| 1,812
| 13,211
| 4.699227
| 0.053532
| 0.13212
| 0.09313
| 0.071873
| 0.915796
| 0.896653
| 0.850499
| 0.757722
| 0.669524
| 0.572871
| 0
| 0.002902
| 0.165468
| 13,211
| 392
| 84
| 33.701531
| 0.769433
| 0.089092
| 0
| 0.686207
| 0
| 0
| 0.079927
| 0
| 0
| 0
| 0
| 0
| 0.262069
| 1
| 0.234483
| false
| 0
| 0.013793
| 0
| 0.251724
| 0.010345
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bf9ff674e5be999babc1d3d67faf8f670827ac5f
| 68
|
py
|
Python
|
python/packages/isce3/geometry/__init__.py
|
isce3-testing/isce3-circleci-poc
|
ec1dfb6019bcdc7afb7beee7be0fa0ce3f3b87b3
|
[
"Apache-2.0"
] | null | null | null |
python/packages/isce3/geometry/__init__.py
|
isce3-testing/isce3-circleci-poc
|
ec1dfb6019bcdc7afb7beee7be0fa0ce3f3b87b3
|
[
"Apache-2.0"
] | 1
|
2021-12-23T00:00:31.000Z
|
2021-12-23T00:00:31.000Z
|
python/packages/isce3/geometry/__init__.py
|
isce3-testing/isce3-circleci-poc
|
ec1dfb6019bcdc7afb7beee7be0fa0ce3f3b87b3
|
[
"Apache-2.0"
] | 1
|
2021-12-02T21:10:11.000Z
|
2021-12-02T21:10:11.000Z
|
from isce3.ext.isce3.geometry import *
from .rdr2rdr import rdr2rdr
| 22.666667
| 38
| 0.808824
| 10
| 68
| 5.5
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.117647
| 68
| 2
| 39
| 34
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bfa1c24e323ab6dcab1e4f1477dd8e3ae5599e93
| 90
|
py
|
Python
|
lg_nav_to_device/src/lg_nav_to_device/__init__.py
|
FuriousJulius/lg_ros_nodes
|
15a84c5022ab2f5b038d11a5589cd4a34010b1d6
|
[
"Apache-2.0"
] | 16
|
2015-10-10T11:55:37.000Z
|
2022-02-24T22:47:48.000Z
|
lg_nav_to_device/src/lg_nav_to_device/__init__.py
|
FuriousJulius/lg_ros_nodes
|
15a84c5022ab2f5b038d11a5589cd4a34010b1d6
|
[
"Apache-2.0"
] | 292
|
2015-09-29T21:59:53.000Z
|
2022-03-31T15:59:31.000Z
|
lg_nav_to_device/src/lg_nav_to_device/__init__.py
|
constantegonzalez/lg_ros_nodes
|
1c7b08c42e90205922602c86805285508d1b7971
|
[
"Apache-2.0"
] | 5
|
2017-05-03T06:22:43.000Z
|
2021-08-19T16:54:14.000Z
|
from .device_writer import DeviceWriter
from .background_stopper import BackgroundStopper
| 30
| 49
| 0.888889
| 10
| 90
| 7.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 50
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bfaffb5d0ee51d60be42e8a880ef803120a48fb1
| 355
|
py
|
Python
|
mooncake_utils/everything.py
|
ericyue/mooncake_utils
|
e6809f0e4e0153a1cbc7de150813806ab394f7eb
|
[
"Apache-2.0"
] | 1
|
2019-01-02T10:18:07.000Z
|
2019-01-02T10:18:07.000Z
|
mooncake_utils/everything.py
|
ericyue/mooncake_utils
|
e6809f0e4e0153a1cbc7de150813806ab394f7eb
|
[
"Apache-2.0"
] | 1
|
2017-07-16T16:32:43.000Z
|
2017-07-16T16:32:43.000Z
|
mooncake_utils/everything.py
|
ericyue/mooncake_utils
|
e6809f0e4e0153a1cbc7de150813806ab394f7eb
|
[
"Apache-2.0"
] | null | null | null |
from mooncake_utils.date import *
from mooncake_utils.cmd import *
from mooncake_utils.file import *
from mooncake_utils.data import *
from mooncake_utils.log import *
from mooncake_utils.alert import *
from mooncake_utils.hadoop import *
#from mooncake_utils.cython_build import *
from mooncake_utils.config import *
from mooncake_utils.network import *
| 32.272727
| 42
| 0.828169
| 51
| 355
| 5.54902
| 0.294118
| 0.424028
| 0.600707
| 0.731449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 355
| 10
| 43
| 35.5
| 0.898413
| 0.115493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bfbe72f53ca67da8f26591f378de6120997c18b8
| 212
|
py
|
Python
|
ably/util/unicodemixin.py
|
abordeau/ably-python
|
3fc0c1b99148005e5784384a331a24ac41a8207c
|
[
"Apache-2.0"
] | null | null | null |
ably/util/unicodemixin.py
|
abordeau/ably-python
|
3fc0c1b99148005e5784384a331a24ac41a8207c
|
[
"Apache-2.0"
] | null | null | null |
ably/util/unicodemixin.py
|
abordeau/ably-python
|
3fc0c1b99148005e5784384a331a24ac41a8207c
|
[
"Apache-2.0"
] | null | null | null |
import six
class UnicodeMixin(object):
if six.PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
return self.__unicode__().encode('utf8')
| 19.272727
| 52
| 0.584906
| 23
| 212
| 4.695652
| 0.652174
| 0.111111
| 0.185185
| 0.296296
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 0.306604
| 212
| 10
| 53
| 21.2
| 0.721088
| 0
| 0
| 0.25
| 0
| 0
| 0.018868
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
bfd7bf2656f85000ca3d0051fee721341f48d281
| 4,511
|
py
|
Python
|
resources.py
|
geometalab/QGisLayerStyleLoader
|
8cf23106ae31014001dd762b7841d129bb45cc9b
|
[
"MIT"
] | 5
|
2017-03-03T19:03:10.000Z
|
2022-01-26T18:50:49.000Z
|
resources.py
|
geometalab/QGisLayerStyleLoader
|
8cf23106ae31014001dd762b7841d129bb45cc9b
|
[
"MIT"
] | 4
|
2016-09-19T17:26:04.000Z
|
2021-07-26T12:05:14.000Z
|
resources.py
|
geometalab/QGisLayerStyleLoader
|
8cf23106ae31014001dd762b7841d129bb45cc9b
|
[
"MIT"
] | 2
|
2017-01-04T15:52:50.000Z
|
2021-07-20T06:32:18.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.11.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x02\x98\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x1a\x00\x00\x00\x1a\x08\x06\x00\x00\x00\xa9\x4a\x4c\xce\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x02\x4a\x49\x44\x41\x54\x48\x89\xbd\
\xd6\xbf\x8f\x4c\x51\x18\xc6\xf1\xcf\x11\xc4\xaf\xd9\x59\x09\x8d\
\xca\xe8\xa9\xb7\xa2\xd5\xd1\x48\xfc\x0b\xaa\xa5\xd0\x68\xd8\x46\
\xa3\xa1\x60\x35\x0a\x8d\xc6\x16\xa8\x44\x23\x0a\x89\x4e\x24\x68\
\x6c\xc1\x6e\x76\x65\x83\x5d\xbf\x12\x22\x92\x57\x31\xef\xdd\xbd\
\x7b\xdd\x59\x33\xd9\xc4\x49\xde\x62\xce\x79\xde\xe7\x3b\xe7\x3d\
\xf7\xbc\xf7\x96\x88\xf0\x3f\xc6\x96\x51\xc4\xa5\x94\x4e\x29\xe5\
\x56\x46\x67\x24\x52\x44\xfc\x33\x30\x86\xeb\x58\xc4\xab\x8c\xc5\
\x9c\x1b\x1b\xc6\xa3\x6c\x54\xba\x52\x4a\x17\x97\x71\x12\x9f\xf1\
\x13\x55\x42\xc1\x0e\x8c\xe3\x1e\x2e\x44\xc4\x97\x81\x5e\x6d\xa0\
\x1a\xe0\x04\xbe\xe2\x47\x0d\xf0\x97\x1c\x3b\x73\xd7\xf7\x07\x01\
\xd7\x81\x46\x04\x8c\x04\x2c\x11\xd1\x04\x7c\xb1\xbe\x44\xa3\x8e\
\xaa\xa4\xdd\x3a\xb0\x60\x12\x57\xf0\x16\xdf\x36\x01\x68\x03\x76\
\x70\x10\xe7\xb7\x44\xc4\x35\x1c\xc5\x02\xf6\x63\x5b\x8a\x36\x03\
\xd8\x96\x5e\x0b\x38\x1a\x11\xd7\x9a\x67\x34\x81\x29\x1c\xc0\x2f\
\xa3\xef\xae\x60\xbb\xfe\xa3\x7f\x31\x22\x9e\xad\x2e\x34\x9f\xba\
\xbc\x88\x0f\xb1\x0f\x5b\xf1\x1d\xbf\x37\x80\x96\xd4\xed\x49\xdd\
\x47\x1c\x8f\x88\x6f\x75\xd1\x6a\x67\x28\xa5\xf4\x4a\x29\x33\x98\
\x47\x0f\xb7\x71\x1a\xcb\xd6\x4a\xda\x1c\x55\x89\x96\x53\x7b\x3b\
\x73\xe7\x4b\x29\x33\xa5\x94\x5e\x5d\x7c\x18\x0f\x72\xbb\xaf\x71\
\xae\xa5\x33\x4c\xe0\x09\xe6\xf0\x22\x63\x2e\xe7\x26\x5a\xf4\xe7\
\xd2\x6b\x31\xbd\x0f\x0f\xd5\xeb\x22\xe2\x59\x44\x1c\xc3\x29\xfd\
\x52\x7e\xc7\xa9\x88\x38\x56\x3f\x87\x7f\x99\x54\xff\xa2\x87\x19\
\xfd\x56\xb3\x88\x4b\xc3\xf4\xb0\xc6\x4e\x2e\x65\xee\xe7\xf4\xea\
\xad\xae\xb5\x88\x3b\x78\x8a\x59\x2c\x0d\x03\x4c\xc0\x52\xe6\x3c\
\x45\xe7\x2f\x4d\xcb\x59\x3c\xc2\x3b\xbc\xc4\x1d\xac\xa4\xc9\x54\
\x0b\x60\x2a\xd7\x56\x52\xfb\x32\x73\x1f\x35\xcf\xae\x0e\x78\xac\
\x7f\xc1\xde\xe0\x4c\xcd\xec\x10\xee\x66\x39\x96\x70\x3e\x63\x29\
\xe7\xee\xe2\x50\x4d\x7f\x26\x3d\x16\xd2\x73\xa2\xba\x42\x93\xfa\
\x97\x73\x16\xcf\xb1\x6b\x40\x79\x2a\xe0\x4a\xc6\x3a\x40\x43\xbb\
\x2b\xbd\x66\xd3\x7b\xb2\x5a\xe8\x5a\x7b\xb1\x7d\xc0\x4d\xec\x1d\
\x60\x72\x04\x47\x06\xac\xed\xcd\xdc\x0f\xd6\x5e\x8c\xdd\xb6\x33\
\xea\xe2\x06\x3e\x65\x4c\x0f\x02\xb6\x00\xa6\x6b\x79\x37\x2a\xc0\
\xc0\xa7\xae\xb1\xc3\x4f\xfa\xb7\x7e\x1a\xe3\x2d\xba\xf1\x5c\x5b\
\x4e\xed\xf5\x26\x60\x43\x50\x0b\x70\xb9\x06\xdc\x9d\x31\x5d\x9b\
\x1f\x08\x18\x0a\x54\x03\x8e\xd5\x80\xef\x33\x2a\xc0\xe6\x3f\x4e\
\x9a\x23\x3b\xfb\xd5\xfc\x79\x36\x1a\x1d\x7a\xc3\xdc\x51\x40\x9b\
\x19\x7f\x00\x65\xdb\xac\x69\xcb\x5e\x68\x6f\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x10\
\x02\xd4\x99\x62\
\x00\x4c\
\x00\x61\x00\x79\x00\x65\x00\x72\x00\x53\x00\x74\x00\x79\x00\x6c\x00\x65\x00\x4c\x00\x6f\x00\x61\x00\x64\x00\x65\x00\x72\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x7a\xa4\x20\xaa\x38\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 42.556604
| 121
| 0.726225
| 1,009
| 4,511
| 3.209118
| 0.280476
| 0.194565
| 0.197344
| 0.151946
| 0.191785
| 0.18252
| 0.161828
| 0.161828
| 0.161828
| 0.161828
| 0
| 0.300371
| 0.042784
| 4,511
| 105
| 122
| 42.961905
| 0.449514
| 0.033695
| 0
| 0.123596
| 0
| 0.58427
| 0.00023
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.022472
| false
| 0
| 0.011236
| 0
| 0.033708
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
44a6a949923f82c74bc82bab830bb9912bad3e6c
| 561
|
py
|
Python
|
src/forecastability/cov.py
|
PacktPublishing/Modern-Time-Series-Forecasting-with-Python-
|
391ae9c8c8c5b2fba20a8ada8e48e68eb46f118a
|
[
"MIT"
] | 10
|
2021-08-09T11:06:28.000Z
|
2022-03-07T14:47:36.000Z
|
src/forecastability/cov.py
|
PacktPublishing/Modern-Time-Series-Forecasting-with-Python-
|
391ae9c8c8c5b2fba20a8ada8e48e68eb46f118a
|
[
"MIT"
] | null | null | null |
src/forecastability/cov.py
|
PacktPublishing/Modern-Time-Series-Forecasting-with-Python-
|
391ae9c8c8c5b2fba20a8ada8e48e68eb46f118a
|
[
"MIT"
] | null | null | null |
import warnings
import numpy as np
def calc_norm_sd(x, original):
if (len(x) <= 2) and np.all(x==0):
warnings.warn("Array should not be all zeroes or should atleast more than 1 datapoint. COV will be NaN")
cov = np.nan
else:
cov = np.std(x) / np.mean(original)
return cov
def calc_cov(x):
if (len(x) <= 2) and np.all(x==0):
warnings.warn("Array should not be all zeroes or should atleast more than 1 datapoint. COV will be NaN")
cov = np.nan
else:
cov = np.std(x) / np.mean(x)
return cov
| 31.166667
| 112
| 0.613191
| 98
| 561
| 3.479592
| 0.367347
| 0.058651
| 0.035191
| 0.041056
| 0.739003
| 0.739003
| 0.739003
| 0.739003
| 0.739003
| 0.739003
| 0
| 0.01467
| 0.270945
| 561
| 18
| 113
| 31.166667
| 0.819071
| 0
| 0
| 0.625
| 0
| 0
| 0.309609
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
44b925732a824addb50f17e3e2d647eff6025a50
| 90
|
py
|
Python
|
square_table.py
|
hashimas/learning_python
|
a6eba00031c79c5793e5bdc651374a41a7c3545f
|
[
"Apache-2.0"
] | null | null | null |
square_table.py
|
hashimas/learning_python
|
a6eba00031c79c5793e5bdc651374a41a7c3545f
|
[
"Apache-2.0"
] | null | null | null |
square_table.py
|
hashimas/learning_python
|
a6eba00031c79c5793e5bdc651374a41a7c3545f
|
[
"Apache-2.0"
] | null | null | null |
for x in range(1,11):
print(repr(x).rjust(2), repr(x*x).rjust(3),repr(x*x*x).rjust(4))
| 45
| 68
| 0.611111
| 22
| 90
| 2.5
| 0.545455
| 0.272727
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0.1
| 90
| 2
| 68
| 45
| 0.604938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
44d47e5543c7fb18b6e8c2bee85e85248b85e8cb
| 134
|
py
|
Python
|
examples/SimpleERC20/test/__init__.py
|
IC3Hydra/Hydra
|
ba42d62108bb1a374bc2e2290d6535c22062b2d3
|
[
"MIT"
] | 78
|
2017-11-02T19:25:04.000Z
|
2022-03-20T01:03:36.000Z
|
examples/SimpleERC20/test/__init__.py
|
IC3Hydra/Hydra
|
ba42d62108bb1a374bc2e2290d6535c22062b2d3
|
[
"MIT"
] | 2
|
2018-02-01T16:07:35.000Z
|
2018-08-29T14:57:03.000Z
|
examples/SimpleERC20/test/__init__.py
|
IC3Hydra/Hydra
|
ba42d62108bb1a374bc2e2290d6535c22062b2d3
|
[
"MIT"
] | 6
|
2017-11-02T16:27:58.000Z
|
2021-05-08T00:12:22.000Z
|
PATH_TO_HEADS = 'examples/SimpleERC20/heads/'
META_CONTRACT = 'examples/SimpleERC20/Hydra.sol'
SPEC = 'examples/SimpleERC20/Spec.sol'
| 33.5
| 48
| 0.798507
| 17
| 134
| 6.117647
| 0.588235
| 0.548077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048
| 0.067164
| 134
| 4
| 49
| 33.5
| 0.784
| 0
| 0
| 0
| 0
| 0
| 0.641791
| 0.641791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
781bf6b9c7753e124c1e9e6c749e51440a335224
| 199
|
py
|
Python
|
src/controllers/delivery.py
|
MaksonViini/Flask-Delivery-App
|
de131a08d7dd0b42ddd3ffd7dd395d9dc177d5fb
|
[
"MIT"
] | null | null | null |
src/controllers/delivery.py
|
MaksonViini/Flask-Delivery-App
|
de131a08d7dd0b42ddd3ffd7dd395d9dc177d5fb
|
[
"MIT"
] | null | null | null |
src/controllers/delivery.py
|
MaksonViini/Flask-Delivery-App
|
de131a08d7dd0b42ddd3ffd7dd395d9dc177d5fb
|
[
"MIT"
] | null | null | null |
from flask_restx import Resource
from server.instance import server
delivery_ns = server.delivery_name_space
class Delivery(Resource):
def get(self, ):
return {'hello': 'world'}
| 16.583333
| 40
| 0.713568
| 25
| 199
| 5.52
| 0.72
| 0.202899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201005
| 199
| 11
| 41
| 18.090909
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0.050251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
7847af6bde3ef1a5424189ec30988c33acba35ee
| 1,214
|
py
|
Python
|
4-25/8.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
4-25/8.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
4-25/8.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
number = 7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450
list = []
for i in str(number):
list.append(int(i))
a = []
for i in range(0,988):
b = 1
for j in range(0, 13):
b *= list[i+j]
a.append(b)
a.sort(reverse=True)
print(a)
| 93.384615
| 1,010
| 0.912685
| 40
| 1,214
| 27.7
| 0.525
| 0.00722
| 0.01083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.879581
| 0.056013
| 1,214
| 13
| 1,011
| 93.384615
| 0.08726
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
786dbd03b63c3402a3bbf979c36b94bb4258d6f6
| 74
|
py
|
Python
|
tests/__init__.py
|
icaropires/pdf2dataset
|
b070d656fa446c296458512515fc68fc43d949e1
|
[
"Apache-2.0"
] | 11
|
2020-06-30T03:22:57.000Z
|
2021-11-16T03:35:50.000Z
|
tests/__init__.py
|
icaropires/pdf2dataset
|
b070d656fa446c296458512515fc68fc43d949e1
|
[
"Apache-2.0"
] | 23
|
2020-07-21T19:03:37.000Z
|
2020-11-01T15:53:03.000Z
|
tests/__init__.py
|
icaropires/pdf2dataset
|
b070d656fa446c296458512515fc68fc43d949e1
|
[
"Apache-2.0"
] | 4
|
2020-07-15T20:16:28.000Z
|
2021-04-13T18:38:22.000Z
|
import pytest
pytest.register_assert_rewrite('tests.testing_dataframe')
| 14.8
| 57
| 0.851351
| 9
| 74
| 6.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 74
| 4
| 58
| 18.5
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0.310811
| 0.310811
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
787957a4d081235ae2e4c79c2fe142e10dc5a006
| 24
|
py
|
Python
|
django_common_utils/libraries/models/mixins/helpers/__init__.py
|
Myzel394/django-common-utils
|
038bc4481bd44d67545ce307170f530a464f678b
|
[
"MIT"
] | 2
|
2021-02-08T11:10:53.000Z
|
2021-03-14T15:34:21.000Z
|
django_common_utils/libraries/models/mixins/helpers/__init__.py
|
Myzel394/django-common-utils
|
038bc4481bd44d67545ce307170f530a464f678b
|
[
"MIT"
] | null | null | null |
django_common_utils/libraries/models/mixins/helpers/__init__.py
|
Myzel394/django-common-utils
|
038bc4481bd44d67545ce307170f530a464f678b
|
[
"MIT"
] | 1
|
2021-02-18T15:34:14.000Z
|
2021-02-18T15:34:14.000Z
|
from .queryset import *
| 12
| 23
| 0.75
| 3
| 24
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
787b1ac18aa3055f6f4aa187ae40919628127873
| 41
|
py
|
Python
|
api/tests/seed.py
|
ShubhamGG/Anubis
|
2c538ef258a1edf5463596a33bc66caa2ef7e35b
|
[
"MIT"
] | 65
|
2021-06-27T07:18:27.000Z
|
2021-09-17T16:58:24.000Z
|
api/tests/seed.py
|
efaraz27/Anubis
|
40a12933877df7f39dd75ca26148858774fcda7b
|
[
"MIT"
] | 114
|
2021-06-27T08:37:43.000Z
|
2021-10-24T00:51:01.000Z
|
api/tests/seed.py
|
efaraz27/Anubis
|
40a12933877df7f39dd75ca26148858774fcda7b
|
[
"MIT"
] | 15
|
2021-06-27T07:26:51.000Z
|
2021-10-06T18:42:39.000Z
|
from anubis.rpc.seed import seed
seed()
| 10.25
| 32
| 0.756098
| 7
| 41
| 4.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 41
| 3
| 33
| 13.666667
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
78ac9fb91ae2ed231cd29ccc16e74d8716e7a797
| 319
|
py
|
Python
|
configs/gdrn/lmoSingleObj/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e_12_holepuncher_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
configs/gdrn/lmoSingleObj/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e_12_holepuncher_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
configs/gdrn/lmoSingleObj/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e_12_holepuncher_bop_test.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
_base_ = "./resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e_01_ape_bop_test.py"
OUTPUT_DIR = (
"output/gdrn/lmoRealPbrSO/resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_lmoRealNBPbr_100e_SO/holepuncher"
)
DATASETS = dict(TRAIN=("lmo_pbr_holepuncher_train", "lmo_NoBopTest_holepuncher_train"))
| 53.166667
| 113
| 0.868339
| 38
| 319
| 6.605263
| 0.657895
| 0.12749
| 0.239044
| 0.278884
| 0.486056
| 0.486056
| 0.486056
| 0
| 0
| 0
| 0
| 0.039604
| 0.050157
| 319
| 5
| 114
| 63.8
| 0.788779
| 0
| 0
| 0
| 0
| 0
| 0.786834
| 0.786834
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
152867b2061f6833c56aae302dc6f88839e144ea
| 1,385
|
py
|
Python
|
scripts/spring 2020/test_compute_route.py
|
ronan-keane/hav-sim
|
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
|
[
"Apache-2.0"
] | null | null | null |
scripts/spring 2020/test_compute_route.py
|
ronan-keane/hav-sim
|
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
|
[
"Apache-2.0"
] | null | null | null |
scripts/spring 2020/test_compute_route.py
|
ronan-keane/hav-sim
|
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
|
[
"Apache-2.0"
] | 2
|
2020-09-30T22:44:37.000Z
|
2021-05-09T07:36:28.000Z
|
"""
Test that the compute_route() function in the havsim.simulation.road package works correctly.
"""
from havsim.simulation.road import Road
from havsim.simulation.road import compute_route
def test1():
road1 = Road(num_lanes=2, length=4, name='road1')
road2 = Road(num_lanes=1, length=4, name='road2')
road3 = Road(num_lanes=1, length=3, name='road3')
road4 = Road(num_lanes=1, length=2, name='road4')
road5 = Road(num_lanes=1, length=24, name='road5')
road1.connect(road2, [1], [0])
road1.connect(road5, [0], [0])
road2.connect(road3)
road3.connect(road4)
road4.merge(road5, 0, 0, (1, 2), (23, 24))
road5.connect('exit', is_exit=True)
assert compute_route(road1, 0, 'exit') == ['road2', 'road3', 'road4', 'road5', 'exit']
def test2():
road1 = Road(num_lanes=2, length=4, name='road1')
road2 = Road(num_lanes=1, length=4, name='road2')
road3 = Road(num_lanes=1, length=3, name='road3')
road4 = Road(num_lanes=1, length=2, name='road4')
road5 = Road(num_lanes=1, length=8, name='road5')
road1.connect(road2, [1], [0])
road1.connect(road5, [0], [0])
road2.connect(road3)
road3.connect(road4)
road4.merge(road5, 0, 0, (1, 2), (7, 8))
road5.connect('exit', is_exit=True)
assert compute_route(road1, 0, 'exit') == ['road5', 'exit']
def test_all():
test1()
test2()
test_all()
| 30.777778
| 93
| 0.638267
| 209
| 1,385
| 4.143541
| 0.205742
| 0.080831
| 0.138568
| 0.120092
| 0.794457
| 0.725173
| 0.725173
| 0.725173
| 0.725173
| 0.725173
| 0
| 0.08589
| 0.176173
| 1,385
| 44
| 94
| 31.477273
| 0.673094
| 0.067148
| 0
| 0.5625
| 0
| 0
| 0.077103
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.09375
| false
| 0
| 0.0625
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
153dd62a028aebd33d87c7a739508d9a08947a25
| 730
|
py
|
Python
|
2_animal_printer.py
|
ak0029/Zookeeper
|
ad61bafb57078602c5ce0d1dec1ac56217f6d4ab
|
[
"MIT"
] | null | null | null |
2_animal_printer.py
|
ak0029/Zookeeper
|
ad61bafb57078602c5ce0d1dec1ac56217f6d4ab
|
[
"MIT"
] | null | null | null |
2_animal_printer.py
|
ak0029/Zookeeper
|
ad61bafb57078602c5ce0d1dec1ac56217f6d4ab
|
[
"MIT"
] | null | null | null |
print(r"""Switching on camera from habitat with camels...
___.-''''-.
/___ @ |
',,,,. | _.'''''''._
' | / \
| \ _.-' \
| '.-' '-.
| ',
| '',
',,-, ':;
',,| ;,, ,' ;;
! ; !'',,,',',,,,'! ; ;:
: ; ! ! ! ! ; ; :;
; ; ! ! ! ! ; ; ;,
; ; ! ! ! ! ; ;
; ; ! ! ! ! ; ;
;,, !,! !,! ;,;
/_I L_I L_I /_I
Yey, our little camel is sunbathing!""")
| 38.421053
| 58
| 0.126027
| 21
| 730
| 3.761905
| 0.809524
| 0.050633
| 0.075949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.663014
| 730
| 19
| 59
| 38.421053
| 0.321138
| 0
| 0
| 0.105263
| 0
| 0
| 0.978962
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ec72798e9996cbe6100a0aa377f790750337d843
| 105
|
py
|
Python
|
clouds/io/__init__.py
|
jchen42703/understanding-clouds-kaggle
|
6972deb25cdf363ae0d9a9ad26d538280613fc94
|
[
"Apache-2.0"
] | 1
|
2019-10-26T16:33:40.000Z
|
2019-10-26T16:33:40.000Z
|
clouds/io/__init__.py
|
jchen42703/understanding-clouds-kaggle
|
6972deb25cdf363ae0d9a9ad26d538280613fc94
|
[
"Apache-2.0"
] | 1
|
2019-11-08T02:50:25.000Z
|
2019-11-19T03:36:54.000Z
|
clouds/io/__init__.py
|
jchen42703/understanding-clouds-kaggle
|
6972deb25cdf363ae0d9a9ad26d538280613fc94
|
[
"Apache-2.0"
] | null | null | null |
from .dataset import CloudDataset, ClassificationCloudDataset, \
ClfSegCloudDataset
| 35
| 64
| 0.695238
| 6
| 105
| 12.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 105
| 2
| 65
| 52.5
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
eca736137690d10a1b35603e34eeba3d325c180b
| 443
|
py
|
Python
|
plugins/trello/komand_trello/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/trello/komand_trello/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/trello/komand_trello/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .deactivate_user.action import DeactivateUser
from .deactivated_list.action import DeactivatedList
from .get_boards_by_member.action import GetBoardsByMember
from .member_list.action import MemberList
from .remove_member_from_board.action import RemoveMemberFromBoard
from .remove_member_from_cards.action import RemoveMemberFromCards
from .remove_member_from_org.action import RemoveMemberFromOrg
| 49.222222
| 66
| 0.880361
| 57
| 443
| 6.578947
| 0.491228
| 0.224
| 0.128
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083521
| 443
| 8
| 67
| 55.375
| 0.923645
| 0.083521
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ecd636977f371e720da80f3fc621bd3ffb8b202d
| 5,164
|
py
|
Python
|
Binance/Trades.py
|
awaismemon26/BinanceRESTAPI
|
8aab315fd6ae5e25e526af2ed9a7c3f8dfd0fea2
|
[
"MIT"
] | 2
|
2022-01-30T19:26:56.000Z
|
2022-02-26T17:36:41.000Z
|
Binance/Trades.py
|
awaismemon26/BinanceRESTAPI
|
8aab315fd6ae5e25e526af2ed9a7c3f8dfd0fea2
|
[
"MIT"
] | null | null | null |
Binance/Trades.py
|
awaismemon26/BinanceRESTAPI
|
8aab315fd6ae5e25e526af2ed9a7c3f8dfd0fea2
|
[
"MIT"
] | null | null | null |
from Binance.BinanceClient import awais26_client, awais100_client, sarah12_client
import pandas as pd
from pandas import DataFrame as dataframe
def get_ticker_usd_value(ticker):
pd.set_option('precision', 8)
ticker = ticker + 'USDT'
usdt_price = awais26_client.get_symbol_ticker(symbol=ticker)
cleanDF = dataframe(usdt_price, index=[0])
cleanDF.set_index('symbol', inplace=True)
cleanDF = float(cleanDF['price'][0])
return cleanDF
def get_awais26_aggregated_trades(ticker):
json_data = awais26_client.get_aggregate_trades(symbol=ticker)
tradesDF = pd.json_normalize(json_data)
tradesDF.columns=['Id', 'Price', 'Quantity', 'FirstTradeId', 'LastTradeId', 'Date', 'IsBuyerMaker', 'IsBestPriceMatch']
return tradesDF
def get_awais26_historical_trades(ticker):
json_data = awais26_client.get_historical_trades(symbol=ticker, limit=1000, fromId='329135120')
tradesDF = pd.json_normalize(json_data)
tradesDF['time'] = pd.to_datetime(tradesDF['time'], unit='ms')
tradesDF['time'] = tradesDF.time.dt.tz_localize('UTC').dt.tz_convert('Europe/Berlin')
# tradesDF.columns=['Id', 'Price', 'Quantity', 'FirstTradeId', 'LastTradeId', 'Date', 'WasBuyerMaker', 'WasBestPriceMatch']
return tradesDF
def get_awais26_recent_trades(ticker, count):
json_data = awais26_client.get_my_trades(symbol=ticker, limit=count)
tradesDF = pd.json_normalize(json_data)
cols = ['symbol', 'time', 'qty', 'price', 'quoteQty' ,'commission', 'commissionAsset', 'isBuyer', 'isMaker', 'isBestMatch']
tradesDF = tradesDF[cols]
return tradesDF
def get_awais26_recent_trade_sum(ticker):
json_data = awais26_client.get_my_trades(symbol=ticker, limit=500)
tradesDF = pd.json_normalize(json_data)
if tradesDF.empty:
return 'No Trades Found!'
tradesDF['time'] = pd.to_datetime(tradesDF['time'], unit='ms')
tradesDF['time'] = tradesDF['time'].dt.strftime('%Y-%m-%dT%H:%M:%S').astype(str)
cols = ['symbol', 'time', 'qty', 'price', 'quoteQty', 'commission', 'commissionAsset', 'isBuyer', 'isMaker']
groupbyCols = ['time','isBuyer', 'isMaker']
tradesDF = tradesDF[cols]
tradesDF[['qty', 'quoteQty', 'price', 'commission']] = tradesDF[['qty', 'quoteQty', 'price', 'commission']].apply(pd.to_numeric, errors='ignore')
tradesDF['fees_usd'] = tradesDF.apply(lambda row: row['commission'] if row['commissionAsset'] == 'USDT' else (get_ticker_usd_value(row['commissionAsset']) * row['commission']), axis=1)
tradesDF = tradesDF.groupby(groupbyCols, as_index=False)[['qty','price', 'quoteQty', 'fees_usd']].agg(
{
'qty': 'sum',
'price' : 'mean',
'quoteQty': 'sum',
'fees_usd': 'sum'
}
)
return tradesDF
def get_awais100_recent_trade_sum(ticker):
json_data = awais100_client.get_my_trades(symbol=ticker, limit=500)
tradesDF = pd.json_normalize(json_data)
if tradesDF.empty:
return 'No Trades Found!'
tradesDF['time'] = pd.to_datetime(tradesDF['time'], unit='ms')
tradesDF['time'] = tradesDF['time'].dt.strftime('%Y-%m-%dT%H:%M:%S').astype(str)
cols = ['symbol', 'time', 'qty', 'price', 'quoteQty', 'commission', 'commissionAsset', 'isBuyer', 'isMaker']
groupbyCols = ['time','isBuyer', 'isMaker']
tradesDF = tradesDF[cols]
tradesDF[['qty', 'quoteQty', 'price', 'commission']] = tradesDF[['qty', 'quoteQty', 'price', 'commission']].apply(pd.to_numeric, errors='ignore')
tradesDF['fees_usd'] = tradesDF.apply(lambda row: row['commission'] if row['commissionAsset'] == 'USDT' else (get_ticker_usd_value(row['commissionAsset']) * row['commission']), axis=1)
tradesDF = tradesDF.groupby(groupbyCols, as_index=False)[['qty','price', 'quoteQty', 'fees_usd']].agg(
{
'qty': 'sum',
'price' : 'mean',
'quoteQty': 'sum',
'fees_usd': 'sum'
}
)
return tradesDF
def get_sarah12_recent_trade_sum(ticker):
json_data = sarah12_client.get_my_trades(symbol=ticker, limit=500)
tradesDF = pd.json_normalize(json_data)
if tradesDF.empty:
return 'No Trades Found!'
tradesDF['time'] = pd.to_datetime(tradesDF['time'], unit='ms')
tradesDF['time'] = tradesDF['time'].dt.strftime('%Y-%m-%dT%H:%M:%S').astype(str)
cols = ['symbol', 'time', 'qty', 'price', 'quoteQty', 'commission', 'commissionAsset', 'isBuyer', 'isMaker']
groupbyCols = ['time','isBuyer', 'isMaker']
tradesDF = tradesDF[cols]
tradesDF[['qty', 'quoteQty', 'price', 'commission']] = tradesDF[['qty', 'quoteQty', 'price', 'commission']].apply(pd.to_numeric, errors='ignore')
tradesDF['fees_usd'] = tradesDF.apply(lambda row: row['commission'] if row['commissionAsset'] == 'USDT' else (get_ticker_usd_value(row['commissionAsset']) * row['commission']), axis=1)
tradesDF = tradesDF.groupby(groupbyCols, as_index=False)[['qty','price', 'quoteQty', 'fees_usd']].agg(
{
'qty': 'sum',
'price' : 'mean',
'quoteQty': 'sum',
'fees_usd': 'sum'
}
)
return tradesDF
| 44.904348
| 188
| 0.651046
| 599
| 5,164
| 5.445743
| 0.175292
| 0.05886
| 0.034335
| 0.042305
| 0.808706
| 0.800429
| 0.749234
| 0.708155
| 0.673207
| 0.673207
| 0
| 0.014768
| 0.173896
| 5,164
| 115
| 189
| 44.904348
| 0.749883
| 0.023431
| 0
| 0.615385
| 0
| 0
| 0.229059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.032967
| 0
| 0.21978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
01a4b5914d226c49a4b71dd7335ab2e8094358fc
| 32
|
py
|
Python
|
carrots/carrots.py
|
chrisgzf/kattis
|
7b66474c040a31cfc997863141f57a7c81f6ebab
|
[
"MIT"
] | null | null | null |
carrots/carrots.py
|
chrisgzf/kattis
|
7b66474c040a31cfc997863141f57a7c81f6ebab
|
[
"MIT"
] | null | null | null |
carrots/carrots.py
|
chrisgzf/kattis
|
7b66474c040a31cfc997863141f57a7c81f6ebab
|
[
"MIT"
] | null | null | null |
a, b = input().split()
print(b)
| 10.666667
| 22
| 0.5625
| 6
| 32
| 3
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 32
| 2
| 23
| 16
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
01c8465df2c322efda9679660b609148a6ba86b0
| 99
|
py
|
Python
|
z_tests/a_gym_info.py
|
linklab/minimal_rl
|
382d99ca355ea405414c4ed1077fb4e8ed3532a9
|
[
"MIT"
] | null | null | null |
z_tests/a_gym_info.py
|
linklab/minimal_rl
|
382d99ca355ea405414c4ed1077fb4e8ed3532a9
|
[
"MIT"
] | null | null | null |
z_tests/a_gym_info.py
|
linklab/minimal_rl
|
382d99ca355ea405414c4ed1077fb4e8ed3532a9
|
[
"MIT"
] | 1
|
2021-10-17T14:09:05.000Z
|
2021-10-17T14:09:05.000Z
|
from gym import envs
for idx, env_name in enumerate(envs.registry.all()):
print(idx, env_name)
| 24.75
| 52
| 0.737374
| 17
| 99
| 4.176471
| 0.764706
| 0.169014
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 3
| 53
| 33
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
01e6f0e2ab79f1b7be4b30cacf5aec3b191421f8
| 154
|
py
|
Python
|
Wrapping/Python/SIMPL/__init__.py
|
v7t/SIMPL
|
41c941ac957960ec17d067ffe5c566390c4a2553
|
[
"NRL"
] | null | null | null |
Wrapping/Python/SIMPL/__init__.py
|
v7t/SIMPL
|
41c941ac957960ec17d067ffe5c566390c4a2553
|
[
"NRL"
] | 2
|
2019-02-23T20:46:12.000Z
|
2019-07-11T15:34:13.000Z
|
Wrapping/Python/SIMPL/__init__.py
|
v7t/SIMPL
|
41c941ac957960ec17d067ffe5c566390c4a2553
|
[
"NRL"
] | null | null | null |
""" Some Description """
# This imports the python module
from . import dream3d_py
#from . import utils
#__all__ = ['simpl_common','simpl_test_dirs']
| 15.4
| 45
| 0.714286
| 20
| 154
| 5.1
| 0.85
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007752
| 0.162338
| 154
| 9
| 46
| 17.111111
| 0.782946
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bf192ca49ae1720cf8ce2e9b65791fbe97067479
| 1,113
|
py
|
Python
|
pypeln/sync/api/flat_map_sync_test.py
|
quarckster/pypeln
|
f4160d0f4d4718b67f79a0707d7261d249459a4b
|
[
"MIT"
] | 1,281
|
2018-09-20T05:35:27.000Z
|
2022-03-30T01:29:48.000Z
|
pypeln/sync/api/flat_map_sync_test.py
|
webclinic017/pypeln
|
5231806f2cac9d2019dacbbcf913484fd268b8c1
|
[
"MIT"
] | 78
|
2018-09-18T20:38:12.000Z
|
2022-03-30T20:16:02.000Z
|
pypeln/sync/api/flat_map_sync_test.py
|
webclinic017/pypeln
|
5231806f2cac9d2019dacbbcf913484fd268b8c1
|
[
"MIT"
] | 88
|
2018-09-24T10:46:14.000Z
|
2022-03-28T09:34:50.000Z
|
import hypothesis as hp
from hypothesis import strategies as st
import time
import pypeln as pl
import cytoolz as cz
MAX_EXAMPLES = 10
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_flat_map_square(nums):
def _generator(x):
yield x
yield x + 1
yield x + 2
nums_py = map(lambda x: x ** 2, nums)
nums_py = cz.mapcat(_generator, nums_py)
nums_py = list(nums_py)
nums_pl = pl.sync.map(lambda x: x ** 2, nums)
nums_pl = pl.sync.flat_map(_generator, nums_pl)
nums_pl = list(nums_pl)
assert nums_pl == nums_py
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_flat_map_square_workers(nums):
def _generator(x):
yield x
yield x + 1
yield x + 2
nums_py = map(lambda x: x ** 2, nums)
nums_py = cz.mapcat(_generator, nums_py)
nums_py = list(nums_py)
nums_pl = pl.sync.map(lambda x: x ** 2, nums)
nums_pl = pl.sync.flat_map(_generator, nums_pl, workers=3)
nums_pl = list(nums_pl)
assert sorted(nums_pl) == sorted(nums_py)
| 24.195652
| 62
| 0.665768
| 186
| 1,113
| 3.747312
| 0.209677
| 0.1033
| 0.05165
| 0.063128
| 0.76901
| 0.76901
| 0.705882
| 0.705882
| 0.705882
| 0.705882
| 0
| 0.012702
| 0.221923
| 1,113
| 45
| 63
| 24.733333
| 0.792148
| 0
| 0
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.117647
| false
| 0
| 0.147059
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bf240f6cfc336066dc7c1506c7651e020100a6e8
| 135
|
py
|
Python
|
osp/citations/models/__init__.py
|
davidmcclure/open-syllabus-project
|
078cfd4c5a257fbfb0901d43bfbc6350824eed4e
|
[
"Apache-2.0"
] | 220
|
2016-01-22T21:19:02.000Z
|
2022-01-25T04:33:55.000Z
|
osp/citations/models/__init__.py
|
davidmcclure/open-syllabus-project
|
078cfd4c5a257fbfb0901d43bfbc6350824eed4e
|
[
"Apache-2.0"
] | 14
|
2016-01-23T14:34:39.000Z
|
2016-09-19T19:58:37.000Z
|
osp/citations/models/__init__.py
|
davidmcclure/open-syllabus-project
|
078cfd4c5a257fbfb0901d43bfbc6350824eed4e
|
[
"Apache-2.0"
] | 14
|
2016-02-03T13:47:48.000Z
|
2019-03-27T13:09:05.000Z
|
from .text import Text
from .citation import Citation
from .text_index import Text_Index
from .citation_index import Citation_Index
| 16.875
| 42
| 0.82963
| 20
| 135
| 5.4
| 0.25
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140741
| 135
| 7
| 43
| 19.285714
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bf3899fc7413db1583661e51609f4eefb0563c61
| 97
|
py
|
Python
|
open-codegen/opengen/functions/is_symbolic.py
|
elsuizo/optimization-engine
|
c264264dd2ca035670ccd3d5d87e53c2f98b7d35
|
[
"Apache-2.0",
"MIT"
] | 253
|
2019-03-02T03:47:54.000Z
|
2022-03-25T01:00:41.000Z
|
open-codegen/opengen/functions/is_symbolic.py
|
elsuizo/optimization-engine
|
c264264dd2ca035670ccd3d5d87e53c2f98b7d35
|
[
"Apache-2.0",
"MIT"
] | 157
|
2019-03-23T15:13:24.000Z
|
2022-03-04T19:13:22.000Z
|
open-codegen/opengen/functions/is_symbolic.py
|
elsuizo/optimization-engine
|
c264264dd2ca035670ccd3d5d87e53c2f98b7d35
|
[
"Apache-2.0",
"MIT"
] | 26
|
2019-03-05T01:48:35.000Z
|
2022-03-18T15:31:27.000Z
|
import casadi.casadi as cs
def is_symbolic(u):
return isinstance(u, (cs.SX, cs.MX, cs.DM))
| 16.166667
| 47
| 0.680412
| 18
| 97
| 3.611111
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 97
| 5
| 48
| 19.4
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
173995c2a0bd922f5a15dcf4c5d138e39f75d76f
| 920
|
py
|
Python
|
scripts/pac/crlpac/__init__.py
|
mkarim2017/insarzd
|
e7d05f836e7ca044166e38bad549629ed00d71f1
|
[
"ECL-2.0",
"Apache-2.0"
] | 28
|
2019-10-04T01:18:29.000Z
|
2022-02-15T11:18:18.000Z
|
scripts/pac/crlpac/__init__.py
|
mkarim2017/insarzd
|
e7d05f836e7ca044166e38bad549629ed00d71f1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
scripts/pac/crlpac/__init__.py
|
mkarim2017/insarzd
|
e7d05f836e7ca044166e38bad549629ed00d71f1
|
[
"ECL-2.0",
"Apache-2.0"
] | 11
|
2019-10-04T08:36:54.000Z
|
2021-06-21T08:47:28.000Z
|
#!/usr/bin/env python3
#class
from .funcs import Dummy
#functions
from .funcs import getWidth
from .funcs import getLength
from .funcs import getInfo
from .funcs import get_content
from .funcs import changeXmlName
from .funcs import writeSARconfig_ALOS2
from .funcs import create_xml
from .funcs import renderParXml
from .funcs import runCmd
from .funcs import run_record_cmd
from .funcs import writeOffset
from .funcs import meanOffset
from .funcs import cullOffsetbyMean
from .funcs import cullOffset
from .funcs import getOffset
from .funcs import cal_coherence
from .funcs import overlapFrequency
from .funcs import gaussian
from .funcs import create_multi_index
from .funcs import create_multi_index2
from .funcs import fit_surface
from .funcs import cal_surface
from .funcs import read_param_for_checking_overlap
from .funcs import check_overlap
from .funcs import read_insar_arg
from .funcs import set_filename
| 27.878788
| 50
| 0.83913
| 134
| 920
| 5.61194
| 0.365672
| 0.323138
| 0.538564
| 0.083777
| 0.069149
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003722
| 0.123913
| 920
| 32
| 51
| 28.75
| 0.92928
| 0.038043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1755f08b19ae9c1f60c4396357585c837264ec56
| 27
|
py
|
Python
|
aliyun-python-sdk-dts/aliyunsdkdts/__init__.py
|
bricklayer-Liu/aliyun-openapi-python-sdk
|
20da2554de22679fc7c5462c483663e4d79512aa
|
[
"Apache-2.0"
] | 1
|
2021-03-08T02:59:17.000Z
|
2021-03-08T02:59:17.000Z
|
aliyun-python-sdk-dts/aliyunsdkdts/__init__.py
|
bricklayer-Liu/aliyun-openapi-python-sdk
|
20da2554de22679fc7c5462c483663e4d79512aa
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-dts/aliyunsdkdts/__init__.py
|
bricklayer-Liu/aliyun-openapi-python-sdk
|
20da2554de22679fc7c5462c483663e4d79512aa
|
[
"Apache-2.0"
] | null | null | null |
__version__ = '5.0.78.19.4'
| 27
| 27
| 0.666667
| 6
| 27
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28
| 0.074074
| 27
| 1
| 27
| 27
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1772f04f6d1101d31d26cc4926a830a698fe0caa
| 93
|
py
|
Python
|
lumin/inference/__init__.py
|
choisant/lumin
|
c039136eb096e8f3800f13925f9325b99cf7e76b
|
[
"Apache-2.0"
] | 43
|
2019-02-11T16:16:42.000Z
|
2021-12-13T15:35:20.000Z
|
lumin/inference/__init__.py
|
choisant/lumin
|
c039136eb096e8f3800f13925f9325b99cf7e76b
|
[
"Apache-2.0"
] | 48
|
2020-05-21T02:40:50.000Z
|
2021-08-10T11:07:08.000Z
|
lumin/inference/__init__.py
|
choisant/lumin
|
c039136eb096e8f3800f13925f9325b99cf7e76b
|
[
"Apache-2.0"
] | 14
|
2019-05-02T15:09:41.000Z
|
2022-01-12T21:13:34.000Z
|
# from .summary_stat import * # noqa F304
# __all__ = [*summary_stat.__all__] # noqa F405
| 23.25
| 48
| 0.688172
| 12
| 93
| 4.5
| 0.666667
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.193548
| 93
| 3
| 49
| 31
| 0.64
| 0.913978
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
178c95c6914ba12d27bcf351df63da0162c398e6
| 96
|
py
|
Python
|
mica/archive/aca_dark/__init__.py
|
sot/mica
|
136a9b0d9521efda5208067b51cf0c8700b4def3
|
[
"BSD-3-Clause"
] | null | null | null |
mica/archive/aca_dark/__init__.py
|
sot/mica
|
136a9b0d9521efda5208067b51cf0c8700b4def3
|
[
"BSD-3-Clause"
] | 150
|
2015-01-23T17:09:53.000Z
|
2022-01-10T00:50:54.000Z
|
mica/archive/aca_dark/__init__.py
|
sot/mica
|
136a9b0d9521efda5208067b51cf0c8700b4def3
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .dark_cal import * # noqa
| 32
| 63
| 0.739583
| 16
| 96
| 4.375
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.1875
| 96
| 2
| 64
| 48
| 0.884615
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
179cc507e6ce988530edd377750f8fff5e4dbeda
| 138
|
py
|
Python
|
jgf/__init__.py
|
filipinascimento/jgf
|
6558ba152937bdb4814190e4c1a89c7ade5bdfaf
|
[
"MIT"
] | null | null | null |
jgf/__init__.py
|
filipinascimento/jgf
|
6558ba152937bdb4814190e4c1a89c7ade5bdfaf
|
[
"MIT"
] | 1
|
2021-03-22T21:36:19.000Z
|
2021-03-22T21:36:19.000Z
|
jgf/__init__.py
|
filipinascimento/jgf
|
6558ba152937bdb4814190e4c1a89c7ade5bdfaf
|
[
"MIT"
] | 1
|
2020-08-03T15:54:16.000Z
|
2020-08-03T15:54:16.000Z
|
#!/usr/bin/python
# -*- coding: <utf-8> -*-
from .core import load,save
from . import igraph
from . import conmat
__version__ = "0.2.2"
| 15.333333
| 27
| 0.652174
| 21
| 138
| 4.095238
| 0.761905
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034783
| 0.166667
| 138
| 8
| 28
| 17.25
| 0.713043
| 0.289855
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bdb1a57cff3db8332ef70e5c3a7166cf920104ca
| 708
|
py
|
Python
|
trigonometry.py
|
Coder4OO/trigonometry
|
4f3e0ad32e2a5cbc5590acd139b88dcfc99838bf
|
[
"MIT"
] | null | null | null |
trigonometry.py
|
Coder4OO/trigonometry
|
4f3e0ad32e2a5cbc5590acd139b88dcfc99838bf
|
[
"MIT"
] | null | null | null |
trigonometry.py
|
Coder4OO/trigonometry
|
4f3e0ad32e2a5cbc5590acd139b88dcfc99838bf
|
[
"MIT"
] | null | null | null |
import math
def get_sine_angle(o, h):
return math.degrees(math.asin(o/h))
def get_cosine_angle(a, h):
return math.degrees(math.acos(a/h))
def get_tangent_angle(o, a):
return math.degrees(math.atan(o/a))
def get_opposite_from_sine(h, ang):
return math.degrees(math.sin(ang))*h
def get_hypotenuse_from_sine(o, ang):
return o/math.degrees(math.sin(ang))
def get_adjacent_from_cosine(h, ang):
return math.degrees(math.cos(ang))*h
def get_hypotenuse_from_cosine(a, ang):
return a/math.degrees(math.cos(ang))
def get_opposite_from_tangent(a, ang):
return math.degrees(math.tan(ang))*a
def get_adjacent_from_tangent(o, ang):
return o/math.degrees(math.tan(ang))
print(get_sine_angle(4,5))
| 23.6
| 39
| 0.748588
| 131
| 708
| 3.847328
| 0.206107
| 0.107143
| 0.267857
| 0.25
| 0.521825
| 0.297619
| 0.103175
| 0
| 0
| 0
| 0
| 0.003155
| 0.10452
| 708
| 30
| 40
| 23.6
| 0.791798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.45
| false
| 0
| 0.05
| 0.45
| 0.95
| 0.05
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
bdeabd44a5b6e3fc302a3f44e936da7a6314482f
| 74
|
py
|
Python
|
yadialogs/urls.py
|
idlesign/django-yadialogs
|
e3964b3dd296c5b379f83287bf94cc50c4f4cfdd
|
[
"BSD-3-Clause"
] | null | null | null |
yadialogs/urls.py
|
idlesign/django-yadialogs
|
e3964b3dd296c5b379f83287bf94cc50c4f4cfdd
|
[
"BSD-3-Clause"
] | null | null | null |
yadialogs/urls.py
|
idlesign/django-yadialogs
|
e3964b3dd296c5b379f83287bf94cc50c4f4cfdd
|
[
"BSD-3-Clause"
] | null | null | null |
from .utils import get_yadialogs_urls
urlpatterns = get_yadialogs_urls()
| 18.5
| 37
| 0.837838
| 10
| 74
| 5.8
| 0.7
| 0.413793
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 3
| 38
| 24.666667
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
da148ee8d9c091d98a86c3d78dbb095e933fe651
| 422
|
py
|
Python
|
mythx_models/request/version.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 2
|
2019-08-26T13:42:28.000Z
|
2019-11-13T15:44:16.000Z
|
mythx_models/request/version.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 22
|
2019-08-26T13:14:55.000Z
|
2021-04-18T14:22:52.000Z
|
mythx_models/request/version.py
|
ConsenSys/mythx-models
|
e912c2fc6e7d18041310d3b9f0f95085db47ed9b
|
[
"MIT"
] | 6
|
2019-08-29T15:51:38.000Z
|
2021-04-05T11:41:34.000Z
|
"""This module contains the VersionRequest domain model."""
from pydantic import BaseModel
class VersionRequest(BaseModel):
@property
def endpoint(self):
return "v1/version"
@property
def method(self):
return "GET"
@property
def payload(self):
return {}
@property
def headers(self):
return {}
@property
def parameters(self):
return {}
| 16.230769
| 59
| 0.606635
| 42
| 422
| 6.095238
| 0.571429
| 0.214844
| 0.140625
| 0.164063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003367
| 0.296209
| 422
| 25
| 60
| 16.88
| 0.858586
| 0.125592
| 0
| 0.470588
| 0
| 0
| 0.035813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.058824
| 0.294118
| 0.705882
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
da575c3aa3c5bcc23b6fc35bc33b8dfe0f1a4d82
| 304
|
py
|
Python
|
keycache/__init__.py
|
psytron/keycache
|
0b69e21719dbe76908476c01e3e487aae2612fd2
|
[
"Apache-2.0"
] | 2
|
2020-04-27T07:48:54.000Z
|
2020-10-21T17:47:54.000Z
|
keycache/__init__.py
|
psytron/keycache
|
0b69e21719dbe76908476c01e3e487aae2612fd2
|
[
"Apache-2.0"
] | null | null | null |
keycache/__init__.py
|
psytron/keycache
|
0b69e21719dbe76908476c01e3e487aae2612fd2
|
[
"Apache-2.0"
] | null | null | null |
# github.com/psytron/keycache
# USAGE :
# keycache.set_system_encrypt_key( sysfinger.generate() )
# keycache.get('healthity' , alias='healthity')
# future usage
# levels.set( 'alias L 0' ,'domain L 1' )
# def set( self , 'level_0' , 'level_1'):
# print(' next level ')
from .keycache import Keycache
| 25.333333
| 57
| 0.6875
| 41
| 304
| 4.97561
| 0.658537
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015504
| 0.151316
| 304
| 12
| 58
| 25.333333
| 0.775194
| 0.835526
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e505c8e2e1631d9e04edaf9ead93f01702af971a
| 204
|
py
|
Python
|
sip/examples/flask_processing_controller/app/tests/conftest.py
|
SKA-ScienceDataProcessor/integration-prototype
|
5875dc0489f707232534ce75daf3707f909bcd15
|
[
"BSD-3-Clause"
] | 3
|
2016-11-08T02:27:05.000Z
|
2018-01-22T13:26:11.000Z
|
sip/examples/flask_processing_controller/app/tests/conftest.py
|
SKA-ScienceDataProcessor/integration-prototype
|
5875dc0489f707232534ce75daf3707f909bcd15
|
[
"BSD-3-Clause"
] | 87
|
2016-11-24T11:09:01.000Z
|
2021-03-25T22:23:59.000Z
|
sip/examples/flask_processing_controller/app/tests/conftest.py
|
SKA-ScienceDataProcessor/integration-prototype
|
5875dc0489f707232534ce75daf3707f909bcd15
|
[
"BSD-3-Clause"
] | 10
|
2016-05-18T09:41:36.000Z
|
2019-07-04T10:19:24.000Z
|
# pylint: disable=unused-import
# coding=utf-8
"""Pytest configuration"""
import pytest
from .rest_api_fixtures import (get_test_app, get_db_client, init_db,
set_root_url)
| 29.142857
| 69
| 0.671569
| 27
| 204
| 4.740741
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00641
| 0.235294
| 204
| 6
| 70
| 34
| 0.814103
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e5355a644266f3bcfa8bf7cb259666b6c8987205
| 75
|
py
|
Python
|
PYTHON/Py3_Mundo1_Fundamental/exercicios/exer.30.py
|
Marciobroficial/CURSO-EM-VIDEO
|
37b10c26336a9744236603282af77661fdf8c61a
|
[
"MIT"
] | 1
|
2021-10-09T18:11:20.000Z
|
2021-10-09T18:11:20.000Z
|
PYTHON/Py3_Mundo1_Fundamental/exercicios/exer.35.py
|
Coppini21/CURSO-EM-VIDEO
|
37b10c26336a9744236603282af77661fdf8c61a
|
[
"MIT"
] | 1
|
2021-09-15T04:18:34.000Z
|
2022-03-02T23:16:26.000Z
|
PYTHON/Py3_Mundo1_Fundamental/exercicios/exer.35.py
|
Coppini21/CURSO-EM-VIDEO
|
37b10c26336a9744236603282af77661fdf8c61a
|
[
"MIT"
] | 3
|
2021-12-15T17:19:51.000Z
|
2022-03-29T02:19:00.000Z
|
# 1 "PREPARANDO O AMBIENTE".
#-------------------------------------------#
| 25
| 45
| 0.266667
| 4
| 75
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014493
| 0.08
| 75
| 2
| 46
| 37.5
| 0.275362
| 0.92
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e56ce1dc1b0f10e7eab6c733c49847aee69068c2
| 93
|
wsgi
|
Python
|
flask/recomenda.wsgi
|
ttm/pnud4
|
89e1fd866dbdea7afcb3d1020816370e303f258c
|
[
"Unlicense"
] | 1
|
2015-05-12T17:03:26.000Z
|
2015-05-12T17:03:26.000Z
|
flask/recomenda.wsgi
|
ttm/pnud4
|
89e1fd866dbdea7afcb3d1020816370e303f258c
|
[
"Unlicense"
] | null | null | null |
flask/recomenda.wsgi
|
ttm/pnud4
|
89e1fd866dbdea7afcb3d1020816370e303f258c
|
[
"Unlicense"
] | null | null | null |
import sys
sys.path.insert(0, '/disco/pnud4/flask')
from recomenda import app as application
| 23.25
| 40
| 0.784946
| 15
| 93
| 4.866667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.107527
| 93
| 3
| 41
| 31
| 0.855422
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e574230c7749ff95fa41ae51fd055ecd1078e267
| 84
|
py
|
Python
|
env/Lib/site-packages/countdowntimer_model/constants.py
|
gtkacz/fantasytrashtalk
|
24ed8ba6c4fae2eca5b15f66b62338a8c87debd2
|
[
"MIT"
] | 4
|
2021-03-29T07:35:41.000Z
|
2022-01-12T09:54:55.000Z
|
env/Lib/site-packages/countdowntimer_model/constants.py
|
gtkacz/fantasytrashtalk
|
24ed8ba6c4fae2eca5b15f66b62338a8c87debd2
|
[
"MIT"
] | 4
|
2020-08-06T14:51:06.000Z
|
2021-09-22T18:53:50.000Z
|
env/Lib/site-packages/countdowntimer_model/constants.py
|
gtkacz/fantasytrashtalk
|
24ed8ba6c4fae2eca5b15f66b62338a8c87debd2
|
[
"MIT"
] | 3
|
2020-04-20T18:54:10.000Z
|
2021-03-29T07:35:13.000Z
|
import pytz
TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones))
| 16.8
| 69
| 0.809524
| 12
| 84
| 5.416667
| 0.666667
| 0.215385
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 84
| 4
| 70
| 21
| 0.855263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e574c1a519b08425c19ca005f1f7cf9a60998555
| 27,898
|
py
|
Python
|
python/pyxir/frontend/tvm/relay_tools/relay_l2_convolution.py
|
anilmartha/pyxir
|
0972aed63748afd82ef414b67a6cceaedd738b38
|
[
"Apache-2.0"
] | 25
|
2020-06-17T22:41:13.000Z
|
2022-03-22T16:28:22.000Z
|
python/pyxir/frontend/tvm/relay_tools/relay_l2_convolution.py
|
anilmartha/pyxir
|
0972aed63748afd82ef414b67a6cceaedd738b38
|
[
"Apache-2.0"
] | 25
|
2021-03-16T06:26:44.000Z
|
2022-03-18T11:28:33.000Z
|
python/pyxir/frontend/tvm/relay_tools/relay_l2_convolution.py
|
anilmartha/pyxir
|
0972aed63748afd82ef414b67a6cceaedd738b38
|
[
"Apache-2.0"
] | 19
|
2020-07-30T10:03:02.000Z
|
2021-06-29T01:18:16.000Z
|
# Copyright 2020 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for transforming Relay L2 operators to XLayer objects
L2: Convolution related operators
"""
import math
import logging
import numpy as np
import pyxir as px
from typing import Dict, List, Callable
import tvm
from tvm.relay.expr import Expr
from pyxir import graph
from pyxir.graph.layer import XLayer
from pyxir.graph.layer import xlayer_factory as xlf
from .util import Schedule
from .relay_2_xlayer_registry import register_relay_2_xlayer_converter,\
register_relay_2_xlayer_converter_base
logger = logging.getLogger("pyxir")
@register_relay_2_xlayer_converter('nn.avg_pool2d')
def nn_avg_pool2d(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
TVM Avg Pool2d to XLayer
Relay
-----
Type: tvm.relay.op.nn.nn.avg_pool2d
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- strides (tuple of int, optional)
The strides of pooling.
- padding (tuple of int, optional)
The padding for pooling.
- layout (str, optional)
Layout of the input.
- ceil_mode (bool, optional)
To enable or disable ceil while pooling.
- count_include_pad (bool, optional)
To include padding to compute the average.
"""
if expr in net:
logger.debug("MEMORY: NN AVG POOL2D")
# This expressions is already transformed so we reuse that one
return net[expr]
pool_size = [int(e) for e in list(expr.attrs.pool_size)]
strides = [int(e) for e in list(expr.attrs.strides)]
padding = [int(e) for e in list(expr.attrs.padding)]
data_layout = str(expr.attrs.layout)
ceil_mode = bool(expr.attrs.ceil_mode)
count_include_pad = bool(expr.attrs.count_include_pad)
# if count_include_pad:
# logger.debug("Padding: {}".format(padding))
# raise NotImplementedError("Including padding in avg pool2d
# " computation"
# " is not supported")
data_expr, data_expr_class = expr.args[0], expr.args[0].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
logger.debug("nn_avg_pool2d: {}".format(hash(expr)))
# Update schedule with input data layer
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayer
pool_type = 'Avg'
# Convert NHWC -> NCHW TODO: remove data layout
if data_layout == 'NHWC':
t_name = 'nn_avg_pool2d_NHWC>NCHW-' + str(hash(expr))
data_layer.tops.append(t_name)
data_layer = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, data_layer, [0, 3, 1, 2])
schedule.append(t_name)
net[t_name] = data_layer
# Create name
op_name = 'nn_avg_pool2d-' + str(hash(expr))
X = xlf.get_xop_factory_func('Pooling')(
op_name, data_layer, pool_type, pool_size,
strides, padding, 'NCHW',
ceil_mode, count_include_pad,
relay_id=[hash(expr)])
logger.debug("-- outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops
data_layer.tops.append(X.name)
# Convert to NCHW -> NHWC TODO: remove data layout
if data_layout == 'NHWC':
schedule.append(X.name)
net[X.name] = X
t_name = 'nn_avg_pool2d_NCHW>NHWC-' + str(hash(expr))
X.tops.append(t_name)
res_X = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, X, [0, 2, 3, 1])
else:
res_X = X
return res_X
@register_relay_2_xlayer_converter_base('nn.batch_flatten')
def nn_batch_flatten(op_name: str, expr: Expr, in_xlayers: List[XLayer]) -> XLayer:
"""
TVM NN batch_flatten to XLayer
Relay
-----
Type: tvm.relay.op.nn.nn.batch_flatten
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
"""
X = px.ops.batch_flatten(op_name, in_xlayers, relay_id=[hash(expr)])
return X
@register_relay_2_xlayer_converter('nn.conv2d')
def nn_conv2d(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
TVM Convolution to XLayer
Relay
-----
Type: tvm.relay.op.nn.nn.conv2d
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- weight (tvm.relay.Expr)
The weight expressions.
- strides (tuple of int, optional)
The strides of convolution.
- padding (tuple of int, optional)
The padding of convolution on both sides of inputs before
convolution.
- dilation (tuple of int, optional)
Specifies the dilation rate to be used for dilated convolution.
- groups (int, optional)
Number of groups for grouped convolution.
- channels (int, optional)
Number of output channels of this convolution.
- kernel_size (tuple of int, optional)
The spatial of the convolution kernel.
- data_layout (str, optional)
Layout of the input.
- kernel_layout (str, optional)
Layout of the weight.
- out_layout (str, optional)
Layout of the output, by default, out_layout is the same as
data_layout
- out_dtype (str, optional)
Specifies the output data type for mixed precision conv2d.
"""
if expr in net:
logger.debug("MEMORY: CONV2D")
# This expressions is already transformed so we reuse that one
return net[expr]
data_expr, data_expr_class = \
expr.args[0], expr.args[0].__class__.__name__
weights_expr, weights_expr_class = \
expr.args[1], expr.args[1].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
weights_layer = RELAY_2_XLAYER[weights_expr_class](weights_expr, params,
schedule, net, op_idx,
RELAY_2_XLAYER,
**kwargs)
weights_shape = weights_layer.shapes
logger.debug("nn_conv2d: {}".format(hash(expr)))
data_layout = str(expr.attrs.data_layout)
kernel_layout = str(expr.attrs.kernel_layout)
h_index, w_index = kernel_layout.index('H'), kernel_layout.index('W')
o_index = kernel_layout.index('O')
# HW
kernel_size = [int(e) for e in list(expr.attrs.kernel_size)] \
if expr.attrs.kernel_size is not None \
else [weights_shape[h_index], weights_shape[w_index]]
strides = [int(e) for e in list(expr.attrs.strides)]
padding = [int(e) for e in list(expr.attrs.padding)]
dilation = [int(e) for e in list(expr.attrs.dilation)]
groups = int(expr.attrs.groups) if expr.attrs.groups is not None else 1
channels = int(expr.attrs.channels) if expr.attrs.channels is not None \
else weights_shape[o_index]
# out_layout = str(expr.attrs.out_layout)
# out_dtype = str(expr.attrs.out_dtype)
assert len(data_layer.shapes) == 4
assert weights_layer.data is not None
# Update schedule with child layers
# ! We don't add weights layer as this weight is precomputed
# TODO What if weights layer can't be precomputed
# TODO WHat if weights layer is shared
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayer
# Convert NHWC -> NCHW TODO: remove data layout
if data_layout == 'NHWC':
t_name = 'nn_conv2d_NHWC>NCHW-' + str(hash(expr))
data_layer.tops.append(t_name)
data_layer = \
xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, data_layer, [0, 3, 1, 2])
schedule.append(t_name)
net[t_name] = data_layer
# Create name
op_name = 'nn_conv2d-' + str(hash(expr))
# [pad_h, pad_w] or [pad_h_top, pad_h_bottom, pad_w_left, pad_w_right]
xpadding = padding if len(padding) == 2\
else [padding[i] for i in [0, 2, 1, 3]]
X = xlf.get_xop_factory_func('Convolution')(
op_name, data_layer, weights_layer,
kernel_size, strides, xpadding, dilation, groups,
channels, 'NCHW', kernel_layout,
relay_id=[hash(expr)])
logger.debug("--outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops
data_layer.tops.append(X.name)
# Convert to NCHW -> NHWC TODO: remove data layout
if data_layout == 'NHWC':
schedule.append(X.name)
net[X.name] = X
t_name = 'nn_conv2d_NCHW>NHWC-' + str(hash(expr))
X.tops.append(t_name)
res_X = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, X, [0, 2, 3, 1])
else:
res_X = X
return res_X
@register_relay_2_xlayer_converter('nn.conv2d_transpose')
def nn_conv2d_transpose(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
Convert Relay nn.conv2d_transpose to Conv2DTranspose XLayer
Relay
-----
Type: tvm.relay.nn.conv2d_transpose
Ref: https://docs.tvm.ai/langref/relay_op.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- weight (tvm.relay.Expr)
The weight expressions.
- strides (Tuple[int], optional)
The strides of convolution.
- padding (Tuple[int], optional)
The padding of convolution on both sides of inputs.
- dilation (Tuple[int], optional)
Specifies the dilation rate to be used for dilated convolution.
- channels (int, optional)
Number of output channels of this convolution.
- kernel_size (tuple of int, optional)
The spatial of the convolution kernel.
- groups (int, optional)
Number of groups for grouped convolution.
- data_layout (str, optional)
Layout of the input.
- kernel_layout (str, optional)
Layout of the weight.
- out_layout (Optional[str])
Layout of the output, by default, out_layout is the same as
data_layout
- output_padding (Tuple[int], optional)
Additional zero-padding to be added to one side of the output.
- out_dtype (str, optional)
Specifies the output data type for mixed precision conv2d.
"""
if expr in net:
logger.debug("MEMORY: CONV2D_TRANSPOSE")
return net[expr]
# HW
# kernel_size = [int(e) for e in list(expr.attrs.kernel_size)]
# strides = [int(e) for e in list(expr.attrs.strides)]
# padding = [int(e) for e in list(expr.attrs.padding)]
# dilation = [int(e) for e in list(expr.attrs.dilation)]
# groups = int(expr.attrs.groups) if expr.attrs.groups is not None else 1
# channels = int(expr.attrs.channels) if expr.attrs.channels is not None \
# else None
# data_layout = str(expr.attrs.data_layout)
# kernel_layout = str(expr.attrs.kernel_layout)
data_expr, data_expr_class = \
expr.args[0], expr.args[0].__class__.__name__
weights_expr, weights_expr_class = \
expr.args[1], expr.args[1].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
weights_layer = RELAY_2_XLAYER[weights_expr_class](weights_expr, params,
schedule, net, op_idx,
RELAY_2_XLAYER,
**kwargs)
weights_shape = weights_layer.shapes
logger.debug("nn_conv2d_transpose")
data_layout = str(expr.attrs.data_layout)
kernel_layout = str(expr.attrs.kernel_layout)
# NOTE TVM uses different kernel layout description than we do and we have to switch O and I
kernel_layout = ''.join(({'O': 'I', 'I': 'O', 'H': 'H', 'W': 'W'}[c] for c in kernel_layout))
h_index, w_index = kernel_layout.index('H'), kernel_layout.index('W')
o_index = kernel_layout.index('O')
# HW
kernel_size = [int(e) for e in list(expr.attrs.kernel_size)] \
if expr.attrs.kernel_size is not None \
else [weights_shape[h_index], weights_shape[w_index]]
strides = [int(e) for e in list(expr.attrs.strides)]
padding = [int(e) for e in list(expr.attrs.padding)]
dilation = [int(e) for e in list(expr.attrs.dilation)]
groups = int(expr.attrs.groups) if expr.attrs.groups is not None else 1
channels = int(expr.attrs.channels) if expr.attrs.channels is not None \
else weights_shape[o_index]
# out_layout = str(expr.attrs.out_layout)
# out_dtype = str(expr.attrs.out_dtype)
logger.debug("-- kernel_size {}".format(kernel_size))
logger.debug("-- strides {}, {}".format(strides, type(strides[0])))
logger.debug("-- padding {}".format(padding))
logger.debug("-- dilation {}".format(dilation))
logger.debug("-- groups {}, {}".format(groups, type(groups)))
logger.debug("-- channels {}".format(channels))
logger.debug("-- data_layout {}".format(data_layout))
logger.debug("-- kernel_layout {}".format(kernel_layout))
assert len(data_layer.shapes) == 4
assert weights_layer.data is not None
# Update schedule with child layers
# ! We don't add weights layer as this weight is precomputed
# TODO What if weights layer can't be precomputed
# TODO WHat if weights layer is shared
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayer
# Initialize relay idx with relay idx of weights
relay_idx = weights_layer.attrs['relay_id'][:]
# Relay converts a NHWC conv2d_transpose layer into a
# transpose -> conv2d_transpose (NCHW) -> transpose. For partitioning we
# keep track of those relay ids inside the conv2d_transpose operation
if 'Transpose' in data_layer.type:
relay_idx.append(data_layer.attrs['relay_id'][0])
# TODO: NHWC
op_name = 'nn_conv2d_transpose-' + str(hash(expr))
relay_idx.append(hash(expr))
# [pad_h, pad_w] or [pad_h_top, pad_h_bottom, pad_w_left, pad_w_right]
xpadding = padding if len(padding) == 2\
else [padding[i] for i in [0, 2, 1, 3]]
X = xlf.get_xop_factory_func('Conv2DTranspose')(
op_name, data_layer, weights_layer, kernel_size,
strides, xpadding,
dilation,
groups, channels,
data_layout, kernel_layout,
relay_id=relay_idx
)
logger.debug("--outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops:
data_layer.tops.append(op_name)
return X
@register_relay_2_xlayer_converter('nn.global_avg_pool2d')
def nn_global_avg_pool2d(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
TVM global Avg pooling to XLayer
Relay
-----
Type: tvm.relay.op.nn.nn.global_avg_pool2d
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- layout (str, optional)
Layout of the input.
"""
if expr in net:
logger.debug("MEMORY: GLOBAL AVG POOL2D")
# This expressions is already transformed so we reuse that one
return net[expr]
data_layout = str(expr.attrs.layout)
data_expr, data_expr_class = expr.args[0], expr.args[0].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
logger.debug("nn_global_avg_pool2d")
# Update schedule with input data layer
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayers
# Convert NHWC -> NCHW TODO: remove data layout
if data_layout == 'NHWC':
t_name = 'nn_global_avg_pool2d_NHWC>NCHW-' + str(hash(expr))
data_layer.tops.append(t_name)
data_layer = \
xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, data_layer, [0, 3, 1, 2])
schedule.append(t_name)
net[t_name] = data_layer
# Create name
op_name = 'nn_global_avg_pool2d-' + str(hash(expr))
pool_type = 'Avg'
X = xlf.get_xop_factory_func('GlobalPooling')(
op_name, data_layer, pool_type, 'NCHW',
relay_id=[hash(expr)])
logger.debug("-- outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops
data_layer.tops.append(X.name)
# Convert to NCHW -> NHWC TODO: remove data layout
if data_layout == 'NHWC':
schedule.append(X.name)
net[X.name] = X
t_name = 'nn_global_avg_pool2d_NCHW>NHWC-' + str(hash(expr))
X.tops.append(t_name)
res_X = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, X, [0, 2, 3, 1])
else:
res_X = X
return res_X
@register_relay_2_xlayer_converter('nn.global_max_pool2d')
def nn_global_max_pool2d(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
TVM global max pool to XLayer
TODO Overlap with globale_avg_pool2d
Relay
-----
Type: tvm.relay.op.nn.nn.global_max_pool2d
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- layout (str, optional)
Layout of the input.
"""
if expr in net:
logger.debug("MEMORY: GLOBAL MAX POOL2D")
# This expressions is already transformed so we reuse that one
return net[expr]
data_layout = str(expr.attrs.layout)
data_expr, data_expr_class = expr.args[0], expr.args[0].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
logger.debug("nn_global_max_pool2d")
# Update schedule with input data layer
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayers
# Convert NHWC -> NCHW TODO: remove data layout
if data_layout == 'NHWC':
t_name = 'nn_global_max_pool2d_NHWC>NCHW-' + str(hash(expr))
data_layer.tops.append(t_name)
data_layer = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, data_layer, [0, 3, 1, 2])
schedule.append(t_name)
net[t_name] = data_layer
# Create name
op_name = 'nn_global_max_pool2d-' + str(hash(expr))
pool_type = 'Max'
X = xlf.get_xop_factory_func('GlobalPooling')(
op_name, data_layer, pool_type, 'NCHW',
relay_id=[hash(expr)])
logger.debug("-- outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops:
data_layer.tops.append(op_name)
# Convert to NCHW -> NHWC TODO: remove data layout
if data_layout == 'NHWC':
schedule.append(X.name)
net[X.name] = X
t_name = 'nn_global_max_pool2d_NCHW>NHWC-' + str(hash(expr))
X.tops.append(t_name)
res_X = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, X, [0, 2, 3, 1])
else:
res_X = X
return res_X
@register_relay_2_xlayer_converter('nn.max_pool2d')
def nn_max_pool2d(expr: Expr,
params: Dict[str, np.ndarray],
schedule: Schedule,
net: Dict[Expr, Expr],
op_idx: Dict[str, int],
RELAY_2_XLAYER: Dict[str, Callable],
**kwargs) -> XLayer:
"""
TVM max pool to XLayer
Relay
-----
Type: tvm.relay.op.nn.nn.max_pool2d
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- strides (tuple of int, optional)
The strides of pooling.
- padding (tuple of int, optional)
The padding for pooling.
- layout (str, optional)
Layout of the input.
- ceil_mode (bool, optional)
To enable or disable ceil while pooling.
"""
if expr in net:
logger.debug("MEMORY: MAX POOL2D")
return net[expr]
pool_size = [int(e) for e in list(expr.attrs.pool_size)]
strides = [int(e) for e in list(expr.attrs.strides)]
padding = [int(e) for e in list(expr.attrs.padding)]
data_layout = str(expr.attrs.layout)
ceil_mode = bool(expr.attrs.ceil_mode)
data_expr, data_expr_class = expr.args[0], expr.args[0].__class__.__name__
data_layer = RELAY_2_XLAYER[data_expr_class](data_expr, params, schedule,
net, op_idx, RELAY_2_XLAYER,
**kwargs)
logger.debug("nn_max_pool2d")
# Update schedule with input data layer
if data_expr not in net:
schedule.append(data_expr)
net[data_expr] = data_layer
# Create XLayers
pool_type = 'Max'
# Convert NHWC -> NCHW TODO: remove data layout
if data_layout == 'NHWC':
t_name = 'nn_max_pool2d_NHWC>NCHW-' + str(hash(expr))
data_layer.tops.append(t_name)
data_layer = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, data_layer, [0, 3, 1, 2])
schedule.append(t_name)
net[t_name] = data_layer
# Create name
op_name = 'nn_max_pool2d-' + str(hash(expr))
logger.debug("-- name: {}".format(op_name))
X = xlf.get_xop_factory_func('Pooling')(
op_name, data_layer, pool_type, pool_size,
strides, padding, 'NCHW',
ceil_mode, False,
relay_id=[hash(expr)])
logger.debug("-- outshape: {}".format(list(X.shapes)))
# !Important: set input layer tops
data_layer.tops.append(X.name)
# Convert to NCHW -> NHWC TODO: remove data layout
if data_layout == 'NHWC':
schedule.append(X.name)
net[X.name] = X
t_name = 'nn_max_pool2d_NCHW>NHWC-' + str(hash(expr))
X.tops.append(t_name)
res_X = xlf.get_xop_factory_func('Transpose', internal=True)(
t_name, X, [0, 2, 3, 1])
else:
res_X = X
return res_X
@register_relay_2_xlayer_converter_base('nn.pad')
def pad(op_name: str, expr: Expr, in_xlayers: List[XLayer]) -> XLayer:
"""
TVM padding layer to XLayer
Relay
-----
Type: tvm.relay.op.nn.pad
Ref: https://docs.tvm.ai/api/python/relay/nn.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator
- pad_width (tuple of <tuple of <int>>, required)
Number of values padded to the edges of each axis, in the format
of ((before_1, after_1), …, (before_N, after_N))
- pad_value (float, optional, default=0.0)
The value used for padding
"""
pad_width = [[int(e) for e in t] for t in expr.attrs.pad_width]
if hasattr(expr.attrs, "pad_value"):
pad_value = float(expr.attrs.pad_value)
else:
# For tvm>=v0.8.dev0
assert len(in_xlayers) > 1, "If pad_value is not a Relay operation attribute, it is expected"\
" as an input expression"
assert in_xlayers[1].type[0] == "Constant", "Only static padding is supported."
pad_value = float(in_xlayers[1].data[0])
logger.debug("nn_pad: {}".format(hash(expr)))
logger.debug("-- pad width: {}".format(pad_width))
logger.debug("-- pad value: {}".format(pad_value))
X = px.ops.pad(op_name, in_xlayers[0], pad_width, pad_value, relay_id=[hash(expr)])
return X
@register_relay_2_xlayer_converter_base('nn.upsampling')
def nn_upsampling(op_name: str, expr: Expr, in_xlayers: List[XLayer]) -> XLayer:
"""
TVM 2D Upsampling to XLayer
Relay
-----
Type: tvm.relay.split
Desc:
Upsampling.
This operator takes data as input and does 2D scaling to the given
scale factor. In the default case, where the data_layout is NCHW with
data of shape (n, c, h, w) out will have a shape
(n, c, h*scale_h, w*scale_w)
method indicates the algorithm to be used while calculating the out
value and method can be one of (bilinear, nearest_neighbor, bicubic)
Ref: https://docs.tvm.ai/langref/relay_op.html
Parameters:
- data (tvm.relay.Expr)
The input data to the operator.
- scale_h (tvm.relay.Expr)
The scale factor for height upsampling.
- scale_w (tvm.relay.Expr)
The scale factor for width upsampling.
- layout (str, optional)
Layout of the input.
- method (str, optional)
Scale method to used [nearest_neighbor, bilinear, bicubic].
- align_corners (bool, optional)
Whether to keep corners in proper place.
"""
scale_h = float(expr.attrs.scale_h)
scale_w = float(expr.attrs.scale_w)
layout = str(expr.attrs.layout)
method = str(expr.attrs.method)
align_corners = bool(expr.attrs.align_corners)
X = px.ops.upsampling2d(
op_name,
in_xlayers,
scale_h=scale_h,
scale_w=scale_w,
data_layout=layout,
method=method,
align_corners=align_corners,
relay_id=[hash(expr)]
)
logger.debug("-- outshape: {}".format(list(X.shapes)))
return X
| 34.916145
| 102
| 0.607104
| 3,744
| 27,898
| 4.328793
| 0.08734
| 0.031653
| 0.025174
| 0.009379
| 0.772012
| 0.758191
| 0.744863
| 0.73567
| 0.721602
| 0.7063
| 0
| 0.009552
| 0.286974
| 27,898
| 798
| 103
| 34.9599
| 0.805047
| 0.336081
| 0
| 0.696335
| 0
| 0
| 0.079776
| 0.014834
| 0
| 0
| 0
| 0.008772
| 0.015707
| 1
| 0.02356
| false
| 0
| 0.031414
| 0
| 0.094241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e582175d61d7e850681673503c660583278dc60a
| 6,748
|
py
|
Python
|
misc/server.py
|
fabriciocgf/IOT_LoRa_Dashboard
|
ba6eb168c47f57f682a84a50ad366319d12e5126
|
[
"MIT"
] | 22
|
2017-05-02T21:23:27.000Z
|
2022-01-31T20:11:32.000Z
|
misc/server.py
|
fabriciocgf/IOT_LoRa_Dashboard
|
ba6eb168c47f57f682a84a50ad366319d12e5126
|
[
"MIT"
] | 6
|
2017-05-04T08:17:13.000Z
|
2017-06-28T09:39:22.000Z
|
misc/server.py
|
fabriciocgf/IOT_LoRa_Dashboard
|
ba6eb168c47f57f682a84a50ad366319d12e5126
|
[
"MIT"
] | 8
|
2017-05-04T09:03:40.000Z
|
2021-09-07T14:43:38.000Z
|
#!/usr/bin/python
import os, sys, requests, json
from datetime import datetime
import time
import random
myCMD = sys.argv[2];
def callWebservicePost(entity, jsonString): #set up the url and headers# fast server
urlF = "http://35.163.116.152:8080/SensorThingsServer-1.0/v1.0/" + entity# slow server
urlS = "http://35.162.114.82:8080/SensorThingsServer-1.0/v1.0/" + entity# slow server
urlVM = "http://192.168.56.101:8080/SensorThingsServer-1.0/v1.0/" + entity
userURL = "http://" + sys.argv[1] + "/SensorThingsServer-1.0/v1.0/" + entity
headers = {
"Content-Type": "application/json",
"Accept": "application/json"
}
#Use requests module to send a POST request
request = requests.post(userURL, data = json.dumps(jsonString), headers = headers)
# print the status code and the response as a json or text.
print(request.status_code)
# status - code should be 201, to let us know the entity has been created
print("Content-Length: " + request.headers["Content-Length"])
print(request.headers)
if request.status_code != 201: #print error
print(request.text)
print("Error")
else :#do something with response
print("Success: " + str(random.randrange(0, 100, 2)))
# Observation
if myCMD == "o" and __name__ == '__main__':
dsID = int(sys.argv[3])
while (True): #Create a json string
dtNow = datetime.now()
currentTime = dtNow.isoformat()
value = random.randrange(0, 101, 2)
print("Pushing to server: " + str(value))
jsonString = {"phenomenonTime": currentTime,"resultTime": currentTime,"result": value,"Datastream": {"@iot.id": dsID}}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Observations", jsonString)
time.sleep(5)
# Thing
if myCMD == "t" and __name__ == '__main__':
myName = sys.argv[3]
# Create a json string
coord = [-117.123, 54.123]
properties = {"property1": "it's waterproof","property2": "it glows in the dark","property3": "it repels insects"}
location = {"type": "Point","coordinates": coord}
locations = [{"name": myName + " location","description": myName + "location description","encodingType": "application/vnd.geo+json","location": location}]
sensor = {"name": myName + " Sensor","description": myName + " Sensor Description","encodingType": "http://schema.org/description","metadata": "Calibration date: Jan 11, 2015"}
uoM = {"name": "Celsius","symbol": "C","definition": "http://www.qudt.org/qudt/owl/1.0.0/unit/Instances.html#Celsius"}
observedP = {"name": "Temperature","definition": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Instances.html#Temperature","description": "Temperature of the camping site"}
obsType = "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
datastreams = [{"Sensor": sensor,"unitOfMeasurement": uoM,"name": myName + " Datastream","description": myName + " Datastream Description","observationType": obsType,"ObservedProperty": observedP}]
jsonString = {"name": myName + "Thing","description": myName + "description","properties": properties,"Locations": locations,"Datastreams": datastreams}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Things", jsonString)
# Thing Wit Location
if myCMD == "tl" and __name__ == '__main__':
myName = sys.argv[3]
# Create a json string
coord = [float(sys.argv[4]), float(sys.argv[5])]
properties = {"property1": "it's waterproof","property2": "it glows in the dark","property3": "it repels insects"}
location = {"type": "Point","coordinates": coord}
locations = [{"name": myName + " location","description": myName + "location description","encodingType": "application/vnd.geo+json","location": location}]
sensor = {"name": myName + " Sensor","description": myName + " Sensor Description","encodingType": "http://schema.org/description","metadata": "Calibration date: Jan 11, 2015"}
uoM = {"name": "Celsius","symbol": "C","definition": "http://www.qudt.org/qudt/owl/1.0.0/unit/Instances.html#Celsius"}
observedP = {"name": "Temperature","definition": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Instances.html#Temperature","description": "Temperature of the camping site"}
obsType = "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement"
datastreams = [{"Sensor": sensor,"unitOfMeasurement": uoM,"name": myName + " Datastream","description": myName + " Datastream Description","observationType": obsType,"ObservedProperty": observedP}]
jsonString = {"name": myName + "Thing","description": myName + "description","properties": properties,"Locations": locations,"Datastreams": datastreams}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Things", jsonString)
# Location
if myCMD == "l" and __name__ == '__main__':
myName = sys.argv[3]
thingID = sys.argv[4]
while (True): #Create a json string
coord = [8.4 + random.randrange(0, 100, 2) / 1000, 49 + random.randrange(0, 100, 2) / 1000]
jsonString = {"name": myName + "Location","description": myName + "description","encodingType": "application/vnd.geo+json","location": {"type": "Point","coordinates": coord}}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Things(" + thingID + ")/Locations", jsonString)
time.sleep(5)
# Location
if myCMD == "lv" and __name__ == '__main__':
myName = sys.argv[3]
thingID = sys.argv[4]
coord = [float(sys.argv[5]), float(sys.argv[6])]
jsonString = {"name": myName + "Location","description": myName + "description","encodingType": "application/vnd.geo+json","location": {"type": "Point","coordinates": coord}}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Things(" + thingID + ")/Locations", jsonString)
# all
if myCMD == "a" and __name__ == '__main__':
myName = sys.argv[3]
thingID = sys.argv[4]
while (True): #Create a json string
coord = [8.4 + random.randrange(0, 100, 2) / 1000, 49 + random.randrange(0, 100, 2) / 1000]
jsonString = {"name": myName + "Location","description": myName + "description","encodingType": "application/vnd.geo+json","location": {"type": "Point","coordinates": coord}}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Things(" + thingID + ")/Locations", jsonString)
dsID = int(sys.argv[5])# Create a json string
dtNow = datetime.now()
currentTime = dtNow.isoformat()
jsonString = {"phenomenonTime": currentTime,"resultTime": currentTime,"result": random.randrange(0, 101, 2),"Datastream": {"@iot.id": dsID}}
#Call method in order to perform a POST request to the webservice
callWebservicePost("Observations", jsonString)
time.sleep(5)
| 53.555556
| 199
| 0.693242
| 845
| 6,748
| 5.474556
| 0.231953
| 0.024211
| 0.020752
| 0.025724
| 0.797665
| 0.784695
| 0.749676
| 0.742542
| 0.742542
| 0.723952
| 0
| 0.032547
| 0.139449
| 6,748
| 125
| 200
| 53.984
| 0.764078
| 0.135892
| 0
| 0.55814
| 0
| 0.104651
| 0.410548
| 0.025681
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011628
| false
| 0
| 0.046512
| 0
| 0.05814
| 0.081395
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e5ac0c186805f80c0d7b1b074089d80ca81860db
| 113,736
|
py
|
Python
|
src/NER/corpus/taxonomy_2021_for_NLP.py
|
ohikendoit/cbi_mrc_analytics
|
e2f4b8bfa60d032275543fe0e5c61f0c96444b8b
|
[
"MIT"
] | null | null | null |
src/NER/corpus/taxonomy_2021_for_NLP.py
|
ohikendoit/cbi_mrc_analytics
|
e2f4b8bfa60d032275543fe0e5c61f0c96444b8b
|
[
"MIT"
] | null | null | null |
src/NER/corpus/taxonomy_2021_for_NLP.py
|
ohikendoit/cbi_mrc_analytics
|
e2f4b8bfa60d032275543fe0e5c61f0c96444b8b
|
[
"MIT"
] | null | null | null |
emergency = ['conflict', 'violence', 'displacement', 'drought', 'earthquake', 'fire', 'flooding', 'freeze', 'health emergency', 'dengue', 'pneumonic plague', 'measles', 'landslide', 'tropical storm', 'typhoon', 'cyclone', 'hurricane', 'tsunami', 'urban disaster', 'volcanic eruption', 'volcano', 'refugee', 'terrorist attack', 'cold wave', 'complex emergency', 'epidemic', 'extratropical cyclone', 'flash flood', 'flood', 'heat wave', 'insect infestation', 'land slide', 'mud slide', 'severe local storm', 'snow avalanche', 'storm surge', 'technological disaster', 'tropical cyclone', 'volcano', 'wild fire', 'flood crisis', 'victims', 'flood victims', 'flood powerful', 'powerful storms', 'hoisted flood', 'explosion', 'flood cost', 'affected tornado', 'tornado relief', 'photos flood', 'water rises', 'flood waters', 'flood appeal', 'victims explosion', 'bombing suspect', 'massive explosion', 'affected areas','flood relief', 'flood affected', 'tornado victims', 'explosions running', 'evacuated', 'relief', 'flood death', 'deaths confirmed', 'affected flooding', 'people killed', 'dozens', 'footage', 'survivor finds', 'flood worsens', 'flood damage', 'major flood', 'rubble', 'another explosion', 'confirmed dead', 'rescue','flood warnings', 'tornado survivor', 'damage', 'devastating', 'flood toll', 'affected hurricane', 'prayers families', 'crisis', 'text donation', 'redcross give', 'recede', 'bombing', 'massive', 'bombing victims', 'explosion ripped', 'gets donated', 'donated victims', 'relief efforts', 'news flood', 'flood emergency', 'fire flood', 'huge explosion', 'bushfire', 'torrential rains', 'affected explosion', 'disaster', 'tragedy','twister', 'blast', 'fatalities', 'dead explosion', 'survivor', 'death', 'explosion reported', 'evacuees', 'large explosion', 'firefighters', 'morning flood', 'praying', 'public safety', 'destroyed', 'displaced', 'fertilizer explosion', 'donate tornado', 'retweet donate', 'flood tornado', 'casualties', 'climate change', 'financial donations', 'stay strong', 'dead hundreds', 'major explosion', 'bodies recovered', 'waters recede', 'response disasters', 'victims donate','fire fighters', 'explosion victims', 'prayers city', 'torrential', 'bomber', 'explosion registered', 'missing flood', 'brought hurricane', 'relief fund', 'help tornado', 'explosion fire', 'tragic', 'enforcement official', 'dealing hurricane', 'flood recovery', 'dead torrential', 'flood years', 'massive tornado', 'crisis rises', 'flood peak', 'flood ravaged','missing explosion', 'floods kill', 'tornado damage', 'cross tornado', 'facing flood', 'deadly explosion', 'dead missing', 'floods force', 'flood disaster', 'tornado disaster', 'medical examiner', 'fire explosion', 'storm', 'flood hits', 'floodwaters', 'emergency', 'flood alerts', 'crisis unfolds', 'daring rescue', 'tragic events', 'medical office', 'deadly tornado', 'people trapped', 'lives hurricane', 'bombings reports', 'breaking suspect', 'bombing investigation', 'praying affected', 'surging floods', 'explosion injured', 'injured explosion', 'responders killed', 'explosion caught', 'city tornado', 'damaged hurricane', 'suspect bombing', 'massive manhunt', 'releases images', 'shot killed', 'rains severely', 'house flood', 'live coverage', 'devastating tornado', 'lost lives', 'reportedly dead', 'following explosion', 'remember lives', 'tornado flood', 'want help', 'seconds bombing', 'reported dead', 'safe hurricane','dead floods', 'flood threat', 'flood situation', 'thousands homes', 'risk running', 'dying hurricane', 'bombing shot','police people', 'terrible explosion', 'prayers involved', 'reported injured', 'seismic', 'victims waters', 'flood homeowners', 'flood claims', 'homeowners reconnect', 'reconnect power', 'power supplies', 'rescuers help', 'free hotline', 'hotline help', 'saddened loss', 'identified suspect', 'bombings saddened','reported explosion', 'prepare hurricane', 'landfall', 'bombing case','communities damaged', 'destruction', 'levy', 'tornado', 'hurricane coming', 'toxins flood', 'release toxins', 'toxins', 'supplies waters', 'crisis found', 'braces major', 'government negligent', 'terror', 'memorial service', 'terror attack', 'coast hurricane', 'terrified hurricane', 'hurricane category', 'devastating fire', 'disaster area', 'disaster preparedness', 'disaster recovery', 'disaster relief', 'disaster response', 'disaster site', 'disaster situation', 'emergency response', 'flood control', 'flood damage', 'flood relief', 'flooded', 'flooding', 'heavy rainfall']
emergency_event = ['haiti earthquake', 'earthquake in haiti', 'tropical storm grace', 'coronavirus', 'corona virus', 'coronavirus disease', 'covid-19', 'typhoon goni', 'typhoon rolly', 'typhoon vamco', 'typhoon ulysses', 'beirut port explosions', 'beirut explosion', 'beirut blast', 'cyclone harold', 'hurricane dorian', 'cyclone kenneth', 'cyclone idai', 'indonesia tsunami', 'indian ocean earthquake', 'indian ocean tsunami', 'typhoon manghut']
humanitarian_theme = ['accountability to affected people', 'business continuity', 'civil military coordination', 'climate change', 'community engagement', 'conflict and fragility', 'disaster risk reduction', 'disaster risk', 'early warning', 'early warning system', 'gender', 'humanitarian development index', 'humanitarian development nexus', 'humanitarian development', 'human rights', 'impact measurement', 'innovation and new technologies', 'innovation and technologies', 'forced displacement', 'peace', 'preparedness', 'prevention', 'public private partnership', 'recovery', 'response', 'MSME', 'small and medium sized enterprise', 'small and medium sized enterprises', 'sustainable development', 'sustainable development goals', 'affected families', 'affected regions', 'aid agencies', 'aids', 'collapsed']
humanitarian_action_clusters = ['food security', 'health', 'logistic', 'logistics', 'nutrition', 'protection', 'shelter', 'water sanitation', 'water hygiene', 'hygiene', 'camp coordination', 'early recovery', 'education', 'emergency telecommunication', 'emergency telecommunications']
#disaster_management_theme = ['caution and advice', 'injured People', 'dead people', 'infrastructure damage', 'supplies needed or offered', 'services needed or offered', 'or trapped people', 'displaced and evacuated people', 'animal management', 'personal updates', 'sympathy', 'children and education', 'food and nutrition', 'logistic and transportation', 'camp and shelter', 'water', 'sanitation', 'and hygiene', 'safety and security', 'telecommunications', 'weather', 'response agencies in place', "witnesse's accounts", 'impact of the Crisis', 'million children effected', 'schools next to fertilizer plant', 'savethechildren', 'adoption', 'affected families', 'authorities advising children', 'babies', 'babies born months', 'babies rescued','baby born in tent', 'baby needs milk', 'can help children', 'cancelled due', 'cancelled due to strong winds', 'children', 'classes are canceled because of', 'classes are suspended', 'collapse families search trucks hauling debris', 'displaced families', 'families hit by floods', 'families hit by typhoon', 'family center', 'family killed', 'family needs rescue', 'family receive emergency shelter kit', 'hundreds of children', 'hundreds of families', 'hungry children', 'injured are children', 'kids', 'kids displaced', 'kids lessons', 'kids movies', 'national high school', 'no class tomorrow', 'no classes', 'no classes and office', 'no classes for both private and public schools', 'no school', 'nursery', 'private and public schools', 'public schools', 'relief assistance to families', 'relief assistance to families affected','schools are closed', 'schools are now accepting donations', 'schools extend class suspension until', 'schools football team', 'search for family & friends', 'sem break', 'students now', 'students return', 'teachers', 'the children', 'the families', 'the kids', 'the sandy hook kids', 'thousands of families', "victim's families", ' bags of rice', 'relief food packs', 'relief needs food and water', 'rescue trapped no food water', 'army providing mres', 'care and share food bank', 'distributes food', 'donate any supplies', 'donate/distribute food', 'donating binalot meals', 'donation pickup', 'donations especially rice', 'donations like canned goods', 'drop off donations', 'emergency food packages', 'family food packs', 'food', 'food packets and health services to affected people', 'food packs', 'food supplies', 'hospital food shortage', 'hospital is running out of food', 'hospital needs food', 'need canned', 'needs food', 'no food', 'non-food', "not confident it will last but maybe i won't lose all the food", 'packaged food', 'provided food', 'starving to death', 'victims identified', 'well-fed', 'working to buy million meal', 'allergies', 'another storm', 'army doctors treating patients', 'at least hurt', 'at least people injured', 'at least people injured by glass shards', 'at least people were injured', 'breathing problems', 'carcinogens', 'critical gunshot victim', 'dengue', 'dengue cases', 'doctors and patients', 'donate blood', 'donate blood following explosion', 'donate medicines', 'dozens are injured', 'dozens badly hurt', 'dozens hurt', 'dozens injured', 'dozens of injuries', 'emergency services working tonight', 'epidemics of communicable infectious diseases', 'female patients', 'haze', 'haze prevention', 'haze situation', 'health advisories', 'health advisory', 'help children recover from trauma', 'highway hypnosis', 'hospital', 'hospital address', 'hospital appeals', 'hospital ceo on west explosion', 'hospital director', 'hospital needs help', 'hospital patients', 'hospital staff', 'hundreds injured', 'hundreds likely injured after massive explosion at fertiliser plant', 'hundreds more injured', 'leaves injured', 'medical', 'medical assistance', 'medical camps', 'medical centers', 'medical facility', 'medical supplies', 'medical supplies/prescriptions', 'multiple injuries', 'multiple injuries reported', 'multiple injuries reported at', 'need to provide first aid while waiting on emergency services', 'no surgery', 'no surgery staff present', 'nurse among victims', 'officials fear increased risk of west nile virus', 'patients', 'people hospitalized', 'people hurt', 'people injured', 'people injured by falling', 'people injured by glass shards from shattered', 'people really suck', 'people who were injured', 'red crescent society', 'red cross', 'red cross emergency numbers', 'red cross flood relief', 'respiratory problems', 'search healthcare', 'snake bite vaccine', 'still aching this morning', 'suffering', 'suffering from', 'surgical capacity', 'sustaining burns', 'the headache', 'the hospital', 'the patients', 'those still recovering', 'those suffering', 'tons of people giving blood', 'took painkiller', 'trauma', 'victims treated', 'victims treated at', 'victims were treated', 'wildfire smoke chokes', 'without serious injury', ' boats washed', ' chopper', ' eastbound lanes', ' miles of roads damaged -no flush order', ' suspended', ' times over speed limit', ' trucks', 'a brief delay', 'a brief suspension', 'a huge gash', 'a huge gash through the road', 'a runaway train carrying light crude oil', 'a train', 'a transportation official', 'a vehicle', 'access opening up along highway ', 'aid chopper targeted', 'aid chopper targeted by militant rockets', 'aircraft', 'aircraft carrier heads to', 'aircraft flying over area', 'airport devastated by fire', 'airport terminal still closed', 'airstrip', 'airways', 'amtrak empire service', 'are cut off', 'army launched steel foot-bridge', 'army trucks', 'arrival section', 'arrival/departure roads closed at', 'at least flights canceled', 'at least terminal', 'boats', 'bridge', 'bridges destroyed', 'brief halt', 'building a bridge', 'bus services', 'buses', 'cargo', 'cars abandoned to flood', 'chopper', 'church and roads impacted', 'closed after the flood', 'coach crash', 'convoy', 'copters', 'costa fleet', 'cracks roads', 'cruise', 'cruise line', 'cruise ship', 'damaged', 'delayed', 'delayed after shooting at los angeles airport', 'delayed flights at lax', 'delayed for', 'departures area', 'destruction at the national airport', 'devastating train explosion', 'disaster may get worse as ship appears', 'displaced baggage', 'disruptions', 'dozens of cars', 'dozens of fire trucks', 'emergency unit vehicles', 'family needs boat', 'ferries', 'fire has shut', 'fire ravages international arrivals', 'firefighting aircraft crash', 'flash floods cause traffic', 'flash floods cause traffic chaos', 'flights to', 'flood damage to area roads', 'flood strands airline passengers', 'flooded at queen station', 'flooded street', 'flooded streets make driving a nightmare', 'flooded track', 'forces closure', 'grounds flights', 'has closed', 'heavy damage', "helicopter's rotor blades", 'help build an emergency airport to replace', 'huge fire closes', 'huge fire shuts', 'hundreds of tourists stranded', 'impassable roads', 'inspected', 'inundated', 'landslides destroyed roads to towns', 'large blocks of cement ripped off roads', 'large fire shuts', 'los angeles airport', 'major delays', 'major fire closes', 'major fire shuts', 'major victims of car accidents', 'many rows of semi-trailers stranded waiting', 'massive fire closes', "massive fire closes kenya's international", 'massive fire engulfs major', 'massive fire shuts', 'metro-north crash', 'metro-north passenger train', 'military convoy', 'military truck', 'motorists', 'multiple tsa agents', 'navy ship', 'nearest passable road', 'no firetrucks', 'no flights', 'no trains tomorrow morning', 'no visibility', 'not functioning properly', 'not passable', 'not safe for light vehicles', 'over fire brigade vehicles', 'over flights canceled', 'over roads damaged', 'passengers evacuated', 'planes', 'police', 'police car', 'police chopper', 'police cruiser abandoned off memorial drive', 'police helicopter', 'police helicopter crash', 'police helicopter crash landed', 'police helicopter going', 'police helicopter lifted', 'police helicopter winched', 'police van yesterday', 'racing my bike', 'rail line carrying fracked oil', 'rail safety', 'rail safety study', 'rail shipment', 'rail tanker', "railway's", 'relief goods stuck', 'relief helicopter', 'relief supplies flown to', 'remote area', 'rescue boats', 'resume normal operations', 'resume normal service', 'river walkway', 'road', 'road closure', 'roads', 'roads (destroyed)', 'roads are shredded', 'roads closed', 'roads damaged', 'roads now', 'roads torn up', 'roads turn into rivers', 'roads washed away', 'rope bridge', 'runaway train', 'search trucks hauling debris', 'sends chopper', 'service resumes', 'severe congestion', 'spanish train', 'stranded', 'stranded car', 'stranded communities', 'stranded passengers', 'stranded pilgrims', 'stranded traffic', 'stranded train', 'strands ', 'strands commuters', 'struggling to reach', 'struggling to reach remote areas worst hit', 'submerged towns', 'suspends vessels', 'taxis', 'terminals evacuated', 'the train', 'trains', 'trains today', 'updates travel advisory', 'vessels', 'vessels out-bound', 'wagons', ' flee homes', ' flee wildfire', ' homeless', ' homeless people', ' homes', ' homes are damaged', ' homes confirmed lost', ' homes destroyed', ' homes feared destroyed', ' homes have now been inundated', ' homes left uninhabitable', ' relief camps', ' tents', ' to flee', ' to flee homes', 'shelter has been established', 'army offering their camps', 'army rescue base camp', "army's special medical camp", 'at least homes destroyed', 'being moved to community centre', 'building housing', 'camps', 'can you help with long-term housing', 'collapsing homes', 'destroying homes', 'destroys homes', 'destroys more homes', 'destroys homes', 'estimated homes', 'evacuate homes', 'flood-relief camp', 'flooded house', 'flooding', 'force from homes', 'force people to leave their homes', 'homes at risk', 'homes burned', 'homes destroyed after flash floods', 'homes evacuated', 'homes lost', 'homes lost due', 'hotel', 'house destroyed', 'house washed away by flooding', 'houses damaged by power . earthquake', 'housing', 'hundreds homes', 'hundreds homes lost', 'hundreds of homes', 'installed', 'many lost house', 'massive flooding has destroyed multiple homes', "navigators ' camps", "navigators' camps", 'nearest shelter', 'need shelter?', 'not full', 'not threatening homes', 'offering housing', 'over shelters', 'people losing homes', 'people use shelter kits distributed', 'relief camp', 'residents allowed to return to damaged homes', 'searching for housing', 'shelter', 'shelter material', 'shelter team', 'shelter team arrived', 'shelter to people', 'tents', 'those who have tragically lost their homes', 'those who lost their homes', 'thousands flee homes', 'thousands homeless', 'to house', 'without shelter', ' rivers converged', 'aid for private water infrastructure', 'bottled water', 'exceeds gallons', 'flash floods hit open-air fracking waste water ponds', 'flood', 'flood water', 'flood waters', 'flooded', 'floods recede', 'full of water', "half of india's rivers", 'have dried up', 'mostly contaminated', 'muddy torrents', 'need clean water', 'no concern over drinking water quality at', 'no drinking water', 'no flush order', 'no water', 'no water and electricity', 'polluting rivers', 'pollution', 'rain harvesting', 'release of flood waters', 'release of waters', 'releases more water', 'releasing more water', 'restoring water', 'river level', 'river pollution', 'river water', 'river waters', 'serious flooding', 'sewage water', 'tainted', 'the disposal', 'undrinkable', 'washing', 'water', 'water filtration systems', 'water purification material', 'water usage', 'webpage', 'arrests', ' girls charged', ' held over deadly brazil nightclub fire', ' more arrests made', ' people shot', ' shooter', 'suspect', 'fireworks', 'stay safe guys', 'a gunman', 'airport shooting', 'another shooting', 'army', 'army confirms dead', 'army deployed', 'army gunship helicopters are bombing the area where the earthquake wreaked havoc', 'army helpline no', 'army helpline numbers', 'army is doing', 'army leads rescue operations', 'army medical emergency helpline', 'army official', 'army operations', 'army personnel', 'army rescue operation', 'arrested', 'arsonist', 'arsonists', 'at least we are safe', 'authorities begin investigation', 'authorities got shooter immediately', 'be safe everyone', 'being shot', 'binding building safety agreement', 'bomb-making', 'building owner arrested', 'building safety agreement', 'checking car fuel tanks', 'claims first life', 'congressmen request classified briefing', 'considered armed and dangerous', 'coroner', 'criminal inquiry', 'criminal investigation', "dead suspect's", 'detained', 'disaster preparedness not only for high level security zones', 'driver suspected of', 'fatal shooting rampage', 'federal officer', 'federal source', 'fireworks', 'flood-triggered oil/gas spills', 'guns', 'had more guns to protect it', 'i hope everyone is ok !', 'investigators', 'investigators ask', 'investigators focus', 'investigators say military exercise may be cause', 'investigators search', "investigators search railway's offices", 'keeping safe', 'killing of man', 'lapd confirmed they shot the suspect', 'lawmakers debate gun control', 'lax shooter', 'lax shooter opened fire with assault rifle', 'lax shooter pulled assault rifle', 'lax shooting suspect', 'lax shooting suspect charged with murder', 'lax suspect', 'lax suspect shot', 'looting homes', 'making sure were all safe', 'military communities threatened', 'military live ordinance exercises', 'no alarms', 'no suspect in custody', 'no threat', 'not feeling safe to go anywhere anymore', 'not negotiable with rebels', 'not safe anyone', 'officials confirm more bombs', 'officials investigating', 'officials launch criminal investigation', 'officials say the threat', 'officials to conduct expedited investigation', 'police airport unit', 'police arrest nightclub owner', 'police asking people', 'police boss', 'police bravery award', 'police chase', 'police chief', 'police chief gannon confirms shooter pulled an assault rifle', 'police conference', 'police confirm', 'police custody', 'police engaged and neutralize lone shooter', 'police engagement', 'police fear rise', 'police force aerial shot of town', 'police friday confirmed a shooting took place', 'police harassed the relatives', 'police looting', 'police lower death toll', 'police make arrests', 'police officer', 'police officer killed', 'police official tells', 'police press conference expected', 'police probe motive', 'police probe motive of attack', 'police questioned over looting', 'police raid office of protest group', 'police reportedly question', 'police responding to incident', 'police service', 'police source', 'police urging everyone', 'police use personnel dressed', 'pray for the safety of the people', 'prosecutor', 'protected by taking supplements', 'radiation detector', 'raw sewage', 'rcmp statement', 'rcmp urge caution', 'rebels', 'rules out terror', 'safe place', 'safes', 'safety', 'safety advice', 'safety agreement', 'safety concerns', 'safety contact', 'safety debate', 'safety engineer', 'safety laws', 'safety lists', 'safety management system', 'safety margins', 'safety plan', 'safety problems', 'safety rules', 'safety spokesperson', 'safety trooper', 'saving lives', 'search for bomb suspect', 'search for killer', 'security', 'set fire', 'sheriff', 'sheriff dept', 'snake escaping', 'sprinklers', 'stay safe', 'stay safe everyone', 'strictest firearms laws', 'suspect shot', 'take necessary precautions', 'the bombing', 'the killer', "the people's safety", 'the safety', 'the war criminals', "today's airport shooting", 'took axes', 'troops aid', 'under investigation', 'unemployed motorcycle mechanic suspected of', 'unexploded devices', 'victims down', 'victims of the crazed shooter', 'video points to suspect', 'watery escape', 'bushfires live', 'call', 'call helpline at ', 'call triple ', 'call us', 'calls flooding', 'can call', 'can call the emergency operations center at {PhoneNumber}', 'can donate online', 'can retweet vital', 'can tweet', 'contact', 'delayed coverage', 'disaster distress helpline', 'disaster distress helpline', 'disaster response program public information', 'donate by text', 'donate on', 'donate pls text redcross', 'donate to ', 'dswd calls', 'emergency line', 'emergency number', 'fema hotline', 'fiber optic cuts', 'flood assistance call', 'flood coverage', 'flood helpline', 'flood helpline no', 'flood helpline numbers', 'flood news', 'fundraiser full number', 'government turns to social media', 'help online', 'hotline', 'hotlines to remember', 'information from twitter', 'iphone shelter view app', 'link fixed', 'live stream', 'local media reporting', 'message prior to', 'needing help may text', 'needing info', 'no connection', 'numbers to call', 'official emergency hotlines compiled', 'optus network update', 'people needing help may text', 'phone', 'phone app', 'phone call', 'phone services cut', 'pnp hotline', 'police casualty bureau number', 'police tweet', 'poor crisis communication', 'radio', 'radio reports', 'relief operations use the hashtag', 'require assistance may call', 'rescue database', 'social media', 'social media chaos', 'social media search', 'social network', 'text', 'text mess', 'texted', 'the link', 'typhoon relief telethon', 'united nations news centre', 'updated searchable database', ' degrees', ' direction of winds', ' more storms expected', ' mph', ' very dangerous storms', 'climatechange', 'fireseason', 'wind_gusts', 'a haze in the sky', 'a high chance of light rain', 'a major cyclone', 'after heavy rain', 'attack by fog', 'authorities are warning of tornadoes', 'avalanche', 'blanketed by haze', 'brutal wildfire season', 'calmer winds', 'can smell the smoke', 'chief meteorologist', 'climate change', 'cooler weather', 'copters grounded by rain', 'deadly cyclone', 'deaths caused by extreme-weather events last year', 'disaster zone typhoon', 'earthquake triggers small tsunami', 'earthquake triggers tsunami', 'expect rains', 'expect thunderstorm', 'explosive weather conditions', 'extreme rain', 'extreme rain and flooding', 'extreme rainfall', 'extreme weather', 'extreme weather conditions', 'extreme winds', 'fierce cyclone', 'fighting climate change', 'firefighters facing more wind', 'floods', 'forecast', 'forecasters', 'forecasts predicting', 'fresh mist', 'fresh rains due', 'gusty win', 'hard rain', 'hard rains', 'hard rains hit areas', 'harsh weather', 'have eased', 'haze casts shadow', 'haze free', 'haze hits unhealthy level', 'haze problem', 'haze suddenly comes', 'haze worsens', 'heat', 'heat haze', 'heatwave', 'heaviest rains', 'heavy clouds', 'heavy downpour of rain', 'heavy haze', 'heavy monsoon', 'heavy monsoon rains', 'heavy rain', 'heavy rain expected', 'heavy rain forecast', 'heavy rain started', 'heavy rainfall', 'heavy rainfall has caused flooding', 'heavy rains', 'heavy rains & flooding', 'heavy rains and flooding', 'heavy rains due to sw monsoon', 'heavy rains pounding', 'heavy rainstorm', 'heavy thunderstorms', 'heavy thunderstorms and major floods', 'heavy-intense rains', 'high chance', 'high temps', 'high temps expected', 'high winds', 'hit by its most powerful storm of the year', 'hitting temps of ', 'horrible storm approaches', 'horrible typhoon', 'hurricanes', 'intense rains', 'killer cyclone', 'killer smog', 'less than mph', 'light rain', 'light showers', 'lightening storm', 'lighting', 'long wildfire season', 'low around ', 'low humidity', 'mainly smokey', 'maintains strength', 'making these colorado skies all hazey', 'massive rain storm', 'massive rain storm hits', 'massive rainstorm', 'massive typhoon ravages', 'monsoon', 'monsoon flood', 'monsoon flooding', 'monsoon floods', 'monsoon rain floods', 'monsoon rainfall', 'monsoon rains', 'more areas under storm signals', 'more choked up', 'more rains forecast', 'more tornadoes', 'mostly cloudy', 'much-needed rain', 'nasty storm', 'never ending rains hounds us', 'new rains', 'no clouds in the sky', 'no more heavy rain', 'no tsunami', 'non-stop heavy pouring', 'non-stop heavy pouring since : am', 'not related to cloud seeding', 'not toxic sghail sghaze', 'pouring rain', 'quake triggers tsunami', 'quake triggers tsunami warning', 'raging', 'rain', 'rain monsoon', 'rain forecast', 'rain inflows', 'rain likely soon', 'rain pounds', 'rain slows rescue efforts', 'rain triggers flooding', 'rain-making technology', 'rain-to-be', 'rain-triggered floods', 'rain/flooding', 'rain/thunderstorms', 'rainbow', 'raindrops', 'rainfall', 'rainfall advisory', 'rainfall records', 'rainfall warning', 'raining', 'raining hard', 'raining hard here', 'rains', 'rains and flooding', 'rains as tropical cyclone', 'rains forecast', 'rainstorm', 'rainstorm causes', 'rainstorm floods', 'rainy season', 'rare cloud formation', 'ready for the thunder', 'record heat', 'record rain', 'red rainfall advisory is issued', 'red rainfall warning', 'rescue operations hit due to bad weather', 'residents prepare for storm tides', 'rough waves', 'severe rain', 'severe storms', 'severe thunder', 'severe thunderstorm', 'severe weather bulletin tropical cyclone warning', 'significant rain', 'southwest wind to mph', 'starting to rain', 'starts raining', 'still pouring down', 'storm', 'storm clouds rolling', 'storm flash flooding', 'storm floods', 'storm signal', 'storm warning signal', 'storms heading', 'storms/flooding', 'stormy weather', 'strong earthquake measuring .', 'strong haze environment', 'strong storm', 'strong wind', 'strong winds', 'strongest storm to hit mindanao in decades', 'summer is just starting', 'summer rain', 'summer solstice', 'temperatures', 'the haze', 'the rain storm', 'the skies', 'the storm', 'the thunder', 'the typhoon', 'thunderstorm', 'tidal waves', 'tons of rain', 'tornado', 'tornado warning', 'tornadoes', 'torrential flooding rain', 'torrential downpour', 'torrential downpours', 'torrential flash floods', 'torrential rain', 'torrential rains pounding the philippine capital', 'tropical depression', 'tropical storm', 'tsunami warnings issued', 'typhoon', 'typhoon haiyan expected', 'typhoon appeal', 'typhoon damage', 'typhoon death toll', 'typhoon deaths', 'typhoon disaster', 'typhoon haiyan', 'typhoon haiyan continues', 'typhoon haiyan heads', 'typhoon haiyan situation', 'typhoon hit your town', 'typhoon hits', 'typhoon mud', "typhoon pablo's return", 'typhoon path', 'typhoon pounds', 'typhoon status', "typhoon's trail", 'typhoons', 'usual snowfall', 'warmer than normal temperatures', 'weather', 'weather conditions', 'weather floods italian island', 'weather safety', 'weather system', 'weather weapon', 'wind', 'wind direction changed', 'wind gusts', 'wind gusts of miles per hour', 'wind never stops', 'wind still vicious', 'wind-driven', 'winds', 'winds turned nw', 'windy conditions', ' crews', ' faster rescue response', ' firefighters will be working today', ' groups working at the emergency operation centre', ' passengers slowly being rescued', 'personnel working the fire', 'rescue work', 'rmg workers', 'wildfire fighters', 'a humanitarian', 'a police helicopter', 'abc newsfirefighters', 'abdullah relief campaign', 'aerials', 'affected people', 'aid', 'aid agencies', 'aid agencies deliver relief to', 'aid begins trickling', 'aid campaigns', 'aid effort', 'aid provided', 'aid train passengers', 'aid workers', 'air nationalguard', 'air force', 'air force academy', 'air force academy officials', 'already there', 'amazing coordination', 'amazing fireman', 'appeals', 'are ready to respond', 'army criticised over australia fire', 'army general', 'army leads', 'army says search for bodies', "army's ambulance waiting to be", 'arriving to assist', 'authorities advise', 'authorities continue to prepare', 'battle wildfires', 'battling', 'battling the horrific wildfires', 'battling wildfires', 'being fought from air and ground', 'biggest firms', 'bomb dogs', 'boundaries are % contained', 'briefing', 'c- helping with', 'call for volunteers', 'call for volunteers/donations', 'can also volunteer', 'can anyone tell me how to reach out to the volunteers aiding the relief process', 'can help relief', 'can help support', 'can offer', 'can rescue', 'can somebody call rescueph team', 'can support', 'can volunteer', 'cfa fire crews', 'chief digital officer', 'cmdr hahnenberg', 'coast guard station', 'colorado national guard serviceman', 'colorado springs mayor', 'colorado wildfire effort', 'command center', 'command team', 'community services crews', 'community support centres are open to help citizens', 'construction workers who still have to work', 'consulate general', 'continuing rescue efforts', 'convoy of hope', 'coordinating volunteers', 'cops', 'core team', 'crew response', 'csfd', 'dealing with', 'dealing with the floods', 'dealing with the worst wildfire', 'deputy chief executive', 'destruction complicates relief efforts', 'dfa updates', 'disaster appeal', 'disaster assistance', 'disaster assistance providing support to', 'disaster assistance responds to', 'disaster assistance responds to the colorado flooding', 'disaster centers', 'disaster cleanup', 'disaster management at the red cross', 'disaster management institutions', 'disaster management officials', 'disaster path', 'disaster preparation', 'disaster preparedness', 'disaster ready kit', 'disaster recovery', 'disaster recovery center opened this morning', 'disaster relief', 'disaster relief efforts', 'disaster response', 'disaster response and recovery efforts', 'disaster response program public information map', 'disaster service is monitoring the', 'dispatched trucks loaded with relief goods', 'doctors', 'dswd operation center', 'earthquake relief effort', 'efforts continued friday to cope with fallout', 'efforts gain ground', 'efforts to fight', 'emergency aid', 'emergency crew driving', 'emergency hotline set', 'emergency operation', 'emergency operations center', 'emergency order', 'emergency services involved', 'emergency services responded heroically', 'emergency team is monitoring', 'emergency teams', 'emergency warning', 'emergency workers', 'essential risk management', 'evac centers', 'evac centres are open', 'evac order', 'evac underway', 'evacuation centre abflood', "evacuation ctr's", 'evacuation efforts today', 'evacuation order', 'evacuation order issued', 'evacuation orders came', 'evacuation zone extended to th', 'evacuation/rescue plans', 'evacuations were declared', 'faster reaction', 'fbi', 'fbi director', 'federal aid', 'federal fighters', 'federal firefighters', 'federal help', 'feds', 'fema continues to support response', 'fema disaster assistance', 'fight the fires', 'fighters who are bravely combating the bushfires', 'fighting these fires', 'fights fires', 'fire authority', 'fire chief', 'fire dept', 'fire effort', 'fire info officer', 'fire managers update highparkfire', 'fire retardant', 'fire zone', 'fire-fighting plane tha crashed', 'fire', 'firefighters', 'firefighters again will battle inferno-like conditions', 'firefighters assist', 'firefighters battling colorado wildfires', 'firefighters battling these sudden shocking bush fires', 'firefighters continue rescue efforts', 'firefighters douse', 'firefighters fighting the fires', 'firefighters gain upper hand', 'firefighters plan', 'firefighters search door-to-door', 'firefighters search door-to-door for bodies', 'firefighters show spirit', 'firefighters will be working today', 'firefighters work', 'firefighting', 'firefighting c-', 'firefighting costs', 'firefighting efforts', 'flood cleanup', 'flood flag maps', 'flood monitoring', 'flood recovery', 'flood recovery efforts', 'flood relief', 'flood relief effort', 'flood rescue', 'flood treatment', 'flood-relief operations', 'flooding response', 'forced evacuation has begun', 'forcing rescue work to be suspended', 'forcing rescuers to abandon their search', 'fully contained', 'gained the upper hand', 'gov', 'gov hickenlooper', 'gov to help relief & rescue', 'gov. hickenlooper', "gov't", "gov't issues emergency order", "gov't offices", 'government action', 'government advisories', 'government agency', 'government declared a state of emergency', 'government information', 'government is accused of slow response', 'government is working', 'government official', 'government relief effort', 'government response', 'government slow', 'governor', 'governor general', 'govt', 'govt agencies', 'govt policy', 'govt scientists', 'hardworking power crews work', 'heavy demand for fire fighting', 'heli', 'helicopters', 'help is needed down there', 'help is on its way', 'help is on the way', 'help my friends', 'help my province', 'help rescue', 'help someone', 'help support', 'help survivors', 'help the victims of the deadly earthquake', 'help us', 'help us by planning ahead', 'helping', 'human chain', 'humanitarian efforts', 'hundreds await rescue', 'hundreds awaiting rescue', 'industry response', 'information meeting', 'islands emergency crews head to tsunami devastated villages', 'joining relief efforts', 'jumped firefighters{SpecialChar}; perimeter', 'light helicopters', 'local engineer', 'local fire depts', 'local flood affected and the army', 'looking to help', 'massive flood clean-up', 'massive recovery', 'mayor', 'mayor says', 'medical and rescue teams', 'medical team headed', 'military hospital', 'mmda teams', 'more forces coming today', 'most destructive fire nears containment', 'nasa', 'nasa scientists', 'national calamity', 'national disaster risk reduction and manag', 'national security issue', 'nato mission', 'nears containment', 'need help', 'need help asap', 'need to rapidly deploy', 'need your help', 'needing immediate rescue', 'needs for rescuing', 'needs rescue', 'needs to be rescued', 'night shift', 'no containment', 'no help', 'no rescuers', 'none but army', 'nypd', 'nypd calling level mobilization', 'nypd officer', 'officers', 'officers appeal', 'official', 'official announcement', 'official confirms', 'official confirms deaths', 'official says', 'officials appeal', 'officials confirm', 'officials confirm police incident', 'officials have declared a state of emergency', 'officials honor firefighters', 'officials now confirm', 'officials now say', 'officials rushed body bags', 'officials said', 'officials say', 'officials say the threat to the blue mountains', 'operations', 'our emergency services', 'overflowing with help', 'peace team', 'people begin to dig sand', 'people rescued', 'perimeter map of the flagstafffire', 'pilgrims rescued', 'please coordinate with', 'please evacuate', 'police commissioner', 'police crucial information', 'police department', 'police gain access to site', 'police headquarters tonight', 'police helicopter crashed friday night', 'police helicopter crashing', 'police incident', 'police marine unit', 'police officer dead', 'police officers who were killed', 'police seeking to interview', 'possible flooding', 'post-quake relief effort', 'president', 'providing assistant', 'ready assist ', 'ready for action after earthquake', 'ready to assist', 'ready to dive', 'ready to evacuate if we need to', 'ready to respond', 'recovery operation', 'red cross flood recovery guide', 'red cross relief', 'red cross relief efforts', 'red flag', 'redcross', 'releases crisis response map', 'relief', 'relief & recovery', 'relief aid', 'relief aid to the tiny island', 'relief and rescue work', 'relief assistance', 'relief campaigns', 'relief campaigns to reach the affectees sooner', 'relief camps', 'relief centers', 'relief donation centers', 'relief drive', 'relief effort', 'relief effort begins after', 'relief efforts', 'relief efforts ?', 'relief efforts have stalled due to', 'relief efforts ramp', 'relief goods', 'relief goods operation holcimphils', 'relief goods will be accepted', 'relief guides', 'relief helicopters', 'relief help', 'relief management', 'relief map', 'relief mission', 'relief mission today', 'relief operation', 'relief operation underway', 'relief operation video', 'relief ops', 'relief ops continue', 'relief ops will', 'relief ops will start at am today', 'relief packs', 'relief politics', 'relief projects', 'relief steadily starting to flow', 'relief supplies', 'relief team', 'relief to thousands', 'relief volunteer', 'relief work', 'relief work carried out', 'rescue', 'rescue aid', 'rescue assistance', 'rescue crews', 'rescue dog', 'rescue efforts', 'rescue efforts today', 'rescue operation', 'rescue operations', 'rescue request', 'rescue team', 'rescue teams', 'rescue trapped people', 'rescue workers', 'rescue workers have freed', 'rescue workers working', 'rescuers battle australia floods', 'rescuers rush to evacuate stranded communities', 'responding', 'response team', 'retardant line', 'rubber gloves/boots', 'rush to help', 'scavenging for survival', 'search and rescue cameras', 'search and rescue operation', 'search crews clawed', 'search ongoing', 'search-and-rescue effort', 'sends hot shots', 'serious prep', 'shelter locations', 'sheriff court', 'shifted', 'slow response', 'staff and volunteers', 'starting to get under control', 'stranded passengers rescued last night', 'struggles to deliver quake relief', 'struggles to help', 'tests firefighters', 'the army', 'the firefighters', 'the govt', 'the indian army', 'the red cross', 'the skipper', 'those fighting', 'those fighting those horrible fires', 'those who want to volunteer', 'thousands of firefighters', 'thousands of firefighters who have so bravely fought fires', 'to help in rescue work', 'to help support efforts', 'to help the victims of the earthquake', 'top priority over', 'troops aid quake', 'troops aid quake victims', 'typhoon aid', 'typhoon haiyan aid', 'typhoon search-and-rescue efforts', 'uk rescue aid rejected', 'un agencies', 'unicef', 'unicef staff', 'uninsured', 'urgent need for', 'usgs', 'usgs model estimates % chance', 'usgs reports . magnitude earthquake off the coast', 'vandenberg sends hot shots', 'victims await relief help', 'victims need your support', 'victims r gtting more desperate', 'volunteer heads', 'volunteered', 'volunteers', 'volunteers aiding the relief process', 'volunteers and donations', 'volunteers and staff', 'volunteers help repack relief goods', 'volunteers in action', 'volunteers map', 'volunteers needed', 'volunteers right now', 'volunteers to help', 'volunteers who want to help', 'were diverted to help work', 'who r battling this horrible fire', 'wildfire fight', 'wildland firefighters', 'without borders is helping', 'working to contain spot fires', 'we are watching it', 'witness appeal', 'are affecting tons of people', 'flood impact', 'impact on ministries/churches', 'typhoon bears', 'typhoon bopha/pablo impact', 'typhoon haiyan aftermath', "'extreme' flames", "'monster' colorado fire", ' acres burned', ' bombs', ' emergencies', ' evac notices', ' explosions', ' explosions at the finish line', ' fires', ' fires burning', ' loud pops', ' miles away', ' more islands emerge', ' people', ' percent containment', ' powerful explosions detonated', ' ricther', ' schools next to fertilizer plant', ' villages damaged by tsunami wave', ' wildfires', '. -magnitude earthquake hits', '. earthquake', '. earthquake hit', '. earthquake hits', '. quake hits', '. earthquake', '. earthquake hits', '. earthquake recorded', '. earthquake struck', '. intensity', '. magnitude', '. magnitude earthquake', '. magnitude earthquake', '. magnitude earthquake rattles', '. magnitude quake', '. quake', '. quake hits', '. quake strikes', '. quake that struck', '.-earthquake', '.-earthquake hits', 'mag. quake', 'magnitude earthquake hits', 'magnitude quake', 'magnitude quake hits', '.-magnitude quake strikes', '.m quake', '.magnitude earthquake', 'bigelkmeadowsfire', 'boulderfire', 'breaking', 'breaking news', 'breakingnews', 'bushfire', 'cebuearthquake', 'cofire', 'cofires', 'coloradofires', 'coloradowildfire', 'coloradowildfires', 'earhquake', 'earthquake', 'earthquake .', 'earthquake .', 'earthquake hit', 'earthquake m .', 'earthquake m.', 'earthquake magnitude', 'earthquake magnitude .', 'earthquake magnitude m .', 'earthquake magnitude mb .', 'earthquake mb .', 'earthquake measuring .', 'earthquake measuring . hits', 'earthquake measuring . struck', 'earthquakeph', 'eartquake', 'estesparkfire', 'fire', 'fire extinguish', 'fire heads up', 'fires', 'flagstaff fire', 'flagstafffire', 'flagstafffire fire', 'flood disaster', 'haze fires', 'highparkfire wildfire', 'highparkfire burn area', 'highparkfire evacuation orders', 'highparkfire explodes to acres', 'highparkfire live briefing', 'highparkfire update', 'iaf rescue chopper crashes', 'jkiafire fire', 'jkiafire fire contained', 'lastchancefire', 'littlesandf', 'littlesandfire', 'magnitude', 'magnitude. earthquake', 'quake breaking', 'quake_watch', 'rescueph team', 'scary', 'shocking earthquake', 'spencerfire', 'springerfire', 'statelinefire', 'tourist attractions reopen', 'treasurefire', 'tsunami', 'visayanearthquake', 'visayasquake', 'waldocanyonfire', 'waldofire', 'weberfire', 'wildfire', 'wildfire activity', 'wildfire can harm', 'wildfire evacuations', 'wildfires', 'woodlandheightsfire', ' acres now', 'a . earthquake', 'a . magnitude earthquake', 'a . magnitude earthquake', 'a . magnitude earthquake hits', 'a .-magnitude', 'a .-magnitude earthquake', 'a .-magnitude earthquake struck', 'a big earthquake', 'a blast', 'a blaze', 'a burning smell around orchard road', 'a devastating earthquake', 'a disaster', 'a fertilizer plant explosion', 'a fierce wildfire', 'a fiery explosion', 'a fire', 'a frightening scene', 'a grassfire', 'a high flood peak', 'a long night aftershocks', 'a magnitude', 'a magnitude-. earthquake', 'a magnitude-. earthquake struck', 'a major . earthquake', 'a major ph earthquake', 'a major earthquake', 'a massive ', 'a massive wildfile', 'a plant explosion', 'a shooting', 'a spectacular meteor blast', 'a strong .-magnitude earthquake', 'a strong aftershock rattled', 'a strong earthquake', 'a stubborn and towering wildfire', 'a temblor', 'a terrible news', 'a terrible train accident', 'a very strong earthquake', 'about minutes ago', 'activated', 'active volcano', 'advice about prepping for a fire', 'affected areas', 'affected clients', 'after earthquake', 'after the earthquake', 'aftershock experience at :', 'aftershocks', 'aftershocks continue', 'aftershocks continue to rock', 'aftershocks have followed', 'aftershocks hit', 'aftershocks strike', 'air is smoky', 'alert meteor', 'all the fires', 'amazing sound shockwave', 'amends fire ban', 'an earthquake', 'an earthquake measuring . magnitude', 'an earthquake struck', 'announcements', 'another explosion', 'another metro north train derailed', 'another quake', 'another shock', 'are crazy', 'are opening at p', 'are spreading', 'asks', 'at least bush fires are raging', 'at least bush fires rage', 'at least large oil spills have been confirmed', 'authorities asking people to limit cell phone', 'authorities request texting only keep phone lines', 'awful fires', 'backing up to open space', 'badly affected', 'basic crisis', 'be alert', 'be aware', 'be careful', 'be safe', 'be shocked', 'before the crash', 'before travelling to the area', 'being asked to turn off all non-essential lights', 'being declared', 'big blaze', 'big danger zone', 'big disaster', 'big earthquake', 'big fire', 'big fire came', 'big flood', 'big pacific quake', 'big quake', 'big quake rattles', 'big wild fires', 'bigger flood', 'bigger', 'stronger', 'more devastating blazes', 'blaze', 'blaze causes closure', 'bloodied clothes', 'breaking news', 'breaking update', 'breathtaking colorado wildfire', 'brutal daylight murder', 'brutal the flood', 'brutal wildfires', 'brutal wildfires spread', 'building ablaze after explosion', 'building fire', 'building-collapse toll tops ', 'burglars', 'burn across', 'burned colorado home', 'burning', 'burning near', 'bush fire crisis', 'bush fire situation continues to worsen', 'bush fire zone', 'bush fires burning', 'bush fires flooding', 'bush-fires', 'bushfire blazes', 'bushfire disaster', 'businesses are under threat', 'call center for residents', 'call for', 'call for moratorium', 'calls for action to reduce emissions', 'can harm', 'can return', 'cascading water', 'category strength', 'chemical attack', 'chemical fire', 'chopper crashed', 'clear flammable debris', 'collapse blamed on swampy ground', 'colorado fire', 'colorado springs fire', 'colorado wildfire', 'colorado wildfire front lines', 'colorado wildfire!!!', 'colorado wildfires', "colorado's waldo canyon fire", 'come to an end', 'come under control', 'comet fragment', 'community recovery hotline {PhoneNumber}', 'costa rica earthquake', 'costa rica quake', 'country calling', 'crashed', 'crazy blackouts', 'crazy highparkfire', 'crazy fire', 'crazy fires', 'crazy wildfires', 'creates new island', 'customers affected by', 'damage', 'deadly blast', 'deadly crash', 'deadly earthquake', 'deadly earthquake hits', 'deadly earthquake strikes', 'deadly fertilizer plant blast', 'deadly fires', 'deadly flood', 'deadly flood waters rise', 'deadly flooding', 'deadly flooding strikes', 'deadly floods', 'deadly floods batter', 'deadly quake', 'deadly quake hits', 'deadly quake strikes', 'deadly tsunami', 'dealing with the horrific floods', 'death toll', 'death toll climbs', 'death toll climbs to at least ', 'death toll continues to climb', 'death toll reaches ', 'death toll rises', 'death toll tops ', 'debris fell', 'declares disaster', 'depth', 'depth km', 'depth:.km', 'destruction', 'destructive blaze', 'deteriorating conditions', 'devastated by a terrible explosion', 'devastating typhoonhaiyan', 'devastating blaze', 'devastating colorado wildfires', 'devastating fire', 'devastating flood', 'devastation', 'devastation feared', 'devasting update', 'disaster booklet', 'disaster declared', 'disaster flood', 'disaster hits australia?셲 east coast', 'disaster policy', 'disaster prone country', 'disaster situation', 'disaster situation declared', 'disaster strikes', 'disaster zone get notices', 'disaster zones', 'disastrous earthquake', "don't cancel co springs reservations", 'doubles in size', 'dozens of buildings destroyed', 'earthquake', 'earthquake . magnitude', 'earthquake alert', 'earthquake at .', 'earthquake destroys', 'earthquake disaster', 'earthquake heads up', 'earthquake hits', 'earthquake hitting', 'earthquake magnitude .', 'earthquake magnitude ..', 'earthquake measuring', 'earthquake measuring . magnitude', 'earthquake measuring . magnitude hits', 'earthquake measuring . magnitude strikes', 'earthquake rattles', 'earthquake rocked', 'earthquake strikes', 'earthquake that flattened', 'earthquake that hit', 'earthquake update', 'earthquakeph', 'emergency concerns', 'emergency warning issued', 'epicenter', 'epicentre', 'evacuate immediately', 'evacuation', 'evacuation areas abflood', 'evacuation order due to flash flooding', 'evacuation recommended', 'evacuees get out', 'excellent news!', 'expect death toll', 'experienced magnitude . earthquake', 'experienced strong earthquake', 'explosion', 'explosions', 'explosive growth', 'extreme fire danger warning', 'extreme flooding', 'fatal cyclone', 'fatalities now reported at texas plant explosion', 'ferocious fire', 'fierce', 'fierce wildfire', 'fight', 'fire', "fire ' kills last survivor", 'fire & evac information', 'fire area', 'fire ban', 'fire blazes', 'fire breaks out', 'fire burning', 'fire can now be seen', 'fire claims first victim', 'fire consuming homes', 'fire coverage', 'fire danger', 'fire danger ??', 'T01', 'fire danger areas', 'fire danger index', 'fire danger is high', 'fire danger update', 'fire danger warning', 'fire department', 'fire disaster', 'fire displaces hundreds', 'fire doubles in size', 'fire evacuation', 'fire evacuation area', 'fire ground', 'fire has destroyed houses', 'fire just broke out', 'fire kills', 'fire lines', 'fire moving east', 'fire news', 'fire over', 'fire rages', 'fire ravages', 'fire spreads to', 'fire sweeps', 'fire threatens', 'fire tragedy', 'fire under control', 'fire update', 'firefighters struggle with refinery blaze', 'fires', 'fires are destroying', 'fires are getting out of hand', 'fires are out of control', 'fires bad', 'fires blazing', 'fires burn', 'fires burn out of control', 'fires exporting smoky haze', 'fires going on', 'fires r serious', 'fires rage', 'fires raging', 'flagstaff fire', 'flash flood', 'flash flooding', 'flash flooding continues', 'flash flooding tonight', 'flash flooding traps', 'flash floods', 'flash floods kill at least ', 'flash floods swamp', 'flash floods wash away homes', 'flash floods wreaking havoc', 'flattened villages', 'flood advisory', 'flood affected', 'flood affected communities', 'flood affected families', 'flood alert', 'flood assistance', 'flood chaos', 'flood control', 'flood crisis', 'flood crisis deepens', 'flood crisis hits', 'flood death toll', 'flood disaster', 'flood emergency', 'flood evacuation', 'flood fury', 'flood helpline no', 'flood inundation', 'flood level', 'flood maps', 'flood now', 'flood peak', 'flood photos', 'flood prevented', 'flood protocol', 'flood ravaged', 'flood repairs', 'flood reports', 'flood resilient', 'flood risk', 'flood situation', 'flood situation continues unrelenting', 'flood subside', 'flood toll', 'flood update', 'flood warning', 'flood warning up', 'flood washing a building away', 'flood watch', 'flood water rises', 'flood waters force evacuations', 'flood waters move', 'flood zone', 'flood-affected people', 'flood-prone areas', 'flooded areas', 'flooded badly', 'flooded regions', 'flooded with rain', 'flooding chaos', 'flooding disaster', 'flooding disaster hits', 'flooding occurring', 'flooding potential', 'flooding river', 'flooding triggers', 'flooding warning', 'force . tremor hits', 'forces ', 'forces evacuation', 'forces town evacuation', 'forest fires', 'freaky', 'fresh flooding', 'frightening shooting', 'full containment expected', 'gas tanks', 'getting stronger', 'good sized storm', 'got hitted by', 'government releases close-up', 'governor lifts fire ban', 'growing increasingly worried major bushfires', 'gunfire', 'had tons w/no safety', 'had to evacuat', 'has been going on', 'has been hit by a . mag earthquake', 'has floods/bushfires', 'has hit', 'has rocked', 'has triggered', 'havoc', 'hazardous air', 'hazardous to be outside', 'hazards', "haze approaches ' danger zone", "haze approaches ' unhealthy ' level", 'he earthquake', 'headed toward', 'heavy', 'heavy activity', 'heavy flood damage', 'heavy flooding', 'heavy flooding hits', 'heavy floods', 'heavy floods hit', 'heavy rain & floods', 'heavy rain brings floods', 'heavy rain brought flooding', 'heavy smoke', 'helicopter tragedy', 'high fire alert', 'high flood', 'high flood peak', 'high park fire', 'high tide', 'high water', 'higher ground', 'historic colorado wildfire', 'historic flooding', 'hit', 'hit by . earthquake', 'hit by . magnitude earthquake', 'hit by a . magnitude quake', 'hit by fires', 'hit by flash flooding', 'hit by floods', 'hit by massive . -magnitude earthquake', 'hit by meteor shower', 'hit by second major earthquake', 'hit by the killer earthquake', 'hitting', 'homeowners', 'horrible collapse', 'horrible disaster', 'horrible fire', 'horrible fires', 'horrible flash floods', 'horrific fire', 'horrific floods', 'horrific nightclub fire', 'horrific photos', 'horrific shooting', 'horrific wildfires', 'how bad the wild fire is', 'huge blast', 'huge blaze', 'huge disaster', 'huge earthquake', 'huge fire', 'huge flood', 'huge quake', 'hurricane-force winds', 'immediate danger', 'in both north', 'south directions', 'in danger of devastating flood', 'in fiamme', 'info', 'info on', 'information center', 'information here', 'information map', 'infrared imagery of meteor', 'injures others at lax airport', 'intense flooding', 'intensifies', 'into panic', 'inundates mountain', 'is ridge away', 'is getting much worse', 'is in imminent danger', 'is now % contained', 'is now at acres', 'is on fi', 'is on fire', 'is out of control', 'island emerges after . magnitude earthquake', 'islands destroyed after . quake triggers tsunami', 'islands destroyed by . quake', 'islands tsunami leaves dead', "it's going to be a long night aftershocks", "it's not safe", 'jolts', 'jumps to ', 'just hit', 'just in', 'keep safe everyone', 'killer', 'killer earthquake', 'killer earthquakes that struck', 'killer floods', 'killer floods triggered', 'landslides', 'landslides threaten', 'landslides triggered by heavy rains', 'large . magnitude earthquake', 'large earthquake strikes', 'large blaze', 'large explosion reported at fertilizer plant', 'large fire', 'large fire rages', 'large flood protection barrier', 'large quake', 'large surge of water coming down', 'large wildfire burning', 'latest developments', 'latest flood warning', 'latest situation', 'latest update on', 'lax shooter unconscious after being shot', 'leave them on', 'leaves without power', 'leaves community devastated', 'leaves destruction', 'lifts statewide fire ban', 'local state of emergency', 'lost the count', 'm .', 'km nnw of', 'm', 'm quake', 'm. earthquake', 'mag . earthquake', 'magnitude', 'magnitude earthquake', 'magnitude .', 'magnitude . earthquake', 'magnitude . earthquake hits', 'magnitude . earthquake recorded', 'magnitude . quake', 'magnitude .m', 'magnitude was reported as .', 'magnitude- . earthquake', 'magnitude-. earthquake', 'magnitude-. earthquake', 'magnitude-. earthquake hits', 'major . magnitude quake strikes', 'major aftershock', 'major blast', 'major crime scene', 'major earthquake', 'major earthquake hits', 'major earthquake jolts', 'major earthquake strikes', 'major fire', 'major flood', 'major flood report', 'major flooding', 'major philippines quake', 'major quake', 'major quake hits', 'major quake shakes', 'major quake strikes', 'mass wildfires', 'massive . -magnitude earthquake', 'massive . earthquake', 'massive . magnitude earthquake', 'massive aftershock rocks quake-stricken', 'massive blast', 'massive blast at', 'massive deadly earthquake', 'massive deadly earthquake', 'massive earthquake', 'massive earthquake hits', 'massive earthquake m . strikes', 'massive earthquake rocks', 'massive explosion', 'massive fire', 'massive fire burn', 'massive fire engulfs', 'massive fire erupts', 'massive fire rages', 'massive fire rips', 'massive flood', 'massive flooding', 'massive floods', 'massive floods hit', 'massive meteor strike', 'massive meteor strike confirmed', 'massive quake', 'massive quake strikes', 'massive summer floods', 'mayor of high river asks province', 'mayor says city centre destroyed', 'medical supplies/prescriptions contact', 'meteor explosion', 'meteor fall', 'meteor fragments injuring people', 'meteor hit', 'meteor showers', 'meteorite', 'meteors', 'miles of smoke', 'millions battle manila flood', 'minacciata', 'monsoon rains cause massive landslides', 'monster fire', 'monster wildfire', 'more evacuated', 'more fires', 'more information', 'more spills', 'more updates', 'most dangerous piece of public infrastructure', 'most destructive wildfire', 'mudslide', 'multiple devastating wildfires burning', 'multiple wildfires currently burning', 'national crisis', 'nearby areas', 'neighbourhood destroyed by wildfires', 'new colorado wildfire erupts', 'new fire', 'nightclub fire', 'no boil water advisory', 'no california tsunami threat', 'no flooding', 'no more tsunami warning', 'no power', 'no problem', 'no respite', 'no sign of waldofire', 'no threat of', 'no threat of a tsunami', 'no tsunami expected', 'no tsunami warning', 'no tsunami warning issued', 'no warning', 'no widespread tsunami hazard', 'no word on victims', 'not expected to get stronger', 'not hazardous yet', 'now % contained', 'number coding advisory', 'obey signs', 'offered survivors', 'official says an earthquake', 'official statement', 'officials discuss the deadly train crash', 'oil train blast', 'oil train explosion', 'older adults', 'on fire', 'on what to do', 'only exit shut', 'orange earthquake alert', 'other emergencies', 'other warnings ended', 'our lands starts shaking', 'out-of control', 'over aftershocks noted', 'over aftershocks recorded so far after . magnitude earthquake', 'over remain out of control', 'people affected', 'people affected by the flooding', 'people are mostly calm', 'people complaining', 'people seeking', 'phone battery', 'photo gallery of the multiple devastating wildfires burning', "pictures of colorado's worst fire season", 'please avoid', 'police calling', 'police helicopter crash victims', 'police helicopter crashes though the roof', 'police helicopter falls', 'police helicopter involved', 'possible arson', 'possible floods', 'power . earthquake', 'power equal to - atomic bombs', 'power outage', 'power outages', 'power outages all over', 'power outages galore !!', 'powerful', 'powerful . earthquake', 'powerful . earthquake strikes', 'powerful . magnitude earthquake', 'powerful quake struck', 'powerful earthquake', 'powerful magnitude- . earthquake', 'powerful quake', 'powerful quake strikes', 'powers back', 'prelim mag of .', 'presidential disaster declaration', 'pretty bad', 'prompting evacuation warnings', "prompting return to ' watch", 'prompts evacuations', 'quake', 'quake approaches', 'quake at am', 'quake epicentre', 'quake hit', 'quake hits', 'quake measures .', 'quake toll', 'quake toll leaps', 'quake toll reaches ', 'quake toll rises to ', 'quake update', 'quake with prelim mag of .', 'quake-triggered tsunami', 'raging colorado fire', 'raging colorado wildfire', 'raging colorado wildfires', 'raging fires', 'raging river', 'raging wildfire', 'raging wildfires', 'rail accident', 'rain floods', 'rain&flood situation', 'raise awareness', 'raised serious concerns', 'rapid flooding', 'rattling buildings and cutting power', 'reach for things blindly under water', 'ready for the worst', 'record colorado wildfire', 'record flooding', 'record-breaking flood', 'record-high rainfall dumped', 'record-smashing rainfall sparks flash floods', 'red deer river', 'red quake alert', 'relieved survivors tell', 'reporting another fire', 'residents clean-up', 'residents remain inside', 'residents should remain indoors', 'residents sign up county alerts', 'residents using refrigerators', 'reveal the scale', 'river foreshore', 'river rises', 'rivers converged', 'rivers keep rising', 'roads flooded', 'roads will be closed', 'rocked', 'rocked by', 'rocked by . richter earthquake', 'rolling outages', 'scammers', 'scarryyyyy', 'scary', 'sea water level', 'search ends', 'search for cause', 'search for clues', 'search suspended', 'seasonal rains have triggered massive flooding', 'second major earthquake', 'set off fires', 'several fires explode', 'several tips', 'severe fire danger', 'severe flood', 'severe flood damage', 'severe flooding', 'severe flooding hits', 'severe floods', 'severe floods expected', 'severe floods strike', 'severely affected by', 'severity', 'severity unclear', 'shakes', 'shocking information of the meteor', 'shocking twitter pictures', 'shut-off valves', 'significant flooding', 'significant loss', 'significant outages', 'significant preliminary . magnitude earthquake', 'significant rain and flooding', 'simple fire', 'sirens', 'smells like smoke', 'smoke', 'smoke billows', 'smoke covers', 'smoke from colorado wildfires', 'smoke haze', 'smoke mixing with rain', 'snakes', 'so consistent', 'starts shaking', 'stay safe!', 'still burning', 'still experiencing aftershocks', 'still fairly severe', 'strikes', 'strong aftershock', 'strong aftershock rattled', 'strong earthquake', 'strong earthquake hits', 'strong earthquake shook building', 'strong earthquakes', 'strong floods', 'strong quake', 'strong quake hits', 'strong quake rattles', 'strong quake shakes', 'strong winds likely to keep heightened fire danger', 'struck', 'struck by a major earthquake', 'stunning flooding', 'sudden fall in sea water level', 'suffering with floods', 'terrible earthquake', 'terrible earthquake that hit', 'terrible flooding', 'terrible news', 'terrifying colorado wildfires', 'terrifying fire', 'terrifying picture', 'terrorist attack', 'terrorizes', 'that hit', 'the . magnitude', 'the .-magnitude earthquake struck', 'the earthquake', 'the flagstafffire', 'the highparkfire', 'the aftershocks', 'the best way to protect yourself', 'the deadly earthquake', 'the devastation', 'the earthquake', 'the earthquake struck', 'the explosion', 'the fires', 'the fires are spreading rapidly', 'the flooding', 'the magnitude-. earth', 'the major', '.-magnitude earthquake', 'the quake', 'the shaky earthquake', 'the terrible earthquake', 'the updates', 'thefiretracker', "those affected by last night's tragic event", "those affected by today's earthquake", 'those fires', 'those impacted', 'those who are affected', 'those who have been impacted', 'thousands (killed', 'thousands displaced', 'thousands missing', 'thousands of affected', 'thousands of affected communities', 'thousands of tourists', "threaten to fan 'extreme' flames", 'threatened by flash floods', 'threatened by the wildfire', 'thunderstorm advisory', 'tips for disaster preparedness', 'to allow charcoal grills', 'to burn', 'to get under control', 'to play with', "today's earthquake", 'toll continues rising', 'took place today', 'top fire information', "toronto's flash floods", 'torrential rain', 'torrential rains', 'tragedy gets out of hand', 'tragedy struck', 'tragic accident', 'tragic earthquake', 'tragic fire', 'tragic flooding', 'tragic nightclub fire', 'train carrying oil', 'travel advice', 'tremor', 'tremor felt', 'tremors', 'trying to be careful', 'tsunami', 'tsunami hazard', 'tsunami hit', 'turn on sprinklers', 'typhoon haiyan awakens', 'typhoon haiyan hits', 'typhoon haiyan is about to destroy', 'typhoon haiyan smashes', 'unbelievable', 'updated collection', 'updated emergency hotlines', 'updated info', 'updates', 'updates conditions following an . -magnitude earthquake', 'updates on', 'use caution', 'usgs earthquake', 'usgs reports', 'usgs reports a magnitude .', 'usgs said quake struck', 'victims and survivors', 'victims of car accidents', 'victims urged', 'volcano', 'waldo canyon fire', 'war', 'watch for', 'waters have hit', 'waters have receded', 'waters rise', 'waters rising fast', 'west of downtown', 'western wildfires persist', 'when suspension will be lifted', 'widespread devastation', 'widespread flooding', 'widespread tsunami hazard', 'wild fire', 'wild fires', 'wildfile raging', 'wildfir', 'wildfire', "wildfire ' moving fast '", 'wildfire brings evacuations', 'wildfire burning', 'wildfire burns homes', 'wildfire continues to ravage', 'wildfire continues to threaten', 'wildfire doubles in size', 'wildfire explodes in size', 'wildfire forces evacuations', 'wildfire front lines', 'wildfire most destructive', "wildfire of 'epic proportions'", 'wildfire rages', 'wildfire raging', 'wildfire smoke blows', 'wildfire smoke smudges skies', 'wildfire tragedy', 'wildfire worsens', 'wildfires', 'wildfires blaze', 'wildfires burn', 'wildfires continue', 'wildfires explained', 'wildfires forcing evacuations', 'wildfires getting worse', 'wildfires moved', 'wildfires rage', 'wildfires ravage', 'wildfires spread', 'wildfires spread in', 'wildfires sweep', 'wildfires sweeping', 'wildfires threaten', 'wildfires turn deadly', 'wildfires wreck', 'wind blows', 'wind-driven colorado wildfire', 'wind-driven inferno', 'worsening', 'worst flood in decade', 'worst colorado wildfire', 'worst disaster', 'worst fire conditions', 'worst flooding', 'worst flooding in decades', 'worst wildfire', 'wreaked havoc', "yesterday's flood", 'young white male with rifle', ' cyclist', ' flood victims', ' injuries', ' others wounded', ' people injured', ' residents', ' sustained injuries', ' wounded', ' wounded evacuated/treated', 'wildfire victims', 'affected', 'affected by', 'aid survivors', 'are suffering', 'are suffering from', 'at least injured', 'at least of the injured are children', 'at least people injuried because of the meteor shower', 'bombing victims', 'building collapse victims', 'calamity victims', 'casualties', 'casualties feared', 'casualties reported', 'collapse victims', 'dead', 'disaster victims', 'dozens of people seriously injured', 'dozens seriously injured', 'dswd staff sustained major injuries', 'earthquake survivors', 'earthquake victims', 'earthquake victims here', 'explosion victims', 'fire victims', 'flood victims', 'help flash flood victims', 'hundreds feared hurt', 'hundreds hurt', 'hundreds likely injured', 'hundreds people hurt', 'injured', 'injured ', 'injured people', 'injured ', 'injured workers', 'injured survivors', 'injured train survivors', 'lax shooting leaves several people wounded', 'leaves several people wounded', 'many more injured', 'more victims are', 'more victims bringing total identified', 'multiple people injured', 'multiple victims', 'no injuries', 'over injured', 'over injured after fertilizer plant explosion', 'over injuries', 'over injured', 'police confirm names of the last ', 'poor victims', 'quake survivor', 'quake survivors', 'quake victims', 'quake victims of', 'scrambles to reach quake victims', 'search for possible other victims', 'search for survivors', 'search for victims', 'suffering people', 'the flood victims', 'the flooding victims', 'the lives impacted', 'the shooting victims', 'the survivors', 'the victims', 'the victims & families of the navyyardshooting', 'the victims and families', 'the victims of . earthquake last tuesday', 'the victims of bagyongmaring', 'the victims of earthquake', 'the victims of earthquake', 'the victims of the', 'the victims of the savar tragedy', 'the victims of the disaster', 'the victims of the drastic quake', 'the victims of the explosion', 'the victims of the floods', 'the victims of the helicopter crash nd the victims of the train crash', 'the victims of the horrible explosions', 'the victims of the magnitude .', 'the victims of the massive earthquake', 'the victims of the train accident', 'the victims of the typoon maring', 'those injured', 'those who are injured', 'typhoon haiyan victims', 'victims', 'victims ...!!!!', 'victims & families of this tragic event', 'victims affected', 'victims and families affected', 'victims and families of', 'victims and families of the bostonmarathon tragedy', 'victims and their families', 'victims and their families/friends that were affected', 'victims and those affected', 'victims and thousands affected', 'victims confirmed', 'victims feared', 'victims impacted', 'victims include loving dad', 'victims including', 'victims injured', 'victims need relief', 'victims of pabloph', 'victims of bomb tragedy', "victims of brazil's nightclub", 'victims of derailment', 'victims of devastating', 'victims of earthquake', 'victims of explosion', 'victims of fire', 'victims of flood', 'victims of flood fury', "victims of glasgow's", 'victims of lax shooter', 'victims of maring/habagat', 'victims of quake', 'victims of that . magnitude earthquake', 'victims of that devastating', 'victims of that devastating train explosion', 'victims of the clutha pub helicopter crash', 'victims of the deadly earthquake that struck', 'victims of the earthquake', 'victims of the explosion', 'victims of the fertilizer plant explosion', 'victims of the flood', 'victims of the helicopter crash', 'victims of the horrible fires', 'victims of the lax shooting', 'victims of the lax shooting today', 'victims of the marathon', 'victims of the metro-north', 'victims of the nsw fires', 'victims of the police helicopter crash', 'victims of the pub helicopter crash', 'victims of the recent', 'victims of the train accident', 'victims of the train accident that took place yesterday night', 'victims of the train derailment', 'victims of the typhoon', 'victims of this terrible incident', "victims of today's explosion", 'victims of tragic', 'victims reported', 'victims shot by suspect', 'victims that were affected', 'victims this week', 'victims today', 'victims waiting', 'victims with brain dead', 'wounded', 'wounded shown leaving airport', '', ' are believed to have died', ' bodies now found', ' bodies recovered', ' body was found', ' confirmed dead', ' days of mourning', ' dead', ' dead at', ' dead at clutha', ' dead so far', ' deaths', ' deaths confirmed', ' died', ' fatality', ' feared dead', ' have died', ' killed', ' life', ' more bodies are found', ' more bodies found', ' more bodies recovered', ' more die', ' more people who died', ' more tragedies', ' more victims', ' now dead', ' of the victims', ' of the dead', ' passenegers who died', ' passengers killed', ' people are killed', ' people confirmed dead', ' people died', ' people feared dead', ' people have died', ' people killed', ' people now confirmed dead', ' people shot at', ' people who died', ' person dies', ' persons feared dead', ' shot at', ' survivors', ' tsa agent dead', ' tsa agent has been shot', ' victims', ' victims identified', ' victims named', ' who died', '-year-old among the dead', ' lost lives', 'nd death', 'earthquake victim', 'earthquakevictims', 'flooding toll rises', 'another body', 'army is working to recover bodies', 'at least confirmed dead', 'at least dead', 'at least dead so far', 'at least deaths have been reported', 'at least die', 'at least fatalities', 'at least feared dead', 'at least have died', 'at least killed', 'at least now known to have died', 'at least people are dead', 'at least people are feared dead', 'at least people are killed', 'at least people are known to have died', 'at least people confirmed dead', 'at least people dead', 'at least people die', 'at least people have been killed', 'at least people have died', 'at least people killed', 'at least people now reported dead', 'at least people reported dead', 'at least people shot at lax airport terminal ', 'at least people were killed', 'at least person confirmed dead', 'at least person dead', 'at least ppl confirmed dead', 'at least reported dead', 'at least reportedly killed', 'atleast dead', 'blaze claims first life', 'building collapse', 'building collapse death toll', 'building collapse death toll at ', 'building collapse death toll hits ', 'building collapse death toll nears ', 'building collapse death toll passes ', 'building collapse death toll rises near', 'building collapse death toll rises to ', 'building collapse death toll tops ', 'building collapse kills ', 'building collapse kills at least ', 'building collapse kills more than ', 'building collapse leaves dead', 'building collapse toll hits', 'building collapse toll hits ', 'building collapse toll rises to ', 'building collapse toll tops ', 'building collapses claim lives', 'building toll rises to ', 'building-collapse toll', 'building-collapse toll climbs to ', 'buried in debris', 'casualties are low', 'church bells toll times', 'climbs', 'climbs to ', 'collapse becomes worst tragedy', 'collapse death toll crosses ', 'collapse death toll goes up to ', 'collapse death toll hits', 'collapse death toll hits ', 'collapse death toll passes ', 'collapse death toll tops ', 'collapse leaves -plus dead', 'collapse toll', 'collapse toll hits ', 'collapse toll passes ', 'collapse toll passes killed', 'collapse toll still climbing', 'condolence', 'confirmed dead', 'confirmed death toll', 'confirmed killed', 'confirmed victims', 'dead bodies', 'dead bodies are handed', 'dead confirmed', 'dead expected', 'dead expected to be found', 'dead expected to rise', 'dead garment workers', 'dead or missing', 'dead peoples', 'dead photos', 'deadly', 'deadly collapse', 'deadly refinery blast', 'death', 'death toll at ', 'death toll breaches k', 'death toll climbs past ', 'death toll climbs to ', 'death toll exceeds ', 'death toll has risen to ', 'death toll is ', 'death toll is now ', 'death toll jumps to ', 'death toll now at ', 'death toll passes ', 'death toll rises ', 'death toll rises above ', 'death toll rises to', 'death toll rises to ', 'death toll rises to killed', 'death toll rises to at least ', 'death toll rises to nearly ', 'death toll rises to nearly people', 'death toll rising', 'death toll surges at', 'death toll surpass ', 'death toll to ', 'death toll touches ', 'deaths at ', 'deaths have been confirmed', 'deaths now ', 'die', 'died', 'died at that fertilizer plant', 'died this morning', 'dozens dead', 'dozens die', 'dozens feared dead', 'dozens killed', 'dozens more bodies found', 'earthquake death toll', 'earthquake death toll rises to ', 'earthquake kiling', 'earthquake that killed', 'earthquake toll reaches ', 'earthquake toll rises to', 'earthquake toll rises to ', 'earthquake victims will die', 'estimates at least died', 'factory-collapse toll passes ', 'fatal blaze', 'fatalities', 'fatalities has risen to ', 'fatalities reported', 'fatalities rise to ', 'fire claims first victim', 'fire deaths', 'flash floods kill ', 'flood death', 'flood death toll increases to ', 'flood death toll rises', 'flood fatalities hit ', 'flood toll rises', 'flood toll rises to ', 'flooding death', 'flooding toll', 'flooding toll rises above', 'flooding victims', 'going to have way more causalities', 'government minister says death toll', 'government official says the death toll', 'has died', 'has killed people', 'has killed at least people', 'has killed more than people', 'has left people dead', 'has left at least dead', 'has risen to ', 'have killed', 'heavy death toll', 'her mom is of the casualties', 'hit by deadly flooding', 'horrible tragedy', 'horrible tragic news', 'horrific tragedy', 'hundreds dead', 'hundreds feared dead', 'hundreds have been killed', 'hundreds killed', "i'm still alive", 'is killing', 'is now ', 'islands tsunami deaths confirmed', 'kill', 'kill ', 'kill people', 'kill troops', 'kill at least ', 'kill at least tsa', 'kill dozens', 'kill more than ', 'kill more than people', 'killed', 'killed ', 'killed people', 'killed person', 'killed ppl', 'killed ', 'killed at lax shooting', 'killed at least', 'killed at least people', 'killed by flash flooding', 'killed more than people', 'killed nearly people', 'killed people this morning', 'killed today at lax', 'killer quake', 'killing', 'killing ', 'killing passengers', 'killing s of americans', 'killing at least', 'killing at least ', 'killing at least people', 'killing dozens', 'killing dozens of people', 'killing hundreds', 'killing more than people', 'killing people', 'kills ', 'kills at venezuela refinery', 'kills people', 'kills people at least', 'kills us troops', 'kills about ', 'kills at least', 'kills at least ', 'kills at least more', 'kills at least passengers', 'kills at least people', 'kills hundreds', 'kills last survivor of dhaka factory collapse', 'kills more than ', 'kills more than people', 'kills nearly ', 'kills nearly people', 'kills over ', 'kills over people', 'kills people', 'kills several', 'kills tsa officer', 'largest loss of life', 'leaves dead', 'leaves people dead', 'leaves -plus dead', 'leaves at least dead', 'leaves multiple people dead', 'leaves over dead', 'leaves scores dead', 'loss of life', 'many fatalities feared', 'meat vendor', 'meteor killing people', 'moment of silence', 'more bodies could be found', 'more than killed', 'more than people now confirmed dead', 'more victims brings toll', 'multiple deaths', 'multiple fatalities', 'multiple shooting victims confirmed', 'multiple victims reported', 'murder complaint over collapse', 'nightclub fire kills ', 'nightclub fire kills people', 'nightclub fire kills more than ', 'nightclub fire kills more than people', 'nightclub fire leaves people dead', 'nightclub tragedy', 'ninety- deaths', 'no casualties', 'no confirmed casualties', 'no deaths', 'no fatalities', 'nothing left .', 'number of dead', 'official death toll', 'official death toll now ', 'official says death toll', 'officials confirm people killed', 'officials confirm - deaths', 'officials say the death toll', 'over dead', 'over feared dead', 'over ppl are dead', 'people dead', 'people died', 'people killed', 'people who died', 'people who lost their lives', 'police confirm the final number of fatalities', 'police memorial', 'police say death toll from garment factory collapse', 'pray for the dead', 'quake toll rises', 'quake toll touches ', 'rains kill', 'raise death toll', 'raise death toll to ', 'raising the death toll', 'reported dead', 'reported killed', 'reports of at least dead', 'rip', 'rip the victims', 'scores dead', 'search for bodies', 'several dead', 'several deaths likely', 'several deaths reported', 'several killed', 'several lives', 'so many innocent lives', 'staggering toll', 'strong quake kills', 'takes lives', 'the death', 'the death toll', 'the deceased', 'the funeral', 'the lives', 'the toll', 'the tragedy', 'the victims of', 'the victims of bomb tragedy at', 'the victims of fire', 'those killed', 'those perished', 'those who died', 'those who have lost their lives', 'those who have lost their lives following building collapse', 'those who lost their life', 'thousands and thousands of people died', 'thousands presumed dead', 'thousands protest deadly collapse', 'toll climbs', 'toll rises to ', 'toll rises to dead', 'toll tops ', 'tragedy', 'unidentified victims', 'updates death toll', 'victims at lax', 'victims bringing total identified to ', 'victims drowned', 'victims families details', 'victims later today', 'victims of quake that left at least dead', 'victims of the', 'victims service under way', 'victims were thrown', 'young child', ' acres', ' acres burning', ' acres of land', ' buildings', ' cities', ' homes are in immediate danger', ' homes confirmed destroyed', ' homes lost', ' houses', ' kilometers', ' miles west of fc', ' percent contained', ' poultry houses', ' structures are currently threatened', ' structures burned so far', ' villa', ' villages flattened', ' wagons', ' year old tower collapsed', '- homes', '. earthquake devastates', '. sq miles', '.-magnitude quake collapses buildings', ' homes destroyed', 'km south of', 'earthquake damage', 'glass broken', 'highparkfire is at about sq miles', 'photos', 'a .-magnitude earthquake collap', 'a church breaking down', 'a historic church', 'a strong earthquake destroyed tons of infrastructures', 'a telling photo', 'affected by magnitude earthquake', 'affected by the earthquake', 'affected neighborhoods', 'affected regions', 'affected villages', 'after the quake', 'after-shocks of .', 'all connecting roads', 'all these photos', 'another villages', 'any damage', 'any photos', 'are destroyed by the quake', 'area affected', 'area demolished', 'area of the airport', 'assessing damage', 'at acres', 'at least structures', 'barrage', 'barricades', 'before & after earthquake', 'before and after', 'before and after image', 'before and after the earthquake', 'being destroyed', 'biggest refinery', 'bldg', 'bridges', 'roads damaged', 'building', 'building affected', 'building at zoo closing', 'building burnt down', 'building collapse has risen to ', 'building collapse identified', 'building collapse incident', 'building collapse live to vimeo', 'building collapse now above ', 'building collapse now exceeds ', 'building collapse reaches ', 'building collapse rises above ', 'building collapse rises to ', 'building collapse rises to more than ', 'building collapse surges past ', 'building collapse tops ', 'building collapses', 'building collapsing', 'building damage', 'building designs', 'building destroyed by the', 'building housing factories', 'building owner', 'building rubble', 'building wreckage', 'buildings', 'buildings collapsed', 'burned homes', 'burning homes filmed shortly after blast', 'burning to the ground', 'burns homes', "burns est'd acres of grass", 'burns nearly homes', 'burnt properties', 'bush fire threat to the blue mountains', 'businesses affected', 'businesses destroyed', 'can now build', 'captures devastating power', 'century old church', 'charred cities', 'charred material', 'church', 'church also collapsed', 'church buildings', 'church is destroyed by the earthquake', 'churches', 'churches destroyed', 'churches have collapsed', 'churches ruined', 'churches were destroyed', 'cleanup efforts', 'close-up aerial view', 'closed', 'collapse buried', 'collapse crosses ', 'collapse has topped ', 'collapse of buildings', 'collapse passes ', 'collapse toll above ', 'collapsed', 'collapsed buildings', 'collapsed factory compound', 'collapses buildings', 'collapses following earthquake', "colorado wildfire's destructive aftermath", 'completely destroyed', 'concrete debris', 'construction workers', 'cracks', 'cranes clear', "creates new island off pakistan's coast", 'crumble', 'crumble to the ground', 'crumbles parts of historic buildings', 'dam', 'damage caused', 'damaged areas', 'damaged towns', 'damages houses', 'dams', 'debris', 'depth: km', 'destroyed', 'destroyed by earthquake', 'destroys more than homes', 'destruction caused', 'destruction reported', 'destruction unbelievable', 'destructive', 'destructive history', 'devastated', 'devastated an area of', 'devastated at the aftermath', 'devastated by', 'devastated by huge fire', 'devastated by the flood', 'devastated parts of', 'devastates tight-knit town', 'devastating before/after comparison of', 'devastating earthquake', 'devastating loss', 'devastating scenes', 'devastation hampers', 'devastation hampers aid efforts', 'devastation impact', 'devasting floods', 'devours acres', 'disaster area', 'disaster leaves country', 'disaster site', 'disaster zone', 'earthquake clean-up', 'earthquake creates island off coast', 'earthquake creates new island', 'earthquake video', 'environmental toll', 'expected to resume full operations', 'facilities', 'factories reopen after collapse', 'factory building', 'family home', 'fire burning down different hillsides', 'fire claims homes', 'fire damage', 'fire destroys', 'fire near acres', 'flash flooding hit the west-end', 'flood affected areas', 'flood area', 'flood damage', 'flood havoc', 'flood hit areas', 'flood is almost at rd floor', 'flood torrent', 'flood videos', 'flood washing a building away at', 'flood water inundates', 'flood zone development', 'flood-area building rules', 'flood-hit', 'flood-hit areas', 'flood-ravaged', 'flood-ravaged areas', 'flooded fracking sites', 'flooded basement', 'flooded dvp', 'flooded house l', 'flooded landscape', 'flooded out after abflood', 'flooded right now', 'flooded saddledome', 'flooded the venue', 'forest management', 'going to have to build', 'has re-opened', 'has shutdown', 'have lost homes', 'heartbreaking photos', 'historic churches damaged', 'historical churches', 'historical loboc church', 'hit by a meteor', 'hit by another earthquake', 'hit by devastating', 'hit by magnitude . earthquake', 'home is being destroyed', 'homes destroyed', 'horrific building collapse', 'hospital are flooded', 'house', 'houses', 'houses are burning', 'houses brought down', 'howing damage', 'hundreds of homes are under water', 'hundreds of homes destroyed', 'hwy ', 'image', 'images of the destruction', 'in bad shape', 'infrastructure-damage', 'island', 'island metres high up', 'island created', 'island emerges', 'islands', 'islands destroyed by tsunami', "it's so devastating", 'landslides unleashed by early monsoon rains', 'lanslides', 'large number of homes at risk', 'largest cities', 'largest city flooded', 'latest photos', 'little damage', 'lodges', 'looks like a nuke went off', 'loon church', 'loses precious instruments', 'losses to agriculture', 'low-lying areas', 'malls', 'many buildings', 'many business and homes effe', 'map of highparkfire', 'map with before-and-after photos', 'massive . earthquake destroys parts of', 'massive aftershock rocks quake-stricken region', 'massive damage', 'massive damage to crops', 'massive destruction of ecological resources', 'massive earthquake devastates', 'massive fire destroys', 'ministries', 'minor damage', 'more buildings become compromised', 'most affected', 'most affected district by the earthquake', 'most destructive', 'most destructive on record', 'most destructive wildfire ever', 'most likely to continue', 'nasa aqua satellite image', 'nasa image', 'nasa satellite photo showing the nswfires today', 'nations impacted', 'neighborhood', 'neighborhoods', 'neighbourhood destroyed', 'neighbourhoods to re-open', 'nightmare scenario', 'no damage', 'no new homes destroyed overnight', 'northwest quadrant', 'now at acres', 'now at acres & growing', 'offices destroyed', 'old churches crumble', 'oldest bell tower collapses', 'oldest church', 'oldest churches', 'on fire right now', 'over acres burnt in wildfires', 'over acres of land & homes burned up', 'over shops', 'people are losing their homes', 'people being pulled out', 'people losing their property', 'people losing thierry homes', 'people who lost homes', 'photo', 'photo gallery', 'photo obtained', 'photo of damage', 'photo submitted', 'photos', 'photos of damage', 'photos of damages', 'photos of destruction', 'photos of flood', 'photos showing impact', 'pictures of cruise ship sinking off coast', 'pictures of explosion at finish line', 'pictures of metro north train derailment', 'pictures of the arrivals terminal on fire at', 'potholes', 'program launched to rebuild', 'quake collapses buildings', 'quake damage', 'quake rubble', 'quake-affected areas', 'quake-battered areas', 'quake-hit areas', 'quake-hit provinces', 'ranches avoid catastrophe', 'red circle is the train tunnel', 'remains at - acres in size', 'reservoirs', 'residents w/ property', 'restoration', 'return to normalcy', 'road damage', 'roads impacted', 'roofs', 'ruined by that earthquake', 'ruined province', 'ruins', 'ruptured pipeline', 'scenes', 'scenes of damage', 'schools and businesses', 'schools have been closed', 'search for collapse victims', 'search for survivors of a collapsed', 'seeing such scenes', 'severe earthquake', 'severely damaged', 'shocking damage', 'shocking photos', 'shocking video', 'shocking video of the moment of impact', 'significant damage', 'storm-hit areas', 'strong quake collapses', 'structure maps', 'structure withstand the floods', 'structures', 'structures burned', 'structures destroyed', 'stunning satellite image', 'submerged', 'suburbs threatened', 'tapes recount horrific scene', 'the . magnitude earthquake', 'the city', 'the damage', 'the damage inside the saddledome', 'the damage to the small town', 'the epicentre of the earthquake', 'the flooded areas', 'the historic churches collapsed', 'the historical loboc church', 'the images', 'the most affected', 'the most destructive', 'the most destructive fire', "the most destructive fire in colorado's history", 'the oldest church', 'the region', 'to protect this house', 'to rebuild', 'towns devastated', 'towns devastated by wildfire', 'tragic factory collapse', 'unable to withstand', 'unbelievable damage', 'unbelievable image', 'unbelievable photos', 'under flood', 'unknown damage', 'until further notice', 'video and photos', 'video captures', 'video captures devastating power', 'video coverage', 'video footage', 'video of situation', 'video released', 'video update', 'village', 'villages are cut off', 'villages destroyed', 'villages destroyed by quake', 'villages flattened', 'was destroyed in the earthquake', 'wildfire destroys', 'will be open', 'with some containment', 'without these churches', 'worst-hit area of quake', ' million dollars foreign aid', 'fire budget', 'salvationarmy', '$ billion', '$ million', '$ million fund', '$ million to assist', '$m', '짙 million aid', '짙 million raised', 'allocates $mn for', 'amount', 'an amount of million dollars', 'assist', 'authorities appealed', 'benefit', 'benefit concert', 'businesses record $ in flood losses', 'calamity funds to areas devasted', 'call for help', 'can donate', 'can donate $', 'cash support', 'charity', 'charity appeal', 'charity auction', 'charity car event', 'charity marathon', 'charity shirt', 'collecting money', 'disaster payment', 'displaced charities', 'donate ! reliefph', 'donate ! rescueph', 'donate day earnings', 'donate day salary', 'donate dollar', 'donate -pm', 'donate $ ', 'donate $', 'donate $ in support', 'donate $ to', 'donate 짙million euromillion prize', "donate a month's salary", 'donate anything', 'donate even small amounts', 'donate exchange gift budget', 'donate for', 'donate here', 'donate here to help victims', 'donate if you can !!', 'donate in-game currency', 'donate money', 'donate money online', 'donate my money', 'donate now to help', 'donate prize money', 'donate prize money to', 'donate salary', 'donate their aeroplan miles to red cross', 'donate their appearance fee', 'donate to help', 'donate to help clean-up', 'donate to help rebuild', 'donate to support', 'donate to the red cross', 'donate to victims', 'donate to wildfire relief', 'donate your spare change', 'donated lakhs', "donated month's salary", 'donated $ ', 'donated $ to relief efforts', 'donated over $ so far', 'donated to', 'donates $ to', 'donates $ to american red cross', 'donates $k to red cross', 'donates php ', 'donates to help', 'donates w million', 'donating', 'donating % of sales', 'donating $ per birdie', 'donating $ to support', 'donating $', 'donating $. per new follower', 'donating us $. million', 'donation channels', 'donation of $ to assist', 'donations', 'donations earth quake victims', 'donations in cash', 'donations flood', 'earthquake relief donations needed urgently', 'fake charities', 'federal disaster relief funds', 'financial aid', 'financial contributions', 'financial donations', 'financial support is the best way to help', 'flood appeal', 'flood appeal fall short', 'flood claims', 'flood donation drop off points', 'flood funding', 'flood insurance', 'flood relief fund', 'for charity', 'ford offering support to victims', 'fund raiser', 'fundraise', 'fundraiser', 'fundraising to help', 'funds', 'govt requesting fellow citizens to contribute', 'help', 'help them with a donation', 'how to donate', 'how to help', 'in cash', 'little donations', 'losses galore', 'lost revenue', 'more donations', 'needs $m typhoon aid', 'needs donations', 'no money', 'officers club donation', 'pledge $', 'pledge $ to', 'pools donations', 'province pledges billion to abflood relief', 'raise $ ', 'raise 짙 ', 'raise at least $m', 'raise funds', 'raise money', 'raise relief money', 'raised $ ', 'raised money for', 'raised more', 'raised over $k', 'raised thousands of $', 'raises funds', 'raising some funds', 'rate', 'red cross donate-a-load', 'red cross donations', 'red cross flood fund', 'red cross flood relief great sale', 'release aid money', 'release emergency funds', 'relief efforts/donations', 'relief fundraiser', 'relief fundraising tmrw', 'relief goods worth millions', 'relief to his swiss account', 'still accepting donations', 'stores price gouging', 'the charity', 'those who can donate', 'those who donated', 'those who want to donate', 'up to $m from state emg funds', 'us$ . milli', 'us$ . million', 'wildfire relief efforts', 'without charge', 'would you like to donate', 'you may drop your donations', ' bags', ' paper clips', ' paper towel', ' rubber bands and staples', 'accepted for distribution', 'aid the coloradoflood', 'aids', 'any relief goods', 'appeal', 'army choppers to carry wood', 'available', 'basic supplies', 'bug spray', 'call for cameras', 'can help', 'can help the coloradoflood victims', 'can you help', 'can you help?', 'candles', 'clothing', 'collect relief goods', 'collecting clothes', 'crisis response map', 'distributes relief goods', "don't ever recieve", 'donate coffins', 'donate clothes', 'donate goods', 'donate items', 'donate thousands of things', 'donate yun pork barrel', 'donated lots of clothes', 'donated sack', 'donation appointments', 'donation drop', 'donation drop off', 'donation drop points', 'donations brought to', 'essential items', 'evacuee relief cards', 'first batch of relief goods', 'flash flood relief', 'flood resource table', 'flooding resources', 'fresh-grilled tacos', 'gasoline', 'hand crews', 'heavy equipment', 'help here', 'help needed', 'help others', 'help us get', 'help victims', "hospital's generator", 'how you can help', 'impedes lending', 'information', 'load relief supplies', 'lost power', 'mangled pressure cooker used', 'need basic', 'household items', 'need help today for residents', 'need pillows', 'need resources', 'need urgent help', 'need you', 'needed', 'needed !', 'needed at the rideau community hall', 'needed down there', 'needed urgently', 'needs', 'needs household items', 'needs ongoing', 'new flood maps', 'no electricity', 'no food water & medicine', 'no fuel', 'not available', 'not financial help', 'now available', 'people have lost everything', 'people offering', 'people take rice', 'people who need rescuing', 'plastic crates', 'pls!!!', 'rail oil shipments', 'reach', 'relief cards are available today', 'relief drop off point', 'relief drop zones', 'relief goods maringph reliefph', 'relief goods accepted', 'relief goods here', 'relief goods like clothes', 'relief goods to', 'relief goods will be accepted for distribution', 'relief items', 'relief material free', 'relief material is lying here', 'relief request', 'relief supplies comes', 'relief supplies comes under gunfire', 'relief to flood affected people', 'requires aid & relief services', 'resources', 'sandbags', 'sanitation', 'send your relief', 'struggling with coping', 'the goods', 'the most effective way to help', 'thermos', 'those who requires aid', 'trash bags', 'truckloads', 'typhoon haiyan relief', 'victims beg', 'victims need', 'water and clothing', 'we super need help', '@moorehospital', 'reliefph and rehab efforts', 'savethemountains', 'aid lacking', 'any assistance needed', 'await relief', 'begs for federal help', 'biggest firms rush to help', 'call ', 'can contribute', 'crisismanagement', 'disaster agency', 'fans helping', 'firefighters needed', 'flood forecasting division', 'flood relief camp', 'flood relief efforts', 'flood response', 'foreign aid', 'help bring attention to the flooding', 'help is needed', 'help is needed across the philippines right now', 'help the victims of', 'help them', 'help them out', 'helpful locals', 'hit areas need your help', 'industry response to floods', 'is taking small animals', 'need mechanical help', 'need to build', 'needing rescue', 'people needing rescue', 'people who are cleaning up need', 'people who want to volunteer', 'poor flood response', 'providing free board for pets', 'ready to aid', 'rehabilitation', 'relief activities', 'rescue work', 'shower', 'stranded get rescue', 'struggling with intense flooding', 'take measures', 'the support from', 'to help', 'to help flooding victims', 'us aid to', 'volunteered to help', 'volunteers help moving books', 'volunteers today to load', 'will be open overnight', 'with a helicopter', 'you may help', ' adults', ' missing', ' missing men', ' more baloch abducted', ' more found alive', ' person missing', ' rescued', ' women', ' missing', 's more still buried', 'army commandos locate stranded', 'army jawans rescued thousands', 'at least missing', 'at least people are missing', 'at least people still missing', 'at least unaccounted', 'being saved', 'being spotted', 'closed passengers evacuated after fire ravages', 'dozens missing', 'dozens trapped', 'earthquake victims of', 'frantic search', 'govt lists', 'govt taking innocent civilians hostage', 'hospital patients trapped', 'hundreds missing', 'hundreds more are missing', 'hundreds more missing', 'hundreds trapped', 'leaves many more still trapped', 'leaves stranded', 'lost at the clutha bar', 'many more feared trapped', 'many residents missing', 'members missing', 'missing', 'missing farm workers were found', 'missing men', 'more missing', 'multiple casualties', 'multiple people trapped', 'over still missing', 'over unaccounted', 'people missing', 'people remain trapped', 'people reportedly trapped inside their houses', 'people who all are still not rescued', 'people who are stranded', 'people who survived the kedarnath flood', 'quake missing', 'search', 'search capsized', 'search door-to-door', 'search for missing', 'search for survivors continues', 'search for survivors is', 'search missing', 'search of my mother', 'searches for quake survivors', 'searching for hundreds', 'searching for survivors', 'searching the submerged decks', 'several missing', 'survivors', 'tens of thousands of survivors', 'those people missing', 'those still stranded', 'thousands of survivors', 'thousands still missing', 'trapped', 'trapped at home', 'trapped people', 'typhoon survivors', 'victims found', 'victims of killer quake', 'victims return', 'victims stuck', 'victims stuck at', ' are evacuated', ' evacuated', ' evaculated', ' more people should evacuate immediately', ' people evacuated', ' people have been evacuated', ' pilgrims', '% homeless', 'evac beginning', 'wildfire activity prompts evacs', 'army evacuated persons today', 'at least people evacuated by', 'be evacuated', 'being evacuated', 'brings evacuations', 'buildings evacuated', 'calls for volunteers to man the evacuation centers', 'city lifts pre-evacuation notices', 'displaced civilians', 'displaced more than people', 'displaced thousands', 'displaced thousands of people', 'displacement', 'displaces', 'displaces ', 'displaces hundreds', 'displacing tens of thousands', 'displacing tens of thousands of people', 'divers evacuated', 'dozens of evacuations', 'evac notices', 'evacuate people', 'evacuate residents', 'evacuated people', 'evacuated persons today', 'evacuated animals', 'evacuates residents', 'evacuating', 'evacuating residents', 'evacuation bags', 'evacuation centers', 'evacuation centre', 'evacuation checklists', 'evacuation starts pm', 'evacuations', 'evacuations pic', 'evacuee', 'evacuee meeting tonight', 'evacuee pets', 'evacuees await return home', 'evacuees slowly return home', 'evacuees told they can return home', 'exacuated or not', 'family escapes', 'fire evacuation area expands', 'flee homes', 'flee their homes', 'force from homes today', 'force evacuations', 'force thousands to evacuate', 'force thousands to flee', 'forces from homes', 'forces to evacuate', 'forces to flee', 'forces to flee homes', 'forces more than from homes', 'forces thousands from homes', 'forces thousands to flee flames', 'forcing more from homes', 'forcing people to be e', 'forcing evacuations', 'forcing residents to flee', 'forcing tens of thousands to flee', 'had to back off our location', 'has forced the evacuation', 'hasnt evacuated yet', 'homeless', 'homeless man', 'hundreds are evacuated', 'hundreds evacuated', 'hundreds flee', 'if you are evacuating', 'keep receipts for living expenses', 'made thousands homeless', 'mass evacuation', 'mass evacuations', 'more evacuations ordered', 'more people missin', 'more than people are evacuated', 'more than people were forced to flee their homes', 'more than k displaced', 'new evacuation', 'no new evacuations', 'nobody evacuated', 'over are evacuated', 'people displaced', 'people evacuated', 'people refusing to leave', 'people return to', 'people return to charred cities', 'people who fled', 'people who fled the most destructive fire', 'pre-evac notice', 'pre-evacuation notice', 'pre-evacuation notices', 'previous evacuations', 'prompt mass evacuations', 'rapid evacuation', 'red cross evacuation', 'residents are evacuated', 'residents evacuate', 'residents evacuated', 'residents have returned to', 'residents return to find', 'residents to move to higher ground', 'tens of thousands forced', 'the evacuation', 'thousands flee', 'thousands forced to evacuate', 'thousands have been forced to flee the flames', 'thousands of residents evacuate', 'thousands still displaced', "to prepare' to evacuate", 'took refuge', 'updated evacuations', 'updated list of evacuation', 'victims crowd evacuation centers', 'were evacuating', 'wildfire forces evacuation of ', ' stolen', 'animalshelters', 'a pony', 'affected animals', 'all pets animal hospital', 'animal rescue', 'animal rescue hotline', 'animals', 'annotationirina', 'bears evacuated by chopper', 'bears evacuated by helicopter', 'can you help home pets/animals', 'demand action against dolphin slaughter', 'displaced animals', 'displaced kitty', 'dogs', 'donate for animal', 'donating $k per birdie', 'evacuated', 'evacuated dog runs to his owner waiting', 'farm animals', 'free boarding', 'got to get them out now', 'hippos', 'horses', 'horses displaced', 'hundreds of cats', 'kittens', 'livestock evac. center', 'lost blue burmese cat', 'lost french bulldog ??reward', 'millions of spiders', 'monkeys', 'moths', 'no pets', 'over horses', 'pets', 'pets affected by evacuations', 'pets displaced by colorado floods', 'sloths/gibbons/spider monkeys', 'small animals', 'spiders', 'the creatures', 'to escape', 'to take refuge in', 'vca animal hospitals', 'wilderness ranch sanctuary', 'your pets', 'a single prayer', 'a touching sight', 'a tragic story', 'all my support', 'all my support to the families of the victims', 'amazing job', 'amen!', 'an earthquake!', 'are safe', 'are there', 'aunts', 'be safe still', 'bed', 'doors and whole building are dancing', 'beside myself', 'beyond imagination', 'big hearts', 'bless your work', 'brave souls', 'breaks my heart', 'call home', 'call to prayer', "can't believe", "can't sleep", 'cannot believe', 'community', 'concerns', 'damn', 'dear god', 'deep condolences', 'devastated to hear', 'everything will be fine', "everything's gonna be alright!", 'faith in humanity', 'fam', 'family r safe', 'fear', 'felt sorry', 'get through this', 'god', 'god bless', 'god bless us all', 'god bless you!', 'god bless!', 'god guide all', 'god help', 'god please', 'god please spare', 'god preserve us', 'god will help us!', "god's grace", 'going to be okay', 'good vibes to', 'goosebumps', 'got scared', 'heart breaking', 'heart-wrenching', 'heartbre', 'heartbreaking to hear', 'heartbreaking to see', 'heartbreaking to watch the news', 'heartening', 'heavy heart', 'help me to pray', 'help pray', 'help the people & animals', 'help your brothers', 'hope', 'hope all friends', 'hope all is well', 'hope everyone is okay', 'hope everyone is safe', 'hope that my', "hope you're all well and safe", 'hoping', 'hoping they stop', 'hurts my heart', "i can't take it any longer", 'i feel for all of u', 'i feel so bad', 'i have lots of friends and fam in', 'i hope', 'i hope all is well w/ you and your family', 'i hope your safe', 'i know what it like', 'i pray', 'i think', 'i was rocked by a m. earthquake', "i'm at a loss for words", "i'm in", "i'm so sorry to hear", 'in solidarity with', 'in your prayer', 'in your prayers tonight', 'in your thoughts', 'in your thoughts and prayers', 'is tht true??', 'it saddens me', "it's a good thing", "it's terrifying", 'jesus be the center', 'join me in praying', 'just a short prayer', 'just sent their messages to us', 'keep a blessing for them', 'keep colorado in your prayers', 'keep faith!', 'keep praying', 'keep safe', 'keep safe everyone!', 'keep safe everyone!!!', 'keep safe people', 'keep save people in there', 'keep the faith', 'keep the people safe', 'keep your fingers crossed', 'kills your family', 'let us pray', "let's all pray", "let's be united and pray", "let's include to our prayers", "let's pray", "let's pray ", "let's pray for our sistres and brothers", 'lets prayforvisayas', 'lord', 'lord god', 'have mercy', 'lots of prayers to', 'love', 'love you', 'make me want to cry', 'massive shout out', 'mercy', 'might', 'mom reported', 'most inspiring story', 'most inspiring thing', 'must share', 'my aunts are safe', 'my city is on fire', 'my country', 'my friends', 'my heart and thoughts go out to', 'my heart goes out to', 'my love & prayers', 'my papa and siblings', 'my prayers are w/', 'my prayers go out to everyone', 'my prayers go out to the people', 'my relatives', 'my thoughts', 'my thoughts & prayers', 'my thoughts & prayers go out to', 'my thoughts & prayers go out to the people', 'my thoughts and prayers', 'my thoughts and prayers are', 'my thoughts and prayers are with', 'my thoughts and prayers go out to', 'my thoughts and prayers go out to everyone', 'my thoughts and prayers go to', 'my thoughts and prayers go to the families', 'my thoughts and wishes go out to', 'my thoughts are', 'my thoughts are with', 'my thoughts go out to', 'my thoughts go out to everybody', 'my thoughts go out to everyone', 'my thoughts go out to the families', 'my thoughts goes', 'my thoughts to you', 'my younger sister', 'need our prayers', 'need prayers', 'need to stop', 'needs prayer', 'no hope', 'no shame', 'no words', 'no words right now', 'oh lord help us!', 'oh my gosh', 'oh my!', 'omg', 'our brothers and sisters', 'our hearts', 'our prayers', 'our relatives', 'our thoughts & prayers are', 'our thoughts & prayers are with', 'our thoughts and prayers are with', 'our thoughts and prayers go out to', 'our thoughts and prayers remain', 'our thoughts are with you', 'our thoughts go', 'our thoughts go out to', 'our thoughts go out to everyone', 'our thoughts this morning', 'our thoughts with', 'personal message', 'please prayforvisayas', 'please be careful', 'please be safe', 'please bless', 'please help us', 'please keep praying for', 'please pray', 'please pray for', 'please protect those people', 'please stay safe', 'followers', 'rt!', 'pls prayforbohol', 'poor you', 'positive stories', 'positive vibes', 'pray', 'pray for', 'pray for all the people', 'pray for our dear', 'pray for rain', 'pray for the best', 'pray for the people affected', 'pray for the philippines', 'pray for the philippines especially', 'pray for the victims of the horrible explosions', 'pray for them', 'pray for u', 'pray!', 'prayer', 'prayer alert', 'prayers', 'prayers :(', 'prayers all', 'prayers and condolences for our brothers and sisters', 'prayers and thoughts', 'prayers are', 'prayers are needed for', 'prayers are with', 'prayers for', 'prayers for all', 'prayers for the families', 'prayers for the people', 'prayers for those affected', 'prayers for west', 'prayers go out to', 'prayers going out to', 'prayers going out to all', 'prayers going out to those', 'prayers please', 'prayers to', 'praying', 'praying for', 'praying for all', 'praying for everyone', "praying for everyone's safety", 'praying for my family', 'praying for the people', 'praying for those', 'praying for you', 'praying hard for', 'quite sad', 'r u okay??', 'really heartbreaking', 'relatives', 'remain in prayer', 'residents stay safe', 'rest in peace', 'rip all', 'rip to our', 'rip today', 'sad', 'sad really', 'sad times for', 'sad to hear', 'sadness', 'safe', 'search of loved', 'send up prayers', 'sending good thoughts out to everyone', 'sending good vibes', 'sending out my love', 'sending prayers', 'sending thoughts and prayers', 'shocking', 'silent prayer', 'sincere prayer', 'so heartbreaking', 'so heartbreaking!!', 'so sad', 'so scary', 'so worried', 'spare a thought for us', 'special prayer', 'speechless', 'starting to panic coz', 'stay safe guys', 'stay strong', 'stay strong!', 'supporting', 'take care', 'take care of yourselves!', 'terrifying', 'thank god', 'thank goodness', 'thank goodness i live', 'thank you', 'thank you lord', 'thanks for', 'thanks!', "that's too much", 'the community', 'the most disgraceful exploitation', 'the ppl', 'the prayers', 'the safeness of our', 'the souls', 'their okay', 'they are fine', 'think positive', 'thinking about everyone', 'thinking of', 'thinking of a lot of families', 'thinking of all', 'thinking of all my friends', 'thinking of all our friends', 'thinking of everyone helping to', 'thinking of my friends and everyone', 'thinking of you', 'this is not good', 'this is over my house', 'this too shall pass', 'thnx god', 'those individuals', 'those who passed so suddenly', 'to my family & friends', 'to pray', 'to pray for', 'tragic event', 'tragic scene', 'traumatic time', 'uplifting', 'upsetting', 'urgh', 'we are asking all of you', "we are hoping that they're safe", 'we are praying for you', 'we getting slammed with your flood calls !?', 'we have to pray more', 'we pray', "we're staying", 'what did just happend', 'who matters the most', 'without parole', 'would be sweet', 'you texted', 'your prayers']
search_term = ['conflict', 'violence', 'displacement', 'drought', 'earthquake', 'fire', 'flooding', 'freeze', 'health emergency', 'dengue', 'pneumonic plague', 'measles', 'landslide', 'tropical storm', 'typhoon', 'cyclone', 'hurricane', 'tsunami', 'urban disaster', 'volcanic eruption', 'refugee', 'terrorist attack', 'cold wave', 'complex emergency', 'epidemic', 'extratropical cyclone', 'flash flood', 'flood', 'heat wave', 'insect infestation', 'land slide', 'mud slide','snow avalanche', 'storm surge', 'technological disaster', 'tropical cyclone', 'volcano', 'wild fire', 'flood crisis', 'explosion', 'affected tornado', 'affected', 'death toll', 'tornado relief', 'flood appeal', 'massive explosion', 'affected areas', 'praying victims', 'injured', 'lurches fire', 'flood relief', 'flood affected', 'tornado victims', 'deadly', 'evacuated', 'relief', 'flood death', 'deaths confirmed', 'affected flooding', 'people killed', 'flood damage', 'people dead', 'major flood', 'rubble', 'another explosion', 'flood warnings', 'tornado survivor', 'damage', 'devastating', 'flood toll', 'affected hurricane', 'crisis', 'relief efforts', 'flood emergency', 'fire flood', 'huge explosion', 'bushfire', 'torrential rains', 'affected explosion', 'disaster', 'twister', 'blast', 'injuries reported', 'fatalities', 'large explosion', 'destroyed', 'displaced', 'casualties', 'climate change', 'major explosion', 'response disasters', 'explosion victims', 'tragic', 'dealing hurricane', 'flood recovery', 'dead torrential', 'flood years', 'massive tornado', 'buried alive', 'alive rubble', 'crisis rises', 'flood ravaged', 'killed injured', 'killed people', 'people died', 'floods kill', 'tornado damage', 'facing flood', 'deadly explosion', 'flood disaster', 'tornado disaster', 'help victims', 'hundreds homes', 'severe flooding', 'magnitude', 'firefighters police', 'fire explosion', 'storm', 'flood hits', 'floodwaters', 'emergency', 'flood alerts', 'crisis unfolds', 'tragic events', 'deadly tornado', 'people trapped', 'surging floods', 'city tornado', 'damaged hurricane', 'rains severely', 'house flood', 'devastating tornado', 'lost lives', 'reportedly dead', 'following explosion', 'tornado flood', 'early warninig', 'warning', 'dead floods', 'flood threat', 'flood situation', 'risk running', 'loss life', 'thoughts victims', 'terrible explosion', 'seismic', 'flood homeowners', 'flood claims', 'power supplies', 'free hotline', 'hotline help', 'registered magnitude', 'prepare hurricane', 'landfall', 'crisis worsens', 'communities damaged', 'destruction', 'tornado', 'hurricane coming', 'toxins flood', 'release toxins', 'toxins', 'supplies waters', 'crisis found', 'braces major', 'government negligent', 'attack', 'waiting hurricane', 'terror', 'memorial service', 'terror attack', 'coast hurricane', 'terrified hurricane', 'hurricane category', 'disaster relief', 'cleanup', 'troops lend', 'effected hurricane', 'time hurricane', 'saying hurricane', 'praying families', 'dramatic', 'path hurricane']
| 12,637.333333
| 104,677
| 0.729057
| 14,399
| 113,736
| 5.758108
| 0.186332
| 0.006163
| 0.005789
| 0.003257
| 0.069701
| 0.043432
| 0.032107
| 0.026366
| 0.024363
| 0.024363
| 0
| 0.00004
| 0.130777
| 113,736
| 8
| 104,678
| 14,217
| 0.838613
| 0.920342
| 0
| 0
| 0
| 0
| 0.758583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e5b142ce1fe5c6d48f707e90d204987e528da2f3
| 9,319
|
py
|
Python
|
tests/integration/test_join.py
|
flcong/dask-sql
|
39980fd40f49ddf3c1910c8e36e8a88bd78b82de
|
[
"MIT"
] | null | null | null |
tests/integration/test_join.py
|
flcong/dask-sql
|
39980fd40f49ddf3c1910c8e36e8a88bd78b82de
|
[
"MIT"
] | null | null | null |
tests/integration/test_join.py
|
flcong/dask-sql
|
39980fd40f49ddf3c1910c8e36e8a88bd78b82de
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
def test_join(c):
df = c.sql(
"SELECT lhs.user_id, lhs.b, rhs.c FROM user_table_1 AS lhs JOIN user_table_2 AS rhs ON lhs.user_id = rhs.user_id"
)
df = df.compute()
expected_df = pd.DataFrame(
{"user_id": [1, 1, 2, 2], "b": [3, 3, 1, 3], "c": [1, 2, 3, 3]}
)
assert_frame_equal(
df.sort_values(["user_id", "b", "c"]).reset_index(drop=True), expected_df,
)
def test_join_inner(c):
df = c.sql(
"SELECT lhs.user_id, lhs.b, rhs.c FROM user_table_1 AS lhs INNER JOIN user_table_2 AS rhs ON lhs.user_id = rhs.user_id"
)
df = df.compute()
expected_df = pd.DataFrame(
{"user_id": [1, 1, 2, 2], "b": [3, 3, 1, 3], "c": [1, 2, 3, 3]}
)
assert_frame_equal(
df.sort_values(["user_id", "b", "c"]).reset_index(drop=True), expected_df,
)
def test_join_outer(c):
df = c.sql(
"SELECT lhs.user_id, lhs.b, rhs.c FROM user_table_1 AS lhs FULL JOIN user_table_2 AS rhs ON lhs.user_id = rhs.user_id"
)
df = df.compute()
expected_df = pd.DataFrame(
{
# That is strange. Unfortunately, it seems dask fills in the
# missing rows with NaN, not with NA...
"user_id": [1, 1, 2, 2, 3, np.NaN],
"b": [3, 3, 1, 3, 3, np.NaN],
"c": [1, 2, 3, 3, np.NaN, 4],
}
)
assert_frame_equal(
df.sort_values(["user_id", "b", "c"]).reset_index(drop=True), expected_df
)
def test_join_left(c):
df = c.sql(
"SELECT lhs.user_id, lhs.b, rhs.c FROM user_table_1 AS lhs LEFT JOIN user_table_2 AS rhs ON lhs.user_id = rhs.user_id"
)
df = df.compute()
expected_df = pd.DataFrame(
{
# That is strange. Unfortunately, it seems dask fills in the
# missing rows with NaN, not with NA...
"user_id": [1, 1, 2, 2, 3],
"b": [3, 3, 1, 3, 3],
"c": [1, 2, 3, 3, np.NaN],
}
)
assert_frame_equal(
df.sort_values(["user_id", "b", "c"]).reset_index(drop=True), expected_df,
)
def test_join_right(c):
df = c.sql(
"SELECT lhs.user_id, lhs.b, rhs.c FROM user_table_1 AS lhs RIGHT JOIN user_table_2 AS rhs ON lhs.user_id = rhs.user_id"
)
df = df.compute()
expected_df = pd.DataFrame(
{
# That is strange. Unfortunately, it seems dask fills in the
# missing rows with NaN, not with NA...
"user_id": [1, 1, 2, 2, np.NaN],
"b": [3, 3, 1, 3, np.NaN],
"c": [1, 2, 3, 3, 4],
}
)
assert_frame_equal(
df.sort_values(["user_id", "b", "c"]).reset_index(drop=True), expected_df,
)
def test_join_complex(c):
df = c.sql(
"SELECT lhs.a, rhs.b FROM df_simple AS lhs JOIN df_simple AS rhs ON lhs.a < rhs.b",
)
df = df.compute()
df_expected = pd.DataFrame(
{"a": [1, 1, 1, 2, 2, 3], "b": [1.1, 2.2, 3.3, 2.2, 3.3, 3.3]}
)
assert_frame_equal(df.sort_values(["a", "b"]).reset_index(drop=True), df_expected)
df = c.sql(
"""
SELECT lhs.a, lhs.b, rhs.a, rhs.b
FROM
df_simple AS lhs
JOIN df_simple AS rhs
ON lhs.a < rhs.b AND lhs.b < rhs.a
"""
)
df = df.compute()
df_expected = pd.DataFrame(
{"a": [1, 1, 2], "b": [1.1, 1.1, 2.2], "a0": [2, 3, 3], "b0": [2.2, 3.3, 3.3],}
)
assert_frame_equal(df.sort_values(["a", "b0"]).reset_index(drop=True), df_expected)
def test_join_complex_2(c):
df = c.sql(
"""
SELECT
lhs.user_id, lhs.b, rhs.user_id, rhs.c
FROM user_table_1 AS lhs
JOIN user_table_2 AS rhs
ON rhs.user_id = lhs.user_id AND rhs.c - lhs.b >= 0
"""
)
df = df.compute()
df_expected = pd.DataFrame(
{"user_id": [2, 2], "b": [1, 3], "user_id0": [2, 2], "c": [3, 3]}
)
assert_frame_equal(df.sort_values("b").reset_index(drop=True), df_expected)
def test_join_literal(c):
df = c.sql(
"""
SELECT
lhs.user_id, lhs.b, rhs.user_id, rhs.c
FROM user_table_1 AS lhs
JOIN user_table_2 AS rhs
ON True
"""
)
df = df.compute()
df_expected = pd.DataFrame(
{
"user_id": [2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3],
"b": [1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3],
"user_id0": [1, 1, 2, 4, 1, 1, 2, 4, 1, 1, 2, 4, 1, 1, 2, 4],
"c": [1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4],
}
)
assert_frame_equal(
df.sort_values(["b", "user_id", "user_id0"]).reset_index(drop=True),
df_expected,
)
df = c.sql(
"""
SELECT
lhs.user_id, lhs.b, rhs.user_id, rhs.c
FROM user_table_1 AS lhs
JOIN user_table_2 AS rhs
ON False
"""
)
df = df.compute()
df_expected = pd.DataFrame({"user_id": [], "b": [], "user_id0": [], "c": []})
assert_frame_equal(df.reset_index(), df_expected.reset_index(), check_dtype=False)
def test_join_lricomplex(c):
# ---------- Panel data (equality and inequality conditions)
# Correct answer
dfcorrpn = pd.DataFrame(
[
[0, 1, pd.NA, pd.NA, pd.NA, pd.NA],
[1, 5, 32, 2, pd.NA, 112],
[1, 5, 32, 4, 13, 113],
[2, 1, 33, pd.NA, pd.NA, pd.NA],
],
columns=["ids", "dates", "pn_nullint", "startdate", "lk_nullint", "lk_int",],
)
change_types = {
"pn_nullint": "Int32",
"lk_nullint": "Int32",
"startdate": "Int64",
"lk_int": "Int64",
}
for k, v in change_types.items():
dfcorrpn[k] = dfcorrpn[k].astype(v)
# Left Join
querypnl = """
select a.*, b.startdate, b.lk_nullint, b.lk_int
from user_table_pn a left join user_table_lk b
on a.ids=b.id and b.startdate<=a.dates
"""
dftestpnl = (
c.sql(querypnl)
.compute()
.sort_values(["ids", "dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(dftestpnl, dfcorrpn, check_dtype=False)
# Right Join
querypnr = """
select b.*, a.startdate, a.lk_nullint, a.lk_int
from user_table_lk a right join user_table_pn b
on b.ids=a.id and a.startdate<=b.dates
"""
dftestpnr = (
c.sql(querypnr)
.compute()
.sort_values(["ids", "dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(dftestpnr, dfcorrpn, check_dtype=False)
# Inner Join
querypni = """
select a.*, b.startdate, b.lk_nullint, b.lk_int
from user_table_pn a inner join user_table_lk b
on a.ids=b.id and b.startdate<=a.dates
"""
dftestpni = (
c.sql(querypni)
.compute()
.sort_values(["ids", "dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(
dftestpni,
dfcorrpn.dropna(subset=["startdate"])
.assign(
startdate=lambda x: x["startdate"].astype("int64"),
lk_int=lambda x: x["lk_int"].astype("int64"),
)
.reset_index(drop=True),
check_dtype=False,
)
# ---------- Time-series data (inequality condition only)
# # Correct answer
dfcorrts = pd.DataFrame(
[
[1, 21, pd.NA, pd.NA, pd.NA],
[3, pd.NA, 2, pd.NA, 112],
[7, 23, 2, pd.NA, 112],
[7, 23, 4, 13, 113],
],
columns=["dates", "ts_nullint", "startdate", "lk_nullint", "lk_int",],
)
change_types = {
"ts_nullint": "Int32",
"lk_nullint": "Int32",
"startdate": "Int64",
"lk_int": "Int64",
}
for k, v in change_types.items():
dfcorrts[k] = dfcorrts[k].astype(v)
# Left Join
querytsl = """
select a.*, b.startdate, b.lk_nullint, b.lk_int
from user_table_ts a left join user_table_lk2 b
on b.startdate<=a.dates
"""
dftesttsl = (
c.sql(querytsl)
.compute()
.sort_values(["dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(dftesttsl, dfcorrts, check_dtype=False)
# Right Join
querytsr = """
select b.*, a.startdate, a.lk_nullint, a.lk_int
from user_table_lk2 a right join user_table_ts b
on a.startdate<=b.dates
"""
dftesttsr = (
c.sql(querytsr)
.compute()
.sort_values(["dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(dftesttsr, dfcorrts, check_dtype=False)
# Inner Join
querytsi = """
select a.*, b.startdate, b.lk_nullint, b.lk_int
from user_table_ts a inner join user_table_lk2 b
on b.startdate<=a.dates
"""
dftesttsi = (
c.sql(querytsi)
.compute()
.sort_values(["dates", "startdate"])
.reset_index(drop=True)
)
assert_frame_equal(
dftesttsi,
dfcorrts.dropna(subset=["startdate"])
.assign(
startdate=lambda x: x["startdate"].astype("int64"),
lk_int=lambda x: x["lk_int"].astype("int64"),
)
.reset_index(drop=True),
check_dtype=False,
)
| 27.984985
| 127
| 0.534607
| 1,385
| 9,319
| 3.419495
| 0.094585
| 0.046875
| 0.057432
| 0.064611
| 0.802576
| 0.761402
| 0.738598
| 0.730785
| 0.696579
| 0.672086
| 0
| 0.042441
| 0.307222
| 9,319
| 332
| 128
| 28.069277
| 0.69114
| 0.053654
| 0
| 0.410788
| 0
| 0.024896
| 0.262323
| 0
| 0
| 0
| 0
| 0
| 0.070539
| 1
| 0.037344
| false
| 0
| 0.016598
| 0
| 0.053942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e5b2788ca138945d25146a5a065db8adb0b4c8a0
| 158
|
py
|
Python
|
ransomcare/config/__init__.py
|
Happyholic1203/ransomcare
|
81a8dd1e0ed2dee6549321624e8e311a69e727d9
|
[
"MIT"
] | 16
|
2016-07-20T15:57:14.000Z
|
2021-10-16T07:54:21.000Z
|
ransomcare/config/__init__.py
|
Happyholic1203/ransomcare
|
81a8dd1e0ed2dee6549321624e8e311a69e727d9
|
[
"MIT"
] | null | null | null |
ransomcare/config/__init__.py
|
Happyholic1203/ransomcare
|
81a8dd1e0ed2dee6549321624e8e311a69e727d9
|
[
"MIT"
] | 7
|
2016-07-22T09:30:16.000Z
|
2020-04-07T06:59:08.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
if os.environ.get('RANSOMECARE_ENV') == 'dev':
from .dev import *
else:
from .prod import *
| 15.8
| 46
| 0.607595
| 23
| 158
| 4.130435
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007937
| 0.202532
| 158
| 9
| 47
| 17.555556
| 0.746032
| 0.265823
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f90c7db435a8d979ca6e193315d763ed475ff576
| 176
|
py
|
Python
|
features/steps/data/src/fails_isort.py
|
ldamewood/ly-python-tools
|
4c6e17357ddd0ce68dae539f4527ab3a40e58698
|
[
"MIT"
] | null | null | null |
features/steps/data/src/fails_isort.py
|
ldamewood/ly-python-tools
|
4c6e17357ddd0ce68dae539f4527ab3a40e58698
|
[
"MIT"
] | null | null | null |
features/steps/data/src/fails_isort.py
|
ldamewood/ly-python-tools
|
4c6e17357ddd0ce68dae539f4527ab3a40e58698
|
[
"MIT"
] | null | null | null |
# pylint: disable=all
# flake8: noqa
import logging
import collections
from typing import DefaultDict
foo: DefaultDict[str, str] = collections.defaultdict()
logging.info(foo)
| 19.555556
| 54
| 0.789773
| 22
| 176
| 6.318182
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.119318
| 176
| 8
| 55
| 22
| 0.890323
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0054c4655f1169c4430788ca1c7a5fe5e19de471
| 5,874
|
py
|
Python
|
test_code.py
|
Oliver-Chalkley/whole_cell_modelling_suite
|
dc5896635b88398210d0fd1d7bc3065ba716351a
|
[
"MIT"
] | null | null | null |
test_code.py
|
Oliver-Chalkley/whole_cell_modelling_suite
|
dc5896635b88398210d0fd1d7bc3065ba716351a
|
[
"MIT"
] | null | null | null |
test_code.py
|
Oliver-Chalkley/whole_cell_modelling_suite
|
dc5896635b88398210d0fd1d7bc3065ba716351a
|
[
"MIT"
] | null | null | null |
import os
from connections import Karr2012Bg as karr_conn
from analysis.genome import Genes as anal
bc3_conn = karr_conn('oc13378', 'bg', 'Oliver', 'Chalkley', 'o.chalkley@bristol.ac.uk', None, None, None)
all_genes = ('MG_001', 'MG_002', 'MG_003', 'MG_004', 'MG_005', 'MG_006', 'MG_007', 'MG_008', 'MG_009', 'MG_010', 'MG_011', 'MG_012', 'MG_013', 'MG471', 'MG472', 'MG_014', 'MG_015', 'MG_018', 'MG_019', 'MG_020', 'MG_021', 'MG_022', 'MG_023', 'MG_024', 'MG_025', 'MG_026', 'MG_027', 'MG_028', 'MG_029', 'MG_030', 'MG_031', 'MG_032', 'MG_033', 'MG_034', 'MG_035', 'MG_036', 'MG_037', 'MG_038', 'MG_039', 'MG_040', 'MG_041', 'MG_042', 'MG_043', 'MG_044', 'MG_045', 'MG_046', 'MG_047', 'MG_048', 'MG_049', 'MG_050', 'MG_051', 'MG_052', 'MG_053', 'MG_054', 'MG_055', 'MG_473', 'MG_474', 'MG_056', 'MG_057', 'MG_058', 'MG_059', 'MG_060', 'MG_061', 'MG475', 'MG_062', 'MG_063', 'MG_064', 'MG_065', 'MG_066', 'MG_067', 'MG_068', 'MG_069', 'MG_070', 'MG_071', 'MG_072', 'MG_073', 'MG_074', 'MG_075', 'MG_076', 'MG_077', 'MG_078', 'MG_079', 'MG_080', 'MG_081', 'MG_082', 'MG_083', 'MG_084', 'MG_085', 'MG_086', 'MG_087', 'MG_088', 'MG_089', 'MG_090', 'MG_091', 'MG_092', 'MG_093', 'MG_094', 'MG_095', 'MG_096', 'MG_097', 'MG_098', 'MG_099', 'MG_100', 'MG_101', 'MG_102', 'MG_103', 'MG_476', 'MG_104', 'MG_105', 'MG_106', 'MG_107', 'MG_108', 'MG_109', 'MG_110', 'MG_111', 'MG_112', 'MG_113', 'MG_114', 'MG_115', 'MG_116', 'MG_117', 'MG_118', 'MG_119', 'MG_120', 'MG_121', 'MG_122', 'MG_123', 'MG_124', 'MG_125', 'MG_126', 'MG_127', 'MG_128', 'MG_129', 'MG_130', 'MG_131', 'MG_132', 'MG_133', 'MG_134', 'MG_135', 'MG_136', 'MG_137', 'MG_138', 'MG_139', 'MGrrnA16S', 'MGrrnA23S', 'MGrrnA5S', 'MG_140', 'MG_141', 'MG_477', 'MG_142', 'MG_143', 'MG_144', 'MG_145', 'MG_146', 'MG_147', 'MG_148', 'MG_149', 'MG_478', 'MG_150', 'MG_151', 'MG_152', 'MG_153', 'MG_154', 'MG_155', 'MG_156', 'MG_157', 'MG_158', 'MG_159', 'MG_160', 'MG_161', 'MG_162', 'MG_163', 'MG_164', 'MG_165', 'MG_166', 'MG_167', 'MG_168', 'MG_169', 'MG_170', 'MG_171', 'MG_172', 'MG_173', 'MG_174', 'MG_175', 'MG_176', 'MG_177', 'MG_178', 'MG_179', 'MG_180', 'MG_181', 'MG_182', 'MG_183', 'MG_184', 'MG_185', 'MG_186', 'MG_187', 'MG_188', 'MG_189', 'MG_190', 'MG_191', 'MG_192', 'MG_194', 'MG_195', 'MG_196', 'MG_197', 'MG_198', 'MG_199', 'MG_200', 'MG_201', 'MG_202', 'MG479', 'MG_203', 'MG_204', 'MG_205', 'MG_206', 'MG_207', 'MG_208', 'MG_209', 'MG_210', 'MG_480', 'MG_481', 'MG_211', 'MG_482', 'MG_212', 'MG_213', 'MG_214', 'MG_215', 'MG_216', 'MG483', 'MG484', 'MG485', 'MG486', 'MG487', 'MG488', 'MG489', 'MG490', 'MG_217', 'MG_218', 'MG_491', 'MG_219', 'MG_220', 'MG492', 'MG_221', 'MG_222', 'MG_223', 'MG_224', 'MG_225', 'MG_226', 'MG_227', 'MG_228', 'MG_229', 'MG_230', 'MG_231', 'MG_232', 'MG_233', 'MG_234', 'MG_235', 'MG_236', 'MG_237', 'MG_238', 'MG_239', 'MG_240', 'MG_241', 'MG_242', 'MG_243', 'MG_244', 'MG_245', 'MG_246', 'MG_247', 'MG_248', 'MG_249', 'MG_250', 'MG_251', 'MG_252', 'MG_253', 'MG_254', 'MG493', 'MG_255', 'MG_494', 'MG495', 'MG496', 'MG_256', 'MG_257', 'MG_258', 'MG_259', 'MG_260', 'MG497', 'MG_261', 'MG_262', 'MG_498', 'MG_263', 'MG_264', 'MG_265', 'MG_266', 'MG_267', 'MG_268', 'MG_0001', 'MG_269', 'MG_270', 'MG_271', 'MG_272', 'MG_0002', 'MG_273', 'MG_274', 'MG_275', 'MG_276', 'MG_277', 'MG_278', 'MG_279', 'MG_280', 'MG_281', 'MG499', 'MG500', 'MG501', 'MG502', 'MG503', 'MG_282', 'MG_283', 'MG_284', 'MG_285', 'MG_286', 'MG_287', 'MG504', 'MG_288', 'MG_289', 'MG_290', 'MG_291', 'MG_505', 'MG_292', 'MG_293', 'MG_294', 'MG_295', 'MG_296', 'MG_297', 'MG_298', 'MG_299', 'MG_300', 'MG_301', 'MG_302', 'MG_303', 'MG_304', 'MG_305', 'MG_306', 'MG_307', 'MG_308', 'MG_309', 'MG_310', 'MG_311', 'MG_312', 'MG_313', 'MG_314', 'MG_315', 'MG_316', 'MG_317', 'MG_318', 'MG_319', 'MG_320', 'MG506', 'MG507', 'MG_321', 'MG508', 'MG509', 'MG510', 'MG511', 'MG512', 'MG513', 'MG514', 'MG_322', 'MG_323', 'MG_0003', 'MG_0004', 'MG_515', 'MG_324', 'MG_325', 'MG_326', 'MG_327', 'MG_328', 'MG_329', 'MG_330', 'MG_331', 'MG_332', 'MG_333', 'MG_334', 'MG_335', 'MG_516', 'MG_517', 'MG_336', 'MG_337', 'MG_338', 'MG_339', 'MG_340', 'MG_341', 'MG_342', 'MG_343', 'MG_344', 'MG_345', 'MG_346', 'MG_347', 'MG518', 'MG_348', 'MG519', 'MG_349', 'MG_350', 'MG520', 'MG_521', 'MG_351', 'MG_352', 'MG_353', 'MG_354', 'MG_355', 'MG_356', 'MG_357', 'MG_358', 'MG_359', 'MG_360', 'MG_361', 'MG_362', 'MG_363', 'MG_522', 'MG_364', 'MG_365', 'MG_366', 'MG_367', 'MG_368', 'MG_369', 'MG_370', 'MG_371', 'MG_372', 'MG_373', 'MG_374', 'MG_375', 'MG_376', 'MG_377', 'MG_378', 'MG_379', 'MG_380', 'MG_381', 'MG523', 'MG_382', 'MG_383', 'MG_384', 'MG_524', 'MG_385', 'MG_386', 'MG_387', 'MG_388', 'MG_389', 'MG_390', 'MG_391', 'MG_392', 'MG_393', 'MG_394', 'MG_395', 'MG_396', 'MG_397', 'MG_398', 'MG_399', 'MG_400', 'MG_401', 'MG_402', 'MG_403', 'MG_404', 'MG_405', 'MG_406', 'MG_407', 'MG_408', 'MG_409', 'MG_410', 'MG_411', 'MG_412', 'MG_414', 'MG_525', 'MG_417', 'MG_418', 'MG_419', 'MG_421', 'MG_422', 'MG_423', 'MG_424', 'MG_425', 'MG_426', 'MG_427', 'MG_428', 'MG_429', 'MG_430', 'MG_431', 'MG_432', 'MG_433', 'MG_434', 'MG_435', 'MG_437', 'MG_438', 'MG_439', 'MG_440', 'MG_441', 'MG_442', 'MG_443', 'MG_444', 'MG_445', 'MG_446', 'MG_447', 'MG_448', 'MG_449', 'MG_450', 'MG_451', 'MG_452', 'MG_453', 'MG_454', 'MG_455', 'MG_456', 'MG_457', 'MG_458', 'MG_459', 'MG_460', 'MG_461', 'MG_462', 'MG_463', 'MG_464', 'MG_465', 'MG_466', 'MG_467', 'MG_468', 'MG_526', 'MG_469', 'MG_470')
genomes = anal(bc3_conn, all_genes)
genomes.appendGenomeFromDb('/home/oli/git/published_libraries/whole_cell_modelling_suite/whole_cell_modelling_suite/analysis/ko.sqll', 'select gen.genome from mgKarr2012Genome as gen join growthAndDivision as gad on gad.genome_id = gen.id join batch on batch.id = gad.batch_id join experiment as exp on exp.id = batch.experiment_id where experiment_id = 17')
print(genomes.genomes.head())
| 451.846154
| 5,238
| 0.61985
| 1,112
| 5,874
| 2.821942
| 0.518885
| 0.005099
| 0.007011
| 0.014659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.301458
| 0.100953
| 5,874
| 12
| 5,239
| 489.5
| 0.292748
| 0
| 0
| 0
| 0
| 0.125
| 0.595335
| 0.021791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0097f88f2ffb9faa4f5c43b91f05774d3a135151
| 133
|
py
|
Python
|
cinema_system/userAccount/admin.py
|
SJPark94/E-Cinema-Booking-System
|
dbb92f615a3c5f63def2cc7247183555176d79ef
|
[
"MIT"
] | 1
|
2019-04-22T19:55:25.000Z
|
2019-04-22T19:55:25.000Z
|
cinema_system/userAccount/admin.py
|
SJPark94/E-Cinema-Booking-System
|
dbb92f615a3c5f63def2cc7247183555176d79ef
|
[
"MIT"
] | null | null | null |
cinema_system/userAccount/admin.py
|
SJPark94/E-Cinema-Booking-System
|
dbb92f615a3c5f63def2cc7247183555176d79ef
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from userAccount.models import UserInfo
admin.site.register(UserInfo)
| 22.166667
| 39
| 0.827068
| 18
| 133
| 6.111111
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112782
| 133
| 6
| 40
| 22.166667
| 0.932203
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
00d2895ce3bafd0e526da27212a435152a511b44
| 137
|
py
|
Python
|
tests/pipeline_example.py
|
jbn/vaquero
|
83d913c4f72f67cf0a48061de752134a44facb87
|
[
"MIT"
] | 1
|
2017-03-16T14:41:03.000Z
|
2017-03-16T14:41:03.000Z
|
tests/pipeline_example.py
|
jbn/vaquero
|
83d913c4f72f67cf0a48061de752134a44facb87
|
[
"MIT"
] | 5
|
2016-11-04T16:10:46.000Z
|
2016-11-04T16:19:58.000Z
|
tests/pipeline_example.py
|
jbn/vaquero
|
83d913c4f72f67cf0a48061de752134a44facb87
|
[
"MIT"
] | null | null | null |
def f(items):
items.append(100)
def g(items):
items.append(items.pop() * 2)
def h(items):
items.extend([items.pop()] * 10)
| 15.222222
| 36
| 0.59854
| 22
| 137
| 3.727273
| 0.5
| 0.365854
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 0.19708
| 137
| 8
| 37
| 17.125
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9706c66218ea7265b0ed3711d87100ad7ec5169a
| 355
|
py
|
Python
|
pydiscordbio/exceptions.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | 7
|
2020-08-29T15:56:24.000Z
|
2021-02-21T22:30:37.000Z
|
pydiscordbio/exceptions.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | null | null | null |
pydiscordbio/exceptions.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | null | null | null |
class APIError(Exception):
"""Raised when there is an API error"""
pass
class NotFound(Exception):
"""Raised on a 404 status code"""
pass
class UserNotFound(Exception):
"""Raised when a specified user cannot be found by the API"""
pass
class InvalidSearch(Exception):
"""Raised when a search query is invalid"""
pass
| 18.684211
| 65
| 0.670423
| 47
| 355
| 5.06383
| 0.617021
| 0.252101
| 0.239496
| 0.168067
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010949
| 0.228169
| 355
| 18
| 66
| 19.722222
| 0.857664
| 0.43662
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
974ae41712d756f8ccc9ce875b61abceaf433da9
| 780
|
py
|
Python
|
test/test_field_length.py
|
smalbadger/kommander
|
eefa145cdabce8c8e3afdb1d6d857ebfff7a0735
|
[
"MIT"
] | 1
|
2021-03-18T00:20:53.000Z
|
2021-03-18T00:20:53.000Z
|
test/test_field_length.py
|
smalbadger/kommander
|
eefa145cdabce8c8e3afdb1d6d857ebfff7a0735
|
[
"MIT"
] | 65
|
2021-02-14T03:36:57.000Z
|
2022-03-04T08:33:10.000Z
|
test/test_field_length.py
|
smalbadger/pymessagelib
|
eefa145cdabce8c8e3afdb1d6d857ebfff7a0735
|
[
"MIT"
] | null | null | null |
import unittest
from pymessagelib import Field, Bit, Bits
class TestFieldLength(unittest.TestCase):
def testNonDWordAlignedField(self):
field = Bits(7)
self.assertTrue(field.length_as_format(Field.Format.Hex), 2)
self.assertTrue(field.length_as_format(Field.Format.Dec), 1)
self.assertTrue(field.length_as_format(Field.Format.Oct), 3)
self.assertTrue(field.length_as_format(Field.Format.Bin), 7)
def testDWordAlignedField(self):
field = Bits(8)
self.assertTrue(field.length_as_format(Field.Format.Hex), 2)
self.assertTrue(field.length_as_format(Field.Format.Dec), 1)
self.assertTrue(field.length_as_format(Field.Format.Oct), 3)
self.assertTrue(field.length_as_format(Field.Format.Bin), 8)
| 41.052632
| 68
| 0.721795
| 104
| 780
| 5.259615
| 0.259615
| 0.204753
| 0.277879
| 0.365631
| 0.698355
| 0.698355
| 0.698355
| 0.698355
| 0.698355
| 0.698355
| 0
| 0.015314
| 0.162821
| 780
| 18
| 69
| 43.333333
| 0.822358
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.533333
| 1
| 0.133333
| false
| 0
| 0.133333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
97535751b1842cb083a73e6acb48d5eb9267d5eb
| 82
|
py
|
Python
|
High School/9th Grade APCSP (Python)/Unit 1/Unit 01.02 (The Basics)/01.02.04.py
|
SomewhereOutInSpace/Computer-Science-Class
|
f5d21850236a7a18dc53b4a650ecbe9a11781f1d
|
[
"Unlicense"
] | null | null | null |
High School/9th Grade APCSP (Python)/Unit 1/Unit 01.02 (The Basics)/01.02.04.py
|
SomewhereOutInSpace/Computer-Science-Class
|
f5d21850236a7a18dc53b4a650ecbe9a11781f1d
|
[
"Unlicense"
] | null | null | null |
High School/9th Grade APCSP (Python)/Unit 1/Unit 01.02 (The Basics)/01.02.04.py
|
SomewhereOutInSpace/Computer-Science-Class
|
f5d21850236a7a18dc53b4a650ecbe9a11781f1d
|
[
"Unlicense"
] | null | null | null |
# 01.02.04 ASCII KAT
print("(---)")
print("(o o)")
print("(=Y=)")
print(" ( ) ")
| 11.714286
| 20
| 0.45122
| 12
| 82
| 3.083333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.158537
| 82
| 6
| 21
| 13.666667
| 0.449275
| 0.219512
| 0
| 0
| 0
| 0
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
9769cd30baf99cacd5c6e81462a1193666c13796
| 197
|
py
|
Python
|
ajax/conf.py
|
joestump/django-ajax
|
b71619d5c00d8e0bb990ddbea2c93cf303dc2c80
|
[
"BSD-3-Clause"
] | 62
|
2015-01-09T23:02:06.000Z
|
2020-12-27T19:44:58.000Z
|
ajax/conf.py
|
joestump/django-ajax
|
b71619d5c00d8e0bb990ddbea2c93cf303dc2c80
|
[
"BSD-3-Clause"
] | 7
|
2015-03-26T21:52:54.000Z
|
2016-06-20T20:53:43.000Z
|
ajax/conf.py
|
joestump/django-ajax
|
b71619d5c00d8e0bb990ddbea2c93cf303dc2c80
|
[
"BSD-3-Clause"
] | 12
|
2015-02-23T11:58:44.000Z
|
2020-10-26T22:32:58.000Z
|
from __future__ import absolute_import
from django.conf import settings
from appconf import AppConf
class AjaxAppConf(AppConf):
AJAX_AUTHENTICATION = 'ajax.authentication.BaseAuthentication'
| 24.625
| 66
| 0.837563
| 22
| 197
| 7.227273
| 0.590909
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116751
| 197
| 7
| 67
| 28.142857
| 0.913793
| 0
| 0
| 0
| 0
| 0
| 0.192893
| 0.192893
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9772efc4d2bba0fa82ae4cb3be4801fb89e754ba
| 59
|
py
|
Python
|
test_framework/fixtures/__init__.py
|
Gliger13/bdo_daily_bot
|
d569405fcae1978c2bb1ac34d1f75936040a3552
|
[
"MIT"
] | null | null | null |
test_framework/fixtures/__init__.py
|
Gliger13/bdo_daily_bot
|
d569405fcae1978c2bb1ac34d1f75936040a3552
|
[
"MIT"
] | null | null | null |
test_framework/fixtures/__init__.py
|
Gliger13/bdo_daily_bot
|
d569405fcae1978c2bb1ac34d1f75936040a3552
|
[
"MIT"
] | null | null | null |
from test_framework.fixtures.database.collections import *
| 29.5
| 58
| 0.864407
| 7
| 59
| 7.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
97844b09085e3762495848bd7867fa91a2955270
| 139
|
py
|
Python
|
tests/some_python_package/module3.py
|
rmorshea/sphinx-resolve-py-references
|
c0a1d6ebee0582d7a9ac5c3661f4d18405c6607f
|
[
"BSD-2-Clause"
] | 1
|
2021-11-02T19:12:05.000Z
|
2021-11-02T19:12:05.000Z
|
tests/some_python_package/module3.py
|
rmorshea/sphinx-resolve-py-references
|
c0a1d6ebee0582d7a9ac5c3661f4d18405c6607f
|
[
"BSD-2-Clause"
] | 1
|
2021-11-02T19:19:01.000Z
|
2021-11-15T03:44:45.000Z
|
tests/some_python_package/module3.py
|
rmorshea/sphinx-resolve-py-references
|
c0a1d6ebee0582d7a9ac5c3661f4d18405c6607f
|
[
"BSD-2-Clause"
] | 1
|
2021-08-21T22:39:38.000Z
|
2021-08-21T22:39:38.000Z
|
GLOBAL_3 = "global 3"
"""Docs for global 3"""
class Class3:
"""Docs for class 3"""
def function_3():
"""Docs for function 3"""
| 12.636364
| 29
| 0.589928
| 21
| 139
| 3.809524
| 0.380952
| 0.2625
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065421
| 0.230216
| 139
| 10
| 30
| 13.9
| 0.682243
| 0.258993
| 0
| 0
| 0
| 0
| 0.115942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
979dc3ecd80cdfb2f22305be4708fbe2145f1fd4
| 121
|
py
|
Python
|
ploopsnapshot/__init__.py
|
nexcess/python-ploopsnapshot
|
774ca48844b6edb5e1d900f30f7433534bacd9e4
|
[
"Apache-2.0"
] | null | null | null |
ploopsnapshot/__init__.py
|
nexcess/python-ploopsnapshot
|
774ca48844b6edb5e1d900f30f7433534bacd9e4
|
[
"Apache-2.0"
] | null | null | null |
ploopsnapshot/__init__.py
|
nexcess/python-ploopsnapshot
|
774ca48844b6edb5e1d900f30f7433534bacd9e4
|
[
"Apache-2.0"
] | null | null | null |
###
# Copyright (C) 2016 Nexcess.net L.L.C.
###
# import to make naming easier
from ploopsnapshot import ploopSnapshot
| 17.285714
| 40
| 0.719008
| 17
| 121
| 5.117647
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039604
| 0.165289
| 121
| 6
| 41
| 20.166667
| 0.821782
| 0.553719
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
97aa2119b1c1f4a56ff8051bf19d763a097dbc77
| 21
|
py
|
Python
|
__init__.py
|
gunnarpope/pyjson
|
467e1fcceabe05d0703ce583bd46359a754e7352
|
[
"MIT"
] | null | null | null |
__init__.py
|
gunnarpope/pyjson
|
467e1fcceabe05d0703ce583bd46359a754e7352
|
[
"MIT"
] | null | null | null |
__init__.py
|
gunnarpope/pyjson
|
467e1fcceabe05d0703ce583bd46359a754e7352
|
[
"MIT"
] | null | null | null |
from pyjson import *
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
97bc2532cfac05983c8f300297a239de7e1d5601
| 166
|
py
|
Python
|
kube_hunter/conf/__init__.py
|
mormamn/kube-hunter
|
14d73e201eda58eef6d873f023e39df13a9464fa
|
[
"Apache-2.0"
] | 2
|
2022-02-09T18:05:46.000Z
|
2022-03-11T06:39:01.000Z
|
kube_hunter/conf/__init__.py
|
mormamn/kube-hunter
|
14d73e201eda58eef6d873f023e39df13a9464fa
|
[
"Apache-2.0"
] | null | null | null |
kube_hunter/conf/__init__.py
|
mormamn/kube-hunter
|
14d73e201eda58eef6d873f023e39df13a9464fa
|
[
"Apache-2.0"
] | null | null | null |
from kube_hunter.conf.parser import parse_args
from kube_hunter.conf.logging import setup_logger
config = parse_args()
setup_logger(config.log)
__all__ = [config]
| 18.444444
| 49
| 0.813253
| 25
| 166
| 5
| 0.56
| 0.128
| 0.224
| 0.288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108434
| 166
| 8
| 50
| 20.75
| 0.844595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8af7f5f832074326bcddb5b7006dba3c35f6c751
| 305
|
py
|
Python
|
Leetcode/0111. Minimum Depth of Binary Tree/0111.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/0111. Minimum Depth of Binary Tree/0111.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/0111. Minimum Depth of Binary Tree/0111.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
class Solution:
def minDepth(self, root: Optional[TreeNode]) -> int:
if not root:
return 0
if not root.left:
return self.minDepth(root.right) + 1
if not root.right:
return self.minDepth(root.left) + 1
return min(self.minDepth(root.left), self.minDepth(root.right)) + 1
| 30.5
| 71
| 0.655738
| 45
| 305
| 4.444444
| 0.377778
| 0.24
| 0.32
| 0.22
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016878
| 0.222951
| 305
| 9
| 72
| 33.888889
| 0.827004
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
8aff1f68ca51c0e05cbe1c3bb79d898a7c541e4f
| 62
|
py
|
Python
|
src/core/transactions/models/__init__.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | 1
|
2018-09-11T19:32:25.000Z
|
2018-09-11T19:32:25.000Z
|
src/core/transactions/models/__init__.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
src/core/transactions/models/__init__.py
|
arnulfojr/simple-pos
|
119c4c52bf62f52004f4b2b031098ed71890d250
|
[
"MIT"
] | null | null | null |
from transactions import Transaction
from items import Item
| 12.4
| 36
| 0.83871
| 8
| 62
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 62
| 4
| 37
| 15.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c1221c14a3108b548a1cbf9f50a1b93c9161ccb0
| 258
|
py
|
Python
|
src/mlsafari/exceptions.py
|
THargreaves/machine-learning-safari
|
500f47531c424bb5df494c6db94bf63c578e9932
|
[
"MIT"
] | 1
|
2021-07-16T23:39:50.000Z
|
2021-07-16T23:39:50.000Z
|
src/mlsafari/exceptions.py
|
THargreaves/machine-learning-safari
|
500f47531c424bb5df494c6db94bf63c578e9932
|
[
"MIT"
] | 6
|
2021-07-15T22:24:56.000Z
|
2021-10-10T09:20:36.000Z
|
src/mlsafari/exceptions.py
|
THargreaves/machine-learning-safari
|
500f47531c424bb5df494c6db94bf63c578e9932
|
[
"MIT"
] | null | null | null |
"""Package-specific exceptions."""
class NotFittedError(Exception):
"""Exception raised when predicting using an unfitted estimator."""
pass
class ConvergenceError(Exception):
"""Exception raised when a model fails to converge."""
pass
| 18.428571
| 71
| 0.713178
| 27
| 258
| 6.814815
| 0.740741
| 0.195652
| 0.26087
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178295
| 258
| 13
| 72
| 19.846154
| 0.867925
| 0.53876
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c130e12a8cf58886043c4e259a7512b1bab540ed
| 244
|
py
|
Python
|
src/2_exercises/3_BehavioralPatterns/1_ChainOfResponsibility/exercise_chain_of_responsibility.py
|
MilovanTomasevic/Python-Design-Patterns
|
2a30e24e062bc623e390f8ced9c228c3ff038e54
|
[
"MIT"
] | 2
|
2020-12-01T21:06:29.000Z
|
2022-01-11T12:40:06.000Z
|
src/2_exercises/3_BehavioralPatterns/1_ChainOfResponsibility/exercise_chain_of_responsibility.py
|
MilovanTomasevic/Python-Design-Patterns
|
2a30e24e062bc623e390f8ced9c228c3ff038e54
|
[
"MIT"
] | null | null | null |
src/2_exercises/3_BehavioralPatterns/1_ChainOfResponsibility/exercise_chain_of_responsibility.py
|
MilovanTomasevic/Python-Design-Patterns
|
2a30e24e062bc623e390f8ced9c228c3ff038e54
|
[
"MIT"
] | 1
|
2022-01-11T12:39:51.000Z
|
2022-01-11T12:39:51.000Z
|
class Goblin(Creature):
def __init__(self, game, attack=1, defense=1):
# todo
class GoblinKing(Goblin):
def __init__(self, game):
# todo
class Game:
def __init__(self):
self.creatures = []
| 22.181818
| 51
| 0.565574
| 27
| 244
| 4.666667
| 0.481481
| 0.166667
| 0.261905
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.319672
| 244
| 11
| 52
| 22.181818
| 0.746988
| 0.036885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c14ed16ac4e1198d13f9ac042e8cdbfc7c1823bf
| 194
|
py
|
Python
|
src/graph_transpiler/webdnn/backend/webgl/kernels/sinh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | 1
|
2021-04-09T15:55:35.000Z
|
2021-04-09T15:55:35.000Z
|
src/graph_transpiler/webdnn/backend/webgl/kernels/sinh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/backend/webgl/kernels/sinh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
from webdnn.backend.webgl.kernels.elementwise import register_elementwise_kernel
from webdnn.graph.operators.sinh import Sinh
register_elementwise_kernel(Sinh, "y = (exp(x0) - exp(-x0))/2.0;")
| 38.8
| 80
| 0.798969
| 28
| 194
| 5.392857
| 0.607143
| 0.13245
| 0.331126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022346
| 0.07732
| 194
| 4
| 81
| 48.5
| 0.821229
| 0
| 0
| 0
| 0
| 0
| 0.149485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c15f8e2ae5410b1fdb19115991fe5b5d438a1823
| 2,948
|
py
|
Python
|
ability-python/app/service/ability_service.py
|
corner4world/cubeai
|
ffe3ded358a70cd0a512420f7df2bb931eaae1c9
|
[
"Apache-2.0"
] | null | null | null |
ability-python/app/service/ability_service.py
|
corner4world/cubeai
|
ffe3ded358a70cd0a512420f7df2bb931eaae1c9
|
[
"Apache-2.0"
] | null | null | null |
ability-python/app/service/ability_service.py
|
corner4world/cubeai
|
ffe3ded358a70cd0a512420f7df2bb931eaae1c9
|
[
"Apache-2.0"
] | null | null | null |
import requests
from app.service import umm_client
from app.global_data.global_data import g
def forward_request(prev_request):
path = prev_request.path
if path.startswith('/model/'):
deployment_uuid = path[7:]
res = umm_client.find_ability(deployment_uuid)
if res['status'] != 'ok':
raise Exception('未找到部署实例')
deployment = res['value']
k8s_port = deployment.get('k8sPort')
if k8s_port is None:
raise Exception('k8s实例端口停止服务')
internal_ip = g.get_central_config()['kubernetes']['ability']['internalIP']
url = 'http://{}:{}/api/model'.format(internal_ip, k8s_port)
res = requests.post(url=url, data=prev_request.body, headers=prev_request.headers)
return {
'response': res,
}
if path.startswith('/file/'):
deployment_uuid, method = path[6:].split('/')
res = umm_client.find_ability(deployment_uuid)
if res['status'] != 'ok':
raise Exception('未找到部署实例')
deployment = res['value']
k8s_port = deployment.get('k8sPort')
if k8s_port is None:
raise Exception('k8s实例端口停止服务')
internal_ip = g.get_central_config()['kubernetes']['ability']['internalIP']
url = 'http://{}:{}/api/file/{}'.format(internal_ip, k8s_port, method)
res = requests.post(url=url, data=prev_request.body, headers=prev_request.headers)
return {
'response': res,
}
if path.startswith('/stream/'):
deployment_uuid, method = path[8:].split('/')
res = umm_client.find_ability(deployment_uuid)
if res['status'] != 'ok':
raise Exception('未找到部署实例')
deployment = res['value']
k8s_port = deployment.get('k8sPort')
if k8s_port is None:
raise Exception('k8s实例端口停止服务')
internal_ip = g.get_central_config()['kubernetes']['ability']['internalIP']
url = 'http://{}:{}/api/stream/{}'.format(internal_ip, k8s_port, method)
res = requests.post(url=url, data=prev_request.body, headers=prev_request.headers)
return {
'response': res,
}
if path.startswith('/web/'):
path = path[5:]
i = path.find('/')
deployment_uuid = path[:i]
filename = path[i+1:]
res = umm_client.find_ability(deployment_uuid)
if res['status'] != 'ok':
raise Exception('未找到部署实例')
deployment = res['value']
k8s_port = deployment.get('k8sPort')
if k8s_port is None:
raise Exception('k8s实例端口停止服务')
internal_ip = g.get_central_config()['kubernetes']['ability']['internalIP']
url = 'http://{}:{}/web/{}'.format(internal_ip, k8s_port, filename)
res = requests.get(url=url, headers=prev_request.headers)
return {
'response': res,
}
raise Exception('unsupported API name')
| 29.48
| 90
| 0.590231
| 330
| 2,948
| 5.1
| 0.19697
| 0.049911
| 0.038027
| 0.038027
| 0.788473
| 0.761141
| 0.761141
| 0.736185
| 0.736185
| 0.736185
| 0
| 0.011521
| 0.263908
| 2,948
| 99
| 91
| 29.777778
| 0.764055
| 0
| 0
| 0.623188
| 0
| 0
| 0.14654
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014493
| false
| 0
| 0.043478
| 0
| 0.115942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c18a1cdf42fbb77ee1da8b2d9c8836265da88292
| 2,718
|
py
|
Python
|
tethysapp/modflow/tests/unit_tests/cli/link_databases.py
|
Aquaveo/tethysapp-modflow
|
5e662d8346f2ffd414ac912a531eef06c5ae79d9
|
[
"BSD-3-Clause"
] | null | null | null |
tethysapp/modflow/tests/unit_tests/cli/link_databases.py
|
Aquaveo/tethysapp-modflow
|
5e662d8346f2ffd414ac912a531eef06c5ae79d9
|
[
"BSD-3-Clause"
] | null | null | null |
tethysapp/modflow/tests/unit_tests/cli/link_databases.py
|
Aquaveo/tethysapp-modflow
|
5e662d8346f2ffd414ac912a531eef06c5ae79d9
|
[
"BSD-3-Clause"
] | null | null | null |
"""
********************************************************************************
* Name: init_command
* Author: nswain
* Created On: July 26, 2018
* Copyright: (c) Aquaveo 2018
********************************************************************************
"""
import mock
import unittest
import tethysapp.modflow.cli.link_databases as ld
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class LinkDatabasesTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch('tethysapp.modflow.cli.link_databases.create_engine')
@mock.patch('sys.stdout', new_callable=StringIO)
def test_link_databases_no_models(self, mock_print, mock_create_engine):
mock_result = mock.MagicMock()
mock_result.__iter__.return_value = []
mock_create_engine().execute.return_value = mock_result
mock_arguments = mock.MagicMock(service_name="test_db")
ld.link_databases(mock_arguments)
print_value = mock_print.getvalue()
self.assertIn('No results found', print_value)
mock_result.close.assert_called()
mock_create_engine().execute.assert_called()
@mock.patch('tethys_apps.utilities.create_ps_database_setting')
@mock.patch('tethysapp.modflow.cli.link_databases.create_engine')
@mock.patch('sys.stdout', new_callable=StringIO)
def test_link_databases_create_success(self, mock_print, mock_create_engine, mock_cpds):
mock_result = mock.MagicMock()
mock_result.__iter__.return_value = ['eggs']
mock_create_engine().execute.return_value = mock_result
mock_cpds.return_value = True
mock_arguments = mock.MagicMock(service_name="test_db")
ld.link_databases(mock_arguments)
print_value = mock_print.getvalue()
self.assertNotIn('No results found', print_value)
self.assertIn('Successfully linked Modflow DB', print_value)
@mock.patch('tethys_apps.utilities.create_ps_database_setting')
@mock.patch('tethysapp.modflow.cli.link_databases.create_engine')
@mock.patch('sys.stdout', new_callable=StringIO)
def test_link_databases_create_failure(self, mock_print, mock_create_engine, mock_cpds):
mock_result = mock.MagicMock()
mock_result.__iter__.return_value = ['eggs']
mock_create_engine().execute.return_value = mock_result
mock_cpds.return_value = False
mock_arguments = mock.MagicMock(service_name="test_db")
ld.link_databases(mock_arguments)
print_value = mock_print.getvalue()
self.assertNotIn('No results found', print_value)
self.assertIn('Could not link the database', print_value)
| 37.75
| 92
| 0.679544
| 321
| 2,718
| 5.411215
| 0.255452
| 0.074842
| 0.064479
| 0.052965
| 0.762809
| 0.73057
| 0.73057
| 0.713875
| 0.713875
| 0.66091
| 0
| 0.004423
| 0.168138
| 2,718
| 71
| 93
| 38.28169
| 0.763821
| 0.094555
| 0
| 0.58
| 0
| 0
| 0.167142
| 0.100285
| 0
| 0
| 0
| 0
| 0.14
| 1
| 0.1
| false
| 0.04
| 0.12
| 0
| 0.24
| 0.22
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c19e0c5a8d5b28b5e539fde8decc9b5cb138a181
| 162
|
py
|
Python
|
cli/src/dolbyio_rest_apis_cli/__init__.py
|
dolbyio-samples/dolbyio-rest-apis-client-python
|
37354dc10f967c4656776f9e2651a2284a11f530
|
[
"MIT"
] | 1
|
2021-12-23T17:55:06.000Z
|
2021-12-23T17:55:06.000Z
|
client/src/dolbyio_rest_apis/__init__.py
|
dolbyio-samples/dolbyio-rest-apis-client-python
|
37354dc10f967c4656776f9e2651a2284a11f530
|
[
"MIT"
] | null | null | null |
client/src/dolbyio_rest_apis/__init__.py
|
dolbyio-samples/dolbyio-rest-apis-client-python
|
37354dc10f967c4656776f9e2651a2284a11f530
|
[
"MIT"
] | null | null | null |
"""Versioning"""
import importlib.metadata
try:
__version__ = importlib.metadata.version(__name__)
except importlib.metadata.PackageNotFoundError:
pass
| 18
| 54
| 0.777778
| 15
| 162
| 7.866667
| 0.666667
| 0.432203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123457
| 162
| 8
| 55
| 20.25
| 0.830986
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
c19fc136336de932459c56981c8af99f9204df91
| 4,595
|
py
|
Python
|
test/python/test_select.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | null | null | null |
test/python/test_select.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | null | null | null |
test/python/test_select.py
|
slyalin/openvino_tensorflow
|
37a2e5b6ff1e60217d31340ad3975b41faa39da0
|
[
"Apache-2.0"
] | 1
|
2021-05-12T07:35:34.000Z
|
2021-05-12T07:35:34.000Z
|
# ==============================================================================
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# ==============================================================================
"""Openvino Tensorflow floor operation test
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import numpy as np
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from common import NgraphTest
class TestSelect(NgraphTest):
def test_select_scalar(self):
a = [1.5]
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x=[1], y=[0])
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_select_sameshape(self):
a = [True, False, True, True]
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x=[1] * 4, y=[0] * 4)
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_select_diffrank(self):
a = [1, 1]
x = [[0, 0], [2, 2]]
y = [[2, 2], [1, 1]]
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x, y)
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_select_complexshape1(self):
a = np.random.random(size=[7]).astype(np.float32)
x = np.random.random(size=[7, 3, 2, 1]).astype(np.float32)
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x, x)
def run_test(sess):
return (sess.run(out, feed_dict={p: a}))
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_select_complexshape2(self):
a = np.random.random(size=[7]).astype(np.float32)
x = np.random.random(size=[7, 3, 2, 7]).astype(np.float32)
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x, x)
def run_test(sess):
return (sess.run(out, feed_dict={p: a}))
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_select_complexshape3(self):
a = np.random.random(size=[5]).astype(np.float32)
x = np.random.random(size=[5, 3, 1]).astype(np.float32)
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p, x, x)
def run_test(sess):
return (sess.run(out, feed_dict={p: a}))
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
class TestWhere(NgraphTest):
env_map = None
def setup_method(self):
self.env_map = self.store_env_variables(
['OPENVINO_TF_CONSTANT_FOLDING'])
self.set_env_variable('OPENVINO_TF_CONSTANT_FOLDING', '1')
def teardown_method(self):
self.restore_env_variables(self.env_map)
def test_where(self):
a = np.array([1.1, 3.0], [2.2, 4.4]).astype(np.float32)
p = tf.compat.v1.placeholder(dtype=tf.float32, shape=(2, 2))
out = tf.where(tf.equal(p, 3.0))
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_where_scalar(self):
a = [1.5]
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p)
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_where_bool(self):
a = [True, False, False, True, False]
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p)
def run_test(sess):
return sess.run(out, feed_dict={p: a})
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
def test_where_complexshape1(self):
a = np.random.random(size=[7]).astype(np.float32)
p = tf.compat.v1.placeholder(dtype=tf.bool)
out = tf.where(p)
def run_test(sess):
return (sess.run(out, feed_dict={p: a}))
assert (
self.with_ngraph(run_test) == self.without_ngraph(run_test)).all()
| 30.03268
| 80
| 0.574102
| 643
| 4,595
| 3.92846
| 0.144635
| 0.083135
| 0.10293
| 0.043547
| 0.718131
| 0.71734
| 0.708234
| 0.708234
| 0.694774
| 0.694774
| 0
| 0.025291
| 0.25136
| 4,595
| 152
| 81
| 30.230263
| 0.709012
| 0.059195
| 0
| 0.588235
| 0
| 0
| 0.013219
| 0.012987
| 0
| 0
| 0
| 0
| 0.098039
| 1
| 0.215686
| false
| 0
| 0.068627
| 0.098039
| 0.411765
| 0.009804
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c1e5e85284059d6aaef3993a41e3b7f6d6224cc2
| 99
|
py
|
Python
|
src/mathenjeu/apps/__init__.py
|
sdpython/mathenjeu
|
97fc9140ef89ac9c3c6ba46803121fd5d23eb8d1
|
[
"MIT"
] | 1
|
2019-10-12T00:48:35.000Z
|
2019-10-12T00:48:35.000Z
|
src/mathenjeu/apps/__init__.py
|
sdpython/mathenjeu
|
97fc9140ef89ac9c3c6ba46803121fd5d23eb8d1
|
[
"MIT"
] | 8
|
2019-01-13T11:52:55.000Z
|
2020-11-19T01:27:28.000Z
|
src/mathenjeu/apps/__init__.py
|
sdpython/mathenjeu
|
97fc9140ef89ac9c3c6ba46803121fd5d23eb8d1
|
[
"MIT"
] | null | null | null |
"""
@file
@brief Shortcut to *apps*.
"""
from .qcm import QCMApp
from .staticapp import StaticApp
| 12.375
| 32
| 0.707071
| 13
| 99
| 5.384615
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161616
| 99
| 7
| 33
| 14.142857
| 0.843373
| 0.323232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de08d1aeee706c880dd50b180b53482e7adb433b
| 231
|
py
|
Python
|
lib/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
lib/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
lib/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
from .Cocurricular import Cocurricular
from .LetterGrade import LetterGrade
from .GeneralSchoolCourse import GeneralSchoolCourse
from .Cname import Cname
# Trying to make a better structured namespace here.
#from . import ctyENGE
| 28.875
| 52
| 0.835498
| 27
| 231
| 7.148148
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12987
| 231
| 7
| 53
| 33
| 0.960199
| 0.307359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de0fa734b8a22b55814931cf787643e117d0322a
| 38
|
py
|
Python
|
reviser/tests/deploying/__init__.py
|
rocketboosters/reviser
|
03ee5eadd35db78cf122e48fac4d48981518af11
|
[
"MIT"
] | null | null | null |
reviser/tests/deploying/__init__.py
|
rocketboosters/reviser
|
03ee5eadd35db78cf122e48fac4d48981518af11
|
[
"MIT"
] | null | null | null |
reviser/tests/deploying/__init__.py
|
rocketboosters/reviser
|
03ee5eadd35db78cf122e48fac4d48981518af11
|
[
"MIT"
] | null | null | null |
"""Tests for deploying subpackage."""
| 19
| 37
| 0.710526
| 4
| 38
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.794118
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
de161aac762f73302d271978f27251dd6d34d2a7
| 99
|
py
|
Python
|
.eggs/py2app-0.14-py3.6.egg/py2app/recipes/matplotlib_prescript.py
|
stfbnc/mtsa_py
|
0dd14f0e51e3251f10b3da781867fbc7173608eb
|
[
"MIT"
] | 17
|
2018-08-28T04:40:07.000Z
|
2021-12-15T06:19:31.000Z
|
.eggs/py2app-0.14-py3.6.egg/py2app/recipes/matplotlib_prescript.py
|
stfbnc/mtsa_py
|
0dd14f0e51e3251f10b3da781867fbc7173608eb
|
[
"MIT"
] | 4
|
2019-05-17T09:35:30.000Z
|
2022-03-13T03:50:20.000Z
|
.eggs/py2app-0.14-py3.6.egg/py2app/recipes/matplotlib_prescript.py
|
stfbnc/mtsa_py
|
0dd14f0e51e3251f10b3da781867fbc7173608eb
|
[
"MIT"
] | 3
|
2019-01-15T07:13:53.000Z
|
2020-03-29T00:48:39.000Z
|
import os
os.environ['MATPLOTLIBDATA'] = os.path.join(
os.environ['RESOURCEPATH'], 'mpl-data')
| 24.75
| 44
| 0.69697
| 13
| 99
| 5.307692
| 0.692308
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 99
| 3
| 45
| 33
| 0.784091
| 0
| 0
| 0
| 0
| 0
| 0.343434
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a9c0e776f1e7b634c239e9a0381a3419946cbc60
| 57
|
py
|
Python
|
aat/ui/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 305
|
2020-02-24T02:25:43.000Z
|
2022-03-26T22:53:43.000Z
|
aat/ui/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 79
|
2020-02-20T21:00:58.000Z
|
2022-03-27T14:06:26.000Z
|
aat/ui/__init__.py
|
mthomascarcamo/aat
|
fd86f513ccf79625516d2236be655498b24ec742
|
[
"Apache-2.0"
] | 71
|
2020-05-10T11:52:25.000Z
|
2022-03-29T07:51:48.000Z
|
from .application import ServerApplication # noqa: F401
| 28.5
| 56
| 0.807018
| 6
| 57
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061224
| 0.140351
| 57
| 1
| 57
| 57
| 0.877551
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e727f4f9620a374cf23f59d8d68ab0adec19ec67
| 225
|
py
|
Python
|
python/8kyu/repeat_it.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/repeat_it.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/repeat_it.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/557af9418895e44de7000053."""
from typing import Optional
def repeat_it(string: Optional[str], n: int) -> str:
return string * n if isinstance(string, str) else 'Not a string'
| 28.125
| 71
| 0.72
| 32
| 225
| 5.03125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098446
| 0.142222
| 225
| 7
| 72
| 32.142857
| 0.735751
| 0.288889
| 0
| 0
| 0
| 0
| 0.077922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
e7305163c22fc37fd055e2cf4375d7b83785e03c
| 255
|
py
|
Python
|
psi/context/api.py
|
bburan/psiexperiment
|
9b70f7f0b4a4379d8c3fc463e1df272153afd247
|
[
"MIT"
] | 5
|
2016-05-26T13:46:00.000Z
|
2020-03-03T13:07:47.000Z
|
psi/context/api.py
|
bburan/psiexperiment
|
9b70f7f0b4a4379d8c3fc463e1df272153afd247
|
[
"MIT"
] | 2
|
2018-04-17T15:06:35.000Z
|
2019-03-25T18:13:10.000Z
|
psi/context/api.py
|
bburan/psiexperiment
|
9b70f7f0b4a4379d8c3fc463e1df272153afd247
|
[
"MIT"
] | 1
|
2016-05-28T19:36:38.000Z
|
2016-05-28T19:36:38.000Z
|
from .context_item import (
BoolParameter, ContextGroup, ContextMeta, EnumParameter, Expression,
FileParameter, OrderedContextMeta, Parameter, Result, UnorderedContextMeta
)
from .selector import CartesianProduct, SingleSetting, SequenceSelector
| 36.428571
| 78
| 0.823529
| 20
| 255
| 10.45
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 255
| 6
| 79
| 42.5
| 0.928889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e764d73557a58b76166ed9cf6b44749ca62b8e68
| 144
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/sys/package_dir_b/__init__.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/sys/package_dir_b/__init__.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/sys/package_dir_b/__init__.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2014 Doug Hellmann. All rights reserved.
#
#
# All Rights Reserved
#
#
#
"""
"""
# end_pymotw_header
| 11.076923
| 57
| 0.534722
| 14
| 144
| 5.357143
| 0.785714
| 0.24
| 0.453333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042105
| 0.340278
| 144
| 12
| 58
| 12
| 0.747368
| 0.8125
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e778cca89246c70fd08a4c9b98e8d471a1c6c386
| 181
|
py
|
Python
|
src/autograph/views.py
|
RobetSlovev39/AutoGraph
|
c8bdb358b95143ab0d8c6f7c475a6c21f7a76b95
|
[
"MIT"
] | null | null | null |
src/autograph/views.py
|
RobetSlovev39/AutoGraph
|
c8bdb358b95143ab0d8c6f7c475a6c21f7a76b95
|
[
"MIT"
] | null | null | null |
src/autograph/views.py
|
RobetSlovev39/AutoGraph
|
c8bdb358b95143ab0d8c6f7c475a6c21f7a76b95
|
[
"MIT"
] | null | null | null |
from .services.core import update_devices
from django.http import HttpResponse, HttpRequest
def index_view(request: HttpRequest) -> HttpResponse:
return HttpResponse('works')
| 25.857143
| 53
| 0.801105
| 21
| 181
| 6.809524
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121547
| 181
| 6
| 54
| 30.166667
| 0.899371
| 0
| 0
| 0
| 0
| 0
| 0.027624
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
e7831086c6096e715b8c0c4e2716db7db6e35fa5
| 466
|
py
|
Python
|
Layers/uni_lstm_layer.py
|
KaiQiangSong/Structure-Infused-Copy-Mechanism
|
da159ea47516894829d34d3db05bd87b0398bb02
|
[
"BSD-3-Clause"
] | 33
|
2018-05-31T00:58:07.000Z
|
2021-12-10T06:51:12.000Z
|
Layers/uni_lstm_layer.py
|
KaiQiangSong/Structure-Infused-Copy-Mechanism
|
da159ea47516894829d34d3db05bd87b0398bb02
|
[
"BSD-3-Clause"
] | 3
|
2018-10-31T15:55:16.000Z
|
2021-08-29T12:50:14.000Z
|
Layers/uni_lstm_layer.py
|
KaiQiangSong/Structure-Infused-Copy-Mechanism
|
da159ea47516894829d34d3db05bd87b0398bb02
|
[
"BSD-3-Clause"
] | 3
|
2018-05-30T22:03:08.000Z
|
2019-07-22T21:04:10.000Z
|
import theano
import theano.tensor as T
import numpy as np
from utility.utility import *
from lstm_layer import *
def uni_lstm_init(prefix, params, layer_setting):
return lstm_init(prefix+'_forward', params, layer_setting)
def uni_lstm_calc(prefix, params, layer_setting,state_below, h_init = None, c_init = None, mask = None, training = True):
return lstm_calc(prefix+'_forward', params, layer_setting, state_below, h_init, c_init, mask, training = True)
| 38.833333
| 121
| 0.772532
| 72
| 466
| 4.736111
| 0.388889
| 0.129032
| 0.211144
| 0.140762
| 0.322581
| 0.193548
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0.135193
| 466
| 12
| 122
| 38.833333
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.034261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.555556
| 0.222222
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
e78e4f9ae652756ae223cca8f8d396788f6a0c19
| 168
|
py
|
Python
|
agents/utils.py
|
tlbai/atari-agents
|
31ec79180a8a9c070b811984e06888bd5da8baf2
|
[
"MIT"
] | 3
|
2019-01-28T14:44:30.000Z
|
2019-05-07T06:07:03.000Z
|
agents/utils.py
|
happybai/atari-agents
|
31ec79180a8a9c070b811984e06888bd5da8baf2
|
[
"MIT"
] | null | null | null |
agents/utils.py
|
happybai/atari-agents
|
31ec79180a8a9c070b811984e06888bd5da8baf2
|
[
"MIT"
] | 1
|
2019-03-18T00:46:50.000Z
|
2019-03-18T00:46:50.000Z
|
import numpy as np
# https://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale
def rgb2gray(image):
return np.dot(image[...,:3], [0.299, 0.587, 0.114])
| 33.6
| 71
| 0.720238
| 28
| 168
| 4.214286
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092715
| 0.10119
| 168
| 5
| 72
| 33.6
| 0.688742
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
e793fbc5a06e88c8c137cbb67e83bf012036208f
| 2,753
|
py
|
Python
|
demo/tests/test_pages.py
|
SocialGouv/ecollecte
|
1bfce2e0700b563c111c11452356b46ecb2630e4
|
[
"MIT"
] | 9
|
2018-11-28T07:36:37.000Z
|
2022-02-04T12:56:11.000Z
|
demo/tests/test_pages.py
|
betagouv/e-controle
|
b6f790ca2590ac257a47930a1e521b86ce3edb29
|
[
"MIT"
] | 154
|
2018-11-22T14:41:17.000Z
|
2022-02-12T08:48:57.000Z
|
demo/tests/test_pages.py
|
betagouv/e-controle
|
b6f790ca2590ac257a47930a1e521b86ce3edb29
|
[
"MIT"
] | 10
|
2018-11-13T06:57:10.000Z
|
2022-03-21T13:04:49.000Z
|
import pytest
from django.shortcuts import reverse
from django.urls.exceptions import NoReverseMatch
from tests import factories, utils
pytestmark = pytest.mark.django_db
def test_demo_user_is_logged_in_when_requesting_demo_page(client, settings):
settings.DEBUG = True
settings.ALLOW_DEMO_LOGIN = True
settings.DEMO_INSPECTOR_USERNAME = 'inspector@test.com'
utils.reload_urlconf()
user = factories.UserFactory(username='inspector@test.com')
user.is_superuser = False
user.is_staff = False
user.save()
response = client.get(reverse('demo-inspector'), follow=True)
assert response.status_code == 200
session_user = response.context['user']
assert session_user.is_authenticated
def test_login_as_demo_user_is_not_available_if_debug_mode_if_off(client, settings):
settings.DEBUG = False
settings.ALLOW_DEMO_LOGIN = True
settings.DEMO_INSPECTOR_USERNAME = 'inspector@test.com'
utils.reload_urlconf()
with pytest.raises(NoReverseMatch):
reverse('demo-inspector')
def test_login_as_demo_user_is_not_available_if_setting_prevents(client, settings):
settings.DEBUG = True
settings.ALLOW_DEMO_LOGIN = False
settings.DEMO_INSPECTOR_USERNAME = 'inspector@test.com'
utils.reload_urlconf()
with pytest.raises(NoReverseMatch):
reverse('demo-inspector')
response = client.get('/demo-controleur/')
assert response.status_code == 404
response = client.get('/demo/')
assert response.status_code == 404
def test_demo_user_is_not_logged_in_if_superuser(client, settings):
settings.DEBUG = True
settings.ALLOW_DEMO_LOGIN = True
settings.DEMO_INSPECTOR_USERNAME = 'inspector@test.com'
utils.reload_urlconf()
user = factories.UserFactory(username='inspector@test.com')
user.is_superuser = True
user.is_staff = False
user.save()
response = client.get(reverse('demo-inspector'), follow=True)
assert response.status_code != 200
def test_demo_user_is_not_logged_in_if_staff(client, settings):
settings.DEBUG = True
settings.ALLOW_DEMO_LOGIN = True
settings.DEMO_INSPECTOR_USERNAME = 'inspector@test.com'
utils.reload_urlconf()
user = factories.UserFactory(username='inspector@test.com')
user.is_superuser = False
user.is_staff = True
user.save()
response = client.get(reverse('demo-inspector'), follow=True)
assert response.status_code != 200
def test_demo_user_is_not_logged_in_if_username_not_in_setting(client, settings):
settings.DEBUG = True
settings.ALLOW_DEMO_LOGIN = True
settings.DEMO_INSPECTOR_USERNAME = None
utils.reload_urlconf()
response = client.get(reverse('demo-inspector'), follow=True)
assert response.status_code != 200
| 33.573171
| 84
| 0.752997
| 357
| 2,753
| 5.515406
| 0.179272
| 0.039614
| 0.085323
| 0.097511
| 0.787202
| 0.751143
| 0.751143
| 0.751143
| 0.751143
| 0.708989
| 0
| 0.007725
| 0.153651
| 2,753
| 81
| 85
| 33.987654
| 0.837339
| 0
| 0
| 0.676923
| 0
| 0
| 0.092626
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 1
| 0.092308
| false
| 0
| 0.061538
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e799da227391770e4722f65f64e95ca3940bc5e7
| 41
|
py
|
Python
|
tests/__init__.py
|
lfpratik/python_sbom
|
2129a0f551a9ed7e4859018011c33e2008d076f5
|
[
"Apache-2.0"
] | 1
|
2021-06-07T16:00:54.000Z
|
2021-06-07T16:00:54.000Z
|
tests/__init__.py
|
lfpratik/python_sbom
|
2129a0f551a9ed7e4859018011c33e2008d076f5
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
lfpratik/python_sbom
|
2129a0f551a9ed7e4859018011c33e2008d076f5
|
[
"Apache-2.0"
] | 1
|
2021-06-07T16:00:58.000Z
|
2021-06-07T16:00:58.000Z
|
"""Unit test package for python_sbom."""
| 20.5
| 40
| 0.707317
| 6
| 41
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.777778
| 0.829268
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
82095a36c546695d809c85e4b6818394c6433a92
| 37
|
py
|
Python
|
sq.py
|
darkdebo/python_codes
|
5644482b7a7cb4d775de0194bae84024e24bfcaf
|
[
"MIT"
] | null | null | null |
sq.py
|
darkdebo/python_codes
|
5644482b7a7cb4d775de0194bae84024e24bfcaf
|
[
"MIT"
] | 1
|
2019-09-03T10:15:36.000Z
|
2019-09-03T10:15:36.000Z
|
sq.py
|
darkdebo/python_codes
|
5644482b7a7cb4d775de0194bae84024e24bfcaf
|
[
"MIT"
] | null | null | null |
print(sum([i**2 for i in range(11)]))
| 37
| 37
| 0.621622
| 9
| 37
| 2.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.108108
| 37
| 1
| 37
| 37
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
823bdd7765cb8136008a8c54947dec23d4d51c24
| 280
|
py
|
Python
|
crestdsl/model/api/__init__.py
|
stklik/CREST
|
7fd97c50b0c6c923e1c477105bed4f0ea032bb99
|
[
"MIT"
] | 14
|
2019-08-06T10:17:46.000Z
|
2022-03-13T12:50:59.000Z
|
crestdsl/model/api/__init__.py
|
stklik/CREST
|
7fd97c50b0c6c923e1c477105bed4f0ea032bb99
|
[
"MIT"
] | 16
|
2018-01-20T00:54:24.000Z
|
2019-07-24T15:43:42.000Z
|
crestdsl/model/api/__init__.py
|
stklik/CREST
|
7fd97c50b0c6c923e1c477105bed4f0ea032bb99
|
[
"MIT"
] | 1
|
2021-02-01T15:33:24.000Z
|
2021-02-01T15:33:24.000Z
|
# TODO: make this more beautiful by using the __all__ in the entity module,
# rather than importing everything here
from .api import get_parent, get_name, get_current, get_root, get_children, get_sources, get_targets
from .convenienceAPI import pullup, relay, add, dependencies
| 40
| 100
| 0.807143
| 42
| 280
| 5.119048
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139286
| 280
| 6
| 101
| 46.666667
| 0.892116
| 0.396429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
41517b39849f7d892f4ec3a1e8aecb3557836ee2
| 85
|
py
|
Python
|
7_kyu/All_Star_Code_Challenge_1.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
7_kyu/All_Star_Code_Challenge_1.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
7_kyu/All_Star_Code_Challenge_1.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
def sum_ppg(player_one, player_two):
return player_one['ppg'] + player_two['ppg']
| 42.5
| 48
| 0.729412
| 14
| 85
| 4.071429
| 0.5
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 85
| 2
| 48
| 42.5
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
415fa2aaddf6a5244523209b3983e9c11e5afbc6
| 70
|
py
|
Python
|
Day8/ex3/package/subpackage2/module.py
|
ash2shukla/Python101-ABESIT
|
f6460fed42b3076ce2cb510e4bc09db758a81a0d
|
[
"MIT"
] | 36
|
2018-06-19T14:08:54.000Z
|
2020-01-06T14:58:03.000Z
|
Day8/ex3/package/subpackage2/module.py
|
ash2shukla/Python101-ABESIT
|
f6460fed42b3076ce2cb510e4bc09db758a81a0d
|
[
"MIT"
] | null | null | null |
Day8/ex3/package/subpackage2/module.py
|
ash2shukla/Python101-ABESIT
|
f6460fed42b3076ce2cb510e4bc09db758a81a0d
|
[
"MIT"
] | 19
|
2018-06-11T19:31:07.000Z
|
2020-10-05T12:42:34.000Z
|
def module_method(arg):
print('Module Method inside subpackage2',arg)
| 35
| 46
| 0.8
| 10
| 70
| 5.5
| 0.7
| 0.436364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0.085714
| 70
| 2
| 46
| 35
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0.450704
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4168c696cee489bfdfd5086c577e841027fca447
| 62
|
py
|
Python
|
tests/guinea-pig/lab_assistant.py
|
carocad/daVinci
|
4045bc7af9f0900d42d2576f7bdab98ab47d7ac2
|
[
"Apache-2.0"
] | 2
|
2016-04-17T11:20:26.000Z
|
2018-05-24T22:20:24.000Z
|
tests/guinea-pig/lab_assistant.py
|
carocad/daVinci
|
4045bc7af9f0900d42d2576f7bdab98ab47d7ac2
|
[
"Apache-2.0"
] | 16
|
2015-05-19T21:20:04.000Z
|
2015-06-27T12:41:19.000Z
|
tests/guinea-pig/lab_assistant.py
|
carocad/CodeInk
|
4045bc7af9f0900d42d2576f7bdab98ab47d7ac2
|
[
"Apache-2.0"
] | null | null | null |
from cage1 import pig1
from cage2.pig2 import eat
import os
| 10.333333
| 26
| 0.790323
| 11
| 62
| 4.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.193548
| 62
| 5
| 27
| 12.4
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4177ce77dc6e423eb65e4787f4658d1a62eb0746
| 45
|
py
|
Python
|
addition.py
|
subhankarbehera/python
|
25c57abda91775fc60140e14a9a9621e4bd898c7
|
[
"MIT"
] | null | null | null |
addition.py
|
subhankarbehera/python
|
25c57abda91775fc60140e14a9a9621e4bd898c7
|
[
"MIT"
] | null | null | null |
addition.py
|
subhankarbehera/python
|
25c57abda91775fc60140e14a9a9621e4bd898c7
|
[
"MIT"
] | null | null | null |
x=19
y=20.35
print(x);
print(y);
print(x+y);
| 7.5
| 11
| 0.6
| 12
| 45
| 2.25
| 0.5
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.111111
| 45
| 5
| 12
| 9
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.6
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
41aad2b714bb01bcf7e57da5192f0407d2025a22
| 276
|
py
|
Python
|
swarmdjango/core/models/User.py
|
YCP-Swarm-Robotics-Capstone-2020-2021/swarm-website-backend
|
081d1930cc9283ee299d373f91f7c127f466c104
|
[
"MIT"
] | null | null | null |
swarmdjango/core/models/User.py
|
YCP-Swarm-Robotics-Capstone-2020-2021/swarm-website-backend
|
081d1930cc9283ee299d373f91f7c127f466c104
|
[
"MIT"
] | 51
|
2020-08-31T16:50:09.000Z
|
2021-05-10T03:04:18.000Z
|
swarmdjango/core/models/User.py
|
YCP-Swarm-Robotics-Capstone-2020-2021/swarm-website-backend
|
081d1930cc9283ee299d373f91f7c127f466c104
|
[
"MIT"
] | null | null | null |
from django.db import models
class User(models.Model):
username = models.TextField()
password = models.TextField()
email = models.EmailField()
firstName = models.TextField()
lastName = models.TextField()
accountLevel = models.IntegerField(default=0);
| 27.6
| 50
| 0.710145
| 29
| 276
| 6.758621
| 0.655172
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004405
| 0.177536
| 276
| 9
| 51
| 30.666667
| 0.859031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.125
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
68dbbb00a7cfbda21af01002eb1510ee58cd606f
| 28
|
py
|
Python
|
graph-tool/doc/sphinxext/__init__.py
|
johankaito/fufuka
|
32a96ecf98ce305c2206c38443e58fdec88c788d
|
[
"Apache-2.0"
] | 1
|
2015-08-04T19:41:53.000Z
|
2015-08-04T19:41:53.000Z
|
graph-tool/doc/sphinxext/__init__.py
|
johankaito/fufuka
|
32a96ecf98ce305c2206c38443e58fdec88c788d
|
[
"Apache-2.0"
] | null | null | null |
graph-tool/doc/sphinxext/__init__.py
|
johankaito/fufuka
|
32a96ecf98ce305c2206c38443e58fdec88c788d
|
[
"Apache-2.0"
] | null | null | null |
from .numpydoc import setup
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
68e2875068044e9c7bffef8433d0d649bf604c66
| 7,322
|
py
|
Python
|
hyperparameters/generator.py
|
ZHENTAN007/Chem-Graph-Kernel-Machine
|
963fdad2d0d6f2bfd962778caee0591b44ecbdcd
|
[
"MIT"
] | 7
|
2021-02-28T11:44:12.000Z
|
2021-12-13T07:17:05.000Z
|
hyperparameters/generator.py
|
ZHENTAN007/Chem-Graph-Kernel-Machine
|
963fdad2d0d6f2bfd962778caee0591b44ecbdcd
|
[
"MIT"
] | 2
|
2021-05-13T14:08:48.000Z
|
2021-06-14T12:28:20.000Z
|
hyperparameters/generator.py
|
ZHENTAN007/Chem-Graph-Kernel-Machine
|
963fdad2d0d6f2bfd962778caee0591b44ecbdcd
|
[
"MIT"
] | 4
|
2021-03-05T08:20:47.000Z
|
2021-12-13T07:17:39.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from typing import Dict, Iterator, List, Optional, Union, Literal, Tuple
class HyperJsonGenerator:
def tMGR(self, k: float = 0.9, k_bounds: Tuple[float, float] = (0.75, 1.0),
k_an=0.75):
return {
'Normalization': [10000, (1000, 30000)],
'a_type': ['Tensorproduct', 'fixed'],
'atom_AtomicNumber': {'kDelta': [k_an, k_bounds, 0.05]},
'atom_AtomicNumber_list_1': {'kConv': [k, k_bounds, 0.05]},
'atom_AtomicNumber_list_2': {'kConv': [k, k_bounds, 0.05]},
'atom_AtomicNumber_list_3': {'kConv': [k, k_bounds, 0.05]},
'atom_AtomicNumber_list_4': {'kConv': [k, k_bounds, 0.05]},
'atom_MorganHash': {'kDelta': [k, k_bounds, 0.05]},
'atom_Ring_count': {'kDelta': [k, k_bounds, 0.05]},
'atom_RingSize_list': {'kConv': [k, k_bounds, 0.05]},
'atom_Hcount': {'kDelta': [k, k_bounds, 0.05]},
'atom_AtomicNumber_count_1': {'kDelta': [k, k_bounds, 0.05]},
'atom_AtomicNumber_count_2': {'kDelta': [k, k_bounds, 0.05]},
'atom_Chiral': {'kDelta': [k, k_bounds, 0.05]},
'b_type': ['Tensorproduct', 'fixed'],
'bond_Order': {'kDelta': [k, k_bounds, 0.05]},
'bond_Stereo': {'kDelta': [k, k_bounds, 0.05]},
'bond_RingStereo': {'kDelta': [k, k_bounds, 0.05]},
'p_type': ['Additive_p', 'fixed'],
'probability_AtomicNumber': {'Const_p': [1.0, 'fixed']},
'q': [0.01, [0.01, 0.5], 0.01],
}
def add(self, k: float = 0.9, k_bounds: Tuple[float, float] = (0.75, 1.0),
c: float = 1.0, c_bounds: Tuple[float, float] = (1.0, 10.0)):
return {
'Normalization': [10000, (1000, 30000)],
'a_type': ['Additive', 'fixed'],
'atom_AtomicNumber': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds]},
'atom_AtomicNumber_list_1': {'Const': [c, c_bounds], 'kConv': [k, k_bounds]},
'atom_AtomicNumber_list_2': {'Const': [c, c_bounds], 'kConv': [k, k_bounds]},
'atom_AtomicNumber_list_3': {'Const': [c, c_bounds], 'kConv': [k, k_bounds]},
'atom_AtomicNumber_list_4': {'Const': [c, c_bounds], 'kConv': [k, k_bounds]},
'atom_MorganHash': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'atom_Ring_count': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'atom_RingSize_list': {'Const': [c, c_bounds], 'kConv': [k, k_bounds]},
'atom_Hcount': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'atom_AtomicNumber_count_1': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'atom_AtomicNumber_count_2': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'atom_Chiral': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'b_type': ['Additive', 'fixed'],
'bond_Order': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'bond_Stereo': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'bond_RingStereo': {'Const': [c, c_bounds], 'kDelta': [k, k_bounds]},
'p_type': ['Additive_p', 'fixed'],
'probability_AtomicNumber': {'Const_p': [1.0, "fixed"]},
'q': [0.01, (0.001, 0.5)],
}
def general(self, k: float = 0.9, k_bounds: Tuple[float, float] = (0.75, 1.0),
c: float = 1.0, c_bounds: Tuple[float, float] = (1.0, 10.0),
p: float = 1.0, p_bounds: Tuple[float, float] = (1.0, 10.0)):
k_uniform = 0.05
p_uniform = 1.0
return {
'Normalization': [10000, (1000, 50000), 1000],
'a_type': ['Additive', 'fixed'],
'atom_AtomicNumber': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_list_1': {'Const': [c, c_bounds, 1.0], 'kConv': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_list_2': {'Const': [c, c_bounds, 1.0], 'kConv': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_list_3': {'Const': [c, c_bounds, 1.0], 'kConv': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_list_4': {'Const': [c, c_bounds, 1.0], 'kConv': [k, k_bounds, k_uniform]},
'atom_MorganHash': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_Ring_count': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_RingSize_list': {'Const': [c, c_bounds, 1.0], 'kConv': [k, k_bounds, k_uniform]},
'atom_Hcount': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_count_1': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_AtomicNumber_count_2': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'atom_Chiral': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'b_type': ['Additive', 'fixed'],
'bond_Order': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'bond_Stereo': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'bond_RingStereo': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'bond_Conjugated': {'Const': [c, c_bounds, 1.0], 'kDelta': [k, k_bounds, k_uniform]},
'p_type': ['Additive_p', 'fixed'],
'probability_AtomicNumber': {'Const_p': [1.0, "fixed"]},
'probability_group_an5': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an6': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an7': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an8': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an9': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an14': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an15': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an16': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an17': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an35': {'Assign_p': [p, p_bounds, p_uniform]},
'probability_group_an53': {'Assign_p': [p, p_bounds, p_uniform]},
'q': [0.01, (0.01, 0.5), 0.01],
}
hyper_json = HyperJsonGenerator()
product_hyper = hyper_json.tMGR()
open('tMGR.json', 'w').write(
json.dumps(product_hyper, indent=1, sort_keys=False))
product_hyper['Normalization'] = [True, 'fixed']
open('tMGR-Norm.json', 'w').write(
json.dumps(product_hyper, indent=1, sort_keys=False))
product_hyper['Normalization'] = [False, 'fixed']
open('tMGR-non-Norm.json', 'w').write(
json.dumps(product_hyper, indent=1, sort_keys=False))
additive_hyper = hyper_json.general()
open('additive.json', 'w').write(
json.dumps(additive_hyper, indent=1, sort_keys=False))
additive_hyper['Normalization'] = [True, 'fixed']
open('additive-Norm.json', 'w').write(
json.dumps(additive_hyper, indent=1, sort_keys=False))
additive_hyper['Normalization'] = [False, 'fixed']
open('additive-non-Norm.json', 'w').write(
json.dumps(additive_hyper, indent=1, sort_keys=False))
| 60.512397
| 107
| 0.566375
| 993
| 7,322
| 3.889225
| 0.100705
| 0.088814
| 0.093216
| 0.10435
| 0.869498
| 0.848006
| 0.819524
| 0.737442
| 0.704039
| 0.498187
| 0
| 0.044916
| 0.22767
| 7,322
| 120
| 108
| 61.016667
| 0.638019
| 0.005736
| 0
| 0.178571
| 0
| 0
| 0.284694
| 0.105661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026786
| false
| 0
| 0.017857
| 0.017857
| 0.080357
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6b7c75e2791a351df267d546fb8fe8519bdc217c
| 13,516
|
py
|
Python
|
tests/nlu_core_tests/namespace_tests.py
|
milyiyo/nlu
|
d209ed11c6a84639c268f08435552248391c5573
|
[
"Apache-2.0"
] | 480
|
2020-08-24T02:36:40.000Z
|
2022-03-30T08:09:43.000Z
|
tests/nlu_core_tests/namespace_tests.py
|
milyiyo/nlu
|
d209ed11c6a84639c268f08435552248391c5573
|
[
"Apache-2.0"
] | 28
|
2020-09-26T18:55:43.000Z
|
2022-03-26T01:05:45.000Z
|
tests/nlu_core_tests/namespace_tests.py
|
milyiyo/nlu
|
d209ed11c6a84639c268f08435552248391c5573
|
[
"Apache-2.0"
] | 76
|
2020-09-25T22:55:12.000Z
|
2022-03-17T20:25:52.000Z
|
import unittest
from tests.test_utils import get_sample_pdf_with_labels, get_sample_pdf, get_sample_sdf, get_sample_pdf_with_extra_cols, get_sample_pdf_with_no_text_col ,get_sample_spark_dataframe
from nlu import *
class TestNameSpace(unittest.TestCase):
def test_tokenize(self):
df = nlu.load('en.tokenize').predict('What a wonderful day!')
print(df)
df = nlu.load('tokenize').predict('What a wonderful day!')
print(df)
def test_pos(self):
df = nlu.load('pos', verbose=True).predict('What a wonderful day!')
print(df)
#
# def test_embed(self):
# # df = nlu.load('en.embed').predict('What a wonderful day!')
# #
# # print(df)
#
# df = nlu.load('embed').predict('What a wonderful day!')
# print(df)
#
#
# def test_embed_glove(self):
# df = nlu.load('en.embed.glove').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('embed.glove').predict('What a wonderful day!')
# print(df)
# df = nlu.load('glove').predict('What a wonderful day!')
# print(df)
#
def test_sentiment_twitter_out(self):
# res=nlu.load('en.sentiment.twitter',verbose=True).predict('@elonmusk Tesla stock price is too high imo') # ifninite loop ??
res = nlu.load('en.sentiment.imdb',verbose=True).predict('The Matrix was a pretty good movie')
print(res)
print(res.columns)
def test_output_levels(self):
print('token test')
df = nlu.load('sentiment',verbose=True).predict('What a wonderful day!', output_level='token')
print(df)
print('document test')
df = nlu.load('sentiment',verbose=True).predict('What a wonderful day!', output_level='document')
print(df)
print('sentence test')
df = nlu.load('sentiment',verbose=True).predict('What a wonderful day!', output_level='sentence')
print(df)
print('chunk test')
df = nlu.load('sentiment',verbose=True).predict('I like peanut butter and jelly!', output_level='chunk')
print(df)
def test_ner_multilingual(self):
df = nlu.load('ner',verbose=True).predict('New York is a great place and America aswell')
print(df)
def test_sentiment(self):
df = nlu.load('en.sentiment').predict('What a wonderful day!')
def test_emotion(self):
df = nlu.load('en.classify.emotion').predict('What a wonderful day!')
print(df)
def test_spell(self):
df = nlu.load('spell').predict('What a wonderful day!')
print(df)
#
def test_dependency(self):
df = nlu.load('dep', verbose=True).predict('What a wonderful day!')
print(df)
def test_dependency_untyped(self):
df = nlu.load('dep.untyped', verbose=True).predict('What a wonderful day!')
print(df)
def test_bert(self):
df = nlu.load('bert').predict('What a wonderful day!')
print(df)
def test_lang(self):
df = nlu.load('lang', verbose=True).predict('What a wonderful day!')
print(df)
print(df.columns)
print(df['language_de'])
print(df['language_fr'])
print(len(df['language_de'][0]))
# df = nlu.load('xx.classify.lang').predict('What a wonderful day!')
# print(df)
# df = nlu.load('classify.lang').predict('What a wonderful day!')
# print(df)
# print(df)
def test_explain(self):
df = nlu.load('en.explain').predict('What a wonderful day!')
print(df)
df = nlu.load('explain').predict('What a wonderful day!')
print(df)
def test_match(self):
df = nlu.load('match.date',verbose=True).predict('What a wonderful day!')
print(df)
# df = nlu.load('en.match.date').predict('What a wonderful day!')
# print(df)
def test_clean_stop(self):
# df = nlu.load('clean.stop').predict('What a wonderful day!')
# print(df)
df = nlu.load('en.clean.stop').predict('What a wonderful day!')
print(df)
def test_spell(self):
df = nlu.load('spell').predict('What a wonderful day!')
print(df)
df = nlu.load('en.spell').predict('What a wonderful day!')
print(df)
# def test_all_spell(self):
# df = nlu.load('en.spell.symmetric').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('en.spell.context').predict('What a wonderful day!')
# print(df)
# df = nlu.load('en.spell.norvig').predict('What a wonderful day!')
#
# print(df)
# df = nlu.load('spell').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('en.spell').predict('What a wonderful day!')
#
# print(df)
# def test_biobert(self):
# df = nlu.load('biobert').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('en.embed.biobert').predict('What a wonderful day!')
# print(df)
#
# def test_elmo(self):
# df = nlu.load('en.embed.elmo').predict('What a wonderful day!')
# print(df)
# df = nlu.load('elmo').predict('What a wonderful day!')
# print(df)
#
# def test_use(self):
# df = nlu.load('en.embed.use').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('use').predict('What a wonderful day!')
# print(df)
#
# def test_albert(self):
# df = nlu.load('en.embed.albert').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('albert').predict('What a wonderful day!')
# print(df)
#
# def test_xlnet(self):
# df = nlu.load('en.embed.xlnet').predict('What a wonderful day!')
#
# print(df)
#
# df = nlu.load('xlnet').predict('What a wonderful day!')
# print(df)
def test_lemma(self):
df = nlu.load('lemma').predict('What a wonderful day!')
print(df)
df = nlu.load('en.lemma').predict('What a wonderful day!')
print(df)
# def test_norm(self):
# df = nlu.load('lemma').predict('What a wonderful day!')
#
# print(df)
# df = nlu.load('en.lemma').predict('What a wonderful day!')
#
# print(df)
#
# def test_use(self):
# df = nlu.load('en.embed_sentence.use').predict('What a wonderful day!')
# print(df)
#
# def test_glove(self):
# df = nlu.load('nl.ner.wikiner.glove_6B_300').predict('What a wonderful day!')
#
# print(df)
def test_sentence_detector(self):
df = nlu.load('sentence_detector', verbose=True).predict('What a wonderful day! Tomorrow will be even better!')
print(df)
def test_stopwords(self):
df = nlu.load('match.chunk').predict('What a wonderful day!')
print(df)
def test_classify_lang(self):
df = nlu.load('xx.classify.wiki_7').predict('What a wonderful day!')
print(df)
def test_sentiment_on_datasets(self):
df = nlu.load('sentiment.twitter').predict('What a wonderful day!')
print(df)
# df = nlu.load('sentiment.imdb').predict('What a wonderful day!')
# print(df)
def test_multiple_nlu_references(self):
# df = nlu.load('elmo bert').predict('What a wonderful day!')
df = nlu.load('elmo').predict('What a wonderful day!')
print(df)
# df = nlu.load('sentiment.imdb').predict('What a wonderful day!')
# print(df)
def test_sentiment_output(self):
res = nlu.load('sentiment',verbose=True).predict('Your life is the sum of a remainder of an unbalanced equation inherent to the programming of the matrix. You are the eventuality of an anomaly, which despite my sincerest efforts I have been unable to eliminate from what is otherwise a harmony of mathematical precision. While it remains a burden assiduously avoided, it is not unexpected, and thus not beyond a measure of control. Which has led you, inexorably, here.', output_level='sentence')
# res = nlu.load('bert',verbose=True).predict('@Your life is the sum of a remainder of an unbalanced equation inherent to the programming of the matrix. You are the eventuality of an anomaly, which despite my sincerest efforts I have been unable to eliminate from what is otherwise a harmony of mathematical precision. While it remains a burden assiduously avoided, it is not unexpected, and thus not beyond a measure of control. Which has led you, inexorably, here.', output_level='sentence')
print(res)
print(res['sentiment'])
print(res.dtypes)
def test_stem(self):
pdf = get_sample_pdf()
res = nlu.load('stem',verbose=True).predict(pdf )
print(res)
res = nlu.load('en.stem',verbose=True).predict(pdf)
print(res)
def test_norm(self):
pdf = get_sample_pdf()
res = nlu.load('norm',verbose=True).predict(pdf, output_positions=True )
print(res)
# res = nlu.load('en.norm',verbose=True).predict(pdf)
# print(res)
def test_chunk(self):
res = nlu.load('chunk',verbose=True).predict('I like peanut butter and jelly!' )
print(res)
def test_ngram(self):
pdf = get_sample_pdf()
# res = nlu.load('ngram',verbose=True).predict(pdf )
pipe = nlu.load('ngram',verbose=True)
# print(res['ngrams'])
print("PIPE", pipe)
res = nlu.load('en.ngram',verbose=True).predict(pdf)
print(res['ngrams'])
def test_chunk_embeds(self):
pdf = get_sample_pdf()
res = nlu.load('embed_chunk',verbose=True).predict("What a wondful day!" )
print(res)
res = nlu.load('en.embed_chunk',verbose=True).predict(pdf)
print(res)
def test_regex_matcher(self):
pdf = get_sample_pdf()
res = nlu.load('match.regex',verbose=True).predict(pdf )
print(res)
def test_text_matcher(self):
pdf = get_sample_pdf()
res = nlu.load('match.text',verbose=True).predict(pdf )
print(res)
def test_auto_sentence_embed_bert(self): # TODO WIP
pdf = get_sample_pdf()
res = nlu.load('embed_sentence.bert',verbose=True).predict(pdf )
print(res)
def test_auto_sentence_embed_elmo(self): # TODO WIP
pdf = get_sample_pdf()
res = nlu.load('embed_sentence.elmo',verbose=True).predict(pdf )
print(res)
# def test_bad_pandas_column_datatype(self):
# sdf = get_sample_spark_dataframe()
# res = nlu.load('asdasj.asdas',verbose=True).predict(sdf, output_level='sentence')
# # res = nlu.load('bert',verbose=True).predict('@Your life is the sum of a remainder of an unbalanced equation inherent to the programming of the matrix. You are the eventuality of an anomaly, which despite my sincerest efforts I have been unable to eliminate from what is otherwise a harmony of mathematical precision. While it remains a burden assiduously avoided, it is not unexpected, and thus not beyond a measure of control. Which has led you, inexorably, here.', output_level='sentence')
#
# print(res)
#
# def test_bad_pandas_dataframe_datatype(self):
# sdf = get_sample_spark_dataframe()
# res = nlu.load('asdasj.asdas',verbose=True).predict(sdf, output_level='sentence')
# # res = nlu.load('bert',verbose=True).predict('@Your life is the sum of a remainder of an unbalanced equation inherent to the programming of the matrix. You are the eventuality of an anomaly, which despite my sincerest efforts I have been unable to eliminate from what is otherwise a harmony of mathematical precision. While it remains a burden assiduously avoided, it is not unexpected, and thus not beyond a measure of control. Which has led you, inexorably, here.', output_level='sentence')
#
# print(res)
#2.6 test
def test_electra(self):
pdf = get_sample_pdf()
res = nlu.load('en.embed.electra',verbose=True).predict(pdf )
print(res)
def test_embed_sentence_bert(self):
pdf = get_sample_pdf()
res = nlu.load('en.embed_sentence.small_bert_L2_128',verbose=True).predict(pdf )
print(res)
def test_embed_sentence_bert(self):
pdf = get_sample_pdf()
res = nlu.load('en.embed_sentence.biobert.pubmed_base_cased',verbose=True).predict(pdf )
print(res)
def test_toxic(self):
pdf = get_sample_pdf()
res = nlu.load('en.classify.toxic',verbose=True).predict(pdf )
print(res)
def test_e2e(self):
pdf = get_sample_pdf()
res = nlu.load('en.classify.e2e',verbose=True).predict(pdf )
print(res)
def test_labse(self):
pdf = get_sample_pdf()
res = nlu.load('xx.embed_sentence.labse',verbose=True).predict(pdf )
print(res)
def test_xx_bert(self):
pdf = get_sample_pdf()
res = nlu.load('xx.embed_sentence',verbose=True).predict(pdf )
print(res)
def test_26_bert(self):
res = nlu.load('en.ner.bert',verbose=True).predict('The NLU library is a machine learning library, simmilar to Tensorflow and Keras')
print(res)
if __name__ == '__main__':
unittest.main()
| 35.382199
| 503
| 0.610462
| 1,831
| 13,516
| 4.403605
| 0.117422
| 0.077267
| 0.065856
| 0.148456
| 0.807516
| 0.748729
| 0.718467
| 0.709537
| 0.637356
| 0.566414
| 0
| 0.001586
| 0.253625
| 13,516
| 381
| 504
| 35.475066
| 0.79768
| 0.356984
| 0
| 0.397661
| 0
| 0.005848
| 0.227236
| 0.011806
| 0
| 0
| 0
| 0.002625
| 0
| 1
| 0.233918
| false
| 0
| 0.017544
| 0
| 0.25731
| 0.350877
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6b975006473f3a1a27b8beec65cd2b431119ce42
| 20
|
py
|
Python
|
hello_word.py
|
codegenin/djangocourse-profiles-rest-api
|
9f26b27dd82593ed1de88d3c54fb6e97c7747c05
|
[
"MIT"
] | null | null | null |
hello_word.py
|
codegenin/djangocourse-profiles-rest-api
|
9f26b27dd82593ed1de88d3c54fb6e97c7747c05
|
[
"MIT"
] | null | null | null |
hello_word.py
|
codegenin/djangocourse-profiles-rest-api
|
9f26b27dd82593ed1de88d3c54fb6e97c7747c05
|
[
"MIT"
] | null | null | null |
print('Hellow Word')
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 20
| 1
| 20
| 20
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6bea389593d18f144baf635df00e828a46352027
| 425
|
py
|
Python
|
python/tests/test_counting_bits.py
|
kellyjoe256/coding-challenge-solutions
|
357e9c8e94cdcb7a3384ec8f4072c2e1facc9833
|
[
"MIT"
] | 1
|
2021-04-15T22:25:55.000Z
|
2021-04-15T22:25:55.000Z
|
python/tests/test_counting_bits.py
|
kellyjoe256/coding-challenge-solutions
|
357e9c8e94cdcb7a3384ec8f4072c2e1facc9833
|
[
"MIT"
] | null | null | null |
python/tests/test_counting_bits.py
|
kellyjoe256/coding-challenge-solutions
|
357e9c8e94cdcb7a3384ec8f4072c2e1facc9833
|
[
"MIT"
] | 1
|
2021-04-15T22:26:10.000Z
|
2021-04-15T22:26:10.000Z
|
from assertpy import assert_that
from counting_bits import count_bits
def test_correct_count():
assert_that(count_bits(0)).is_equal_to(0)
assert_that(count_bits(4)).is_equal_to(1)
assert_that(count_bits(7)).is_equal_to(3)
assert_that(count_bits(9)).is_equal_to(2)
assert_that(count_bits(10)).is_equal_to(2)
assert_that(count_bits(15)).is_equal_to(4)
assert_that(count_bits(1234)).is_equal_to(5)
| 32.692308
| 48
| 0.767059
| 78
| 425
| 3.75641
| 0.320513
| 0.273038
| 0.358362
| 0.453925
| 0.197952
| 0.197952
| 0.197952
| 0.197952
| 0
| 0
| 0
| 0.050265
| 0.110588
| 425
| 12
| 49
| 35.416667
| 0.724868
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0.1
| true
| 0
| 0.2
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d42a6390040bc6db3f0f5c3d631103c318c73921
| 106
|
py
|
Python
|
example/backers/admin.py
|
feinheit/zipfelchappe
|
1bbe56c910d2f33a047f8215ed432aac2988b70e
|
[
"BSD-3-Clause"
] | 25
|
2015-05-17T22:13:37.000Z
|
2021-05-31T17:17:57.000Z
|
example/backers/admin.py
|
feinheit/zipfelchappe
|
1bbe56c910d2f33a047f8215ed432aac2988b70e
|
[
"BSD-3-Clause"
] | 16
|
2015-03-12T09:19:27.000Z
|
2020-11-23T08:41:37.000Z
|
example/backers/admin.py
|
feinheit/zipfelchappe
|
1bbe56c910d2f33a047f8215ed432aac2988b70e
|
[
"BSD-3-Clause"
] | 5
|
2015-04-27T15:17:53.000Z
|
2020-08-11T10:38:40.000Z
|
from django.contrib import admin
from .models import ExtendedBacker
admin.site.register(ExtendedBacker)
| 17.666667
| 35
| 0.839623
| 13
| 106
| 6.846154
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103774
| 106
| 5
| 36
| 21.2
| 0.936842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d444bc4ec7af475d3fdfa1292231dc720c884591
| 72
|
py
|
Python
|
authlib/specs/rfc7516/models.py
|
tk193192/authlib
|
4c60a628f64c6d385a06ea55e416092726b94d07
|
[
"BSD-3-Clause"
] | 2
|
2021-04-26T18:17:37.000Z
|
2021-04-28T21:39:45.000Z
|
authlib/specs/rfc7516/models.py
|
tk193192/authlib
|
4c60a628f64c6d385a06ea55e416092726b94d07
|
[
"BSD-3-Clause"
] | 4
|
2021-03-19T08:17:59.000Z
|
2021-06-10T19:34:36.000Z
|
authlib/specs/rfc7516/models.py
|
tk193192/authlib
|
4c60a628f64c6d385a06ea55e416092726b94d07
|
[
"BSD-3-Clause"
] | 2
|
2021-05-24T20:34:12.000Z
|
2022-03-26T07:46:17.000Z
|
from authlib.jose import JWEAlgorithm, JWEEncAlgorithm, JWEZipAlgorithm
| 36
| 71
| 0.875
| 7
| 72
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 72
| 1
| 72
| 72
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d4468459c4213f0ec3625d6ce3bfc629b8faa6b1
| 4,514
|
py
|
Python
|
tests/unit/system_status/status_group_unit_test.py
|
BMeu/Orchard
|
cd595c9942e4e1ad0032193059f2b39fdf3bcfba
|
[
"MIT"
] | 2
|
2016-10-06T21:19:32.000Z
|
2016-10-06T21:58:04.000Z
|
tests/unit/system_status/status_group_unit_test.py
|
BMeu/Orchard
|
cd595c9942e4e1ad0032193059f2b39fdf3bcfba
|
[
"MIT"
] | 392
|
2016-10-06T17:13:30.000Z
|
2021-01-15T04:15:38.000Z
|
tests/unit/system_status/status_group_unit_test.py
|
BMeu/Orchard
|
cd595c9942e4e1ad0032193059f2b39fdf3bcfba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.system_status.status_group
"""
import unittest
import orchard.system_status
class StatusGroupUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_initialization(self):
status_group = orchard.system_status.StatusGroup('Group 1')
self.assertEqual(status_group._label, 'Group 1')
self.assertListEqual(status_group._items, [])
self.assertFalse(status_group._has_subgroups)
self.assertFalse(status_group._is_subgroup)
def test_label(self):
status_group = orchard.system_status.StatusGroup('Group 1')
self.assertEqual(status_group.label, 'Group 1')
def test_append(self):
status_item_1 = orchard.system_status.StatusItem('Item 1', str)
status_item_2 = orchard.system_status.StatusItem('Item 2', str)
status_item_3 = orchard.system_status.StatusItem('Item 3', str)
status_group_1 = orchard.system_status.StatusGroup('Group 1')
status_group_2 = orchard.system_status.StatusGroup('Group 2')
status_group_3 = orchard.system_status.StatusGroup('Group 3')
self.assertListEqual(status_group_1._items, [])
success = status_group_1.append(status_item_2)
self.assertListEqual(status_group_1._items, [status_item_2])
self.assertTrue(success)
success = status_group_1.append(status_item_3)
self.assertListEqual(status_group_1._items, [status_item_2, status_item_3])
self.assertTrue(success)
success = status_group_1.append(status_item_1)
self.assertListEqual(status_group_1._items, [status_item_2, status_item_3, status_item_1])
self.assertTrue(success)
success = status_group_1.append(status_item_3)
self.assertListEqual(status_group_1._items,
[status_item_2, status_item_3, status_item_1, status_item_3])
self.assertTrue(success)
status_group_1._items = []
success = status_group_1.append(status_group_2)
self.assertListEqual(status_group_1._items, [status_group_2])
self.assertTrue(success)
self.assertTrue(status_group_1._has_subgroups)
self.assertFalse(status_group_1._is_subgroup)
self.assertFalse(status_group_2._has_subgroups)
self.assertTrue(status_group_2._is_subgroup)
success = status_group_2.append(status_group_3)
self.assertListEqual(status_group_2._items, [])
self.assertFalse(success)
self.assertFalse(status_group_2._has_subgroups)
self.assertTrue(status_group_2._is_subgroup)
self.assertFalse(status_group_3._has_subgroups)
self.assertFalse(status_group_3._is_subgroup)
success = status_group_3.append(status_group_1)
self.assertListEqual(status_group_3._items, [])
self.assertFalse(success)
self.assertFalse(status_group_3._has_subgroups)
self.assertFalse(status_group_3._is_subgroup)
self.assertTrue(status_group_1._has_subgroups)
self.assertFalse(status_group_1._is_subgroup)
success = status_group_1.append(status_group_3)
self.assertListEqual(status_group_1._items, [status_group_2, status_group_3])
self.assertTrue(success)
self.assertTrue(status_group_1._has_subgroups)
self.assertFalse(status_group_1._is_subgroup)
self.assertFalse(status_group_3._has_subgroups)
self.assertTrue(status_group_3._is_subgroup)
def test_iterator(self):
status_item_1 = orchard.system_status.StatusItem('Item 1', str)
status_item_2 = orchard.system_status.StatusItem('Item 2', str)
status_item_3 = orchard.system_status.StatusItem('Item 3', str)
status_group = orchard.system_status.StatusGroup('Group 1')
items = []
for status_item in status_group:
items.append(status_item.label)
self.assertEqual(items, [])
status_group.append(status_item_2)
status_group.append(status_item_3)
status_group.append(status_item_1)
status_group.append(status_item_3)
items = []
for status_item in status_group:
items.append(status_item.label)
self.assertEqual(items, ['Item 2', 'Item 3', 'Item 1', 'Item 3'])
| 39.946903
| 98
| 0.70381
| 570
| 4,514
| 5.173684
| 0.089474
| 0.220075
| 0.089522
| 0.105799
| 0.85351
| 0.788403
| 0.703967
| 0.694473
| 0.632418
| 0.583927
| 0
| 0.02412
| 0.20093
| 4,514
| 112
| 99
| 40.303571
| 0.793457
| 0.015064
| 0
| 0.488372
| 0
| 0
| 0.02774
| 0
| 0
| 0
| 0
| 0
| 0.465116
| 1
| 0.069767
| false
| 0
| 0.023256
| 0
| 0.104651
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d453702fe5955b8c7f1bde6f60ded7a799dc2c7f
| 1,939
|
py
|
Python
|
tpa/src/ognjen/typed_priority_array.py
|
savara94/TypedPriorityArray
|
8486184a21f767496978272a4861b9f3f7fd7495
|
[
"MIT"
] | null | null | null |
tpa/src/ognjen/typed_priority_array.py
|
savara94/TypedPriorityArray
|
8486184a21f767496978272a4861b9f3f7fd7495
|
[
"MIT"
] | null | null | null |
tpa/src/ognjen/typed_priority_array.py
|
savara94/TypedPriorityArray
|
8486184a21f767496978272a4861b9f3f7fd7495
|
[
"MIT"
] | null | null | null |
"""
Module containing TypedPriorityArray
"""
class TypedPriorityArray(object):
"""
Typed data structure that keeps elements in order.
"""
def __init__(self, *args, **kwargs):
self.arg = list(args)
self.kwarg = kwargs
#self.my_list = list(self.arg)
#raise NotImplementedError
@property
def length(self):
return self.__len__()
#return len(self.my_list)
#raise NotImplementedError
@property
def array_type(self):
return type(self.arg)
#raise NotImplementedError
@property
def reversed(self):
raise NotImplementedError
@reversed.setter
def reversed(self, descending):
raise NotImplementedError
def insert(self, element):
raise NotImplementedError
def pop(self, index):
#self.arg = self.arg[:index] + self.arg[index+1:]
return self.arg.pop(index)
def contains(self, element):
return self.__contains__(element)
#raise NotImplementedError
def index_of(self, element):
return self.__getitem__(element)
#ne vraca -1
#raise NotImplementedError
def __contains__(self, element):
cont = element in self.arg
return cont
#raise NotImplementedError
def __iter__(self):
i = self.arg.__iter__()
return i
#raise NotImplementedError
def __getitem__(self, key):
index = self.arg.index(key)
return index
# ne vraca -1
#raise NotImplementedError
def __len__(self):
return self.arg.__len__()
#raise NotImplementedError
def __str__(self):
if reversed == True:
return '<='.join(str(x) for x in self.arg)
else:
return '=>'.join(str(x) for x in self.arg)
#raise NotImplementedError
def __repr__(self):
return self.__str__()
#raise NotImplementedError
| 23.938272
| 57
| 0.610108
| 205
| 1,939
| 5.497561
| 0.268293
| 0.298137
| 0.215617
| 0.08252
| 0.184561
| 0.184561
| 0.047915
| 0.047915
| 0.047915
| 0
| 0
| 0.002199
| 0.296545
| 1,939
| 80
| 58
| 24.2375
| 0.824047
| 0.25116
| 0
| 0.142857
| 0
| 0
| 0.002829
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
2e0ccafb9ccfefea9bacac37872be87a641998c5
| 39
|
py
|
Python
|
gui/__init__.py
|
skylogic004/pylinkcloud
|
018535058d508a36c1ea5acf906bcb7114cd914c
|
[
"BSD-3-Clause"
] | 2
|
2017-02-02T04:39:32.000Z
|
2017-12-11T07:09:18.000Z
|
gui/__init__.py
|
skylogic004/pylinkcloud
|
018535058d508a36c1ea5acf906bcb7114cd914c
|
[
"BSD-3-Clause"
] | null | null | null |
gui/__init__.py
|
skylogic004/pylinkcloud
|
018535058d508a36c1ea5acf906bcb7114cd914c
|
[
"BSD-3-Clause"
] | null | null | null |
# from .gui_controls import GuiControls
| 39
| 39
| 0.846154
| 5
| 39
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0.948718
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2e1785242c49aa97ac355e9f58271dec8e156fd1
| 264
|
py
|
Python
|
src/pandas_profiling/report/presentation/flavours/html/collapse.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 76
|
2020-07-06T14:44:05.000Z
|
2022-02-14T15:30:21.000Z
|
src/pandas_profiling/report/presentation/flavours/html/collapse.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 11
|
2020-08-09T02:30:14.000Z
|
2022-03-12T00:50:14.000Z
|
src/pandas_profiling/report/presentation/flavours/html/collapse.py
|
anurag-gandhi/pandas-profiling
|
2373f3a299264f7b312dbe4b92edc14d36e8140e
|
[
"MIT"
] | 11
|
2020-07-12T16:18:07.000Z
|
2022-02-05T16:48:35.000Z
|
from pandas_profiling.report.presentation.core import Collapse
from pandas_profiling.report.presentation.flavours.html import templates
class HTMLCollapse(Collapse):
def render(self):
return templates.template("collapse.html").render(**self.content)
| 33
| 73
| 0.799242
| 31
| 264
| 6.741935
| 0.612903
| 0.095694
| 0.181818
| 0.239234
| 0.354067
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 264
| 7
| 74
| 37.714286
| 0.885593
| 0
| 0
| 0
| 0
| 0
| 0.049242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.