hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b19fb30f6fe93d41f8a1e4166f4bc04e5287845b
| 48
|
py
|
Python
|
src/olymptester/__main__.py
|
jrojer/easy-stdio-tester
|
e3ecf4261859cc3d7fe93142ed0e0d773cff60ed
|
[
"MIT"
] | 1
|
2021-09-20T17:02:24.000Z
|
2021-09-20T17:02:24.000Z
|
src/olymptester/__main__.py
|
jrojer/easy-stdio-tester
|
e3ecf4261859cc3d7fe93142ed0e0d773cff60ed
|
[
"MIT"
] | 1
|
2021-11-21T14:35:29.000Z
|
2021-12-08T17:23:44.000Z
|
src/olymptester/__main__.py
|
jrojer/easy-stdio-tester
|
e3ecf4261859cc3d7fe93142ed0e0d773cff60ed
|
[
"MIT"
] | null | null | null |
from olymptester.olymptester import main
main()
| 16
| 40
| 0.833333
| 6
| 48
| 6.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 3
| 41
| 16
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b1a2fd793a5651e030eaa4d87f7d6f5b9a18fb5b
| 196
|
py
|
Python
|
couchdbkit/designer/__init__.py
|
gelnior/couchdbkit
|
8277d6ffd00553ae0b0b2368636460d40f8d8225
|
[
"MIT"
] | 51
|
2015-04-01T14:53:46.000Z
|
2022-03-16T09:16:10.000Z
|
couchdbkit/designer/__init__.py
|
gelnior/couchdbkit
|
8277d6ffd00553ae0b0b2368636460d40f8d8225
|
[
"MIT"
] | 35
|
2015-07-17T15:39:33.000Z
|
2020-10-22T11:55:20.000Z
|
couchdbkit/designer/__init__.py
|
gelnior/couchdbkit
|
8277d6ffd00553ae0b0b2368636460d40f8d8225
|
[
"MIT"
] | 40
|
2015-01-13T23:38:01.000Z
|
2022-02-26T22:08:01.000Z
|
# -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
from .fs import FSDoc, document, push, pushdocs, pushapps, clone
| 28
| 66
| 0.72449
| 29
| 196
| 4.896552
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00625
| 0.183673
| 196
| 6
| 67
| 32.666667
| 0.88125
| 0.617347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4917c4070dcbe1a60b5c2434201175798f5c0e82
| 32,782
|
py
|
Python
|
lib/instruction.py
|
scott-zhou/pyjvm
|
56b22a0fee77d1586bb3fda301896231aded0170
|
[
"MIT"
] | 12
|
2017-01-17T13:47:30.000Z
|
2022-02-10T07:01:55.000Z
|
lib/instruction.py
|
scott-zhou/pyjvm
|
56b22a0fee77d1586bb3fda301896231aded0170
|
[
"MIT"
] | null | null | null |
lib/instruction.py
|
scott-zhou/pyjvm
|
56b22a0fee77d1586bb3fda301896231aded0170
|
[
"MIT"
] | 2
|
2018-02-24T07:56:02.000Z
|
2018-07-11T03:49:35.000Z
|
import logging
from enum import Enum, unique
from lib import constant_pool
from lib import run_time_data
from lib import descriptor
from lib import frame as FRAME
from lib.hijack_jre_methods import get_native_method
from lib import class_loader
OPCODES = {}
def bytecode(code):
def bytecode_decorator(klass):
OPCODES[code] = klass
return klass
return bytecode_decorator
@unique
class NextStep(Enum):
next_instruction = 0
jump_to = 1
invoke_method = 2
method_return = 3
class _instruction(object):
def __init__(self, address):
self.address = address
# For method internal loop
self.need_jump = False
self.jump_to_address = None
# For call other method
self.invoke_method = False
self.invoke_class_name = None
self.invoke_method_name = None
self.invoke_method_descriptor = None
self.invoke_objectref = None
self.invoke_parameters = []
# For return
self.method_return = False
self.return_value = None
def init_jump(self):
self.need_jump = False
self.jump_to_address = None
def init_invoke_method(self):
self.invoke_method = False
self.invoke_class_name = None
self.invoke_method_name = None
self.invoke_method_descriptor = None
self.invoke_objectref = None
self.invoke_parameters = []
def len_of_operand(self):
return 0
def put_operands(self, operand_bytes):
pass
def class_name_and_address(self):
return '{name} (addr:{address})'.format(name=type(self).__name__, address=self.address)
def next_step(self):
if self.invoke_method:
return NextStep.invoke_method
elif self.need_jump:
return NextStep.jump_to
elif self.method_return:
return NextStep.method_return
else:
return NextStep.next_instruction
def execute(self, frame):
raise NotImplementedError('execute in base instruction is not implemented, instruction {name}'.format(name=self.class_name_and_address()))
@bytecode(0x01)
class aconst_null(_instruction):
def execute(self, frame):
frame.operand_stack.append(None)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'push null onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
class iconst_i(_instruction):
def __init__(self, address, i=0):
super().__init__(address)
self.i = i
def execute(self, frame):
frame.operand_stack.append(self.i)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'push {self.i} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x02)
class iconst_m1(iconst_i):
def __init__(self, address):
super().__init__(address, -1)
@bytecode(0x03)
class iconst_0(iconst_i):
def __init__(self, address):
super().__init__(address, 0)
@bytecode(0x04)
class iconst_1(iconst_i):
def __init__(self, address):
super().__init__(address, 1)
@bytecode(0x05)
class iconst_2(iconst_i):
def __init__(self, address):
super().__init__(address, 2)
@bytecode(0x06)
class iconst_3(iconst_i):
def __init__(self, address):
super().__init__(address, 3)
@bytecode(0x07)
class iconst_4(iconst_i):
def __init__(self, address):
super().__init__(address, 4)
@bytecode(0x08)
class iconst_5(iconst_i):
def __init__(self, address):
super().__init__(address, 5)
@bytecode(0x10)
class bipush(iconst_i):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.i = operand_bytes[0]
@bytecode(0x11)
class sipush(iconst_i):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.i = int.from_bytes(operand_bytes, byteorder='big', signed=False)
@bytecode(0x12)
class ldc(_instruction):
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.index = operand_bytes[0]
def execute(self, frame):
constant = frame.klass.constant_pool[self.index]
if type(constant) is constant_pool.ConstantString:
frame.operand_stack.append(
frame.klass.constant_pool[constant.string_index].value())
elif type(constant) in (
constant_pool.ConstantInteger,
constant_pool.ConstantFloat
):
frame.operand_stack.append(constant.value)
else:
assert False, \
f'constant type is {type(constant)}, '\
'not know what is used for yet'
class iload_n(_instruction):
def __init__(self, address, n=0):
super().__init__(address)
self.n = n
def execute(self, frame):
assert type(frame.local_variables[self.n]) is int
frame.operand_stack.append(frame.local_variables[self.n])
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'push {frame.local_variables[self.n]} onto operand stack '
f'from local variable {self.n}\n'
f'\t{frame.operand_debug_str()}\n'
f'\t{frame.local_variable_debug_str()}'
)
@bytecode(0x15)
class iload(iload_n):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.n = operand_bytes[0]
@bytecode(0x1a)
class iload_0(iload_n):
def __init__(self, address):
super().__init__(address, 0)
@bytecode(0x1b)
class iload_1(iload_n):
def __init__(self, address):
super().__init__(address, 1)
@bytecode(0x1c)
class iload_2(iload_n):
def __init__(self, address):
super().__init__(address, 2)
@bytecode(0x1d)
class iload_3(iload_n):
def __init__(self, address):
super().__init__(address, 3)
class astore_n(_instruction):
def __init__(self, address, n=0):
super().__init__(address)
self.n = n
def execute(self, frame):
objectref = frame.operand_stack.pop()
# TODO: type can be returnAddress reference, what is returnAddress?
assert type(objectref) is FRAME.Object,\
f'Type of ref in astore is type(objectref)'
frame.local_variables[self.n] = objectref
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'pop {objectref} from operand stack and store into '
f'local variable {self.n}\n'
f'\t{frame.operand_debug_str()}\n'
f'\t{frame.local_variable_debug_str()}'
)
@bytecode(0x3a)
class astore(astore_n):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.n = operand_bytes[0]
@bytecode(0x4b)
class astore_0(astore_n):
def __init__(self, address):
super().__init__(address, 0)
@bytecode(0x4c)
class astore_1(astore_n):
def __init__(self, address):
super().__init__(address, 1)
@bytecode(0x4d)
class astore_2(astore_n):
def __init__(self, address):
super().__init__(address, 2)
@bytecode(0x4e)
class astore_3(astore_n):
def __init__(self, address):
super().__init__(address, 3)
class aload_n(_instruction):
def __init__(self, address, n=0):
super().__init__(address)
self.n = n
def execute(self, frame):
assert type(frame.local_variables[self.n]) is FRAME.Object,\
f'Type of ref in aload is {type(frame.local_variables[self.n])}'
frame.operand_stack.append(frame.local_variables[self.n])
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'push {frame.local_variables[self.n]} onto operand stack '
f'from local variable {self.n}\n'
f'\t{frame.operand_debug_str()}\n'
f'\t{frame.local_variable_debug_str()}'
)
@bytecode(0x25)
class aload(aload_n):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.n = operand_bytes[0]
@bytecode(0x2a)
class aload_0(aload_n):
def __init__(self, address):
super().__init__(address, 0)
@bytecode(0x2b)
class aload_1(aload_n):
def __init__(self, address):
super().__init__(address, 1)
@bytecode(0x2c)
class aload_2(aload_n):
def __init__(self, address):
super().__init__(address, 2)
@bytecode(0x2d)
class aload_3(aload_n):
def __init__(self, address):
super().__init__(address, 3)
class istore_n(_instruction):
def __init__(self, address, n=0):
super().__init__(address)
self.n = n
def execute(self, frame):
i = frame.operand_stack.pop()
assert type(i) is int
frame.local_variables[self.n] = i
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'pop {i} from operand stack and set to local variable {self.n}\n'
f'\t{frame.operand_debug_str()}\n'
f'\t{frame.local_variable_debug_str()}'
)
@bytecode(0x36)
class istore(istore_n):
def __init__(self, address):
super().__init__(address)
def len_of_operand(self):
return 1
def put_operands(self, operand_bytes):
assert type(operand_bytes[0]) is int
self.n = operand_bytes[0]
@bytecode(0x3b)
class istore_0(istore_n):
def __init__(self, address):
super().__init__(address, 0)
@bytecode(0x3c)
class istore_1(istore_n):
def __init__(self, address):
super().__init__(address, 1)
@bytecode(0x3d)
class istore_2(istore_n):
def __init__(self, address):
super().__init__(address, 2)
@bytecode(0x3e)
class istore_3(istore_n):
def __init__(self, address):
super().__init__(address, 3)
@bytecode(0x57)
class pop(_instruction):
def execute(self, frame):
frame.operand_stack.pop()
logging.debug(
f'Instruction {self.class_name_and_address()}: '
'Pop the top value from the operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x59)
class dup(_instruction):
def execute(self, frame):
frame.operand_stack.append(frame.operand_stack[-1])
logging.debug(
f'Instruction {self.class_name_and_address()}: '
'Duplicate the top operand stack value\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x60)
class iadd(_instruction):
def execute(self, frame):
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
assert type(value1) is int
assert type(value2) is int
value = value1 + value2
frame.operand_stack.append(value)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'add value1 and value2, push {value} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x70)
class irem(_instruction):
def execute(self, frame):
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
assert type(value1) is int
assert type(value2) is int
# That the defination in JRE document, but we can use % operator
# value = int(value1 - int(value1 / value2) * value2)
value = value1 % value2
frame.operand_stack.append(value)
logging.debug(
f'Instruction {self.class_name_and_address()}: Remainder int, '
f'value1 is {value1}, value2 is {value2}, '
f'push result value {value} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x64)
class isub(_instruction):
def execute(self, frame):
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
assert type(value1) is int
assert type(value2) is int
value = value1 - value2
frame.operand_stack.append(value)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'Subtract value1 and value2, push {value} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x68)
class imul(_instruction):
def execute(self, frame):
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
assert type(value1) is int
assert type(value2) is int
value = value1 * value2
frame.operand_stack.append(value)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'multiply value1 and value2, push {value} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x6c)
class idiv(_instruction):
def execute(self, frame):
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
assert type(value1) is int
assert type(value2) is int
if value2 == 0:
raise NotImplementedError(
'Exception have not implemented. '
'Should through ArithmeticException'
)
value = value1 // value2
frame.operand_stack.append(value)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'Divide value1 and value2, push {value} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
@bytecode(0x84)
class iinc(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(operand_bytes[:1], byteorder='big', signed=False)
self.const = int.from_bytes(operand_bytes[1:], byteorder='big', signed=True)
def execute(self, frame):
frame.local_variables[self.index] = frame.local_variables[self.index] + self.const
logging.debug(
'Instruction {na}: increate local value {i} by {v} to value {fv}'.format(
na=self.class_name_and_address(),
i=self.index,
v=self.const,
fv=frame.local_variables[self.index]
)
)
class if_icmpcond(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.offset = int.from_bytes(operand_bytes, byteorder='big', signed=True)
def execute(self, frame):
self.init_jump()
value2 = frame.operand_stack.pop()
value1 = frame.operand_stack.pop()
if self.cmp(value1, value2):
self.need_jump = True
self.jump_to_address = self.address + self.offset
logging.debug(
'Instruction {na}: compare value1 and value2 from stack, result need {j}'.format(
na=self.class_name_and_address(),
j='jump to address {0}'.format(self.jump_to_address) if self.need_jump else 'not jump'
)
)
def cmp(self, value1, value2):
raise NotImplementedError('cmp function in if_icmpcond will not be implement.')
@bytecode(0x9f)
class if_icmpeq(if_icmpcond):
def cmp(self, value1, value2):
return value1 == value2
@bytecode(0xa0)
class if_icmpne(if_icmpcond):
def cmp(self, value1, value2):
return value1 != value2
@bytecode(0xa1)
class if_icmplt(if_icmpcond):
def cmp(self, value1, value2):
return value1 < value2
@bytecode(0xa2)
class if_icmpge(if_icmpcond):
def cmp(self, value1, value2):
return value1 >= value2
@bytecode(0xa3)
class if_icmpgt(if_icmpcond):
def cmp(self, value1, value2):
return value1 > value2
@bytecode(0xa4)
class if_icmple(if_icmpcond):
def cmp(self, value1, value2):
return value1 <= value2
@bytecode(0xa7)
class goto(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.offset = int.from_bytes(operand_bytes, byteorder='big', signed=True)
def execute(self, frame):
self.need_jump = True
self.jump_to_address = self.address + self.offset
logging.debug(
'Instruction {na}: jump to address {a}'.format(
na=self.class_name_and_address(),
a=self.jump_to_address
)
)
@bytecode(0xac)
class ireturn(_instruction):
def execute(self, frame):
self.method_return = True
self.return_value = frame.operand_stack.pop()
assert type(self.return_value) is int, 'ireturn, but get value from operand in type {t}'.format(type(self.return_value))
logging.debug(
'Instruction {na}: return value {v}'.format(
na=self.class_name_and_address(),
v=self.return_value
)
)
@bytecode(0xb0)
class areturn(_instruction):
def execute(self, frame):
self.method_return = True
self.return_value = frame.operand_stack.pop()
assert type(self.return_value) is FRAME.Object, \
f'areturn, but get value from operand in type {type(self.return_value)}'
logging.debug(
'Instruction {na}: return value {v}'.format(
na=self.class_name_and_address(),
v=self.return_value
)
)
@bytecode(0xb1)
class instruction_return(_instruction):
def execute(self, frame):
self.method_return = True
logging.debug(
'Instruction {na}: void return'.format(
na=self.class_name_and_address()
)
)
@bytecode(0xb2)
class getstatic(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
field_ref = frame.klass.constant_pool[self.index]
assert type(field_ref) is constant_pool.ConstantFieldref
class_name = field_ref.get_class(frame.klass.constant_pool)
assert run_time_data.method_area[class_name],\
f'Can\'t load class {class_name}'
name, field = field_ref.get_name_descriptor(frame.klass.constant_pool)
value = run_time_data.class_static_fields[class_name][(name, field)]
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'get static filed {class_name}.{name}({field}) '
'and push onto operand stack'
)
frame.operand_stack.append(value)
logging.debug(
f'After exec getstatic, operand stack: {frame.operand_debug_str()}'
)
@bytecode(0xb3)
class putstatic(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
field_ref = frame.klass.constant_pool[self.index]
assert type(field_ref) is constant_pool.ConstantFieldref
class_name = field_ref.get_class(frame.klass.constant_pool)
assert run_time_data.method_area[class_name],\
f'Can\'t load class {class_name}'
name, field = field_ref.get_name_descriptor(frame.klass.constant_pool)
value = frame.operand_stack.pop()
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'Put {value} on filed {class_name}.{name}({field})'
)
run_time_data.class_static_fields[class_name][(name, field)] = value
logging.debug(
f'After exec putstatic, operand stack: {frame.operand_debug_str()}'
)
@bytecode(0xb4)
class getfield(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
'''According JVM document, there are lots of checks for putfield
instruction, for type check and for access permission. But they are
all ignored, as we assume this is correct JAVA class file.
'''
field_ref = frame.klass.constant_pool[self.index]
assert type(field_ref) is constant_pool.ConstantFieldref
class_name = field_ref.get_class(frame.klass.constant_pool)
name, field = field_ref.get_name_descriptor(frame.klass.constant_pool)
obj = frame.operand_stack.pop()
value = obj.get_field(class_name, field, name)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'Get {obj}(id:{id(obj)}) filed {name} value {value}'
)
frame.operand_stack.append(value)
logging.debug(
f'After exec putfield, operand stack: {frame.operand_debug_str()}'
)
@bytecode(0xb5)
class putfield(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
'''According JVM document, there are lots of checks for putfield
instruction, for type check and for access permission. But they are
all ignored, as we assume this is correct JAVA class file.
'''
field_ref = frame.klass.constant_pool[self.index]
assert type(field_ref) is constant_pool.ConstantFieldref
class_name = field_ref.get_class(frame.klass.constant_pool)
name, field = field_ref.get_name_descriptor(frame.klass.constant_pool)
value = frame.operand_stack.pop()
obj = frame.operand_stack.pop()
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'Set {obj}(id:{id(obj)}) filed {name} as value {value}'
)
obj.set_field(class_name, field, name, value)
logging.debug(
f'After exec putfield, operand stack: {frame.operand_debug_str()}'
)
@bytecode(0xb7)
class invokespecial(_instruction):
'''Currently only for call super class constructioin
'''
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
self.init_invoke_method()
method_ref = frame.klass.constant_pool[self.index]
assert type(method_ref) in (
constant_pool.ConstantMethodref,
constant_pool.ConstantInterfaceMethodref
)
class_name = method_ref.get_class(frame.klass.constant_pool)
method_name, method_describ = method_ref.get_method(
frame.klass.constant_pool)
# Find klass is not correct implemented now, but enough for invoke
# super class construction
klass = run_time_data.method_area[class_name]
method = klass.get_method(method_name, method_describ)
is_initialization_method = method_name in ['<init>', '<clinit>']
super_class_name = frame.klass.constant_pool[
frame.klass.constant_pool[frame.klass.super_class].name_index
]
is_super_class = type(method_ref) is constant_pool.ConstantMethodref\
and class_name == super_class_name
if not is_initialization_method and is_super_class\
and frame.klass.access_flags.super():
klass = run_time_data.method_area[super_class_name]
method = klass.get_method(method_name, method_describ)
else:
# Otherwise, let C be the class or interface named by the symbolic
# reference. Which don't need do anything
pass
assert not method.access_flags.native(),\
'Not support native method yet.'
assert not method.access_flags.synchronized(),\
'Not support synchronized method yet.'
logging.debug(
'Instruction {na}: {kl}:{me}'.format(
na=self.class_name_and_address(),
kl=class_name,
me=method_name
)
)
self.invoke_method = True
self.invoke_class_name = class_name
self.invoke_method_name = method_name
self.invoke_method_descriptor = method_describ
parameters, _ = descriptor.parse_method_descriptor(method_describ)
for _ in range(len(parameters)):
self.invoke_parameters.append(frame.operand_stack.pop())
# Pop objectref from operand stack
self.invoke_objectref = frame.operand_stack.pop()
self.invoke_parameters.reverse()
@bytecode(0xb8)
class invokestatic(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
self.init_invoke_method()
method_ref = frame.klass.constant_pool[self.index]
assert type(method_ref) in (
constant_pool.ConstantMethodref,
constant_pool.ConstantInterfaceMethodref
)
class_name = method_ref.get_class(frame.klass.constant_pool)
method_name, method_describ = method_ref.get_method(
frame.klass.constant_pool)
logging.debug(
'Instruction {na}: {kl}:{me}'.format(
na=self.class_name_and_address(),
kl=class_name,
me=method_name
)
)
klass = run_time_data.method_area[class_name]
assert klass, f'Can\'t load class {class_name}'
method = klass.get_method(method_name, method_describ)
if method.access_flags.native():
fake_method = get_native_method(
class_name, method_name, method_describ)
if fake_method:
fake_method(frame.operand_stack)
return
else:
assert False, \
'Not support native method yet: '\
f'{class_name}.{method_name}, descriptor {method_describ}'
assert not method.access_flags.synchronized(),\
'Not support synchronized method yet.'
self.invoke_method = True
self.invoke_class_name = class_name
self.invoke_method_name = method_name
self.invoke_method_descriptor = method_describ
parameters, _ = descriptor.parse_method_descriptor(method_describ)
for _ in range(len(parameters)):
self.invoke_parameters.append(frame.operand_stack.pop())
self.invoke_parameters.reverse()
@bytecode(0xb9)
class invokeinterface(_instruction):
def len_of_operand(self):
return 4
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 4
self.index = int.from_bytes(
operand_bytes[:2], byteorder='big', signed=False)
assert operand_bytes[2] > 0
assert operand_bytes[3] == 0
def execute(self, frame):
self.init_invoke_method()
method_ref = frame.klass.constant_pool[self.index]
assert type(method_ref) is constant_pool.ConstantInterfaceMethodref
# class_name = method_ref.get_class(frame.klass.constant_pool)
method_name, method_describ = method_ref.get_method(
frame.klass.constant_pool)
assert method_name not in ['<init>', '<clinit>'],\
'Invoke initialization method in invokeinterface'
self.invoke_method_name = method_name
self.invoke_method_descriptor = method_describ
parameters, _ = descriptor.parse_method_descriptor(method_describ)
for _ in range(len(parameters)):
self.invoke_parameters.append(frame.operand_stack.pop())
self.invoke_parameters.reverse()
# Pop objectref from operand stack
self.invoke_objectref = frame.operand_stack.pop()
klass, method = self.invoke_objectref.klass.interface_resolution(
method_name, method_describ
)
if not method:
# Not resoluve method
assert False, 'Method resolve exception not implemented yet.'
if method.access_flags.private() or method.access_flags.static():
assert False, \
'IncompatibleClassChangeError exception not implemented yet.'
assert not method.access_flags.native(),\
'Not support native method yet.'
assert not method.access_flags.synchronized(),\
'Not support synchronized method yet.'
logging.debug(
f'Instruction {self.class_name_and_address()}: {method.name}'
)
self.invoke_method = True
self.invoke_class_name = klass.name()
@bytecode(0xb6)
class invokevirtual(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes[:2], byteorder='big', signed=False)
def execute(self, frame):
self.init_invoke_method()
method_ref = frame.klass.constant_pool[self.index]
assert type(method_ref) is constant_pool.ConstantMethodref
class_name = method_ref.get_class(frame.klass.constant_pool)
method_name, method_describ = method_ref.get_method(
frame.klass.constant_pool)
klass = run_time_data.method_area[class_name]
method = klass.get_method(method_name, method_describ)
if method.isSignaturePolymorphic():
raise NotImplementedError(
'Invoke signature polymorphic method is not implemented.')
self.invoke_method_name = method_name
self.invoke_method_descriptor = method_describ
parameters, _ = descriptor.parse_method_descriptor(method_describ)
for _ in range(len(parameters)):
self.invoke_parameters.append(frame.operand_stack.pop())
self.invoke_parameters.reverse()
# Pop objectref from operand stack
self.invoke_objectref = frame.operand_stack.pop()
klass, method = self.invoke_objectref.klass.interface_resolution(
method_name, method_describ
)
if not method:
# Not resoluve method
assert False, 'Method resolve exception not implemented yet.'
if method.access_flags.static():
assert False, \
'IncompatibleClassChangeError exception not implemented yet.'
assert not method.access_flags.native(),\
'Not support native method yet.'
assert not method.access_flags.synchronized(),\
'Not support synchronized method yet.'
logging.debug(
f'Instruction {self.class_name_and_address()}: {method.name}'
)
self.invoke_method = True
self.invoke_class_name = klass.name()
@bytecode(0xbb)
class new(_instruction):
def len_of_operand(self):
return 2
def put_operands(self, operand_bytes):
assert len(operand_bytes) == 2
self.index = int.from_bytes(
operand_bytes, byteorder='big', signed=False)
def execute(self, frame):
class_info = frame.klass.constant_pool[self.index]
assert type(class_info) is constant_pool.ConstantClass
class_name = frame.klass.constant_pool[class_info.name_index]
assert type(class_name) is constant_pool.ConstantUtf8
klass = run_time_data.method_area[class_name.str_value]
obj = FRAME.Object(klass)
class_loader.init_class_object(klass, obj)
frame.operand_stack.append(obj)
logging.debug(
f'Instruction {self.class_name_and_address()}: '
f'push reference {obj} onto operand stack\n'
f'\t{frame.operand_debug_str()}'
)
| 31.612343
| 146
| 0.63727
| 4,030
| 32,782
| 4.910422
| 0.080397
| 0.032745
| 0.038658
| 0.031836
| 0.787357
| 0.761282
| 0.742483
| 0.72318
| 0.708778
| 0.667492
| 0
| 0.013474
| 0.261973
| 32,782
| 1,036
| 147
| 31.642857
| 0.804456
| 0.032548
| 0
| 0.589461
| 0
| 0
| 0.137321
| 0.049599
| 0
| 0
| 0.007334
| 0.000965
| 0.078431
| 1
| 0.144608
| false
| 0.002451
| 0.009804
| 0.033088
| 0.281863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4948559bc0558c029fa6e64321b816cc3f908c21
| 71
|
py
|
Python
|
discovery-provider/src/queries/get_genre_metrics_unit_test.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | 1
|
2022-03-27T21:40:36.000Z
|
2022-03-27T21:40:36.000Z
|
discovery-provider/src/queries/get_genre_metrics_unit_test.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | null | null | null |
discovery-provider/src/queries/get_genre_metrics_unit_test.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | null | null | null |
def test():
"""See /integration_tests/test_get_genre_metrics.py"""
| 23.666667
| 58
| 0.71831
| 10
| 71
| 4.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 59
| 35.5
| 0.746032
| 0.676056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
49513927adb9078fb8aebf50adc200a2441f899f
| 276
|
py
|
Python
|
course/views.py
|
pakponj/coursing-field
|
0368c2fc546b3955dc1fef1fc00252d8f015f56d
|
[
"Apache-2.0"
] | null | null | null |
course/views.py
|
pakponj/coursing-field
|
0368c2fc546b3955dc1fef1fc00252d8f015f56d
|
[
"Apache-2.0"
] | null | null | null |
course/views.py
|
pakponj/coursing-field
|
0368c2fc546b3955dc1fef1fc00252d8f015f56d
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
# Create your views here.
def createCourse(req):
return render(req, 'course/createCourse.html')
def createNewCourse(req):
return redirect(reverse('course:createCourse'))
| 30.666667
| 64
| 0.786232
| 36
| 276
| 5.944444
| 0.638889
| 0.093458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.119565
| 276
| 8
| 65
| 34.5
| 0.868313
| 0.083333
| 0
| 0
| 0
| 0
| 0.171315
| 0.095618
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
49962c39a0402e360e9f58c4da9e2072f5bc95fa
| 89
|
py
|
Python
|
collectors/3/test.py
|
parker-pu/octopus
|
18171127fea7f7337d121b6042e1308e4dff6668
|
[
"Apache-2.0"
] | null | null | null |
collectors/3/test.py
|
parker-pu/octopus
|
18171127fea7f7337d121b6042e1308e4dff6668
|
[
"Apache-2.0"
] | null | null | null |
collectors/3/test.py
|
parker-pu/octopus
|
18171127fea7f7337d121b6042e1308e4dff6668
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
print("test1 -- > 1")
print("test1 -- > 2")
print("test1 -- > 3")
| 14.833333
| 21
| 0.539326
| 13
| 89
| 3.692308
| 0.692308
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 0.168539
| 89
| 5
| 22
| 17.8
| 0.567568
| 0.224719
| 0
| 0
| 0
| 0
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
62042a52bde645c9eb141f2ffc8225b3002f8aff
| 96
|
py
|
Python
|
pipeline/__init__.py
|
gmrukwa/msi-preprocessing-pipeline
|
bc6d26daba42575babcdf5287999f1f844cf2e8e
|
[
"Apache-2.0"
] | null | null | null |
pipeline/__init__.py
|
gmrukwa/msi-preprocessing-pipeline
|
bc6d26daba42575babcdf5287999f1f844cf2e8e
|
[
"Apache-2.0"
] | 5
|
2019-11-26T19:13:32.000Z
|
2019-11-29T08:14:28.000Z
|
pipeline/__init__.py
|
gmrukwa/msi-preprocessing-pipeline
|
bc6d26daba42575babcdf5287999f1f844cf2e8e
|
[
"Apache-2.0"
] | null | null | null |
"""Definition of batch jobs pipeline with Luigi"""
from ._pipeline import PreprocessingPipeline
| 32
| 50
| 0.8125
| 11
| 96
| 7
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 96
| 2
| 51
| 48
| 0.905882
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
620ad208f69fa9c202bfdbad208e9db1115e271d
| 115
|
py
|
Python
|
Testpage/admin.py
|
Hu-toetsregistratie/OICT-Toetsregistratie
|
0b918feeeb23149dc64d55f80cf9f2048fbcf63c
|
[
"Apache-2.0"
] | null | null | null |
Testpage/admin.py
|
Hu-toetsregistratie/OICT-Toetsregistratie
|
0b918feeeb23149dc64d55f80cf9f2048fbcf63c
|
[
"Apache-2.0"
] | null | null | null |
Testpage/admin.py
|
Hu-toetsregistratie/OICT-Toetsregistratie
|
0b918feeeb23149dc64d55f80cf9f2048fbcf63c
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Test
admin.site.register(Test)
| 16.428571
| 32
| 0.791304
| 17
| 115
| 5.352941
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 115
| 7
| 33
| 16.428571
| 0.919192
| 0.226087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6261bd8ca13bb27bda52cd8046f1d83f0dbae56d
| 10,748
|
py
|
Python
|
api/tests/test_user.py
|
cjmash/art-backend
|
fb1dfd69cca9cda1d8714bd7066c3920d1a97312
|
[
"MIT"
] | null | null | null |
api/tests/test_user.py
|
cjmash/art-backend
|
fb1dfd69cca9cda1d8714bd7066c3920d1a97312
|
[
"MIT"
] | null | null | null |
api/tests/test_user.py
|
cjmash/art-backend
|
fb1dfd69cca9cda1d8714bd7066c3920d1a97312
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch
from django.contrib.auth import get_user_model
from rest_framework.test import APIClient
from api.tests import APIBaseTestCase
User = get_user_model()
client = APIClient()
class UserTestCase(APIBaseTestCase):
def setUp(self):
super(UserTestCase, self).setUp()
self.user = User.objects.create(
email='test@site.com', cohort=20,
slack_handle='@test_user', password='devpassword'
)
self.token_user = 'testtoken'
self.admin_user = User.objects.create_superuser(
email='admin@site.com', cohort=20,
slack_handle='@admin', password='devpassword'
)
self.token_admin = 'admintesttoken'
self.users_url = "/api/v1/users/"
def test_can_add_user(self):
users_count_before = User.objects.count()
new_user = User.objects.create(
email='test-1@site.com', cohort=20,
slack_handle='@test_user-1', password='devpassword'
)
users_count_after = User.objects.count()
self.assertEqual(new_user.email, 'test-1@site.com')
self.assertEqual(new_user.cohort, 20)
self.assertEqual(new_user.slack_handle, '@test_user-1')
self.assertEqual(new_user.password, 'devpassword')
self.assertEqual(users_count_before, users_count_after - 1)
def test_add_user_without_password(self):
users_count_before = User.objects.count()
new_user = User.objects.create(
email='test-1@site.com', cohort=20,
slack_handle='@test_user-1'
)
users_count_after = User.objects.count()
self.assertEqual(new_user.password, None)
self.assertEqual(users_count_before, users_count_after - 1)
def test_can_update_user(self):
self.user.name = 'edited_name'
self.user.save()
self.assertIn("edited_name", self.user.name)
def test_can_delete_a_user(self):
new_user = User.objects.create(
email='test-1@site.com', cohort=20,
slack_handle='@test_user-1', password='devpassword'
)
users_count_before = User.objects.count()
new_user.delete()
users_count_after = User.objects.count()
self.assertEqual(users_count_before, users_count_after + 1)
def test_user_model_string_representation(self):
self.assertEquals(str(self.user), 'test@site.com')
def test_user_email_is_required(self):
with self.assertRaises(ValueError):
User.objects.create_user(
email='', name='test_user1',
cohort=20, slack_handle='@test_user1',
password='devpassword')
def test_user_cohort_is_required(self):
with self.assertRaises(ValueError):
User.objects.create_user(
email='test1@site.com', name='test_name',
cohort='', slack_handle='@test_user1',
password='devpassword')
def test_user_slack_handle_is_required(self):
with self.assertRaises(ValueError):
User.objects.create_user(
email='test1@site.com', name='test_name',
cohort=20, slack_handle='',
password='devpassword')
def test_create_normal_user(self):
new_user_1 = User.objects.create_user(
email='test-1@site.com', cohort=20,
slack_handle='@test_user-1', password='devpassword'
)
new_user_2 = User.objects._create_user(
email='test-2@site.com', cohort=20,
slack_handle='@test_user-2', password='devpassword'
)
self.assertFalse(new_user_1.is_staff)
self.assertFalse(new_user_1.is_superuser)
self.assertFalse(new_user_2.is_staff)
self.assertFalse(new_user_2.is_superuser)
def test_create_superuser(self):
new_user_1 = User.objects.create_superuser(
email='test-2@site.com', cohort=20,
slack_handle='@test_user-2', password='devpassword'
)
self.assertTrue(new_user_1.is_staff)
self.assertTrue(new_user_1.is_superuser)
def test_create_superuser_with_staff_false(self):
with self.assertRaises(ValueError):
User.objects.create_superuser(
email='test-2@site.com', cohort=20,
slack_handle='@test_user-2', password='devpassword',
is_staff=False, is_superuser=True
)
def test_create_superuser_with_superuser_false(self):
with self.assertRaises(ValueError):
User.objects.create_superuser(
email='test-2@site.com', cohort=20,
slack_handle='@test_user-2', password='devpassword',
is_staff=True, is_superuser=False
)
def test_non_authenticated_user_add_user_from_api_endpoint(self):
response = client.post(self.users_url)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
self.assertEqual(response.status_code, 401)
def test_non_authenticated_user_get_user_from_api_endpoint(self):
response = client.get(self.users_url)
self.assertEqual(response.data, {
'detail': 'Authentication credentials were not provided.'
})
self.assertEqual(response.status_code, 401)
@patch('api.authentication.auth.verify_id_token')
def test_non_admin_add_user_from_api_endpoint(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.user.email}
response = client.post(
self.users_url,
HTTP_AUTHORIZATION="Token {}".format(self.token_user))
self.assertEqual(response.data, {
'detail': 'You do not have permission to perform this action.'
})
self.assertEqual(response.status_code, 403)
@patch('api.authentication.auth.verify_id_token')
def test_non_admin_user_et_user_from_api_endpoint(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.user.email}
response = client.get(
self.users_url,
HTTP_AUTHORIZATION="Token {}".format(self.token_user))
self.assertEqual(response.data, {
'detail': 'You do not have permission to perform this action.'
})
self.assertEqual(response.status_code, 403)
@patch('api.authentication.auth.verify_id_token')
def test_admin_user_add_users_from_api_endpoint(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
users_count_before = User.objects.count()
data = {
"password": "devpassword",
"email": "test_user@mail.com",
}
response = client.post(
self.users_url,
data=data,
format='json',
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
users_count_after = User.objects.count()
self.assertEqual(response.status_code, 201)
self.assertEqual(users_count_after, users_count_before + 1)
@patch('api.authentication.auth.verify_id_token')
def test_admin_user_get_users_from_api_endpoint(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
response = client.get(
self.users_url,
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(len(response.data['results']), User.objects.count())
self.assertEqual(response.status_code, 200)
@patch('api.authentication.auth.verify_id_token')
def test_user_not_found_from_api_endpoint(self, mock_verify_token):
mock_verify_token.return_value = {'email': 'unavailable@email.com'}
response = client.get(
self.users_url,
HTTP_AUTHORIZATION="Token {}".format('sometoken'))
self.assertEqual(response.data, {
'detail': 'Unable to authenticate.'
})
@patch('api.authentication.auth.verify_id_token')
def test_inactive_user_from_api_endpoint(self, mock_verify_token):
self.admin_user.is_active = False
self.admin_user.save()
mock_verify_token.return_value = {'email': self.admin_user.email}
response = client.get(
self.users_url,
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(response.data, {
'detail': 'User inactive or deleted.'
})
@patch('api.authentication.auth.verify_id_token')
def test_add_user_from_api_endpoint_without_email(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
data = {
"password": "devpassword",
"email": "",
}
response = client.post(
self.users_url,
data=data,
format='json',
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(response.data, {
'email': ['This field may not be blank.']
})
self.assertEqual(response.status_code, 400)
@patch('api.authentication.auth.verify_id_token')
def test_add_user_api_endpoint_cant_allow_put(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
user = User.objects.filter(
email='test@site.com').first()
response = client.put(
'{}{}/'.format(self.users_url, user.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(response.data, {
'detail': 'Method "PUT" not allowed.'
})
@patch('api.authentication.auth.verify_id_token')
def test_add_user_api_endpoint_cant_allow_patch(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
user = User.objects.filter(
email='test@site.com').first()
response = client.patch(
'{}{}/'.format(self.users_url, user.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(response.data, {
'detail': 'Method "PATCH" not allowed.'
})
@patch('api.authentication.auth.verify_id_token')
def test_add_user_api_endpoint_cant_allow_delete(self, mock_verify_token):
mock_verify_token.return_value = {'email': self.admin_user.email}
user = User.objects.filter(
email='test@site.com').first()
response = client.delete(
'{}{}/'.format(self.users_url, user.id),
HTTP_AUTHORIZATION="Token {}".format(self.token_admin))
self.assertEqual(response.data, {
'detail': 'Method "DELETE" not allowed.'
})
| 41.180077
| 79
| 0.642538
| 1,273
| 10,748
| 5.135899
| 0.108405
| 0.061946
| 0.045886
| 0.034873
| 0.814775
| 0.773325
| 0.718262
| 0.699296
| 0.661823
| 0.632456
| 0
| 0.010312
| 0.242092
| 10,748
| 260
| 80
| 41.338462
| 0.792291
| 0
| 0
| 0.55794
| 0
| 0
| 0.155285
| 0.03824
| 0
| 0
| 0
| 0
| 0.171674
| 1
| 0.107296
| false
| 0.072961
| 0.017167
| 0
| 0.128755
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
62738247273ff8fc405ad2519e35e73b2cd75876
| 225
|
py
|
Python
|
j.py
|
shng5175/Random-Python-Stuff
|
6b6687da9195a36244f7b22de7ba2984ce78995d
|
[
"bzip2-1.0.6"
] | null | null | null |
j.py
|
shng5175/Random-Python-Stuff
|
6b6687da9195a36244f7b22de7ba2984ce78995d
|
[
"bzip2-1.0.6"
] | null | null | null |
j.py
|
shng5175/Random-Python-Stuff
|
6b6687da9195a36244f7b22de7ba2984ce78995d
|
[
"bzip2-1.0.6"
] | null | null | null |
x = 3.14
print "Our favorite ratio:" + x
File "<stdin>", line 1, in <module>
TypeError: cannot concatenate 'str' and 'float' objects
xAsString = str(x)
print "Our favorite ratio:" + xAsString
Our favorite ratio:3.14
| 28.125
| 56
| 0.684444
| 34
| 225
| 4.529412
| 0.617647
| 0.214286
| 0.311688
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.191111
| 225
| 7
| 57
| 32.142857
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0.243119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.285714
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
654f2dd2ab50f5b26f16bfe78154d19397a47f00
| 275
|
py
|
Python
|
api/config/database.py
|
keithshum/python-flask-oradb-restful-helloworld
|
5dd6a24c6ac98675dc1b126486be6787da5c3ab1
|
[
"Apache-2.0"
] | null | null | null |
api/config/database.py
|
keithshum/python-flask-oradb-restful-helloworld
|
5dd6a24c6ac98675dc1b126486be6787da5c3ab1
|
[
"Apache-2.0"
] | null | null | null |
api/config/database.py
|
keithshum/python-flask-oradb-restful-helloworld
|
5dd6a24c6ac98675dc1b126486be6787da5c3ab1
|
[
"Apache-2.0"
] | null | null | null |
import os
db_user = os.environ.get('POC_LS2PDB1_USER')
db_password = os.environ.get('POC_LS2PDB1_PASSWORD')
db_connectstring = os.environ.get('POC_LS2PDB1_CONNECTIONSTRING')
db_min = os.environ.get('POC_LS2PDB1_POOLMIN', 5)
db_max = os.environ.get('POC_LS2PDB1_POOLMAX', 5)
| 34.375
| 65
| 0.792727
| 44
| 275
| 4.613636
| 0.363636
| 0.221675
| 0.295567
| 0.369458
| 0.541872
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047059
| 0.072727
| 275
| 7
| 66
| 39.285714
| 0.74902
| 0
| 0
| 0
| 0
| 0
| 0.370909
| 0.101818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6563c5d4e0aa540a966684825aaee85160d1c404
| 52
|
py
|
Python
|
qq_bot_service/plugin/group_message_plugin/test_plugin/__init__.py
|
HynemanKan/qq_bot
|
3bca4cbeabee4dec6647a281a08184add9647dcb
|
[
"MIT"
] | 23
|
2019-11-05T14:08:09.000Z
|
2022-02-21T13:07:28.000Z
|
qq_bot_service/plugin/group_message_plugin/test_plugin/__init__.py
|
HynemanKan/qq_bot
|
3bca4cbeabee4dec6647a281a08184add9647dcb
|
[
"MIT"
] | null | null | null |
qq_bot_service/plugin/group_message_plugin/test_plugin/__init__.py
|
HynemanKan/qq_bot
|
3bca4cbeabee4dec6647a281a08184add9647dcb
|
[
"MIT"
] | 6
|
2020-06-03T15:34:03.000Z
|
2021-11-16T00:22:16.000Z
|
from .app import blueprint,handle,plugin_name, setup
| 52
| 52
| 0.846154
| 8
| 52
| 5.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 52
| 1
| 52
| 52
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
65b45fa6163e7adbbb90baf26698b44517823bb0
| 63
|
py
|
Python
|
lib/hachoir/parser/network/__init__.py
|
0x20Man/Watcher3
|
4656b42bc5879a3741bb95f534b7c6612a25264d
|
[
"Apache-2.0"
] | 320
|
2017-03-28T23:33:45.000Z
|
2022-02-17T08:45:01.000Z
|
lib/hachoir/parser/network/__init__.py
|
0x20Man/Watcher3
|
4656b42bc5879a3741bb95f534b7c6612a25264d
|
[
"Apache-2.0"
] | 300
|
2017-03-28T19:22:54.000Z
|
2021-12-01T01:11:55.000Z
|
lib/hachoir/parser/network/__init__.py
|
0x20Man/Watcher3
|
4656b42bc5879a3741bb95f534b7c6612a25264d
|
[
"Apache-2.0"
] | 90
|
2017-03-29T16:12:43.000Z
|
2022-03-01T06:23:48.000Z
|
from hachoir.parser.network.tcpdump import TcpdumpFile # noqa
| 31.5
| 62
| 0.825397
| 8
| 63
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 1
| 63
| 63
| 0.928571
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
65e27005cb21f53aba233f2f4313e34c35cf9bab
| 119
|
py
|
Python
|
DICOMOFFIS/admin.py
|
pfagomez/DICOMOFFIS
|
9d7c5d17933544c0d33004d019fbf96a81ffa9ce
|
[
"BSD-3-Clause"
] | null | null | null |
DICOMOFFIS/admin.py
|
pfagomez/DICOMOFFIS
|
9d7c5d17933544c0d33004d019fbf96a81ffa9ce
|
[
"BSD-3-Clause"
] | null | null | null |
DICOMOFFIS/admin.py
|
pfagomez/DICOMOFFIS
|
9d7c5d17933544c0d33004d019fbf96a81ffa9ce
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import eintrag
# Register your models here.
admin.site.register(eintrag)
| 23.8
| 32
| 0.815126
| 17
| 119
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 119
| 5
| 33
| 23.8
| 0.92381
| 0.218487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
02a808da78a3f62ad421bd1280886141b03bd733
| 39
|
py
|
Python
|
BasicExerciseAndKnowledge/w3cschool/n54.py
|
Jonathan1214/learn-python
|
19d0299b30e953069f19402bff5c464c4d5580be
|
[
"MIT"
] | null | null | null |
BasicExerciseAndKnowledge/w3cschool/n54.py
|
Jonathan1214/learn-python
|
19d0299b30e953069f19402bff5c464c4d5580be
|
[
"MIT"
] | null | null | null |
BasicExerciseAndKnowledge/w3cschool/n54.py
|
Jonathan1214/learn-python
|
19d0299b30e953069f19402bff5c464c4d5580be
|
[
"MIT"
] | null | null | null |
#coding:utf-8
# 题目:取一个整数a从右端开始的4〜7位。
| 7.8
| 22
| 0.692308
| 7
| 39
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0.128205
| 39
| 4
| 23
| 9.75
| 0.705882
| 0.846154
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
02bebbcd40ae0a95dcc146dc7cc26cb85b027a50
| 63
|
py
|
Python
|
components/__init__.py
|
daqcri/Fahes_GUI
|
a37f0d3dfdbcd3162bae30ae284aab1197ce9f8b
|
[
"MIT"
] | 1
|
2020-11-10T16:13:12.000Z
|
2020-11-10T16:13:12.000Z
|
components/__init__.py
|
daqcri/PFD_Demo
|
caf5e51dcc884ebd0f57203d26f797a1ba8c145e
|
[
"MIT"
] | null | null | null |
components/__init__.py
|
daqcri/PFD_Demo
|
caf5e51dcc884ebd0f57203d26f797a1ba8c145e
|
[
"MIT"
] | 1
|
2020-12-11T14:13:52.000Z
|
2020-12-11T14:13:52.000Z
|
from .header import get_logo, Header, make_dash_table, get_menu
| 63
| 63
| 0.84127
| 11
| 63
| 4.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 63
| 1
| 63
| 63
| 0.859649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
02c3ff90ff9b3b54efa17f88820ff771ae8cb02f
| 129
|
py
|
Python
|
cogs/utils/__init__.py
|
nickalaskreynolds/nano-chan
|
0af993b78cf7c22e5e29ea1d2d86475cbc1737bd
|
[
"MIT"
] | 15
|
2017-11-28T12:00:13.000Z
|
2020-09-10T06:23:29.000Z
|
cogs/utils/__init__.py
|
nickalaskreynolds/nano-chan
|
0af993b78cf7c22e5e29ea1d2d86475cbc1737bd
|
[
"MIT"
] | 25
|
2018-09-17T17:52:01.000Z
|
2019-12-02T04:42:28.000Z
|
cogs/utils/__init__.py
|
nickalaskreynolds/nano-chan
|
0af993b78cf7c22e5e29ea1d2d86475cbc1737bd
|
[
"MIT"
] | 10
|
2017-11-28T11:55:55.000Z
|
2019-12-23T19:04:55.000Z
|
from .db_utils import PostgresController
from .enums import Action, Change
__all__ = ['PostgresController', 'Action', 'Change']
| 25.8
| 52
| 0.775194
| 14
| 129
| 6.785714
| 0.642857
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 129
| 4
| 53
| 32.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
02dc4410ff806387013a0e3ecfb353143c171aa3
| 255
|
py
|
Python
|
imeerk/calendars/icalendar/IcalCalendar.py
|
nikialeksey/imeerk
|
fdf9cbdf9c139418ec872489f9615dbd88b378c2
|
[
"MIT"
] | null | null | null |
imeerk/calendars/icalendar/IcalCalendar.py
|
nikialeksey/imeerk
|
fdf9cbdf9c139418ec872489f9615dbd88b378c2
|
[
"MIT"
] | 9
|
2018-10-19T18:35:53.000Z
|
2018-11-14T08:34:52.000Z
|
imeerk/calendars/icalendar/IcalCalendar.py
|
nikialeksey/imeerk
|
fdf9cbdf9c139418ec872489f9615dbd88b378c2
|
[
"MIT"
] | null | null | null |
import abc
import typing
class IcalCalendar(metaclass=abc.ABCMeta):
@abc.abstractmethod
def as_html(self, sync_url: typing.Callable[[str], str]) -> str:
pass
@abc.abstractmethod
def sync(self, folder: str) -> None:
pass
| 19.615385
| 68
| 0.658824
| 32
| 255
| 5.1875
| 0.59375
| 0.204819
| 0.240964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227451
| 255
| 12
| 69
| 21.25
| 0.84264
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.222222
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
02fd4d34718049a408c3707a2c02232b37e50420
| 92
|
py
|
Python
|
wsgi.py
|
rwolande/site_api
|
6de42936789c75ffc3896f1fd3f2cb3e91e02862
|
[
"MIT"
] | null | null | null |
wsgi.py
|
rwolande/site_api
|
6de42936789c75ffc3896f1fd3f2cb3e91e02862
|
[
"MIT"
] | null | null | null |
wsgi.py
|
rwolande/site_api
|
6de42936789c75ffc3896f1fd3f2cb3e91e02862
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '/var/www/html/site_api')
from app import app as application
| 18.4
| 44
| 0.76087
| 17
| 92
| 4.058824
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.119565
| 92
| 4
| 45
| 23
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0.23913
| 0.23913
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f31c9fed4dc4b02fb304d43b84d6f6862ca9e2b0
| 231
|
py
|
Python
|
mindhome_alpha/erpnext/patches/v5_0/update_material_transfer_for_manufacture.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
mindhome_alpha/erpnext/patches/v5_0/update_material_transfer_for_manufacture.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | null | null | null |
mindhome_alpha/erpnext/patches/v5_0/update_material_transfer_for_manufacture.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabStock Entry` set purpose='Material Transfer for Manufacture'
where ifnull(work_order, '')!='' and purpose='Material Transfer'""")
| 33
| 89
| 0.757576
| 29
| 231
| 5.827586
| 0.827586
| 0.177515
| 0.272189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112554
| 231
| 6
| 90
| 38.5
| 0.82439
| 0
| 0
| 0
| 0
| 0
| 0.597403
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b834fa5d34475ef25ef1168922249fef9fade83f
| 122
|
py
|
Python
|
colossus/apps/notifications/admin.py
|
hrithik098/colossus
|
9544838dfc2ab75895d8605d1480fd019b107828
|
[
"MIT"
] | 6
|
2021-02-08T02:46:48.000Z
|
2021-03-29T10:26:58.000Z
|
colossus/apps/notifications/admin.py
|
qube-ai/colossus
|
9544838dfc2ab75895d8605d1480fd019b107828
|
[
"MIT"
] | null | null | null |
colossus/apps/notifications/admin.py
|
qube-ai/colossus
|
9544838dfc2ab75895d8605d1480fd019b107828
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from colossus.apps.notifications import models as m
admin.site.register(m.Notification)
| 24.4
| 51
| 0.836066
| 18
| 122
| 5.666667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098361
| 122
| 4
| 52
| 30.5
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b86c779f9b84ecb86e1e3048d711c7d0990223e1
| 116
|
py
|
Python
|
homeassistant/generated/__init__.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
homeassistant/generated/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
homeassistant/generated/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""All files in this module are automatically generated by hassfest.
To update, run python3 -m script.hassfest
"""
| 23.2
| 68
| 0.758621
| 17
| 116
| 5.176471
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.155172
| 116
| 4
| 69
| 29
| 0.887755
| 0.931034
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b87611cc1fbe6678adf8046a89e4887e0f53cd60
| 386
|
py
|
Python
|
app/core/tests/test_models.py
|
grotvignelli/pecel_lele_records
|
c879c2f84ca1cb44e5ffde7d6000bf87ed3f6903
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
grotvignelli/pecel_lele_records
|
c879c2f84ca1cb44e5ffde7d6000bf87ed3f6903
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
grotvignelli/pecel_lele_records
|
c879c2f84ca1cb44e5ffde7d6000bf87ed3f6903
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from core.models import Artist
class RecordsModel(TestCase):
def test_create_artist(self):
"""Test create a new artist on the db"""
pass
def test_create_album(self):
"""Test create a new album on the db"""
pass
def test_create_single(self):
"""Test create a new single on the db"""
pass
| 20.315789
| 48
| 0.634715
| 55
| 386
| 4.345455
| 0.381818
| 0.251046
| 0.16318
| 0.188285
| 0.426778
| 0.200837
| 0.200837
| 0
| 0
| 0
| 0
| 0
| 0.282383
| 386
| 18
| 49
| 21.444444
| 0.862816
| 0.266839
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.222222
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
b25a6ac5d32dbcb2f7c4395c55a46c5e1f28cf7d
| 307
|
py
|
Python
|
Desafio 78.py
|
MisaelGuilherme/100_Exercicios_Em_Python
|
8c4cdad7e60201abcdd2c4a5646f52aed4e7041e
|
[
"MIT"
] | null | null | null |
Desafio 78.py
|
MisaelGuilherme/100_Exercicios_Em_Python
|
8c4cdad7e60201abcdd2c4a5646f52aed4e7041e
|
[
"MIT"
] | null | null | null |
Desafio 78.py
|
MisaelGuilherme/100_Exercicios_Em_Python
|
8c4cdad7e60201abcdd2c4a5646f52aed4e7041e
|
[
"MIT"
] | null | null | null |
print('====== DESAFIO 78 ======')
lista = list()
cont = 0
for c in range(1,3):
lista.append(int(input('Digite um número: ')))
print(f'O maior número digitado foi: {max(lista)} na posição {lista.index(max(lista))}')
print(f'O menor número digitado foi: {min(lista)} na posição {lista.index(min(lista))}')
| 43.857143
| 88
| 0.65798
| 50
| 307
| 4.04
| 0.6
| 0.059406
| 0.069307
| 0.188119
| 0.237624
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018657
| 0.127036
| 307
| 7
| 89
| 43.857143
| 0.735075
| 0
| 0
| 0
| 0
| 0.285714
| 0.642857
| 0.162338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b2a42154992db6108de0d713ceae6e9c99ce3d03
| 159
|
py
|
Python
|
litcoin/script/validator.py
|
odonnellnoel/litcoin
|
cebe745df97d060c16b8d9dfa9e58a0418f75560
|
[
"MIT"
] | null | null | null |
litcoin/script/validator.py
|
odonnellnoel/litcoin
|
cebe745df97d060c16b8d9dfa9e58a0418f75560
|
[
"MIT"
] | null | null | null |
litcoin/script/validator.py
|
odonnellnoel/litcoin
|
cebe745df97d060c16b8d9dfa9e58a0418f75560
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
def validate_script(script):
"""
Validate compiled script
"""
assert type(script) == bytes
# TODO - more validation
| 19.875
| 32
| 0.63522
| 18
| 159
| 5.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.238994
| 159
| 8
| 33
| 19.875
| 0.818182
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.5
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a2642d95bee9a8d3c308510e720e7fe6296f5e16
| 1,017
|
py
|
Python
|
tests/test_protocol_methods.py
|
aratz-lasa/globalCounter
|
9ac0841b0e7d1bc71cd6205649c1b07bcf77e01f
|
[
"MIT"
] | 2
|
2019-03-24T19:09:59.000Z
|
2019-03-25T07:15:06.000Z
|
tests/test_protocol_methods.py
|
aratz-lasa/globalCounter
|
9ac0841b0e7d1bc71cd6205649c1b07bcf77e01f
|
[
"MIT"
] | null | null | null |
tests/test_protocol_methods.py
|
aratz-lasa/globalCounter
|
9ac0841b0e7d1bc71cd6205649c1b07bcf77e01f
|
[
"MIT"
] | null | null | null |
from globalCounter.protocol.methods import *
def test_build_message():
op_code = COUNT
data = "topic"
message = build_message(op_code, data)
assert type(message) is bytes
assert message[0] == op_code
assert message[1:].decode(DATA_ENCODING) == data
op_code = RE_COUNT
data = 1
message = build_message(op_code, data)
assert type(message) is bytes
assert message[0] == op_code
assert message[1] == data
def test_parse_message():
op_code = COUNT
data = "topic"
test_message = bytes([op_code]) + data.encode(DATA_ENCODING)
re_op_code, re_data = parse_msg(test_message)
assert type(re_op_code) is int
assert type(re_data) is str
assert re_op_code == op_code
assert re_data == data
op_code = RE_COUNT
data = 1
test_message = bytes([op_code, data])
re_op_code, re_data = parse_msg(test_message)
assert type(re_op_code) is int
assert type(re_data) is int
assert re_op_code == op_code
assert re_data == data
| 26.763158
| 64
| 0.682399
| 157
| 1,017
| 4.127389
| 0.178344
| 0.166667
| 0.074074
| 0.083333
| 0.833333
| 0.833333
| 0.669753
| 0.608025
| 0.608025
| 0.608025
| 0
| 0.007634
| 0.227139
| 1,017
| 37
| 65
| 27.486486
| 0.816794
| 0
| 0
| 0.709677
| 0
| 0
| 0.009833
| 0
| 0
| 0
| 0
| 0
| 0.451613
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a26e0c233a9af82169791c848388887700beadc7
| 12,344
|
py
|
Python
|
test/devices_tests/switch_test.py
|
onkelbeh/xknx
|
b7c7427b77b1a709aef8e25b39bbbb62ace6f708
|
[
"MIT"
] | 1
|
2020-12-17T21:16:52.000Z
|
2020-12-17T21:16:52.000Z
|
test/devices_tests/switch_test.py
|
onkelbeh/xknx
|
b7c7427b77b1a709aef8e25b39bbbb62ace6f708
|
[
"MIT"
] | null | null | null |
test/devices_tests/switch_test.py
|
onkelbeh/xknx
|
b7c7427b77b1a709aef8e25b39bbbb62ace6f708
|
[
"MIT"
] | null | null | null |
"""Unit test for Switch objects."""
import asyncio
import unittest
from unittest.mock import MagicMock, Mock, patch
from xknx import XKNX
from xknx.devices import Switch
from xknx.dpt import DPTBinary
from xknx.telegram import GroupAddress, Telegram
from xknx.telegram.apci import GroupValueRead, GroupValueResponse, GroupValueWrite
class AsyncMock(MagicMock):
"""Async Mock."""
# pylint: disable=invalid-overridden-method
async def __call__(self, *args, **kwargs):
return super().__call__(*args, **kwargs)
class TestSwitch(unittest.TestCase):
"""Test class for Switch object."""
def setUp(self):
"""Set up test class."""
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
def tearDown(self):
"""Tear down test class."""
self.loop.close()
#
# SYNC
#
def test_sync(self):
"""Test sync function / sending group reads to KNX bus."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address_state="1/2/3")
self.loop.run_until_complete(switch.sync())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/3"), payload=GroupValueRead()
),
)
def test_sync_state_address(self):
"""Test sync function / sending group reads to KNX bus. Test with Switch with explicit state address."""
xknx = XKNX()
switch = Switch(
xknx, "TestOutlet", group_address="1/2/3", group_address_state="1/2/4"
)
self.loop.run_until_complete(switch.sync())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/4"), payload=GroupValueRead()
),
)
#
# TEST PROCESS
#
def test_process(self):
"""Test process / reading telegrams from telegram queue. Test if device was updated."""
xknx = XKNX()
callback_mock = AsyncMock()
switch1 = Switch(
xknx, "TestOutlet", group_address="1/2/3", device_updated_cb=callback_mock
)
switch2 = Switch(
xknx, "TestOutlet", group_address="1/2/3", device_updated_cb=callback_mock
)
self.assertEqual(switch1.state, None)
self.assertEqual(switch2.state, None)
callback_mock.assert_not_called()
telegram_on = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
)
telegram_off = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(0)),
)
self.loop.run_until_complete(switch1.process(telegram_on))
self.assertEqual(switch1.state, True)
callback_mock.assert_called_once()
callback_mock.reset_mock()
self.loop.run_until_complete(switch1.process(telegram_off))
self.assertEqual(switch1.state, False)
callback_mock.assert_called_once()
callback_mock.reset_mock()
# test setting switch2 to False with first telegram
self.loop.run_until_complete(switch2.process(telegram_off))
self.assertEqual(switch2.state, False)
callback_mock.assert_called_once()
callback_mock.reset_mock()
self.loop.run_until_complete(switch2.process(telegram_on))
self.assertEqual(switch2.state, True)
callback_mock.assert_called_once()
callback_mock.reset_mock()
def test_process_state(self):
"""Test process / reading telegrams from telegram queue. Test if device was updated."""
xknx = XKNX()
callback_mock = AsyncMock()
switch1 = Switch(
xknx,
"TestOutlet",
group_address="1/2/3",
group_address_state="1/2/4",
device_updated_cb=callback_mock,
)
switch2 = Switch(
xknx,
"TestOutlet",
group_address="1/2/3",
group_address_state="1/2/4",
device_updated_cb=callback_mock,
)
self.assertEqual(switch1.state, None)
self.assertEqual(switch2.state, None)
callback_mock.assert_not_called()
telegram_on = Telegram(
destination_address=GroupAddress("1/2/4"),
payload=GroupValueResponse(DPTBinary(1)),
)
telegram_off = Telegram(
destination_address=GroupAddress("1/2/4"),
payload=GroupValueResponse(DPTBinary(0)),
)
self.loop.run_until_complete(switch1.process(telegram_on))
self.assertEqual(switch1.state, True)
callback_mock.assert_called_once()
callback_mock.reset_mock()
self.loop.run_until_complete(switch1.process(telegram_off))
self.assertEqual(switch1.state, False)
callback_mock.assert_called_once()
callback_mock.reset_mock()
# test setting switch2 to False with first telegram
self.loop.run_until_complete(switch2.process(telegram_off))
self.assertEqual(switch2.state, False)
callback_mock.assert_called_once()
callback_mock.reset_mock()
self.loop.run_until_complete(switch2.process(telegram_on))
self.assertEqual(switch2.state, True)
callback_mock.assert_called_once()
callback_mock.reset_mock()
def test_process_invert(self):
"""Test process / reading telegrams from telegram queue with inverted switch."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3", invert=True)
self.assertEqual(switch.state, None)
telegram_inv_on = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(0)),
)
telegram_inv_off = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
)
self.loop.run_until_complete(switch.process(telegram_inv_on))
self.assertEqual(switch.state, True)
self.loop.run_until_complete(switch.process(telegram_inv_off))
self.assertEqual(switch.state, False)
def test_process_reset_after(self):
"""Test process reset_after."""
xknx = XKNX()
reset_after_sec = 0.001
switch = Switch(
xknx, "TestInput", group_address="1/2/3", reset_after=reset_after_sec
)
telegram_on = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
)
self.loop.run_until_complete(switch.process(telegram_on))
self.assertTrue(switch.state)
self.assertEqual(xknx.telegrams.qsize(), 0)
self.loop.run_until_complete(asyncio.sleep(reset_after_sec * 2))
self.assertEqual(xknx.telegrams.qsize(), 1)
self.loop.run_until_complete(switch.process(xknx.telegrams.get_nowait()))
self.assertFalse(switch.state)
def test_process_reset_after_cancel_existing(self):
"""Test process reset_after cancels existing reset tasks."""
xknx = XKNX()
reset_after_sec = 0.01
switch = Switch(
xknx, "TestInput", group_address="1/2/3", reset_after=reset_after_sec
)
telegram_on = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueResponse(DPTBinary(1)),
)
self.loop.run_until_complete(switch.process(telegram_on))
self.assertTrue(switch.state)
self.assertEqual(xknx.telegrams.qsize(), 0)
self.loop.run_until_complete(asyncio.sleep(reset_after_sec / 2))
# half way through the reset timer
self.loop.run_until_complete(switch.process(telegram_on))
self.assertTrue(switch.state)
self.loop.run_until_complete(asyncio.sleep(reset_after_sec / 2))
self.assertEqual(xknx.telegrams.qsize(), 0)
def test_process_callback(self):
"""Test process / reading telegrams from telegram queue. Test if callback was called."""
# pylint: disable=no-self-use
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
after_update_callback = Mock()
async def async_after_update_callback(device):
"""Async callback."""
after_update_callback(device)
switch.register_device_updated_cb(async_after_update_callback)
telegram = Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
)
self.loop.run_until_complete(switch.process(telegram))
after_update_callback.assert_called_with(switch)
#
# TEST SET ON
#
def test_set_on(self):
"""Test switching on switch."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
self.loop.run_until_complete(switch.set_on())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
),
)
#
# TEST SET OFF
#
def test_set_off(self):
"""Test switching off switch."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
self.loop.run_until_complete(switch.set_off())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(0)),
),
)
#
# TEST SET INVERT
#
def test_set_invert(self):
"""Test switching on/off inverted switch."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3", invert=True)
self.loop.run_until_complete(switch.set_on())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(0)),
),
)
self.loop.run_until_complete(switch.set_off())
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(
telegram,
Telegram(
destination_address=GroupAddress("1/2/3"),
payload=GroupValueWrite(DPTBinary(1)),
),
)
#
# TEST DO
#
def test_do(self):
"""Test 'do' functionality."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
self.loop.run_until_complete(switch.do("on"))
self.loop.run_until_complete(xknx.devices.process(xknx.telegrams.get_nowait()))
self.assertTrue(switch.state)
self.loop.run_until_complete(switch.do("off"))
self.loop.run_until_complete(xknx.devices.process(xknx.telegrams.get_nowait()))
self.assertFalse(switch.state)
def test_wrong_do(self):
"""Test wrong do command."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
with patch("logging.Logger.warning") as mock_warn:
self.loop.run_until_complete(switch.do("execute"))
mock_warn.assert_called_with(
"Could not understand action %s for device %s", "execute", "TestOutlet"
)
self.assertEqual(xknx.telegrams.qsize(), 0)
#
# TEST has_group_address
#
def test_has_group_address(self):
"""Test has_group_address."""
xknx = XKNX()
switch = Switch(xknx, "TestOutlet", group_address="1/2/3")
self.assertTrue(switch.has_group_address(GroupAddress("1/2/3")))
self.assertFalse(switch.has_group_address(GroupAddress("2/2/2")))
| 35.168091
| 112
| 0.625081
| 1,386
| 12,344
| 5.362193
| 0.101732
| 0.009419
| 0.011706
| 0.062433
| 0.788617
| 0.766416
| 0.755786
| 0.749596
| 0.731701
| 0.71838
| 0
| 0.018447
| 0.262233
| 12,344
| 350
| 113
| 35.268571
| 0.797628
| 0.091137
| 0
| 0.671815
| 0
| 0
| 0.03909
| 0.001986
| 0
| 0
| 0
| 0
| 0.200772
| 1
| 0.061776
| false
| 0
| 0.030888
| 0
| 0.104247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a293a315712d3bdc71862b9ef2ee40e0d915391a
| 39
|
py
|
Python
|
errors.py
|
pruh/nserv
|
b28625636889d70102e6e5ceee72706a8a3fdd0e
|
[
"MIT"
] | null | null | null |
errors.py
|
pruh/nserv
|
b28625636889d70102e6e5ceee72706a8a3fdd0e
|
[
"MIT"
] | 4
|
2020-01-27T04:34:41.000Z
|
2020-01-27T05:28:19.000Z
|
errors.py
|
pruh/nserv
|
b28625636889d70102e6e5ceee72706a8a3fdd0e
|
[
"MIT"
] | null | null | null |
class ApiError(BaseException):
pass
| 19.5
| 30
| 0.769231
| 4
| 39
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 39
| 2
| 31
| 19.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0c2d5933cfdf08db56bacb502453931d2b1a3f3b
| 198
|
py
|
Python
|
src/ufdl/jobcontracts/base/__init__.py
|
waikato-ufdl/ufdl-job-contracts
|
4d414fc79e110de044e2b8377556d3134c0b5dcc
|
[
"Apache-2.0"
] | null | null | null |
src/ufdl/jobcontracts/base/__init__.py
|
waikato-ufdl/ufdl-job-contracts
|
4d414fc79e110de044e2b8377556d3134c0b5dcc
|
[
"Apache-2.0"
] | null | null | null |
src/ufdl/jobcontracts/base/__init__.py
|
waikato-ufdl/ufdl-job-contracts
|
4d414fc79e110de044e2b8377556d3134c0b5dcc
|
[
"Apache-2.0"
] | null | null | null |
from ._Input import Input
from ._InputConstructor import InputConstructor
from ._Output import Output
from ._OutputConstructor import OutputConstructor
from ._UFDLJobContract import UFDLJobContract
| 33
| 49
| 0.873737
| 20
| 198
| 8.4
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10101
| 198
| 5
| 50
| 39.6
| 0.94382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0c4891166c0603759ed664941ac359b6cc9e4028
| 80
|
py
|
Python
|
magellan_models/config/__init__.py
|
3mcloud/magellan-models
|
aae47496f240a5211e650a5c0efcbc95a15f7bb0
|
[
"BSD-3-Clause"
] | 2
|
2021-08-11T18:15:28.000Z
|
2021-08-11T18:33:38.000Z
|
magellan_models/config/__init__.py
|
3mcloud/magellan-models
|
aae47496f240a5211e650a5c0efcbc95a15f7bb0
|
[
"BSD-3-Clause"
] | null | null | null |
magellan_models/config/__init__.py
|
3mcloud/magellan-models
|
aae47496f240a5211e650a5c0efcbc95a15f7bb0
|
[
"BSD-3-Clause"
] | null | null | null |
""" Init file for config module"""
from .magellan_config import MagellanConfig
| 20
| 43
| 0.775
| 10
| 80
| 6.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1375
| 80
| 3
| 44
| 26.666667
| 0.884058
| 0.3375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0c658d4e6cd53741f4acdbaeb1a3bcc503fc4d9c
| 117
|
py
|
Python
|
src/torchprune/torchprune/method/thres_weight/__init__.py
|
dani3l125/torchprune
|
f2589ec7514bd531ddaa7da3aed6388bb13712d3
|
[
"MIT"
] | 74
|
2021-03-05T01:25:00.000Z
|
2022-03-26T06:15:32.000Z
|
src/torchprune/torchprune/method/thres_weight/__init__.py
|
dani3l125/torchprune
|
f2589ec7514bd531ddaa7da3aed6388bb13712d3
|
[
"MIT"
] | 4
|
2021-05-25T06:01:22.000Z
|
2022-01-24T22:38:09.000Z
|
src/torchprune/torchprune/method/thres_weight/__init__.py
|
dani3l125/torchprune
|
f2589ec7514bd531ddaa7da3aed6388bb13712d3
|
[
"MIT"
] | 7
|
2021-03-24T14:14:32.000Z
|
2022-02-19T17:27:56.000Z
|
# flake8: noqa: F403,F401
"""The package for classic weight thresholding."""
from .thres_weight_net import ThresNet
| 23.4
| 50
| 0.769231
| 16
| 117
| 5.5
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068627
| 0.128205
| 117
| 4
| 51
| 29.25
| 0.794118
| 0.589744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a76fdbea44c9b500c38c326130595d2b30f17175
| 6,838
|
py
|
Python
|
mech/tests/test_mech_box.py
|
theenoob/mech
|
f77b56b585fca5261e2f6a77f8e28597126a8cff
|
[
"MIT"
] | 17
|
2020-01-24T01:08:25.000Z
|
2021-07-12T19:53:34.000Z
|
mech/tests/test_mech_box.py
|
whoopsjohnnie/mech
|
f03f23ccef95f5a7f0c7f83b95db865711a57996
|
[
"MIT"
] | 6
|
2020-02-19T02:50:46.000Z
|
2021-02-14T09:50:32.000Z
|
mech/tests/test_mech_box.py
|
whoopsjohnnie/mech
|
f03f23ccef95f5a7f0c7f83b95db865711a57996
|
[
"MIT"
] | 4
|
2020-06-28T00:03:12.000Z
|
2021-01-28T21:47:23.000Z
|
# Copyright (c) 2020 Mike Kinney
"""Unit tests for 'mech box'."""
import re
from unittest.mock import patch
from click.testing import CliRunner
from mech.mech_cli import cli
def test_mech_box_add_with_cloud():
"""Test 'mech box add' with cloud."""
runner = CliRunner()
with patch('mech.utils.cloud_run') as mock_cloud_run:
runner.invoke(cli, ['--cloud', 'foo', 'box', 'add', 'bento/ubuntu-18.04'])
mock_cloud_run.assert_called()
def test_mech_box_list_with_cloud():
"""Test 'mech box list' with cloud."""
runner = CliRunner()
with patch('mech.utils.cloud_run') as mock_cloud_run:
runner.invoke(cli, ['--cloud', 'foo', 'box', 'list'])
mock_cloud_run.assert_called()
def test_mech_box_remove_with_cloud():
"""Test 'mech box remove' with cloud."""
runner = CliRunner()
with patch('mech.utils.cloud_run') as mock_cloud_run:
runner.invoke(cli, ['--cloud', 'foo', 'box', 'remove', '--version', 'somever',
'--name', 'bento/ubuntu-18.04'])
mock_cloud_run.assert_called()
@patch('os.getcwd')
def test_mech_box_list_no_mechdir(mock_os_getcwd):
"""Test 'mech box list' with no '.mech' directory."""
mock_os_getcwd.return_value = '/tmp'
runner = CliRunner()
with patch('os.walk') as mock_walk:
# root, dirs, files
mock_walk.return_value = [('./tmp', [], []), ]
result = runner.invoke(cli, ['box', 'list'])
mock_walk.assert_called()
# ensure a header prints out
assert re.search(r'BOX', result.output, re.MULTILINE)
@patch('os.getcwd')
def test_mech_box_list_empty_boxes_dir(mock_os_getcwd):
"""Test 'mech box list' with no directories in '.mech/boxes' directory."""
mock_os_getcwd.return_value = '/tmp'
runner = CliRunner()
with patch('os.walk') as mock_walk:
# root, dirs, files
mock_walk.return_value = [('/tmp', ['boxes', ], []), ]
result = runner.invoke(cli, ['box', 'list'])
mock_walk.assert_called()
# ensure a header prints out
assert re.search(r'BOX', result.output, re.MULTILINE)
@patch('os.getcwd')
def test_mech_box_list_one_box(mock_os_getcwd):
"""Test 'mech box list' with one box present."""
mock_os_getcwd.return_value = '/tmp'
runner = CliRunner()
with patch('os.walk') as mock_walk:
# simulate: vmware/bento/ubuntu-18.04/201912.04.0/vmware_desktop.box
mock_walk.return_value = [
('/tmp', ['.mech'], []),
('/tmp/.mech', ['boxes'], []),
('/tmp/.mech/boxes', ['vmware'], []),
('/tmp/.mech/boxes/vmware', ['bento'], []),
('/tmp/.mech/boxes/vmware/bento', ['ubuntu-18.04'], []),
('/tmp/.mech/boxes/vmware/bento/ubuntu-18.04', ['201912.04.0'], []),
('/tmp/.mech/boxes/vmware/bento/ubuntu-18.04/201912.04.0', [], ['vmware_desktop.box']),
]
result = runner.invoke(cli, ['box', 'list'])
mock_walk.assert_called()
print('result.output:{}'.format(result.output))
assert re.search(r'ubuntu-18.04', result.output, re.MULTILINE)
@patch('os.getcwd')
def test_mech_box_list_one_box_legacy(mock_os_getcwd):
"""Test 'mech box list' with a legacy box present.
This is so we can handle the initial box files. (before provider was added)
"""
mock_os_getcwd.return_value = '/tmp'
runner = CliRunner()
with patch('os.walk') as mock_walk:
# simulate: bento/ubuntu-18.04/201912.04.0/vmware_desktop.box
mock_walk.return_value = [
('/tmp/.mech/boxes/bento/ubuntu-18.04/201912.04.0', [], ['vmware_desktop.box']),
]
result = runner.invoke(cli, ['box', 'list'])
mock_walk.assert_called()
print('result.output:{}'.format(result.output))
assert re.search(r'ubuntu-18.04', result.output, re.MULTILINE)
@patch('requests.get')
@patch('os.path.exists')
@patch('os.getcwd')
def test_mech_box_add_new(mock_os_getcwd, mock_os_path_exists,
mock_requests_get, catalog_as_json):
"""Test 'mech box add' from Hashicorp'."""
mock_os_path_exists.return_value = False
mock_os_getcwd.return_value = '/tmp'
runner = CliRunner()
mock_requests_get.return_value.status_code = 200
mock_requests_get.return_value.json.return_value = catalog_as_json
result = runner.invoke(cli, ['box', 'add', '--provider', 'vmware', 'bento/ubuntu-18.04'])
assert re.search(r'Checking integrity', result.output, re.MULTILINE)
def test_mech_box_add_with_invalid_provider():
"""Test 'mech box add'."""
runner = CliRunner()
result = runner.invoke(cli, ['box', 'add', '--provider', 'atari', 'bento/ubuntu-18.04'])
assert re.search(r'Need to provide valid provider', result.output, re.MULTILINE)
def test_mech_box_remove_with_invalid_provider():
"""Test 'mech box remove'."""
runner = CliRunner()
result = runner.invoke(cli, ['box', 'remove', '--version', 'somever',
'--provider', 'atari', '--name', 'bento/ubuntu-18.04'])
assert re.search(r'Need to provide valid provider', result.output, re.MULTILINE)
@patch('requests.get')
@patch('os.path.exists')
@patch('os.getcwd')
def test_mech_box_add_existing(mock_os_getcwd, mock_os_path_exists,
mock_requests_get, catalog_as_json):
"""Test 'mech box add' from Hashicorp'."""
mock_os_getcwd.return_value = '/tmp'
mock_os_path_exists.return_value = True
runner = CliRunner()
mock_requests_get.return_value.status_code = 200
mock_requests_get.return_value.json.return_value = catalog_as_json
result = runner.invoke(cli, ['box', 'add', 'bento/ubuntu-18.04'])
assert re.search(r'Loading metadata', result.output, re.MULTILINE)
@patch('shutil.rmtree')
@patch('os.path.exists')
def test_mech_box_remove_exists(mock_os_path_exists, mock_rmtree):
"""Test 'mech box remove'."""
mock_os_path_exists.return_value = True
mock_rmtree.return_value = True
runner = CliRunner()
result = runner.invoke(cli, ['--debug', 'box', 'remove', '--version', 'somever',
'--provider', 'vmware', '--name', 'bento/ubuntu-18.04'])
mock_os_path_exists.assert_called()
mock_rmtree.assert_called()
assert re.search(r'Removed ', result.output, re.MULTILINE)
@patch('os.path.exists')
def test_mech_box_remove_does_not_exists(mock_os_path_exists):
"""Test 'mech box remove'."""
mock_os_path_exists.return_value = False
runner = CliRunner()
result = runner.invoke(cli, ['box', 'remove', '--version', 'somever', '--provider',
'vmware', '--name', 'bento/ubuntu-18.04'])
mock_os_path_exists.assert_called()
assert re.search(r'No boxes were removed', result.output, re.MULTILINE)
| 39.526012
| 99
| 0.640977
| 919
| 6,838
| 4.541893
| 0.132753
| 0.04528
| 0.068519
| 0.050311
| 0.889075
| 0.850503
| 0.779348
| 0.749641
| 0.686632
| 0.621945
| 0
| 0.021644
| 0.195964
| 6,838
| 172
| 100
| 39.755814
| 0.737541
| 0.120796
| 0
| 0.591667
| 0
| 0.008333
| 0.203444
| 0.032923
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.108333
| false
| 0
| 0.033333
| 0
| 0.141667
| 0.016667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a7710067b0d44fea24447721d8f9eb33a892a361
| 49
|
py
|
Python
|
pyinsar/__init__.py
|
MITeaps/pyinsar
|
4d22e3ef90ef842d6b390074a8b5deedc7658a2b
|
[
"MIT"
] | 8
|
2019-03-15T19:51:27.000Z
|
2022-02-16T07:27:36.000Z
|
pyinsar/__init__.py
|
MITeaps/pyinsar
|
4d22e3ef90ef842d6b390074a8b5deedc7658a2b
|
[
"MIT"
] | 1
|
2022-02-08T03:48:56.000Z
|
2022-02-09T01:33:27.000Z
|
pyinsar/__init__.py
|
MITeaps/pyinsar
|
4d22e3ef90ef842d6b390074a8b5deedc7658a2b
|
[
"MIT"
] | 2
|
2021-01-12T05:32:21.000Z
|
2021-01-13T08:35:26.000Z
|
__all__ = ["data_import", "processing", "output"]
| 49
| 49
| 0.693878
| 5
| 49
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.644444
| 0
| 0
| 0
| 0
| 0
| 0.54
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a7a534233b0990f1bfb0a70cb84d6aab2e4762c9
| 68
|
py
|
Python
|
ffeatools/modules/rod/__init__.py
|
zzalscv2/FFEA
|
da8a09dadb1b3978a3d230dc79d9b163d7889242
|
[
"Apache-2.0"
] | null | null | null |
ffeatools/modules/rod/__init__.py
|
zzalscv2/FFEA
|
da8a09dadb1b3978a3d230dc79d9b163d7889242
|
[
"Apache-2.0"
] | null | null | null |
ffeatools/modules/rod/__init__.py
|
zzalscv2/FFEA
|
da8a09dadb1b3978a3d230dc79d9b163d7889242
|
[
"Apache-2.0"
] | 1
|
2021-04-03T16:08:21.000Z
|
2021-04-03T16:08:21.000Z
|
import rod_math_core
from ndc_extractor import main as cc_extractor
| 22.666667
| 46
| 0.882353
| 12
| 68
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 47
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a7ce3c57530cd724da417601b69a01f50210216a
| 871
|
py
|
Python
|
examples/blog/articles/mongoadmin.py
|
Erenshtein/django-mongonaut
|
eb158f01fc02dd9845d807c0d5044b2c5b577674
|
[
"MIT"
] | 69
|
2016-03-30T17:55:08.000Z
|
2021-11-29T17:44:02.000Z
|
examples/blog/articles/mongoadmin.py
|
Erenshtein/django-mongonaut
|
eb158f01fc02dd9845d807c0d5044b2c5b577674
|
[
"MIT"
] | 22
|
2016-03-30T17:01:31.000Z
|
2020-04-19T08:39:35.000Z
|
examples/blog/articles/mongoadmin.py
|
Erenshtein/django-mongonaut
|
eb158f01fc02dd9845d807c0d5044b2c5b577674
|
[
"MIT"
] | 34
|
2016-04-04T14:11:06.000Z
|
2021-06-25T11:24:33.000Z
|
from mongonaut.sites import MongoAdmin
from articles.models import Post, User, NewUser
class PostAdmin(MongoAdmin):
def has_view_permission(self, request):
return True
def has_edit_permission(self, request):
return True
def has_add_permission(self, request):
return True
def has_delete_permission(self, request):
return True
search_fields = ('title', 'id')
list_fields = ('title', 'author', "published", "pub_date", "update_times")
class UserAdmin(MongoAdmin):
def has_view_permission(self, request):
return True
def has_edit_permission(self, request):
return True
def has_add_permission(self, request):
return True
list_fields = ('first_name', "last_name", "email")
Post.mongoadmin = PostAdmin()
User.mongoadmin = UserAdmin()
NewUser.mongoadmin = UserAdmin()
| 21.775
| 78
| 0.688863
| 103
| 871
| 5.621359
| 0.378641
| 0.072539
| 0.253886
| 0.326425
| 0.519862
| 0.466321
| 0.466321
| 0.455959
| 0.455959
| 0.455959
| 0
| 0
| 0.2124
| 871
| 39
| 79
| 22.333333
| 0.844023
| 0
| 0
| 0.541667
| 0
| 0
| 0.081516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.291667
| false
| 0
| 0.083333
| 0.291667
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
38efd3fc52128125624df59fe13f7cec1b531de7
| 213
|
py
|
Python
|
src/webfrontend.py
|
smlng/lbv
|
b8a584eac413ac85bd363154c69036cddc328477
|
[
"MIT"
] | 1
|
2016-03-09T14:40:40.000Z
|
2016-03-09T14:40:40.000Z
|
src/webfrontend.py
|
smlng/lbv
|
b8a584eac413ac85bd363154c69036cddc328477
|
[
"MIT"
] | 2
|
2016-03-23T07:46:03.000Z
|
2016-04-19T15:05:55.000Z
|
src/webfrontend.py
|
smlng/lbv
|
b8a584eac413ac85bd363154c69036cddc328477
|
[
"MIT"
] | null | null | null |
import os
import sys
sys.path.append(os.path.dirname(__name__))
from app import app
from settings import DEFAULT_WEB_SERVER
app.run(host=DEFAULT_WEB_SERVER['host'], port=DEFAULT_WEB_SERVER['port'], debug=True)
| 21.3
| 85
| 0.798122
| 35
| 213
| 4.571429
| 0.514286
| 0.1875
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089202
| 213
| 9
| 86
| 23.666667
| 0.824742
| 0
| 0
| 0
| 0
| 0
| 0.037559
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
38faf4f4134d18fd6d6abb8af4f18cc55e3f6b0f
| 50
|
py
|
Python
|
carla/__init__.py
|
Philoso-Fish/CARLA
|
beb0a8b5f04b30acd3b617d4443941f815601ba0
|
[
"MIT"
] | null | null | null |
carla/__init__.py
|
Philoso-Fish/CARLA
|
beb0a8b5f04b30acd3b617d4443941f815601ba0
|
[
"MIT"
] | null | null | null |
carla/__init__.py
|
Philoso-Fish/CARLA
|
beb0a8b5f04b30acd3b617d4443941f815601ba0
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .evaluation import distances
| 12.5
| 33
| 0.78
| 6
| 50
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.16
| 50
| 3
| 34
| 16.666667
| 0.904762
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ac0be0c6b8f9f2e25c6eb4172507b2662571c5f9
| 5,009
|
py
|
Python
|
libraries/botframework-streaming/tests/test_header_serializer.py
|
andreikop/botbuilder-python
|
5e073e0c68fcbdc558133bdbd59a02453e597abe
|
[
"MIT"
] | 388
|
2019-05-07T15:53:21.000Z
|
2022-03-28T20:29:46.000Z
|
libraries/botframework-streaming/tests/test_header_serializer.py
|
andreikop/botbuilder-python
|
5e073e0c68fcbdc558133bdbd59a02453e597abe
|
[
"MIT"
] | 1,286
|
2019-05-07T23:38:19.000Z
|
2022-03-31T10:44:16.000Z
|
libraries/botframework-streaming/tests/test_header_serializer.py
|
andreikop/botbuilder-python
|
5e073e0c68fcbdc558133bdbd59a02453e597abe
|
[
"MIT"
] | 168
|
2019-05-14T20:23:25.000Z
|
2022-03-16T06:49:14.000Z
|
from typing import List
from unittest import TestCase
from uuid import uuid4, UUID
import pytest
from botframework.streaming.payloads import HeaderSerializer
from botframework.streaming.payloads.models import Header, PayloadTypes
from botframework.streaming.transport import TransportConstants
class TestHeaderSerializer(TestCase):
def test_can_round_trip(self):
header = Header()
header.type = PayloadTypes.REQUEST
header.payload_length = 168
header.id = uuid4()
header.end = True
buffer: List[int] = [None] * TransportConstants.MAX_PAYLOAD_LENGTH
offset: int = 0
length = HeaderSerializer.serialize(header, buffer, offset)
result = HeaderSerializer.deserialize(buffer, 0, length)
self.assertEqual(header.type, result.type)
self.assertEqual(header.payload_length, result.payload_length)
self.assertEqual(header.id, result.id)
self.assertEqual(header.end, result.end)
def test_serializes_to_ascii(self):
header = Header()
header.type = PayloadTypes.REQUEST
header.payload_length = 168
header.id = uuid4()
header.end = True
buffer: List[int] = [None] * TransportConstants.MAX_PAYLOAD_LENGTH
offset: int = 0
length = HeaderSerializer.serialize(header, buffer, offset)
decoded = bytes(buffer[offset:length]).decode("ascii")
self.assertEqual(f"A.000168.{str(header.id)}.1\n", decoded)
def test_deserializes_from_ascii(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
result = HeaderSerializer.deserialize(buffer, 0, len(buffer))
self.assertEqual("A", result.type)
self.assertEqual(168, result.payload_length)
self.assertEqual(header_id, result.id)
self.assertTrue(result.end)
def test_deserialize_unknown_type(self):
header_id: UUID = uuid4()
header: str = f"Z.000168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
result = HeaderSerializer.deserialize(buffer, 0, len(buffer))
self.assertEqual("Z", result.type)
self.assertEqual(168, result.payload_length)
def test_deserialize_length_too_short_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, 5)
def test_deserialize_length_too_long_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, 55)
def test_deserialize_bad_type_delimiter_throws(self):
header_id: UUID = uuid4()
header: str = f"Ax000168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_length_delimiter_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168x{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_id_delimiter_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}x1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_terminator_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}.1c"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_length_throws(self):
header_id: UUID = uuid4()
header: str = f"A.00p168.{str(header_id)}.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_id_throws(self):
header: str = "A.000168.68e9p9ca-a651-40f4-ad8f-3aaf781862b4.1\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
def test_deserialize_bad_end_throws(self):
header_id: UUID = uuid4()
header: str = f"A.000168.{str(header_id)}.z\n"
buffer: List[int] = list(bytes(header, "ascii"))
with pytest.raises(ValueError):
HeaderSerializer.deserialize(buffer, 0, len(buffer))
| 36.562044
| 74
| 0.658814
| 606
| 5,009
| 5.306931
| 0.136964
| 0.062189
| 0.05255
| 0.126866
| 0.778296
| 0.749067
| 0.749067
| 0.749067
| 0.709577
| 0.709577
| 0
| 0.036667
| 0.221401
| 5,009
| 136
| 75
| 36.830882
| 0.787949
| 0
| 0
| 0.588235
| 0
| 0
| 0.085646
| 0.073268
| 0
| 0
| 0
| 0
| 0.107843
| 1
| 0.127451
| false
| 0
| 0.068627
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ac7310b49e080a4f14ed384393fb879b330580a7
| 94
|
py
|
Python
|
graph_sage.py
|
ejhill24/compound-pcfg
|
f871541d4a462d4bf37d3349f4746a139411a6e1
|
[
"MIT"
] | 1
|
2021-01-08T20:16:16.000Z
|
2021-01-08T20:16:16.000Z
|
graph_sage.py
|
ejhill24/compound-pcfg
|
f871541d4a462d4bf37d3349f4746a139411a6e1
|
[
"MIT"
] | null | null | null |
graph_sage.py
|
ejhill24/compound-pcfg
|
f871541d4a462d4bf37d3349f4746a139411a6e1
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
from tensorflow import keras
print(tf.__version__)
| 18.8
| 28
| 0.829787
| 15
| 94
| 4.933333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138298
| 94
| 4
| 29
| 23.5
| 0.91358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ac7ae35b01506cbd6a67674e8ac06d14cfb943f6
| 30,138
|
py
|
Python
|
seed/tests/test_graphql.py
|
erick-rivas/django-reference
|
3195de635419a0c2ac8eee92742bb98365f614d8
|
[
"MIT"
] | null | null | null |
seed/tests/test_graphql.py
|
erick-rivas/django-reference
|
3195de635419a0c2ac8eee92742bb98365f614d8
|
[
"MIT"
] | 11
|
2020-02-11T23:57:45.000Z
|
2022-02-17T07:03:39.000Z
|
seed/tests/test_graphql.py
|
erick-rivas/django-reference
|
3195de635419a0c2ac8eee92742bb98365f614d8
|
[
"MIT"
] | null | null | null |
"""
__Seed builder__
AUTO_GENERATED (Read only)
Modify via builder
"""
import json
from graphene_django.utils.testing import GraphQLTestCase
from seed.tests.util_test import fill_test_database
class TestGraphql(GraphQLTestCase):
GRAPHQL_URL = "/graphql"
def setUp(self):
fill_test_database()
def test_query_matches(self):
response_01 = self.query(
'''
{
matches(query: "id=1", orderBy: "id", limit: 1){
id
date
type
local {
id
}
visitor {
id
}
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["matches"][0]["id"], 1)
response_02 = self.query(
'''
{
matches{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["matches"][0]["id"], 1)
response_03 = self.query(
'''
{
matchPagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
matches { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["matchPagination"]["totalPages"], 1)
self.assertEqual(res_03["matchPagination"]["totalCount"], 1)
self.assertEqual(res_03["matchPagination"]["matches"][0]["id"], 1)
response_04 = self.query(
'''
{
matchCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["matchCount"]["count"], 1)
response_05 = self.query(
'''
{
matchCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["matchCount"]["count"], 1)
def test_query_match(self):
response = self.query(
'''
{
match(id: 1){
id
date
type
local {
id
}
visitor {
id
}
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["match"]["id"], 1)
def test_save_match(self):
response = self.query(
'''
mutation {
saveMatch(
date: "2020-01-01T12:00:00+00:00",
type: "FRIENDSHIP",
local: 1,
visitor: 1,
) {
match {
id
date
type
local {
id
}
visitor {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["saveMatch"]["match"]["id"], 2)
def test_set_match(self):
response = self.query(
'''
mutation {
setMatch(id:1
date: "2020-01-01T12:00:00+00:00",
type: "FRIENDSHIP",
local: 1,
visitor: 1,
) {
match {
id
date
type
local {
id
}
visitor {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setMatch"]["match"]["id"], 1)
def test_delete_match(self):
response = self.query(
'''
mutation {
deleteMatch(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deleteMatch"]["id"], 1)
def test_query_players(self):
response_01 = self.query(
'''
{
players(query: "id=1", orderBy: "id", limit: 1){
id
name
isActive
photo {
id
}
team {
id
}
position {
id
}
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["players"][0]["id"], 1)
response_02 = self.query(
'''
{
players{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["players"][0]["id"], 1)
response_03 = self.query(
'''
{
playerPagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
players { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["playerPagination"]["totalPages"], 1)
self.assertEqual(res_03["playerPagination"]["totalCount"], 1)
self.assertEqual(res_03["playerPagination"]["players"][0]["id"], 1)
response_04 = self.query(
'''
{
playerCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["playerCount"]["count"], 1)
response_05 = self.query(
'''
{
playerCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["playerCount"]["count"], 1)
def test_query_player(self):
response = self.query(
'''
{
player(id: 1){
id
name
isActive
photo {
id
}
team {
id
}
position {
id
}
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["player"]["id"], 1)
def test_save_player(self):
response = self.query(
'''
mutation {
savePlayer(
name: "",
photo: 1,
isActive: false,
team: 1,
position: 1,
) {
player {
id
name
isActive
photo {
id
}
team {
id
}
position {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["savePlayer"]["player"]["id"], 2)
def test_set_player(self):
response = self.query(
'''
mutation {
setPlayer(id:1
name: "",
photo: 1,
isActive: false,
team: 1,
position: 1,
) {
player {
id
name
isActive
photo {
id
}
team {
id
}
position {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setPlayer"]["player"]["id"], 1)
def test_delete_player(self):
response = self.query(
'''
mutation {
deletePlayer(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deletePlayer"]["id"], 1)
def test_query_player_positions(self):
response_01 = self.query(
'''
{
playerPositions(query: "id=1", orderBy: "id", limit: 1){
id
name
details
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["playerPositions"][0]["id"], 1)
response_02 = self.query(
'''
{
playerPositions{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["playerPositions"][0]["id"], 1)
response_03 = self.query(
'''
{
playerPositionPagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
playerPositions { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["playerPositionPagination"]["totalPages"], 1)
self.assertEqual(res_03["playerPositionPagination"]["totalCount"], 1)
self.assertEqual(res_03["playerPositionPagination"]["playerPositions"][0]["id"], 1)
response_04 = self.query(
'''
{
playerPositionCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["playerPositionCount"]["count"], 1)
response_05 = self.query(
'''
{
playerPositionCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["playerPositionCount"]["count"], 1)
def test_query_player_position(self):
response = self.query(
'''
{
playerPosition(id: 1){
id
name
details
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["playerPosition"]["id"], 1)
def test_save_player_position(self):
response = self.query(
'''
mutation {
savePlayerPosition(
name: "",
details: "{}",
) {
playerPosition {
id
name
details
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["savePlayerPosition"]["playerPosition"]["id"], 2)
def test_set_player_position(self):
response = self.query(
'''
mutation {
setPlayerPosition(id:1
name: "",
details: "{}",
) {
playerPosition {
id
name
details
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setPlayerPosition"]["playerPosition"]["id"], 1)
def test_delete_player_position(self):
response = self.query(
'''
mutation {
deletePlayerPosition(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deletePlayerPosition"]["id"], 1)
def test_query_scores(self):
response_01 = self.query(
'''
{
scores(query: "id=1", orderBy: "id", limit: 1){
id
min
player {
id
}
match {
id
}
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["scores"][0]["id"], 1)
response_02 = self.query(
'''
{
scores{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["scores"][0]["id"], 1)
response_03 = self.query(
'''
{
scorePagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
scores { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["scorePagination"]["totalPages"], 1)
self.assertEqual(res_03["scorePagination"]["totalCount"], 1)
self.assertEqual(res_03["scorePagination"]["scores"][0]["id"], 1)
response_04 = self.query(
'''
{
scoreCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["scoreCount"]["count"], 1)
response_05 = self.query(
'''
{
scoreCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["scoreCount"]["count"], 1)
def test_query_score(self):
response = self.query(
'''
{
score(id: 1){
id
min
player {
id
}
match {
id
}
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["score"]["id"], 1)
def test_save_score(self):
response = self.query(
'''
mutation {
saveScore(
min: 128,
player: 1,
match: 1,
) {
score {
id
min
player {
id
}
match {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["saveScore"]["score"]["id"], 2)
def test_set_score(self):
response = self.query(
'''
mutation {
setScore(id:1
min: 128,
player: 1,
match: 1,
) {
score {
id
min
player {
id
}
match {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setScore"]["score"]["id"], 1)
def test_delete_score(self):
response = self.query(
'''
mutation {
deleteScore(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deleteScore"]["id"], 1)
def test_query_teams(self):
response_01 = self.query(
'''
{
teams(query: "id=1", orderBy: "id", limit: 1){
id
name
description
marketValue
logo {
id
}
rival {
id
}
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["teams"][0]["id"], 1)
response_02 = self.query(
'''
{
teams{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["teams"][0]["id"], 1)
response_03 = self.query(
'''
{
teamPagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
teams { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["teamPagination"]["totalPages"], 1)
self.assertEqual(res_03["teamPagination"]["totalCount"], 1)
self.assertEqual(res_03["teamPagination"]["teams"][0]["id"], 1)
response_04 = self.query(
'''
{
teamCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["teamCount"]["count"], 1)
response_05 = self.query(
'''
{
teamCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["teamCount"]["count"], 1)
def test_query_team(self):
response = self.query(
'''
{
team(id: 1){
id
name
description
marketValue
logo {
id
}
rival {
id
}
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["team"]["id"], 1)
def test_save_team(self):
response = self.query(
'''
mutation {
saveTeam(
name: "",
logo: 1,
description: "",
marketValue: 128.0,
rival: 1,
) {
team {
id
name
description
marketValue
logo {
id
}
rival {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["saveTeam"]["team"]["id"], 2)
def test_set_team(self):
response = self.query(
'''
mutation {
setTeam(id:1
name: "",
logo: 1,
description: "",
marketValue: 128.0,
rival: 1,
) {
team {
id
name
description
marketValue
logo {
id
}
rival {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setTeam"]["team"]["id"], 1)
def test_delete_team(self):
response = self.query(
'''
mutation {
deleteTeam(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deleteTeam"]["id"], 1)
def test_query_users(self):
response_01 = self.query(
'''
{
users(query: "id=1", orderBy: "id", limit: 1){
id
username
firstName
lastName
email
isActive
teams {
id
}
profileImage {
id
}
}
}
''')
res_01 = json.loads(response_01.content)["data"]
self.assertResponseNoErrors(response_01)
with self.subTest():
self.assertEqual(res_01["users"][0]["id"], 1)
response_02 = self.query(
'''
{
users{ id }
}
''')
res_02 = json.loads(response_02.content)["data"]
self.assertResponseNoErrors(response_02)
with self.subTest():
self.assertEqual(res_02["users"][0]["id"], 1)
response_03 = self.query(
'''
{
userPagination(pageNum: 1, pageSize: 1){
pageNum
pageSize
totalPages
totalCount
users { id }
}
}
''')
res_03 = json.loads(response_03.content)["data"]
self.assertResponseNoErrors(response_03)
with self.subTest():
self.assertEqual(res_03["userPagination"]["totalPages"], 1)
self.assertEqual(res_03["userPagination"]["totalCount"], 1)
self.assertEqual(res_03["userPagination"]["users"][0]["id"], 1)
response_04 = self.query(
'''
{
userCount(query: "id=1"){ count }
}
''')
res_04 = json.loads(response_04.content)["data"]
self.assertResponseNoErrors(response_04)
with self.subTest():
self.assertEqual(res_04["userCount"]["count"], 1)
response_05 = self.query(
'''
{
userCount { count }
}
''')
res_05 = json.loads(response_05.content)["data"]
self.assertResponseNoErrors(response_05)
with self.subTest():
self.assertEqual(res_05["userCount"]["count"], 1)
def test_query_user(self):
response = self.query(
'''
{
user(id: 1){
id
username
firstName
lastName
email
isActive
teams {
id
}
profileImage {
id
}
}
}
''')
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["user"]["id"], 1)
def test_save_user(self):
response = self.query(
'''
mutation {
saveUser(
username: "email@test.com",
firstName: "FirstName",
lastName: "LastName",
email: "email@test.com",
password: "pbkdf2_sha256$150000$jMOqkdOUpor5$kU/QofjBsopM+CdCnU2+pROhtnxd5CZc7NhUiXNTMc0=",
isActive: true,
teams: [1],
profileImage: 1,
) {
user {
id
username
firstName
lastName
email
isActive
teams {
id
}
profileImage {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["saveUser"]["user"]["id"], 2)
def test_set_user(self):
response = self.query(
'''
mutation {
setUser(id:1
username: "email_1@test.com",
firstName: "FirstName",
lastName: "LastName",
email: "email_1@test.com",
password: "pbkdf2_sha256$150000$jMOqkdOUpor5$kU/QofjBsopM+CdCnU2+pROhtnxd5CZc7NhUiXNTMc0=",
isActive: true,
teams: [1],
profileImage: 1,
) {
user {
id
username
firstName
lastName
email
isActive
teams {
id
}
profileImage {
id
}
}
}
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["setUser"]["user"]["id"], 1)
def test_delete_user(self):
response = self.query(
'''
mutation {
deleteUser(id:1) { id }
}
'''
)
res = json.loads(response.content)["data"]
self.assertResponseNoErrors(response)
self.assertEqual(res["deleteUser"]["id"], 1)
| 29.576055
| 111
| 0.387683
| 2,098
| 30,138
| 5.453289
| 0.0653
| 0.017306
| 0.103837
| 0.174635
| 0.885937
| 0.80701
| 0.685692
| 0.638231
| 0.594528
| 0.585875
| 0
| 0.037966
| 0.510585
| 30,138
| 1,019
| 112
| 29.576055
| 0.737695
| 0.00219
| 0
| 0.550143
| 1
| 0
| 0.092487
| 0.0049
| 0
| 0
| 0
| 0
| 0.34384
| 1
| 0.088825
| false
| 0
| 0.008596
| 0
| 0.103152
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3be0cdef7c256d01e21b24295c456bbbf248aec0
| 45
|
py
|
Python
|
helpers/__init__.py
|
d-laub/dlaub_helpers
|
f2005d6ec3a5f4293109b4b70115fa1e67aad27a
|
[
"MIT"
] | 1
|
2021-12-10T21:54:46.000Z
|
2021-12-10T21:54:46.000Z
|
helpers/__init__.py
|
d-laub/dlaub_helpers
|
f2005d6ec3a5f4293109b4b70115fa1e67aad27a
|
[
"MIT"
] | null | null | null |
helpers/__init__.py
|
d-laub/dlaub_helpers
|
f2005d6ec3a5f4293109b4b70115fa1e67aad27a
|
[
"MIT"
] | null | null | null |
"""Helper functions."""
from . import rnaseq
| 15
| 23
| 0.688889
| 5
| 45
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 45
| 3
| 24
| 15
| 0.794872
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3bfa95c35fba21ebb548494d678ddd7521365b35
| 4,379
|
py
|
Python
|
blacksheep/testing/client.py
|
q0w/BlackSheep
|
2936cdd3ba6fceacd230a02c99241bde1d06b265
|
[
"MIT"
] | 420
|
2021-02-13T20:00:42.000Z
|
2022-03-31T19:25:39.000Z
|
blacksheep/testing/client.py
|
q0w/BlackSheep
|
2936cdd3ba6fceacd230a02c99241bde1d06b265
|
[
"MIT"
] | 125
|
2021-02-15T09:29:51.000Z
|
2022-03-25T19:48:23.000Z
|
blacksheep/testing/client.py
|
q0w/BlackSheep
|
2936cdd3ba6fceacd230a02c99241bde1d06b265
|
[
"MIT"
] | 27
|
2021-03-20T16:17:58.000Z
|
2022-03-02T19:37:42.000Z
|
from typing import Optional
from blacksheep.contents import Content
from blacksheep.server.application import Application
from blacksheep.server.responses import Response
from blacksheep.testing.simulator import AbstractTestSimulator, TestSimulator
from .helpers import CookiesType, HeadersType, QueryType
class TestClient:
# Setting this dunder variable
# We tell to pytest don't discover this up
__test__ = False
def __init__(
self, app: Application, test_simulator: Optional[AbstractTestSimulator] = None
):
self._test_simulator = test_simulator or TestSimulator(app)
async def get(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP GET method"""
return await self._test_simulator.send_request(
method="GET",
path=path,
headers=headers,
query=query,
cookies=cookies,
content=None,
)
async def post(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
content: Optional[Content] = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP POST method"""
return await self._test_simulator.send_request(
method="POST",
path=path,
headers=headers,
query=query,
cookies=cookies,
content=content,
)
async def patch(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
content: Optional[Content] = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP PATCH method"""
return await self._test_simulator.send_request(
method="PATCH",
path=path,
headers=headers,
query=query,
cookies=cookies,
content=content,
)
async def put(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
content: Optional[Content] = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP PUT method"""
return await self._test_simulator.send_request(
method="PUT",
path=path,
headers=headers,
query=query,
content=content,
cookies=cookies,
)
async def delete(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
content: Optional[Content] = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP DELETE method"""
return await self._test_simulator.send_request(
method="DELETE",
path=path,
headers=headers,
query=query,
content=content,
cookies=cookies,
)
async def options(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP OPTIONS method"""
return await self._test_simulator.send_request(
method="OPTIONS",
path=path,
headers=headers,
query=query,
content=None,
cookies=cookies,
)
async def head(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP HEAD method"""
return await self._test_simulator.send_request(
method="HEAD",
path=path,
headers=headers,
query=query,
content=None,
cookies=cookies,
)
async def trace(
self,
path: str,
headers: HeadersType = None,
query: QueryType = None,
cookies: CookiesType = None,
) -> Response:
"""Simulates HTTP TRACE method"""
return await self._test_simulator.send_request(
method="TRACE",
path=path,
headers=headers,
query=query,
content=None,
cookies=cookies,
)
| 27.36875
| 86
| 0.550582
| 397
| 4,379
| 5.982368
| 0.156171
| 0.060211
| 0.064421
| 0.060632
| 0.747789
| 0.747789
| 0.747789
| 0.747789
| 0.747789
| 0.553684
| 0
| 0
| 0.363097
| 4,379
| 159
| 87
| 27.540881
| 0.85156
| 0.015757
| 0
| 0.735294
| 0
| 0
| 0.009145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007353
| false
| 0
| 0.044118
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ce1309947fc9bb567732162030e0321526b65513
| 28
|
py
|
Python
|
kao_parser/__init__.py
|
cloew/KaoParser
|
475cbf27cbadb10a425aa9cd27764e2d635667ee
|
[
"MIT"
] | null | null | null |
kao_parser/__init__.py
|
cloew/KaoParser
|
475cbf27cbadb10a425aa9cd27764e2d635667ee
|
[
"MIT"
] | null | null | null |
kao_parser/__init__.py
|
cloew/KaoParser
|
475cbf27cbadb10a425aa9cd27764e2d635667ee
|
[
"MIT"
] | null | null | null |
from .grammar import Grammar
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ce415e00024054a90fbce51e5e0c87eacb949f93
| 86
|
py
|
Python
|
ktrain/text/ner/anago/__init__.py
|
RobWillison/ktrain
|
4c690bad3046a43c0cae7b86a8e28463f8cba0a8
|
[
"Apache-2.0"
] | 1,013
|
2019-06-04T14:25:24.000Z
|
2022-03-26T05:52:00.000Z
|
ktrain/text/ner/anago/__init__.py
|
Shifath472533/ktrain
|
3228f336ba5be4d317538c1b79f8ad0259892b2d
|
[
"Apache-2.0"
] | 427
|
2019-06-17T13:45:50.000Z
|
2022-03-25T16:23:49.000Z
|
ktrain/text/ner/anago/__init__.py
|
Shifath472533/ktrain
|
3228f336ba5be4d317538c1b79f8ad0259892b2d
|
[
"Apache-2.0"
] | 272
|
2019-06-05T03:19:07.000Z
|
2022-03-28T02:23:37.000Z
|
from .tagger import Tagger
from .trainer import Trainer
from .wrapper import Sequence
| 21.5
| 29
| 0.825581
| 12
| 86
| 5.916667
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 86
| 3
| 30
| 28.666667
| 0.959459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0229e4d170ffa1e83e64c8b27f49cc7d02bc6d5b
| 153
|
py
|
Python
|
line_search/__init__.py
|
konstmish/opt_methods
|
ae73d9bd89ae5c463e70328d73cbd190175df98c
|
[
"MIT"
] | 13
|
2020-07-19T12:02:43.000Z
|
2022-03-02T14:34:03.000Z
|
line_search/__init__.py
|
konstmish/opt_methods
|
ae73d9bd89ae5c463e70328d73cbd190175df98c
|
[
"MIT"
] | 1
|
2020-12-25T02:05:00.000Z
|
2021-01-01T11:24:51.000Z
|
line_search/__init__.py
|
konstmish/opt_methods
|
ae73d9bd89ae5c463e70328d73cbd190175df98c
|
[
"MIT"
] | 2
|
2020-07-17T08:45:48.000Z
|
2021-12-10T03:24:57.000Z
|
from .armijo import Armijo
from .best_grid import BestGrid
from .goldstein import Goldstein
from .nest_armijo import NestArmijo
from .wolfe import Wolfe
| 25.5
| 35
| 0.836601
| 22
| 153
| 5.727273
| 0.454545
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130719
| 153
| 5
| 36
| 30.6
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
025172388edb4c2dd9f68420cda5c079475f89ee
| 3,535
|
py
|
Python
|
tests/test_parser/test_responses.py
|
christhekeele/openapi-python-client
|
b7193165815419b9a0b4f05032a2a091bfc5ebfe
|
[
"MIT"
] | null | null | null |
tests/test_parser/test_responses.py
|
christhekeele/openapi-python-client
|
b7193165815419b9a0b4f05032a2a091bfc5ebfe
|
[
"MIT"
] | 19
|
2021-05-10T10:33:46.000Z
|
2022-02-14T03:14:59.000Z
|
tests/test_parser/test_responses.py
|
christhekeele/openapi-python-client
|
b7193165815419b9a0b4f05032a2a091bfc5ebfe
|
[
"MIT"
] | null | null | null |
from unittest.mock import MagicMock
import openapi_python_client.schema as oai
from openapi_python_client.parser.errors import ParseError, PropertyError
from openapi_python_client.parser.properties import NoneProperty, Schemas, StringProperty
MODULE_NAME = "openapi_python_client.parser.responses"
def test_response_from_data_no_content():
from openapi_python_client.parser.responses import Response, response_from_data
response, schemas = response_from_data(
status_code=200,
data=oai.Response.construct(description=""),
schemas=Schemas(),
parent_name="parent",
config=MagicMock(),
)
assert response == Response(
status_code=200,
prop=NoneProperty(name="response_200", default=None, nullable=False, required=True),
source="None",
)
def test_response_from_data_unsupported_content_type():
from openapi_python_client.parser.responses import response_from_data
data = oai.Response.construct(description="", content={"blah": None})
response, schemas = response_from_data(
status_code=200, data=data, schemas=Schemas(), parent_name="parent", config=MagicMock()
)
assert response == ParseError(data=data, detail="Unsupported content_type {'blah': None}")
def test_response_from_data_no_content_schema():
from openapi_python_client.parser.responses import Response, response_from_data
data = oai.Response.construct(description="", content={"application/json": oai.MediaType.construct()})
response, schemas = response_from_data(
status_code=200, data=data, schemas=Schemas(), parent_name="parent", config=MagicMock()
)
assert response == Response(
status_code=200,
prop=NoneProperty(name="response_200", default=None, nullable=False, required=True),
source="None",
)
def test_response_from_data_property_error(mocker):
from openapi_python_client.parser import responses
property_from_data = mocker.patch.object(responses, "property_from_data", return_value=(PropertyError(), Schemas()))
data = oai.Response.construct(
description="", content={"application/json": oai.MediaType.construct(media_type_schema="something")}
)
config = MagicMock()
response, schemas = responses.response_from_data(
status_code=400, data=data, schemas=Schemas(), parent_name="parent", config=config
)
assert response == PropertyError()
property_from_data.assert_called_once_with(
name="response_400", required=True, data="something", schemas=Schemas(), parent_name="parent", config=config
)
def test_response_from_data_property(mocker):
from openapi_python_client.parser import responses
prop = StringProperty(name="prop", required=True, nullable=False, default=None)
property_from_data = mocker.patch.object(responses, "property_from_data", return_value=(prop, Schemas()))
data = oai.Response.construct(
description="", content={"application/json": oai.MediaType.construct(media_type_schema="something")}
)
config = MagicMock()
response, schemas = responses.response_from_data(
status_code=400, data=data, schemas=Schemas(), parent_name="parent", config=config
)
assert response == responses.Response(
status_code=400,
prop=prop,
source="response.json()",
)
property_from_data.assert_called_once_with(
name="response_400", required=True, data="something", schemas=Schemas(), parent_name="parent", config=config
)
| 37.210526
| 120
| 0.729279
| 409
| 3,535
| 6.046455
| 0.156479
| 0.061464
| 0.084108
| 0.080873
| 0.838253
| 0.788516
| 0.771128
| 0.745249
| 0.68702
| 0.644157
| 0
| 0.012109
| 0.158982
| 3,535
| 94
| 121
| 37.606383
| 0.819711
| 0
| 0
| 0.457143
| 0
| 0
| 0.089958
| 0.01075
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.071429
| false
| 0
| 0.128571
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0278525a98f9428dd26a37cc37dc802c892667ca
| 77
|
py
|
Python
|
Desafio01.py
|
WestenPy/Curso_em_video
|
9f6a9775d27e1b86d54b381aba5da69b2ae21b27
|
[
"MIT"
] | null | null | null |
Desafio01.py
|
WestenPy/Curso_em_video
|
9f6a9775d27e1b86d54b381aba5da69b2ae21b27
|
[
"MIT"
] | null | null | null |
Desafio01.py
|
WestenPy/Curso_em_video
|
9f6a9775d27e1b86d54b381aba5da69b2ae21b27
|
[
"MIT"
] | null | null | null |
'''Crie um programa que escreva "Olá, Mundo" na tela.'''
print('Olá, Mundo')
| 25.666667
| 56
| 0.662338
| 12
| 77
| 4.25
| 0.833333
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 77
| 2
| 57
| 38.5
| 0.772727
| 0.649351
| 0
| 0
| 0
| 0
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
65fac203af4421f57183c8c6ac27545095f0ac52
| 162
|
py
|
Python
|
modulos y paquetes/paquetes.py
|
MiGueAJM9724/Python
|
436975a3ccef5a922afa7e3f14747322f2979e06
|
[
"Apache-2.0"
] | null | null | null |
modulos y paquetes/paquetes.py
|
MiGueAJM9724/Python
|
436975a3ccef5a922afa7e3f14747322f2979e06
|
[
"Apache-2.0"
] | null | null | null |
modulos y paquetes/paquetes.py
|
MiGueAJM9724/Python
|
436975a3ccef5a922afa7e3f14747322f2979e06
|
[
"Apache-2.0"
] | null | null | null |
"""
folder == paquete
Un paquete agrupa modulos
"""
#from paquete.modulo import objeto
from animales.aves import Pinguino
pinguino = Pinguino()
pinguino.nadar()
| 16.2
| 34
| 0.759259
| 20
| 162
| 6.15
| 0.65
| 0.390244
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135802
| 162
| 9
| 35
| 18
| 0.878571
| 0.475309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5a08b805e0a20a37263bec4113155e02b3c11afc
| 39
|
py
|
Python
|
blocklint/__main__.py
|
bruth/blocklint
|
65d8a2842bbb27742b2c61b9bc02f73c0dc1f066
|
[
"MIT"
] | 4
|
2020-08-19T17:11:58.000Z
|
2021-09-06T18:29:48.000Z
|
blocklint/__main__.py
|
bruth/blocklint
|
65d8a2842bbb27742b2c61b9bc02f73c0dc1f066
|
[
"MIT"
] | 11
|
2020-08-13T18:17:34.000Z
|
2021-11-04T12:48:16.000Z
|
blocklint/__main__.py
|
boblloyd/inclusivitylint
|
037981255bf1eac959fd1471cf35162977e1de3f
|
[
"MIT"
] | 5
|
2020-08-19T17:11:17.000Z
|
2021-11-12T01:57:14.000Z
|
from blocklint.main import main
main()
| 13
| 31
| 0.794872
| 6
| 39
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 2
| 32
| 19.5
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5a3e473c6476692481b340b87c8c70a5940a98a0
| 71
|
py
|
Python
|
example_pkg/example.py
|
WilliamMolina/example-package
|
a0fbda0955020f584da64ebef20a5afc70ffe254
|
[
"MIT"
] | null | null | null |
example_pkg/example.py
|
WilliamMolina/example-package
|
a0fbda0955020f584da64ebef20a5afc70ffe254
|
[
"MIT"
] | null | null | null |
example_pkg/example.py
|
WilliamMolina/example-package
|
a0fbda0955020f584da64ebef20a5afc70ffe254
|
[
"MIT"
] | null | null | null |
def example(name):
return "Hi {}, this is my example!".format(name)
| 35.5
| 52
| 0.661972
| 11
| 71
| 4.272727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 2
| 52
| 35.5
| 0.79661
| 0
| 0
| 0
| 0
| 0
| 0.361111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
5a4c4900b0153d38d220ab9031323b115d624ba4
| 87
|
py
|
Python
|
viewlet/tests/__init__.py
|
5monkeys/django-viewlet
|
aaed573a115dc3de3e05673093d77b9d218730e7
|
[
"MIT"
] | 10
|
2015-02-16T12:09:18.000Z
|
2019-10-14T15:30:37.000Z
|
viewlet/tests/__init__.py
|
5monkeys/django-viewlet
|
aaed573a115dc3de3e05673093d77b9d218730e7
|
[
"MIT"
] | 12
|
2015-10-12T12:51:03.000Z
|
2018-06-07T18:05:42.000Z
|
viewlet/tests/__init__.py
|
5monkeys/django-viewlet
|
aaed573a115dc3de3e05673093d77b9d218730e7
|
[
"MIT"
] | 12
|
2015-10-06T19:11:11.000Z
|
2021-12-25T06:36:45.000Z
|
from .test_viewlet import ViewletCacheBackendTest, ViewletKeyTest, ViewletTest # NOQA
| 43.5
| 86
| 0.850575
| 8
| 87
| 9.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 87
| 1
| 87
| 87
| 0.935897
| 0.045977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ce8ad1b5ddeead32c4a5124364286f63664f5166
| 87
|
py
|
Python
|
slot_racer/server/__init__.py
|
mgreenw/slot-racer
|
ccb456cf489616e14d95c34c7398fb3e04307b02
|
[
"MIT"
] | 1
|
2018-12-08T03:18:00.000Z
|
2018-12-08T03:18:00.000Z
|
slot_racer/server/__init__.py
|
mgreenw/slot-racer
|
ccb456cf489616e14d95c34c7398fb3e04307b02
|
[
"MIT"
] | null | null | null |
slot_racer/server/__init__.py
|
mgreenw/slot-racer
|
ccb456cf489616e14d95c34c7398fb3e04307b02
|
[
"MIT"
] | null | null | null |
"""Module to define how a server will use our game_code"""
from .server import Server
| 21.75
| 58
| 0.747126
| 15
| 87
| 4.266667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 87
| 3
| 59
| 29
| 0.888889
| 0.597701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ceb53ef132f8d7f36bd501940459dea350906aab
| 85
|
py
|
Python
|
enthought/enable/qt4/cairo.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/enable/qt4/cairo.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/enable/qt4/cairo.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from enable.qt4.cairo import *
| 21.25
| 38
| 0.823529
| 12
| 85
| 5.416667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.129412
| 85
| 3
| 39
| 28.333333
| 0.864865
| 0.141176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cecc9175e6960e8d54d60142cce301292f1e16e9
| 44
|
py
|
Python
|
cv/supervised/__init__.py
|
ShkalikovOleh/cv-labs
|
dda27a4f19b7e86c774397d7cc8de39461f34ff1
|
[
"MIT"
] | null | null | null |
cv/supervised/__init__.py
|
ShkalikovOleh/cv-labs
|
dda27a4f19b7e86c774397d7cc8de39461f34ff1
|
[
"MIT"
] | 1
|
2022-02-15T14:06:22.000Z
|
2022-02-15T14:06:22.000Z
|
cv/supervised/__init__.py
|
ShkalikovOleh/cv-labs
|
dda27a4f19b7e86c774397d7cc8de39461f34ff1
|
[
"MIT"
] | 1
|
2021-11-04T16:30:57.000Z
|
2021-11-04T16:30:57.000Z
|
from .GaussPerceptron import GaussPerceptron
| 44
| 44
| 0.909091
| 4
| 44
| 10
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.97561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0c76165909d372c09aa091eb9a49ae49cea53976
| 386
|
py
|
Python
|
arm_basic_samples/utilities/models.py
|
shwetams/arm-samples-py
|
37cb88c23acca5d8c14ce51aea38fe17e94cc740
|
[
"MIT"
] | 4
|
2015-08-13T16:55:44.000Z
|
2020-09-22T07:37:36.000Z
|
arm_basic_samples/utilities/models.py
|
shwetams/arm-samples-py
|
37cb88c23acca5d8c14ce51aea38fe17e94cc740
|
[
"MIT"
] | null | null | null |
arm_basic_samples/utilities/models.py
|
shwetams/arm-samples-py
|
37cb88c23acca5d8c14ce51aea38fe17e94cc740
|
[
"MIT"
] | 2
|
2019-06-06T10:32:34.000Z
|
2020-05-15T16:31:15.000Z
|
from django.db import models
# Create your models here.
class DefaultNetworkSettings(models.Model):
setting_type_id = models.CharField(max_length=20,default="default")
default_subnet_name = models.CharField(max_length=24,blank=True)
default_address_range = models.CharField(max_length=100,blank=True)
default_address_space = models.CharField(max_length=100,blank=True)
| 48.25
| 72
| 0.800518
| 53
| 386
| 5.603774
| 0.528302
| 0.20202
| 0.242424
| 0.323232
| 0.242424
| 0.242424
| 0.242424
| 0
| 0
| 0
| 0
| 0.028902
| 0.103627
| 386
| 8
| 72
| 48.25
| 0.82948
| 0.062176
| 0
| 0
| 0
| 0
| 0.019391
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0c88d4b575ec27e09dd022ad385474f8c70dbd77
| 227
|
py
|
Python
|
DX/result.py
|
TheDXNetwork/dx-sdk-python
|
c3747ff85280c6771fe93d1a4c379c7deff7d205
|
[
"MIT"
] | 1
|
2018-11-22T09:52:34.000Z
|
2018-11-22T09:52:34.000Z
|
DX/result.py
|
TheDXNetwork/dx-sdk-python
|
c3747ff85280c6771fe93d1a4c379c7deff7d205
|
[
"MIT"
] | null | null | null |
DX/result.py
|
TheDXNetwork/dx-sdk-python
|
c3747ff85280c6771fe93d1a4c379c7deff7d205
|
[
"MIT"
] | null | null | null |
from .utils import prettify, highlight
class Result:
def __init__(self, data):
self.data = data
def __repr__(self):
return highlight(prettify(self.data))
def json(self):
return self.data
| 17.461538
| 45
| 0.638767
| 28
| 227
| 4.892857
| 0.5
| 0.233577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268722
| 227
| 12
| 46
| 18.916667
| 0.825301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
0c9159f93640d1e3ac687e449984922f647cb423
| 227
|
py
|
Python
|
util/custom_filters/pretty_time.py
|
gautamk/private-journal
|
3027bff58aafc1a41f97e2be00f84516a1c2712d
|
[
"MIT"
] | 1
|
2015-11-06T00:01:36.000Z
|
2015-11-06T00:01:36.000Z
|
util/custom_filters/pretty_time.py
|
gautamk/private-journal
|
3027bff58aafc1a41f97e2be00f84516a1c2712d
|
[
"MIT"
] | null | null | null |
util/custom_filters/pretty_time.py
|
gautamk/private-journal
|
3027bff58aafc1a41f97e2be00f84516a1c2712d
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from pretty_timedelta import pretty_timedelta
__author__ = 'gautam'
def pretty_time(datetime_value):
now = datetime.now()
delta = datetime_value - now
return pretty_timedelta(delta)
| 22.7
| 45
| 0.770925
| 28
| 227
| 5.892857
| 0.464286
| 0.272727
| 0.193939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167401
| 227
| 9
| 46
| 25.222222
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0.026432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0cac78e15ae1ee9c1acc294a314398471b0e47fb
| 266
|
py
|
Python
|
backend/users/admin.py
|
crowdbotics-apps/ezride-28420
|
9dad5e710b3cf0c7d81c01505b400b21a840f268
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/users/admin.py
|
crowdbotics-apps/ezride-28420
|
9dad5e710b3cf0c7d81c01505b400b21a840f268
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/users/admin.py
|
crowdbotics-apps/ezride-28420
|
9dad5e710b3cf0c7d81c01505b400b21a840f268
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.contrib import admin
from .models import User
from django.contrib.auth import admin as auth_admin
from django.contrib.auth import get_user_model
from users.forms import UserChangeForm, UserCreationForm
User = get_user_model()
admin.site.register(User)
| 26.6
| 56
| 0.834586
| 40
| 266
| 5.425
| 0.425
| 0.138249
| 0.235023
| 0.193548
| 0.248848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109023
| 266
| 9
| 57
| 29.555556
| 0.915612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0ccc45868a290c27723b6afd5b258c9f312ec7f2
| 281
|
py
|
Python
|
Fun/Cdrom.py
|
fakegit/CrazyPy
|
9ac12baf96380d23ac3204089d5192965158f160
|
[
"MIT"
] | 1
|
2021-01-26T22:50:52.000Z
|
2021-01-26T22:50:52.000Z
|
Fun/Cdrom.py
|
fakegit/CrazyPy
|
9ac12baf96380d23ac3204089d5192965158f160
|
[
"MIT"
] | null | null | null |
Fun/Cdrom.py
|
fakegit/CrazyPy
|
9ac12baf96380d23ac3204089d5192965158f160
|
[
"MIT"
] | null | null | null |
# Import modules
from ctypes import windll
""" Open cdrom """
def Open():
return windll.WINMM.mciSendStringW(u"set cdaudio door open", None, 0, None)
""" Close cdrom """
def Close():
return windll.WINMM.mciSendStringW(u"set cdaudio door closed", None, 0, None)
| 25.545455
| 81
| 0.672598
| 38
| 281
| 4.973684
| 0.5
| 0.084656
| 0.179894
| 0.328042
| 0.486772
| 0.486772
| 0.486772
| 0.486772
| 0
| 0
| 0
| 0.00885
| 0.19573
| 281
| 11
| 81
| 25.545455
| 0.827434
| 0.049822
| 0
| 0
| 0
| 0
| 0.201835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
0b37f892c4f1c15e540c8d4a49adbc857e1c4766
| 2,454
|
py
|
Python
|
Clustering/store_cluster.py
|
luoshao23/ML_algorithm
|
6e94fdd0718cd892118fd036c7c5851cf3e6d796
|
[
"MIT"
] | 4
|
2017-06-19T06:33:38.000Z
|
2019-01-31T12:07:12.000Z
|
Clustering/store_cluster.py
|
luoshao23/ML_algorithm
|
6e94fdd0718cd892118fd036c7c5851cf3e6d796
|
[
"MIT"
] | null | null | null |
Clustering/store_cluster.py
|
luoshao23/ML_algorithm
|
6e94fdd0718cd892118fd036c7c5851cf3e6d796
|
[
"MIT"
] | 1
|
2017-12-06T08:41:06.000Z
|
2017-12-06T08:41:06.000Z
|
import pandas as pd
import plotly.plotly as py
import plotly
df2 = pd.read_csv('input.csv', header=0)
df2['promo_dep15'].astype(float)
color = pd.Series(['rgb(100,100,100)', 'rgb(38,17,235)', 'rgb(17,93,235)', 'rgb(17,235,220)',
'rgb(49,235,17)', 'rgb(188,235,17)', 'rgb(235,202,17)', 'rgb(235,115,17)', 'rgb(255,0,0)'])
# cities = []
# for i in range(9):
# df_sub = df2[df2['cat_tot'] == i]
# city = dict(
# type='scattergeo',
# locationmode='USA-states',
# lon=df_sub['long'],
# lat=df_sub['lat'],
# text=df_sub['store_nbr'],
# marker=dict(
# size=(df_sub['cat_tot'] + 1) * 5,
# color=color[df_sub['cat_tot']],
# line=dict(width=0.5, color='rgb(40,40,40)'),
# sizemode='area'
# ),
# name='Cluster %d' % i)
# cities.append(city)
# layout = dict(
# title='Sale cluster',
# showlegend=True,
# geo=dict(
# scope='usa',
# projection=dict(type='albers usa'),
# showland=True,
# landcolor='rgb(217, 217, 217)',
# subunitwidth=1,
# countrywidth=1,
# subunitcolor="rgb(255, 255, 255)",
# countrycolor="rgb(255, 255, 255)"
# ),
# )
# fig = dict(data=cities, layout=layout)
# # py.plot( fig, validate=False, filename='store_cluster' )
# plotly.offline.plot(fig, validate=False, filename='store_cluster')
promo = []
for i in range(9):
df_sub = df2[df2['cat_tot'] == i]
city = dict(
type='scattergeo',
locationmode='USA-states',
lon=df_sub['long'],
lat=df_sub['lat'],
text=df_sub['store_nbr'],
marker=dict(
size=(df_sub['promo_dep15'] *100) ,
color=color[df_sub['cat_tot']],
line=dict(width=0.5, color='rgb(40,40,40)'),
sizemode='area'
),
name='Cluster %d' % i)
promo.append(city)
layout = dict(
title='Promotion depth cluster',
showlegend=True,
geo=dict(
scope='usa',
projection=dict(type='albers usa'),
showland=True,
landcolor='rgb(217, 217, 217)',
subunitwidth=1,
countrywidth=1,
subunitcolor="rgb(255, 255, 255)",
countrycolor="rgb(255, 255, 255)"
),
)
fig = dict(data=promo, layout=layout)
# py.plot( fig, validate=False, filename='store_cluster' )
plotly.offline.plot(fig, validate=False, filename='store_cluster_promo')
| 28.534884
| 110
| 0.550937
| 318
| 2,454
| 4.166667
| 0.286164
| 0.045283
| 0.02717
| 0.036226
| 0.792453
| 0.754717
| 0.754717
| 0.754717
| 0.754717
| 0.754717
| 0
| 0.087748
| 0.261614
| 2,454
| 86
| 111
| 28.534884
| 0.643488
| 0.433985
| 0
| 0.05
| 0
| 0
| 0.256467
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.075
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0b3cc76cf207f96dc49dee925daa2e913946b8aa
| 47
|
py
|
Python
|
run.py
|
citruspi/Alexandria
|
c7761a3b8a090e24b68b1318f1451752e34078e9
|
[
"MIT"
] | null | null | null |
run.py
|
citruspi/Alexandria
|
c7761a3b8a090e24b68b1318f1451752e34078e9
|
[
"MIT"
] | null | null | null |
run.py
|
citruspi/Alexandria
|
c7761a3b8a090e24b68b1318f1451752e34078e9
|
[
"MIT"
] | 1
|
2019-08-08T23:43:28.000Z
|
2019-08-08T23:43:28.000Z
|
from alexandria import app
app.run(port=5001)
| 11.75
| 26
| 0.787234
| 8
| 47
| 4.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 0.12766
| 47
| 3
| 27
| 15.666667
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e7f669c84ebc1bddbd112a57943dd4c3331a7d73
| 2,421
|
py
|
Python
|
tests/test_nested.py
|
grantps/superhelp
|
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
|
[
"MIT"
] | 27
|
2020-05-17T20:48:43.000Z
|
2022-01-08T21:32:30.000Z
|
tests/test_nested.py
|
grantps/superhelp
|
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
|
[
"MIT"
] | null | null | null |
tests/test_nested.py
|
grantps/superhelp
|
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
|
[
"MIT"
] | null | null | null |
from textwrap import dedent
from tests import check_as_expected, get_repeated_lines
ROOT = 'superhelp.helpers.nested_help.'
def test_misc():
test_conf = [
(
dedent("""\
pet = 'cat'
"""),
{
ROOT + 'bloated_nested_block': 0,
}
),
(
dedent("""\
if 1 == 1:
pass
"""),
{
ROOT + 'bloated_nested_block': 0,
}
),
(
dedent(f"""\
if 1 == 1:
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
"""),
{
ROOT + 'bloated_nested_block': 1,
}
),
(
dedent(f"""\
for i in range(2):
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
"""),
{
ROOT + 'bloated_nested_block': 1,
}
),
(
dedent(f"""\
while True:
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
break
"""),
{
ROOT + 'bloated_nested_block': 1,
}
),
(
dedent(f"""\
while True:
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
break
for i in range(2):
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
"""),
{
ROOT + 'bloated_nested_block': 2,
}
),
(
dedent(f"""\
while True:
{get_repeated_lines(item='pass', lpad=16, n_lines=2)}
break
for i in range(2):
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
"""),
{
ROOT + 'bloated_nested_block': 1,
}
),
(
dedent(f"""\
while True:
for i in range(2):
{get_repeated_lines(item='pass', lpad=16, n_lines=40)}
break
"""),
{
ROOT + 'bloated_nested_block': 1, ## consolidated message
}
),
]
check_as_expected(test_conf, execute_code=True)
check_as_expected(test_conf, execute_code=False)
# test_misc()
| 25.755319
| 74
| 0.389095
| 218
| 2,421
| 4.06422
| 0.215596
| 0.111738
| 0.162528
| 0.198646
| 0.794582
| 0.794582
| 0.72912
| 0.65237
| 0.65237
| 0.65237
| 0
| 0.037873
| 0.487402
| 2,421
| 93
| 75
| 26.032258
| 0.676068
| 0.013218
| 0
| 0.590909
| 0
| 0
| 0.53501
| 0.119916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011364
| false
| 0.102273
| 0.022727
| 0
| 0.034091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f020c46941262e5c69b31e4dca4d89cd29cce41d
| 28
|
py
|
Python
|
login.py
|
mars-zhoulifeng/42_01
|
782b1a35ba470417e9a21d3ff29a493230bb7696
|
[
"MIT"
] | null | null | null |
login.py
|
mars-zhoulifeng/42_01
|
782b1a35ba470417e9a21d3ff29a493230bb7696
|
[
"MIT"
] | null | null | null |
login.py
|
mars-zhoulifeng/42_01
|
782b1a35ba470417e9a21d3ff29a493230bb7696
|
[
"MIT"
] | null | null | null |
num1=100
num2=200
num3=300
| 5.6
| 8
| 0.75
| 6
| 28
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.142857
| 28
| 4
| 9
| 7
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f058be9815e944a41f4ae22e7d729e7ba944d98b
| 82
|
py
|
Python
|
sparv/modules/stanford/__init__.py
|
heatherleaf/sparv-pipeline
|
0fe5f27d0d82548ecc6cb21a69289668aac54cf1
|
[
"MIT"
] | 17
|
2018-09-21T07:01:45.000Z
|
2022-02-24T23:26:49.000Z
|
sparv/modules/stanford/__init__.py
|
heatherleaf/sparv-pipeline
|
0fe5f27d0d82548ecc6cb21a69289668aac54cf1
|
[
"MIT"
] | 146
|
2018-11-13T19:13:25.000Z
|
2022-03-31T09:57:56.000Z
|
sparv/modules/stanford/__init__.py
|
heatherleaf/sparv-pipeline
|
0fe5f27d0d82548ecc6cb21a69289668aac54cf1
|
[
"MIT"
] | 5
|
2019-02-14T00:50:38.000Z
|
2021-03-29T15:37:41.000Z
|
"""Annotations from Stanford Parser for English texts."""
from . import stanford
| 20.5
| 57
| 0.756098
| 10
| 82
| 6.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 82
| 3
| 58
| 27.333333
| 0.885714
| 0.621951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b2d86716f0a36526f9cd958c6b5d6b131bf69988
| 78
|
py
|
Python
|
rss_reader/database/redis/__init__.py
|
hfstylite/rss_reader
|
6f821ca5d2418dd88d761ccc853ee79da631588d
|
[
"Apache-2.0"
] | 1
|
2018-07-15T15:32:14.000Z
|
2018-07-15T15:32:14.000Z
|
rss_reader/database/redis/__init__.py
|
hfstylite/rss_reader
|
6f821ca5d2418dd88d761ccc853ee79da631588d
|
[
"Apache-2.0"
] | null | null | null |
rss_reader/database/redis/__init__.py
|
hfstylite/rss_reader
|
6f821ca5d2418dd88d761ccc853ee79da631588d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# _*_ coding:utf-8 _*_
from .base_set import MySettings
| 19.5
| 32
| 0.730769
| 12
| 78
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.128205
| 78
| 4
| 32
| 19.5
| 0.75
| 0.525641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
650835429a5e070b4397c2f44b7eafb4fc832a5c
| 190
|
py
|
Python
|
tests/web_platform/css_flexbox_1/test_ttwf_reftest_flex_align_content_center.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 71
|
2015-04-13T09:44:14.000Z
|
2019-03-24T01:03:02.000Z
|
tests/web_platform/css_flexbox_1/test_ttwf_reftest_flex_align_content_center.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 35
|
2019-05-06T15:26:09.000Z
|
2022-03-28T06:30:33.000Z
|
tests/web_platform/css_flexbox_1/test_ttwf_reftest_flex_align_content_center.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 139
|
2015-05-30T18:37:43.000Z
|
2019-03-27T17:14:05.000Z
|
from tests.utils import W3CTestCase
class TestTtwfReftestFlexAlignContentCenter(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'ttwf-reftest-flex-align-content-center'))
| 31.666667
| 93
| 0.821053
| 20
| 190
| 7.55
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017045
| 0.073684
| 190
| 5
| 94
| 38
| 0.840909
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6519281cf5d6150fea829160da81f180d2b204f6
| 193
|
py
|
Python
|
systems/__init__.py
|
stylekilla/syncmrt
|
816bb57d80d6595719b8b9d7f027f4f17d0a6c0a
|
[
"Apache-2.0"
] | null | null | null |
systems/__init__.py
|
stylekilla/syncmrt
|
816bb57d80d6595719b8b9d7f027f4f17d0a6c0a
|
[
"Apache-2.0"
] | 25
|
2019-03-05T05:56:35.000Z
|
2019-07-24T13:11:57.000Z
|
systems/__init__.py
|
stylekilla/syncmrt
|
816bb57d80d6595719b8b9d7f027f4f17d0a6c0a
|
[
"Apache-2.0"
] | 1
|
2019-11-27T05:10:47.000Z
|
2019-11-27T05:10:47.000Z
|
# syncmrt __init__.py
# Files.
from . import treatmentDelivery
from .theBrain import Brain
from .patient import Patient
from .imageGuidance import optimise, solver
from .control import hardware
| 27.571429
| 43
| 0.818653
| 24
| 193
| 6.416667
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129534
| 193
| 7
| 44
| 27.571429
| 0.916667
| 0.134715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
652b12951660f323baa41debd1942a63ea6242d7
| 5,625
|
py
|
Python
|
defining_classes _exercise/spoopify/project/test.py
|
Xamaneone/Python-OOP
|
7514cdc92bb4f7adf27666516739cbf42a35453c
|
[
"MIT"
] | null | null | null |
defining_classes _exercise/spoopify/project/test.py
|
Xamaneone/Python-OOP
|
7514cdc92bb4f7adf27666516739cbf42a35453c
|
[
"MIT"
] | null | null | null |
defining_classes _exercise/spoopify/project/test.py
|
Xamaneone/Python-OOP
|
7514cdc92bb4f7adf27666516739cbf42a35453c
|
[
"MIT"
] | null | null | null |
from .song import Song
from .album import Album
from .band import Band
import unittest
class SongTest(unittest.TestCase):
def test_song_init(self):
song = Song("A", 3.15, False)
message = song.get_info()
expected = "A - 3.15"
self.assertEqual(message, expected)
def test_album_init(self):
album = Album("The Sound of Perseverance")
message = album.details()
expected = "Album The Sound of Perseverance\n"
self.assertEqual(message, expected)
def test_add_song_working(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
message = album.add_song(song)
expected = "Song Scavenger of Human Sorrow has been added to the album The Sound of Perseverance."
self.assertEqual(message, expected)
def test_add_song_already_added(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
album.add_song(song)
message = album.add_song(song)
expected = "Song is already in the album."
self.assertEqual(message, expected)
def test_add_song_single(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, True)
message = album.add_song(song)
expected = "Cannot add Scavenger of Human Sorrow. It's a single"
self.assertEqual(message, expected)
def test_add_song_published_album(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
album.publish()
message = album.add_song(song)
expected = "Cannot add songs. Album is published."
self.assertEqual(message, expected)
def test_remove_song_working(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
album.add_song(song)
message = album.remove_song("Scavenger of Human Sorrow")
expected = "Removed song Scavenger of Human Sorrow from album The Sound of Perseverance."
self.assertEqual(message, expected)
def test_remove_song_not_in_album(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
message = album.remove_song("Scavenger of Human Sorrow")
expected = "Song is not in the album."
self.assertEqual(message, expected)
def test_remove_song_album_published(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
album.add_song(song)
album.publish()
message = album.remove_song("Scavenger of Human Sorrow")
expected = "Cannot remove songs. Album is published."
self.assertEqual(message, expected)
def test_publish(self):
album = Album("The Sound of Perseverance")
message = album.publish()
expected = album.published
self.assertTrue(expected)
def test_publish_message(self):
album = Album("The Sound of Perseverance")
message = album.publish()
expected = "Album The Sound of Perseverance has been published."
self.assertEqual(message, expected)
def test_details(self):
album = Album("The Sound of Perseverance")
song = Song("Scavenger of Human Sorrow", 6.56, False)
album.add_song(song)
message = album.details()
expected = "Album The Sound of Perseverance\n== Scavenger of Human Sorrow - 6.56\n"
def test_init(self):
band = Band("Death")
message = f"{band.name} - {len(band.albums)}"
expected = "Death - 0"
self.assertEqual(message, expected)
def test_add_album_working(self):
band = Band("Death")
album = Album("The Sound of Perseverance")
message = band.add_album(album)
expected = "Band Death has added their newest album The Sound of Perseverance."
self.assertEqual(message, expected)
def test_add_album_already_added(self):
band = Band("Death")
album = Album("The Sound of Perseverance")
band.add_album(album)
message = band.add_album(album)
expected = "Band Death already has The Sound of Perseverance in their library."
self.assertEqual(message, expected)
def test_remove_album_working(self):
band = Band("Death")
album = Album("The Sound of Perseverance")
band.add_album(album)
message = band.remove_album("The Sound of Perseverance")
expected = "Album The Sound of Perseverance has been removed."
self.assertEqual(message, expected)
def test_remove_album_not_found(self):
band = Band("Death")
album = Album("The Sound of Perseverance")
message = band.remove_album("The Sound of Perseverance")
expected = "Album The Sound of Perseverance is not found."
self.assertEqual(message, expected)
def test_remove_album_published(self):
band = Band("Death")
album = Album("The Sound of Perseverance")
album.publish()
band.add_album(album)
message = band.remove_album("The Sound of Perseverance")
expected = "Album has been published. It cannot be removed."
self.assertEqual(message, expected)
def test_details(self):
band = Band("Death")
message = band.details()
expected = "Band Death\n"
self.assertEqual(message, expected)
if __name__ == '__main__':
unittest.main()
| 38.265306
| 106
| 0.649778
| 700
| 5,625
| 5.105714
| 0.095714
| 0.062675
| 0.078344
| 0.172356
| 0.823167
| 0.788752
| 0.771405
| 0.752098
| 0.591774
| 0.518467
| 0
| 0.008105
| 0.254222
| 5,625
| 147
| 107
| 38.265306
| 0.843862
| 0
| 0
| 0.604839
| 0
| 0
| 0.288838
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 1
| 0.153226
| false
| 0
| 0.032258
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6544376f2e24243a3cadf94c1dbe2698aaa2b51e
| 8,865
|
py
|
Python
|
tests/test_classifier_runnable.py
|
rakovskij-stanislav/karton-classifier
|
271566c36051a914e75a5efac3100bc37953109e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_classifier_runnable.py
|
rakovskij-stanislav/karton-classifier
|
271566c36051a914e75a5efac3100bc37953109e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_classifier_runnable.py
|
rakovskij-stanislav/karton-classifier
|
271566c36051a914e75a5efac3100bc37953109e
|
[
"BSD-3-Clause"
] | null | null | null |
from karton.core import Task
from karton.core.test import ConfigMock, KartonBackendMock, KartonTestCase
from .mock_helper import mock_classifier, mock_resource, mock_task
class TestClassifier(KartonTestCase):
def setUp(self):
self.config = ConfigMock()
self.backend = KartonBackendMock()
def test_process_runnable_android_dec(self):
magic, mime = "Dalvik dex file version 035", "application/octet-stream"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "dex",
"platform": "android",
},
payload={
"sample": resource,
"tags": ["runnable:android:dex"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_linux(self):
magic, mime = "ELF 32-bit MSB executable...", "application/x-executable"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"platform": "linux",
},
payload={
"sample": resource,
"tags": ["runnable:linux"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_dll(self):
magic, mime = (
"PE32 executable (DLL) (console) Intel 80386...",
"application/x-dosexec",
)
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "dll",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:dll"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_exe(self):
magic, mime = (
"PE32 executable (GUI) Intel 80386 Mono/.Net assembly...",
"application/x-dosexec",
)
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "exe",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:exe"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_jar(self):
magic, mime = "Zip archive data...", "application/zip"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file.jar")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "jar",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:jar"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_lnk(self):
magic, mime = "MS Windows shortcut...", "application/octet-stream"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file.lnk")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "lnk",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:lnk"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_msi(self):
magic, mime = (
"Composite Document File V2 Document, MSI Installer...",
"application/x-msi",
)
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file.msi")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "msi",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:msi"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win32_swf(self):
magic, mime = (
"Macromedia Flash data (compressed)...",
"application/x-shockwave-flash",
)
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file.swf")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "swf",
"platform": "win32",
},
payload={
"sample": resource,
"tags": ["runnable:win32:swf"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win64_dll(self):
magic, mime = "PE32+ executable (DLL) (GUI) x86-64...", "application/x-dosexec"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "dll",
"platform": "win64",
},
payload={
"sample": resource,
"tags": ["runnable:win64:dll"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
def test_process_runnable_win64_exe(self):
magic, mime = "PE32+ executable (console) x86-64...", "application/x-dosexec"
self.karton = mock_classifier(magic, mime)
resource = mock_resource("file")
res = self.run_task(mock_task(resource))
expected = Task(
headers={
"type": "sample",
"stage": "recognized",
"origin": "karton.classifier",
"quality": "high",
"kind": "runnable",
"mime": mime,
"extension": "exe",
"platform": "win64",
},
payload={
"sample": resource,
"tags": ["runnable:win64:exe"],
"magic": magic,
},
)
self.assertTasksEqual(res, [expected])
| 32.591912
| 87
| 0.465877
| 707
| 8,865
| 5.724187
| 0.128713
| 0.044477
| 0.034594
| 0.054361
| 0.821102
| 0.804794
| 0.789968
| 0.762046
| 0.669138
| 0.669138
| 0
| 0.015066
| 0.401015
| 8,865
| 271
| 88
| 32.712177
| 0.747081
| 0
| 0
| 0.66129
| 0
| 0
| 0.220192
| 0.020869
| 0
| 0
| 0
| 0
| 0.040323
| 1
| 0.044355
| false
| 0
| 0.012097
| 0
| 0.060484
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e8f9bf5fc8773e839b62ae9db9283a6ba61a84df
| 79
|
py
|
Python
|
exceptions.py
|
MatCast/idealista_api_python
|
9c00a97365cae676ea265cb6f6b3e0535167b3f4
|
[
"BSD-3-Clause"
] | null | null | null |
exceptions.py
|
MatCast/idealista_api_python
|
9c00a97365cae676ea265cb6f6b3e0535167b3f4
|
[
"BSD-3-Clause"
] | null | null | null |
exceptions.py
|
MatCast/idealista_api_python
|
9c00a97365cae676ea265cb6f6b3e0535167b3f4
|
[
"BSD-3-Clause"
] | null | null | null |
class AuthFailed(Exception):
pass
class SearchFailed(Exception):
pass
| 13.166667
| 30
| 0.734177
| 8
| 79
| 7.25
| 0.625
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189873
| 79
| 5
| 31
| 15.8
| 0.90625
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3349777c1292ddec458aa35a0ae023d836979fcf
| 8,658
|
py
|
Python
|
test/test_api.py
|
mondeja/potojson
|
fda0db176b7ffb5e28f2229b4e19e20e0e08a4ce
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_api.py
|
mondeja/potojson
|
fda0db176b7ffb5e28f2229b4e19e20e0e08a4ce
|
[
"BSD-3-Clause"
] | 5
|
2020-10-29T20:12:45.000Z
|
2021-08-25T09:12:56.000Z
|
test/test_api.py
|
mondeja/potojson
|
fda0db176b7ffb5e28f2229b4e19e20e0e08a4ce
|
[
"BSD-3-Clause"
] | 2
|
2020-11-01T10:24:20.000Z
|
2020-11-01T10:41:08.000Z
|
from collections import OrderedDict
from test import POFILE_START
import pytest
from potojson import pofile_to_json
@pytest.mark.parametrize(
(
"content",
"output",
"fallback_to_msgid",
"fuzzy",
"pretty",
"indent",
"language",
"plural_forms",
"as_dict",
"sort_keys",
),
(
(
POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\n',
'{"Hello": "Hola"}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# msgctxt
(
POFILE_START + 'msgctxt "Month"\nmsgid "May"\nmsgstr "Mayo"',
'{"Month": {"May": "Mayo"}}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# obsolete
(
POFILE_START + '#~ msgid "May"\n#~ msgstr "Mayo"',
"{}",
False,
False,
False,
None,
None,
None,
False,
False,
),
# fallback_to_msgid
# True
(
POFILE_START + 'msgid "Hello"\nmsgstr ""\n',
'{"Hello": "Hello"}',
True,
False,
False,
None,
None,
None,
False,
False,
),
# False
(
POFILE_START + 'msgid "Hello"\nmsgstr ""\n',
'{"Hello": ""}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# msgid_plural
(
(
POFILE_START + 'msgid "$n word"\nmsgid_plural "$n words"\n'
'msgstr[0] "$n palabra"\nmsgstr[1] "$n palabras"\n'
),
'{"$n word": ["$n palabra", "$n palabras"]}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# msgid_plural + msgctxt
(
(
POFILE_START + 'msgctxt "a context"\nmsgid "$n word"\n'
'msgid_plural "$n words"\nmsgstr[0] "$n palabra"\n'
'msgstr[1] "$n palabras"\n'
),
('{"a context": {"$n word": ["$n palabra", "$n palabras"]}}'),
False,
False,
False,
None,
None,
None,
False,
False,
),
# fallback_to_msgid + msgid_plural
# True
(
(
POFILE_START + 'msgid "$n word"\nmsgid_plural "$n words"\n'
'msgstr[0] ""\nmsgstr[1] ""\n'
),
'{"$n word": ["$n word", "$n words"]}',
True,
False,
False,
None,
None,
None,
False,
False,
),
# False
(
(
POFILE_START + 'msgid "$n word"\nmsgid_plural "$n words"\n'
'msgstr[0] ""\nmsgstr[1] ""\n'
),
'{"$n word": ["", ""]}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# fallback_to_msgid + msgid_plural + msgctxt
# True
(
(
POFILE_START + 'msgctxt "a context"\nmsgid "$n word"\n'
'msgid_plural "$n words"\nmsgstr[0] ""\nmsgstr[1] ""\n'
),
('{"a context": {"$n word": ["$n word", "$n words"]}}'),
True,
False,
False,
None,
None,
None,
False,
False,
),
# False
(
(
POFILE_START + 'msgctxt "a context"\nmsgid "$n word"\n'
'msgid_plural "$n words"\nmsgstr[0] ""\nmsgstr[1] ""\n'
),
('{"a context": {"$n word": ["", ""]}}'),
False,
False,
False,
None,
None,
None,
False,
False,
),
# fuzzy
# True
(
POFILE_START + '#, fuzzy\nmsgid "Hello"\nmsgstr "Hola"\n',
'{"Hello": "Hola"}',
False,
True,
False,
None,
None,
None,
False,
False,
),
# False
(
POFILE_START + '#, fuzzy\nmsgid "Hello"\nmsgstr "Hola"\n',
"{}",
False,
False,
False,
None,
None,
None,
False,
False,
),
# pretty
(
POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\n',
'{\n "Hello": "Hola"\n}',
False,
False,
True,
None,
None,
None,
False,
False,
),
# pretty with custom indent
(
POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\n',
'{\n "Hello": "Hola"\n}',
False,
False,
True,
3,
None,
None,
False,
False,
),
# language
# discover from pofile
(
POFILE_START + '"Language: es\\n"\n\n',
'{"": {"language": "es"}}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# specified in keyword argument
(
POFILE_START,
'{"": {"language": "es"}}',
False,
False,
False,
None,
"es",
None,
False,
False,
),
# plural_forms
# discover from pofile
(
POFILE_START + '"Plural-Forms: nplurals=2; plural=n != 1;\\n"\n\n',
'{"": {"plural-forms": "nplurals=2; plural=n != 1;"}}',
False,
False,
False,
None,
None,
None,
False,
False,
),
# specified in keyword argument
(
POFILE_START,
'{"": {"plural-forms": "nplurals=2; plural=n != 1;"}}',
False,
False,
False,
None,
None,
"nplurals=2; plural=n != 1;",
False,
False,
),
# as dict
(
POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\n',
{"Hello": "Hola"},
False,
False,
False,
None,
None,
None,
True,
False,
),
# sort_keys
# as JSON
(
(POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\nmsgid "A"\nmsgstr "B"\n'),
'{"A": "B", "Hello": "Hola"}',
False,
False,
False,
None,
None,
None,
False,
True,
),
# as_dict
(
(POFILE_START + 'msgid "Hello"\nmsgstr "Hola"\nmsgid "A"\nmsgstr "B"\n'),
OrderedDict({"A": "B", "Hello": "Hola"}),
False,
False,
False,
None,
None,
None,
True,
True,
),
# Non ASCII characters
(
(POFILE_START + 'msgid "Coal"\nmsgstr "Carbón"\n'),
'{"Coal": "Carbón"}',
False,
False,
False,
None,
None,
None,
False,
False,
),
),
)
def test_pofile_content_to_json(
content,
output,
fallback_to_msgid,
fuzzy,
pretty,
indent,
language,
plural_forms,
as_dict,
sort_keys,
):
assert (
pofile_to_json(
content,
fallback_to_msgid=fallback_to_msgid,
fuzzy=fuzzy,
pretty=pretty,
indent=indent,
language=language,
plural_forms=plural_forms,
as_dict=as_dict,
sort_keys=sort_keys,
)
== output
)
| 23.088
| 85
| 0.346847
| 653
| 8,658
| 4.490046
| 0.104135
| 0.21487
| 0.107435
| 0.116644
| 0.768076
| 0.752046
| 0.724079
| 0.69236
| 0.638472
| 0.589018
| 0
| 0.005153
| 0.529337
| 8,658
| 374
| 86
| 23.149733
| 0.714356
| 0.050358
| 0
| 0.721408
| 0
| 0
| 0.216309
| 0
| 0
| 0
| 0
| 0
| 0.002933
| 1
| 0.002933
| false
| 0
| 0.01173
| 0
| 0.014663
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
336adfdef70fa5bff41e4384061eb9ddf703477f
| 52,895
|
py
|
Python
|
srez_input.py
|
giladddd/MLN
|
52876e75671d3fee8905b16931aadc9ecdc7bd4f
|
[
"MIT"
] | 2
|
2019-04-16T05:04:23.000Z
|
2020-05-20T15:31:19.000Z
|
srez_input.py
|
giladddd/MLN
|
52876e75671d3fee8905b16931aadc9ecdc7bd4f
|
[
"MIT"
] | null | null | null |
srez_input.py
|
giladddd/MLN
|
52876e75671d3fee8905b16931aadc9ecdc7bd4f
|
[
"MIT"
] | 2
|
2018-12-30T14:16:02.000Z
|
2019-08-06T16:43:46.000Z
|
import sys
import tensorflow as tf
import pdb
import numpy as np
import myParams
import GTools as GT
import scipy.io
import h5py
import time
FLAGS = tf.app.flags.FLAGS
def setup_inputs(sess, filenames, image_size=None, capacity_factor=3, TestStuff=False):
batch_size=myParams.myDict['batch_size']
channelsIn=myParams.myDict['channelsIn']
channelsOut=myParams.myDict['channelsOut']
DataH=myParams.myDict['DataH']
DataW=myParams.myDict['DataW']
LabelsH=myParams.myDict['LabelsH']
LabelsW=myParams.myDict['LabelsW']
if myParams.myDict['InputMode'] == 'I2I_ApplySens':
print('I2I loading labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
DatasetMatFN=myParams.myDict['LabelsMatFN']
f = h5py.File(DatasetMatFN, 'r')
nToLoad=myParams.myDict['nToLoad']
LoadAndRunOnData=myParams.myDict['LoadAndRunOnData']>0
if LoadAndRunOnData:
nToLoad=3
labels=f['Data'][1:nToLoad]
print('Loaded images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
SensFN='/media/a/H2/home/a/gUM/ESensCC128.mat'
SensCC=scipy.io.loadmat(SensFN)
Sens=SensCC['ESensCC128']
SensMsk=SensCC['MskS']
SensMsk=np.reshape(SensMsk,(SensMsk.shape[0],SensMsk.shape[1],1))
def ConcatCOnDim(X,dim): return tf.cast(tf.concat([tf.real(X),tf.imag(X)],axis=dim),tf.float32)
def myrot90(X): return tf.transpose(X, perm=[1,0,2])
with tf.device('/gpu:0'):
TFL = tf.constant(np.int32(labels))
Idx=tf.random_uniform([1],minval=0,maxval=TFL.shape[0],dtype=tf.int32)
labelR=tf.slice(TFL,[Idx[0],0,0,0],[1,-1,-1,1])
labelI=tf.slice(TFL,[Idx[0],0,0,1],[1,-1,-1,1])
labelR=tf.cast(labelR,tf.complex64)
labelI=tf.cast(labelI,tf.complex64)
label=tf.cast((labelR + 1j*labelI)/30000.0, tf.complex64)
myParams.myDict['channelsOut']=1
myParams.myDict['LabelsH']=labels.shape[1]
myParams.myDict['LabelsW']=labels.shape[2]
myParams.myDict['DataH']=labels.shape[1]
myParams.myDict['DataW']=labels.shape[2]
label = tf.reshape(label, [LabelsH, LabelsW, 1])
label = tf.image.random_flip_left_right(label)
label = tf.image.random_flip_up_down(label)
u1=tf.random_uniform([1])
label=tf.cond(u1[0]<0.5, lambda: tf.identity(label), lambda: myrot90(label))
TFMsk = tf.constant(np.complex64(SensMsk))
TFSens = tf.constant(np.complex64(Sens))
label=tf.multiply(label,TFMsk)
feature=label
# label=ConcatCOnDim(label,2)
label = tf.cast(tf.abs(label),tf.float32)
feature=tf.multiply(feature,TFSens)
feature=ConcatCOnDim(feature,2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'I2I_B0':
print('I2I loading labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
DatasetMatFN=myParams.myDict['LabelsMatFN']
f = h5py.File(DatasetMatFN, 'r')
nToLoad=myParams.myDict['nToLoad']
LoadAndRunOnData=myParams.myDict['LoadAndRunOnData']>0
if LoadAndRunOnData:
nToLoad=3
labels=f['Data'][1:nToLoad]
LMin=np.float32(f['Min'])
LRange=np.float32(f['Range'])
print('Min, Range: %f,%f' % (LMin,LRange))
print('Loaded images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
print('I2I loading features ' + time.strftime("%Y-%m-%d %H:%M:%S"))
DatasetMatFN=myParams.myDict['FeaturesMatFN']
f = h5py.File(DatasetMatFN, 'r')
features=f['Data'][1:nToLoad]
FMin=np.float32(f['Min'])
FRange=np.float32(f['Range'])
print('Min, Range: %f,%f' % (FMin,FRange))
print('Loaded featuress ' + time.strftime("%Y-%m-%d %H:%M:%S"))
TFL = tf.constant(np.int16(labels))
TFF = tf.constant(np.int16(features))
Idx=tf.random_uniform([1],minval=0,maxval=TFL.shape[0],dtype=tf.int32)
label=tf.slice(TFL,[Idx[0],0,0],[1,-1,-1,])
feature=tf.slice(TFF,[Idx[0],0,0,0],[1,-1,-1,-1])
label = tf.cast(label, tf.float32)
feature = tf.cast(feature, tf.float32)
label=(label*LRange/30000.0)+LMin
feature=(feature*FRange/30000.0)+FMin
if labels.ndim==4:
label = tf.reshape(label, [LabelsH, LabelsW, TFL.shape[3]])
else:
label = tf.reshape(label, [LabelsH, LabelsW, 1])
if features.ndim==4:
feature = tf.reshape(feature, [LabelsH, LabelsW, TFF.shape[3]])
else:
feature = tf.reshape(feature, [LabelsH, LabelsW, 1])
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'I2I':
print('I2I loading labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
DatasetMatFN=myParams.myDict['LabelsMatFN']
# DatasetMatFN='/media/a/H2/home/a/gUM/GRE_U1.4_Labels.mat'
f = h5py.File(DatasetMatFN, 'r')
nToLoad=myParams.myDict['nToLoad']
LoadAndRunOnData=myParams.myDict['LoadAndRunOnData']>0
if LoadAndRunOnData:
nToLoad=3
labels=f['labels'][1:nToLoad]
print('Loaded images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
print('I2I loading features ' + time.strftime("%Y-%m-%d %H:%M:%S"))
DatasetMatFN=myParams.myDict['FeaturesMatFN']
# DatasetMatFN='/media/a/H2/home/a/gUM/GRE_U1.4_Features.mat'
f = h5py.File(DatasetMatFN, 'r')
features=f['features'][1:nToLoad]
print('Loaded featuress ' + time.strftime("%Y-%m-%d %H:%M:%S"))
TFL = tf.constant(np.int16(labels))
TFF = tf.constant(np.int16(features))
Idx=tf.random_uniform([1],minval=0,maxval=TFL.shape[0],dtype=tf.int32)
# label=tf.slice(TFL,[Idx[0],0,0],[1,-1,-1])
label=tf.slice(TFL,[Idx[0],0,0,0],[1,-1,-1,-1])
feature=tf.slice(TFF,[Idx[0],0,0,0],[1,-1,-1,-1])
label = tf.cast(label, tf.float32)
feature = tf.cast(feature, tf.float32)
if labels.ndim==4:
label = tf.reshape(label, [LabelsH, LabelsW, TFL.shape[3]])
else:
label = tf.reshape(label, [LabelsH, LabelsW, 1])
if features.ndim==4:
feature = tf.reshape(feature, [LabelsH, LabelsW, TFF.shape[3]])
else:
feature = tf.reshape(feature, [LabelsH, LabelsW, 1])
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry3FMB':
BaseTSDataP=myParams.myDict['BaseTSDataP']
BaseNUFTDataP=myParams.myDict['BaseNUFTDataP']
B0Data=scipy.io.loadmat(BaseTSDataP + 'B0TS.mat')
TSBFA=B0Data['TSBFA']
TSCA=B0Data['TSCA']
TSBFB=B0Data['TSBFB']
TSCB=B0Data['TSCB']
SensCC=scipy.io.loadmat(BaseTSDataP + 'SensCC1.mat')
SensA=SensCC['SensCCA']
SensMskA=SensCC['SensMskA']
SensB=SensCC['SensCCB']
SensMskB=SensCC['SensMskB']
SensMskA=np.reshape(SensMskA,(SensMskA.shape[0],SensMskA.shape[1],1))
SensMskB=np.reshape(SensMskB,(SensMskB.shape[0],SensMskB.shape[1],1))
TFMskA = tf.constant(np.complex64(SensMskA))
TFMskB = tf.constant(np.complex64(SensMskB))
print('loading images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
# f = h5py.File('/media/a/H1/HCPData_256x256_int16.mat', 'r')
DatasetMatFN=myParams.myDict['DatasetMatFN']
f = h5py.File(DatasetMatFN, 'r')
nToLoad=myParams.myDict['nToLoad']
# nToLoad=10000
LoadAndRunOnData=myParams.myDict['LoadAndRunOnData']>0
if LoadAndRunOnData:
nToLoad=3
I=f['HCPData'][1:nToLoad]
print('Loaded images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
H=LabelsH
W=LabelsW
TFI = tf.constant(np.int16(I))
IdxA=tf.random_uniform([1],minval=0,maxval=I.shape[0],dtype=tf.int32)
IdxB=tf.random_uniform([1],minval=0,maxval=I.shape[0],dtype=tf.int32)
featureA=tf.slice(TFI,[IdxA[0],0,0],[1,-1,-1])
featureB=tf.slice(TFI,[IdxB[0],0,0],[1,-1,-1])
featureA=tf.transpose(featureA, perm=[1,2,0])
featureB=tf.transpose(featureB, perm=[1,2,0])
featureA = tf.image.random_flip_left_right(featureA)
featureA = tf.image.random_flip_up_down(featureA)
u1=tf.random_uniform([1])
featureA=tf.cond(u1[0]<0.5, lambda: tf.identity(featureA), lambda: tf.image.rot90(featureA))
featureB = tf.image.random_flip_left_right(featureB)
featureB = tf.image.random_flip_up_down(featureB)
u1=tf.random_uniform([1])
featureB=tf.cond(u1[0]<0.5, lambda: tf.identity(featureB), lambda: tf.image.rot90(featureB))
featureA = tf.random_crop(featureA, [H, W, 1])
featureB = tf.random_crop(featureB, [H, W, 1])
featureA = tf.cast(featureA, tf.int32)
featureB = tf.cast(featureB, tf.int32)
mxA=tf.maximum(tf.reduce_max(featureA),1)
mxB=tf.maximum(tf.reduce_max(featureB),1)
featureA = tf.cast(featureA/mxA, tf.complex64)
featureB = tf.cast(featureB/mxB, tf.complex64)
featureA=tf.multiply(featureA,TFMskA)
featureB=tf.multiply(featureB,TFMskB)
LFac=myParams.myDict['RandomPhaseLinearFac']
QFac=myParams.myDict['RandomPhaseQuadraticFac']
SFac=myParams.myDict['RandomPhaseScaleFac']
QA=GT.TFGenerateRandomSinPhase(H, W,LFac,QFac,SFac) # (nx=100,ny=120,LFac=5,QFac=0.1,SFac=2):
QB=GT.TFGenerateRandomSinPhase(H, W,LFac,QFac,SFac)
CurIWithPhaseA=featureA*tf.reshape(QA,[H,W,1])
CurIWithPhaseB=featureB*tf.reshape(QB,[H,W,1])
NUFTData=scipy.io.loadmat(BaseNUFTDataP + 'TrajForNUFT.mat')
Kd=NUFTData['Kd']
P=NUFTData['P']
SN=NUFTData['SN']
Trajm2=NUFTData['Trajm2']
nTraj=Trajm2.shape[1]
nCh=SensA.shape[2]
nTSC=TSCA.shape[2]
# ggg Arrived till here. CAIPI supposed to be into TSB anyway
SNcA,paddings,sp_R,sp_I,TSBFXA=GT.TF_TSNUFFT_Prepare(SN,SensA,TSCA,TSBFA,Kd,P)
SNcB,paddings,sp_R,sp_I,TSBFXB=GT.TF_TSNUFFT_Prepare(SN,SensB,TSCB,TSBFB,Kd,P)
def ConcatCI(X): return tf.concat([tf.real(X),tf.imag(X)],axis=0)
def ConcatCIOn2(X): return tf.concat([tf.real(X),tf.imag(X)],axis=2)
if myParams.myDict['BankSize']>0:
BankSize=myParams.myDict['BankSize']
BankK=myParams.myDict['BankK']
label_indexes = tf.constant(np.int32(np.arange(0,BankSize)),dtype=tf.int32)
BankK_indexes = tf.constant(np.int32(np.arange(0,BankSize*BankK)),dtype=tf.int32)
Bankdataset = tf.data.Dataset.from_tensor_slices(label_indexes)
Bankdataset = Bankdataset.repeat(count=None)
Bankiter = Bankdataset.make_one_shot_iterator()
label_index = Bankiter.get_next()
label_index=tf.cast(label_index,tf.int32)
label_index=label_index*2
BankKdataset = tf.data.Dataset.from_tensor_slices(BankK_indexes)
BankKdataset = BankKdataset.repeat(count=None)
BankKiter = BankKdataset.make_one_shot_iterator()
label_indexK = BankKiter.get_next()
label_indexK=tf.cast(label_indexK,tf.int32)
label_indexK=label_indexK*2
IdxAX=tf.random_uniform([1],minval=0,maxval=BankSize,dtype=tf.int32)
IdxBX=tf.random_uniform([1],minval=0,maxval=BankSize,dtype=tf.int32)
with tf.device('/gpu:0'):
OnlyTakeFromBank=tf.greater(label_indexK,label_index)
with tf.variable_scope("aaa", reuse=True):
Bank=tf.get_variable("Bank",dtype=tf.float32)
LBank=tf.get_variable("LBank",dtype=tf.float32)
def f2(): return tf.scatter_nd_update(Bank,[[label_index],[label_index+1]], [ConcatCI(tf.reshape(tf.transpose(GT.TF_TSNUFFT_Run(CurIWithPhaseA,SNcA,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFXA), perm=[1,0]),[nTraj*nCh,1,1])),ConcatCI(tf.reshape(tf.transpose(GT.TF_TSNUFFT_Run(CurIWithPhaseB,SNcB,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFXB), perm=[1,0]),[nTraj*nCh,1,1]))])
def f2L(): return tf.scatter_nd_update(LBank,[[label_index],[label_index+1]], [ConcatCIOn2(CurIWithPhaseA),ConcatCIOn2(CurIWithPhaseB)])
Bank = tf.cond(OnlyTakeFromBank, lambda: tf.identity(Bank), f2)
LBank = tf.cond(OnlyTakeFromBank, lambda: tf.identity(LBank), f2L)
IdxAF = tf.cond(OnlyTakeFromBank, lambda: tf.identity(IdxAX[0]*2), lambda: tf.identity(label_index))
IdxBF = tf.cond(OnlyTakeFromBank, lambda: tf.identity(IdxBX[0]*2+1), lambda: tf.identity(label_index+1))
# Take from bank in any case
featureAX = tf.slice(Bank,[IdxAF,0,0,0],[1,-1,-1,-1])
featureAX = tf.reshape(featureAX, [DataH, 1, 1])
featureBX = tf.slice(Bank,[IdxBF,0,0,0],[1,-1,-1,-1])
featureBX = tf.reshape(featureBX, [DataH, 1, 1])
featureX=featureAX+featureBX # That's MB
labelAX = tf.slice(LBank,[IdxAF,0,0,0],[1,-1,-1,-1])
labelAX = tf.reshape(labelAX, [H, W, 2])
labelBX = tf.slice(LBank,[IdxBF,0,0,0],[1,-1,-1,-1])
labelBX = tf.reshape(labelBX, [H, W, 2])
labelX = tf.concat([labelAX,labelBX],axis=1);
features, labels = tf.train.batch([featureX, labelX],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
else:
featureA=GT.TF_TSNUFFT_Run(CurIWithPhaseA,SNcA,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFXA)
featureB=GT.TF_TSNUFFT_Run(CurIWithPhaseB,SNcB,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFXB)
feature=featureA+featureB # That's MB
feature=tf.transpose(feature, perm=[1,0])
F=tf.reshape(feature,[nTraj*nCh,1,1])
feature=ConcatCI(F)
CurIWithPhase=tf.concat([CurIWithPhaseA,CurIWithPhaseB],axis=1);
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry3F':
BaseTSDataP=myParams.myDict['BaseTSDataP']
BaseNUFTDataP=myParams.myDict['BaseNUFTDataP']
B0Data=scipy.io.loadmat(BaseTSDataP + 'B0TS.mat')
# Sens=B0Data['Sens']
TSBF=B0Data['TSBF']
TSC=B0Data['TSC']
SensCC=scipy.io.loadmat(BaseTSDataP + 'SensCC1.mat')
Sens=SensCC['SensCC']
SensMsk=SensCC['SensMsk']
SensMsk=np.reshape(SensMsk,(SensMsk.shape[0],SensMsk.shape[1],1))
TFMsk = tf.constant(np.complex64(SensMsk))
print('loading images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
# I=scipy.io.loadmat('/media/a/H1/First3kIm256x256Magint16.mat')
# I=I['First3kIm256x256Magint16']
DatasetMatFN=myParams.myDict['DatasetMatFN']
# f = h5py.File('/media/a/H1/HCPData_256x256_int16.mat', 'r')
f = h5py.File(DatasetMatFN, 'r')
# nToLoad=10000
nToLoad=myParams.myDict['nToLoad']
LoadAndRunOnData=myParams.myDict['LoadAndRunOnData']>0
if LoadAndRunOnData:
nToLoad=3
I=f['HCPData'][1:nToLoad]
print('Loaded images ' + time.strftime("%Y-%m-%d %H:%M:%S"))
# I=scipy.io.loadmat('/media/a/H1/First1kIm256x256Magint16.mat')
# I=I['First1kIm256x256Magint16']
H=LabelsH
W=LabelsW
TFI = tf.constant(np.int16(I))
Idx=tf.random_uniform([1],minval=0,maxval=I.shape[0],dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature=tf.transpose(feature, perm=[1,2,0])
feature = tf.image.random_flip_left_right(feature)
feature = tf.image.random_flip_up_down(feature)
# u1 = tf.distributions.Uniform(low=0.0, high=1.0)
u1=tf.random_uniform([1])
feature=tf.cond(u1[0]<0.5, lambda: tf.identity(feature), lambda: tf.image.rot90(feature))
# tf.image.rot90( image, k=1, name=None)
# MYGlobalStep = tf.Variable(0, trainable=False, name='Myglobal_step')
# MYGlobalStep = MYGlobalStep+1
# feature=tf.cond(MYGlobalStep>0, lambda: tf.identity(feature), lambda: tf.identity(feature))
# feature = tf.Print(feature,[MYGlobalStep,],message='MYGlobalStep:')
# image = tf.image.random_saturation(image, .95, 1.05)
# image = tf.image.random_brightness(image, .05)
#image = tf.image.random_contrast(image, .95, 1.05)
feature = tf.random_crop(feature, [H, W, 1])
feature = tf.cast(feature, tf.int32)
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
feature = tf.cast(feature/mx, tf.complex64)
feature=tf.multiply(feature,TFMsk)
Q=GT.TFGenerateRandomSinPhase(H, W)
CurIWithPhase=feature*tf.reshape(Q,[H,W,1])
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
NUFTData=scipy.io.loadmat(BaseNUFTDataP + 'TrajForNUFT.mat')
Kd=NUFTData['Kd']
P=NUFTData['P']
SN=NUFTData['SN']
Trajm2=NUFTData['Trajm2']
nTraj=Trajm2.shape[1]
nCh=Sens.shape[2]
nTSC=TSC.shape[2]
SNc,paddings,sp_R,sp_I,TSBFX=GT.TF_TSNUFFT_Prepare(SN,Sens,TSC,TSBF,Kd,P)
# feature=GT.TF_TSNUFFT_Run(CurIWithPhase,SNc,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFX)
# feature=tf.transpose(feature, perm=[1,0])
# F=tf.reshape(feature,[nTraj*nCh,1,1])
# feature=tf.concat([tf.real(F),tf.imag(F)],axis=0)
def ConcatCI(X): return tf.concat([tf.real(X),tf.imag(X)],axis=0)
# feature=ConcatCI(F)
# feature=ConcatCI(tf.reshape(tf.transpose(GT.TF_TSNUFFT_Run(CurIWithPhase,SNc,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFX), perm=[1,0]),[nTraj*nCh,1,1]))
# ggg Signal Bank stuff:
if myParams.myDict['BankSize']>0:
BankSize=myParams.myDict['BankSize']
BankK=myParams.myDict['BankK']
label_indexes = tf.constant(np.int32(np.arange(0,BankSize)),dtype=tf.int32)
BankK_indexes = tf.constant(np.int32(np.arange(0,BankSize*BankK)),dtype=tf.int32)
Bankdataset = tf.data.Dataset.from_tensor_slices(label_indexes)
Bankdataset = Bankdataset.repeat(count=None)
Bankiter = Bankdataset.make_one_shot_iterator()
label_index = Bankiter.get_next()
label_index=tf.cast(label_index,tf.int32)
BankKdataset = tf.data.Dataset.from_tensor_slices(BankK_indexes)
BankKdataset = BankKdataset.repeat(count=None)
BankKiter = BankKdataset.make_one_shot_iterator()
label_indexK = BankKiter.get_next()
label_indexK=tf.cast(label_indexK,tf.int32)
with tf.device('/gpu:0'):
OnlyTakeFromBank=tf.greater(label_indexK,label_index)
with tf.variable_scope("aaa", reuse=True):
Bank=tf.get_variable("Bank",dtype=tf.float32)
LBank=tf.get_variable("LBank",dtype=tf.float32)
def f2(): return tf.scatter_nd_update(Bank,[[label_index]], [ConcatCI(tf.reshape(tf.transpose(GT.TF_TSNUFFT_Run(CurIWithPhase,SNc,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFX), perm=[1,0]),[nTraj*nCh,1,1]))])
def f2L(): return tf.scatter_nd_update(LBank,[[label_index]], [label])
Bank = tf.cond(OnlyTakeFromBank, lambda: tf.identity(Bank), f2)
LBank = tf.cond(OnlyTakeFromBank, lambda: tf.identity(LBank), f2L)
# Take from bank in any case
featureX = tf.slice(Bank,[label_index,0,0,0],[1,-1,-1,-1])
featureX = tf.reshape(featureX, [DataH, 1, 1])
# featureX = tf.Print(featureX,[label_index,label_indexK],message='Taking from bank:')
labelX = tf.slice(LBank,[label_index,0,0,0],[1,-1,-1,-1])
labelX = tf.reshape(labelX, [H, W, 2])
features, labels = tf.train.batch([featureX, labelX],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
# feature = tf.cond(TakeFromBank, lambda: tf.identity(Bfeature), lambda: tf.identity(Afeature))
# label = tf.cond(TakeFromBank, lambda: tf.identity(Blabel), lambda: tf.identity(Alabel))
else:
feature=ConcatCI(tf.reshape(tf.transpose(GT.TF_TSNUFFT_Run(CurIWithPhase,SNc,paddings,nTraj,nTSC,nCh,sp_R,sp_I,TSBFX), perm=[1,0]),[nTraj*nCh,1,1]))
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
# ggg end Signal Bank stuff:
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry3M':
Msk=scipy.io.loadmat('/media/a/DATA/meas_MID244_gBP_VD11_U19_G35S155_4min_FID22439/Sli08/Msk.mat')
Msk=Msk['Msk']
TFMsk = tf.constant(Msk)
FN='/media/a/H1/meas_MID244_gBP_VD11_U19_G35S155_4min_FID22439/AllData_Sli8_6k.mat'
if TestStuff:
print('setup_inputs Test')
ChunkSize=100
ChunkSizeL=400
FN='/media/a/H1/meas_MID244_gBP_VD11_U19_G35S155_4min_FID22439/AllData_Sli8_100.mat'
else:
print('setup_inputs Train')
ChunkSize=1000
ChunkSizeL=4000
f = h5py.File(FN, 'r')
print('loading Data ' + time.strftime("%Y-%m-%d %H:%M:%S"))
I=f['AllDatax'][:]
print('Loaded labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
f.close()
I=I.astype(np.float32)
f = h5py.File('/media/a/H1/AllImWithPhaseComplexSingle_h5.mat', 'r')
print('Loading labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
L=f['AllLh5'][0:(ChunkSizeL)]
print('Loaded labels ' + time.strftime("%Y-%m-%d %H:%M:%S"))
f.close()
L=L.astype(np.float32)
TFI = tf.constant(I[0:ChunkSize])
TFIb = tf.constant(I[(ChunkSize):(2*ChunkSize)])
TFIc = tf.constant(I[(2*ChunkSize):(3*ChunkSize)])
TFId = tf.constant(I[(3*ChunkSize):(4*ChunkSize)])
TFL = tf.constant(L)
# place = tf.placeholder(tf.float32, shape=(DataH, DataW, channelsIn))
# placeL = tf.placeholder(tf.float32, shape=(LabelsH, LabelsW, channelsOut))
Idx=tf.random_uniform([1],minval=0,maxval=ChunkSizeL,dtype=tf.int32)
def f1(): return tf.cond(Idx[0]<ChunkSize, lambda: tf.slice(TFI,[Idx[0],0],[1,-1]), lambda: tf.slice(TFIb,[Idx[0]-ChunkSize,0],[1,-1]))
def f2(): return tf.cond(Idx[0]<(3*ChunkSize), lambda: tf.slice(TFIc,[Idx[0]-2*ChunkSize,0],[1,-1]), lambda: tf.slice(TFId,[Idx[0]-3*ChunkSize,0],[1,-1]))
feature=tf.cond(Idx[0]<(2*ChunkSize), f1, f2)
# feature=tf.cond(Idx[0]<ChunkSize, lambda: tf.slice(TFI,[Idx[0],0],[1,-1]), lambda: tf.slice(TFIb,[Idx[0]-ChunkSize,0],[1,-1]))
# feature=tf.slice(TFI,[Idx[0],0],[1,-1])
# feature = tmp.assign(place)
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
labels = tf.slice(TFL,[Idx[0],0,0,0],[1,-1,-1,-1])
# feature = tmpL.assign(placeL)
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32)
label=tf.multiply(label,TFMsk)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'SPEN_Local':
SR=scipy.io.loadmat('/media/a/H1/SR.mat')
SR=SR['SR']
SR=np.reshape(SR,[DataH,DataH,1])
SR=np.transpose(SR, (2,0,1))
SR_TF=tf.constant(SR)
# I=scipy.io.loadmat('/media/a/H1/First1kIm256x256Magint16.mat')
# I=I['First1kIm256x256Magint16']
I=scipy.io.loadmat('/media/a/H1/First3kIm256x256Magint16.mat')
I=I['First3kIm256x256Magint16']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature=tf.transpose(feature, perm=[1,2,0])
feature = tf.random_crop(feature, [DataH, DataW, 1])
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
feature = tf.cast(feature/mx, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
CurIWithPhase=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
P=tf.transpose(CurIWithPhase, perm=[2,1,0])
F=tf.matmul(P,SR_TF)
F=tf.transpose(F, perm=[2,1,0])
SPENLocalFactor=myParams.myDict['SPENLocalFactor']
F=GT.ExpandWithCopiesOn2(F,DataH,SPENLocalFactor)
feature=tf.concat([tf.real(F),tf.imag(F)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'SPEN_FC':
SR=scipy.io.loadmat('/media/a/H1/SR.mat')
SR=SR['SR']
SR=np.reshape(SR,[DataH,DataH,1])
SR=np.transpose(SR, (2,0,1))
SR_TF=tf.constant(SR)
I=scipy.io.loadmat('/media/a/H1/First1kIm256x256Magint16.mat')
I=I['First1kIm256x256Magint16']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=1000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature=tf.transpose(feature, perm=[1,2,0])
feature = tf.random_crop(feature, [DataH, DataW, 1])
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
feature = tf.cast(feature/mx, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
CurIWithPhase=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
P=tf.transpose(CurIWithPhase, perm=[2,1,0])
F=tf.matmul(P,SR_TF)
F=tf.transpose(F, perm=[2,1,0])
feature=tf.concat([tf.real(F),tf.imag(F)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'SMASH1DFTxyC':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
Maps=scipy.io.loadmat('/media/a/H1/maps128x128x8.mat')
Mask=Maps['Msk']
Maps=Maps['maps']
nChannels=8
Mask=np.reshape(Mask,[128, 128, 1])
Maps = tf.constant(Maps)
Mask = tf.constant(np.float32(Mask))
# Maps = tf.constant(np.float32(Maps))
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [128, 128, 1])
feature = tf.multiply(feature,Mask)
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
CurIWithPhase=feature*tf.reshape(Q,[DataH,DataW,1])
WithPhaseAndMaps=tf.multiply(CurIWithPhase,Maps)
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
F=GT.gfft_TFOn3D(WithPhaseAndMaps,DataH,0)
F=GT.gfft_TFOn3D(F,DataW,1)
# now subsample 2
F = tf.reshape(F, [64,2, 128, nChannels])
F=tf.slice(F,[0,0,0,0],[-1,1,-1,-1])
F = tf.reshape(F, [64, 128, nChannels])
feature=tf.concat([tf.real(F),tf.imag(F)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == '1DFTxyCMaps':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
Maps=scipy.io.loadmat('/media/a/H1/maps128x128x8.mat')
Mask=Maps['Msk']
Maps=Maps['maps']
nChannels=8
Mask=np.reshape(Mask,[128, 128, 1])
Maps = tf.constant(Maps)
Mask = tf.constant(np.float32(Mask))
# Maps = tf.constant(np.float32(Maps))
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [128, 128, 1])
feature = tf.multiply(feature,Mask)
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
CurIWithPhase=feature*tf.reshape(Q,[DataH,DataW,1])
WithPhaseAndMaps=tf.multiply(CurIWithPhase,Maps)
label=tf.concat([tf.real(CurIWithPhase),tf.imag(CurIWithPhase)],axis=2)
F=GT.gfft_TFOn3D(WithPhaseAndMaps,DataH,0)
F=GT.gfft_TFOn3D(F,DataW,1)
feature=tf.concat([tf.real(F),tf.imag(F)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'M2DFT':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [128, 128, 1])
feature = tf.random_crop(feature, [DataH, DataW, 1])
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2=GT.gfft_TF(IQ2,DataH,0)
IQ2=GT.gfft_TF(IQ2,DataW,1)
feature=tf.reshape(IQ2,[DataH*DataW,1,1])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'M1DFTxy':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [128, 128, 1])
feature = tf.random_crop(feature, [DataH, DataW, 1])
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2=GT.gfft_TF(IQ2,DataH,0)
IQ2=GT.gfft_TF(IQ2,DataW,1)
feature=tf.reshape(IQ2,[DataH,DataW,1])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'M1DFTx':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [DataH, DataW, 1])
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2=GT.gfft_TF(IQ2,DataW,1)
feature=tf.reshape(IQ2,[DataH,DataW,1])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'M1DFTy':
I=scipy.io.loadmat('/media/a/H1/First3kIm128x128MagSinglex.mat')
I=I['First3kIm128x128MagSingle']
TFI = tf.constant(np.float32(I))
Idx=tf.random_uniform([1],minval=0,maxval=3000,dtype=tf.int32)
feature=tf.slice(TFI,[Idx[0],0,0],[1,-1,-1])
feature = tf.reshape(feature, [DataH, DataW, 1])
feature = tf.cast(feature, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,1])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2=GT.gfft_TF(IQ2,DataH,0)
feature=tf.reshape(IQ2,[DataH,DataW,1])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],batch_size=batch_size,num_threads=4,capacity = capacity_factor*batch_size,name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
#if image_size is None:
# image_size = FLAGS.sample_size
#pdb.set_trace()
reader = tf.TFRecordReader()
filename_queue = tf.train.string_input_producer(filenames)
key, value = reader.read(filename_queue)
AlsoLabel=True
kKick= myParams.myDict['InputMode'] == 'kKick'
if kKick or myParams.myDict['InputMode'] == '1DFTx' or myParams.myDict['InputMode'] == '1DFTy' or myParams.myDict['InputMode'] == '2DFT':
AlsoLabel=False
if myParams.myDict['InputMode'] == 'AAA':
#filename_queue = tf.Print(filename_queue,[filename_queue,],message='ZZZZZZZZZ:')
keyX=key
value = tf.Print(value,[keyX,],message='QQQ:')
featuresA = tf.parse_single_example(
value,
features={
'CurIs': tf.FixedLenFeature([], tf.string),
'Labels': tf.FixedLenFeature([], tf.string)
})
feature = tf.decode_raw(featuresA['Labels'], tf.float32)
CurIs = tf.decode_raw(featuresA['CurIs'], tf.float32)
CurIs = tf.cast(CurIs, tf.int64)
mx=CurIs
# mx='qwe'+
feature = tf.Print(feature,[keyX,mx],message='QQQ:')
feature = tf.Print(feature,[keyX,mx],message='QQQ:')
feature = tf.Print(feature,[keyX,mx],message='QQQ:')
feature = tf.Print(feature,[keyX,mx],message='QQQ:')
feature = tf.Print(feature,[keyX,mx],message='QQQ:')
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
label=feature
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
#image = tf.image.decode_jpeg(value, channels=channels, name="dataset_image")
#print('1')
if AlsoLabel:
featuresA = tf.parse_single_example(
value,
features={
'DataH': tf.FixedLenFeature([], tf.int64),
'DataW': tf.FixedLenFeature([], tf.int64),
'channelsIn': tf.FixedLenFeature([], tf.int64),
'LabelsH': tf.FixedLenFeature([], tf.int64),
'LabelsW': tf.FixedLenFeature([], tf.int64),
'channelsOut': tf.FixedLenFeature([], tf.int64),
'data_raw': tf.FixedLenFeature([], tf.string),
'labels_raw': tf.FixedLenFeature([], tf.string)
})
labels = tf.decode_raw(featuresA['labels_raw'], tf.float32)
else:
featuresA = tf.parse_single_example(
value,
features={
'DataH': tf.FixedLenFeature([], tf.int64),
'DataW': tf.FixedLenFeature([], tf.int64),
'channelsIn': tf.FixedLenFeature([], tf.int64),
'data_raw': tf.FixedLenFeature([], tf.string)
})
feature = tf.decode_raw(featuresA['data_raw'], tf.float32)
print('setup_inputs')
print('Data H,W,#ch: %d,%d,%d -> Labels H,W,#ch %d,%d,%d' % (DataH,DataW,channelsIn,LabelsH,LabelsW,channelsOut))
print('------------------')
if myParams.myDict['InputMode'] == '1DFTy':
feature = tf.reshape(feature, [256, 256, 1])
feature = tf.random_crop(feature, [DataH, DataW, channelsIn])
mm=tf.reduce_mean(feature)
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
#feature = tf.Print(feature,[mm,mx],message='QQQ:')
#assert_op = tf.Assert(tf.greater(mx, 0), [mx])
#with tf.control_dependencies([assert_op]):
feature = tf.cast(feature/mx, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,channelsIn])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
feature=label
HalfDataW=DataW/2
Id=np.hstack([np.arange(HalfDataW,DataW), np.arange(0,HalfDataW)])
Id=Id.astype(int)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
feature=tf.fft(IQ2)
feature = tf.gather(feature,Id,axis=1)
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == '1DFTx':
feature = tf.reshape(feature, [256, 256, 1])
feature = tf.random_crop(feature, [DataH, DataW, channelsIn])
mm=tf.reduce_mean(feature)
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
#feature = tf.Print(feature,[mm,mx],message='QQQ:')
#assert_op = tf.Assert(tf.greater(mx, 0), [mx])
#with tf.control_dependencies([assert_op]):
feature = tf.cast(feature/mx, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,channelsIn])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
feature=label
HalfDataH=DataH/2
Id=np.hstack([np.arange(HalfDataH,DataH), np.arange(0,HalfDataH)])
Id=Id.astype(int)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2 = tf.transpose(IQ2, perm=[1, 0])
feature=tf.fft(IQ2)
feature = tf.gather(feature,Id,axis=1)
feature = tf.transpose(feature, perm=[1,0])
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == '2DFT':
feature = tf.reshape(feature, [256, 256, 1])
feature = tf.random_crop(feature, [DataH, DataW, channelsIn])
mm=tf.reduce_mean(feature)
mx=tf.reduce_max(feature)
mx=tf.maximum(mx,1)
#feature = tf.Print(feature,[mm,mx],message='QQQ:')
#assert_op = tf.Assert(tf.greater(mx, 0), [mx])
#with tf.control_dependencies([assert_op]):
feature = tf.cast(feature/mx, tf.complex64)
Q=GT.TFGenerateRandomSinPhase(DataH, DataW)
IQ=feature*tf.reshape(Q,[DataH,DataW,channelsIn])
label=tf.concat([tf.real(IQ),tf.imag(IQ)],axis=2)
feature=label
HalfDataH=DataH/2
HalfDataW=DataW/2
IdH=np.hstack([np.arange(HalfDataH,DataH), np.arange(0,HalfDataH)])
IdH=IdH.astype(int)
IdW=np.hstack([np.arange(HalfDataW,DataW), np.arange(0,HalfDataW)])
IdW=IdW.astype(int)
IQ2=tf.reshape(IQ,IQ.shape[0:2])
IQ2=tf.fft(IQ2)
IQ2=tf.gather(IQ2,IdW,axis=1)
IQ2 = tf.transpose(IQ2, perm=[1, 0])
feature=tf.fft(IQ2)
feature = tf.gather(feature,IdH,axis=1)
feature = tf.transpose(feature, perm=[1,0])
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature=tf.concat([tf.real(feature),tf.imag(feature)],axis=2)
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if kKick:
filename_queue2 = tf.train.string_input_producer(filenames)
key2, value2 = reader.read(filename_queue2)
featuresA2 = tf.parse_single_example(
value2,
features={
'DataH': tf.FixedLenFeature([], tf.int64),
'DataW': tf.FixedLenFeature([], tf.int64),
'channelsIn': tf.FixedLenFeature([], tf.int64),
'data_raw': tf.FixedLenFeature([], tf.string)
})
feature2 = tf.decode_raw(featuresA2['data_raw'], tf.float32)
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature2 = tf.reshape(feature2, [DataH, DataW, channelsIn])
feature.set_shape([None, None, channelsIn])
feature2.set_shape([None, None, channelsIn])
feature = tf.cast(feature, tf.float32)/tf.reduce_max(feature)
feature2 = tf.cast(feature2, tf.float32)/tf.reduce_max(feature)
feature= tf.concat([feature,feature*0,feature2,feature2*0], 2)
label=feature
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry3':
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry2':
FullData=scipy.io.loadmat(myParams.myDict['NMAP_FN'])
NMapCR=FullData['NMapCR']
NMapCR = tf.constant(NMapCR)
feature=tf.gather(feature,NMapCR,validate_indices=None,name=None)
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
labels = tf.reshape(labels, [128, 128, channelsOut])
# scipy.misc.imresize(arr, size, interp='bilinear', mode=None)
labels = tf.image.resize_images(labels,[LabelsH, LabelsW]) #,method=tf.ResizeMethod.BICUBIC,align_corners=False) # or BILINEAR
label = tf.cast(labels, tf.float32)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'RegridTry1':
# FullData=scipy.io.loadmat('/media/a/f38a5baa-d293-4a00-9f21-ea97f318f647/home/a/TF/NMapIndTesta.mat')
FullData=scipy.io.loadmat(myParams.myDict['NMAP_FN'])
NMapCR=FullData['NMapCR']
NMapCR = tf.constant(NMapCR)
feature=tf.gather(feature,NMapCR,validate_indices=None,name=None)
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
if myParams.myDict['InputMode'] == 'SMASHTry1':
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32)
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
"""if myParams.myDict['Mode'] == 'RegridTry1C2':
FullData=scipy.io.loadmat('/media/a/f38a5baa-d293-4a00-9f21-ea97f318f647/home/a/TF/NMapIndC.mat')
NMapCR=FullData['NMapCRC']
NMapCR = tf.constant(NMapCR)
feature=tf.gather(feature,NMapCR,validate_indices=None,name=None)
feature = tf.reshape(feature, [DataH, DataW, channelsIn,2])
feature = tf.cast(feature, tf.float32)
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32)
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels"""
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
#print('44')
#example.ParseFromString(serialized_example)
#x_1 = np.array(example.features.feature['X'].float_list.value)
# Convert from [depth, height, width] to [height, width, depth].
#result.uint8image = tf.transpose(depth_major, [1, 2, 0])
feature.set_shape([None, None, channelsIn])
labels.set_shape([None, None, channelsOut])
# Crop and other random augmentations
#image = tf.image.random_flip_left_right(image)
#image = tf.image.random_saturation(image, .95, 1.05)
#image = tf.image.random_brightness(image, .05)
#image = tf.image.random_contrast(image, .95, 1.05)
#print('55')
#wiggle = 8
#off_x, off_y = 25-wiggle, 60-wiggle
#crop_size = 128
#crop_size_plus = crop_size + 2*wiggle
#print('56')
#image = tf.image.crop_to_bounding_box(image, off_y, off_x, crop_size_plus, crop_size_plus)
#print('57')
#image = tf.image.crop_to_bounding_box(image, 1, 2, crop_size, crop_size)
#image = tf.random_crop(image, [crop_size, crop_size, 3])
feature = tf.reshape(feature, [DataH, DataW, channelsIn])
feature = tf.cast(feature, tf.float32) #/255.0
labels = tf.reshape(labels, [LabelsH, LabelsW, channelsOut])
label = tf.cast(labels, tf.float32) #/255.0
#if crop_size != image_size:
# image = tf.image.resize_area(image, [image_size, image_size])
# The feature is simply a Kx downscaled version
#K = 1
#downsampled = tf.image.resize_area(image, [image_size//K, image_size//K])
#feature = tf.reshape(downsampled, [image_size//K, image_size//K, 3])
#feature = tf.reshape(downsampled, [image_size//K, image_size//K, 3])
#label = tf.reshape(image, [image_size, image_size, 3])
#feature = tf.reshape(image, [image_size, image_size, channelsIn])
#feature = tf.reshape(image, [1, image_size*image_size*2, channelsIn])
#label = tf.reshape(labels, [image_size, image_size, channelsOut])
# Using asynchronous queues
features, labels = tf.train.batch([feature, label],
batch_size=batch_size,
num_threads=4,
capacity = capacity_factor*batch_size,
name='labels_and_features')
tf.train.start_queue_runners(sess=sess)
return features, labels
| 39.950906
| 382
| 0.596049
| 6,638
| 52,895
| 4.658783
| 0.076378
| 0.048601
| 0.0238
| 0.013129
| 0.793209
| 0.765271
| 0.734745
| 0.712078
| 0.705061
| 0.692805
| 0
| 0.039367
| 0.255639
| 52,895
| 1,323
| 383
| 39.981104
| 0.74607
| 0.095
| 0
| 0.68523
| 0
| 0.001211
| 0.076842
| 0.019783
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014528
| false
| 0
| 0.010896
| 0.013317
| 0.054479
| 0.030266
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
682a7da3140371c3e82f52b7fa63044d70248ea0
| 230
|
py
|
Python
|
pyrtshm/metrics.py
|
pappacena/pyrtshm
|
4330e4838582946aa3386d8f3485064f4e0b2b6e
|
[
"Unlicense"
] | null | null | null |
pyrtshm/metrics.py
|
pappacena/pyrtshm
|
4330e4838582946aa3386d8f3485064f4e0b2b6e
|
[
"Unlicense"
] | null | null | null |
pyrtshm/metrics.py
|
pappacena/pyrtshm
|
4330e4838582946aa3386d8f3485064f4e0b2b6e
|
[
"Unlicense"
] | null | null | null |
class Metrics:
received_packets: int = 0
sent_packets: int = 0
forward_key_set: int = 0
forward_key_del: int = 0
lost_packet_count: int = 0
out_of_order_count: int = 0
delete_unknown_key_count: int = 0
| 25.555556
| 37
| 0.682609
| 37
| 230
| 3.864865
| 0.513514
| 0.195804
| 0.188811
| 0.195804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040698
| 0.252174
| 230
| 8
| 38
| 28.75
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
683b93e607e7684591eb8bc41c9e690720852a8e
| 1,088
|
py
|
Python
|
backend/massiliarp/migrations/0003_auto_20210925_0129.py
|
KonstantinosVasilopoulos/massiliarp
|
143cf04f76d282b1d09546d7e7fcaea259cc9b1e
|
[
"MIT"
] | null | null | null |
backend/massiliarp/migrations/0003_auto_20210925_0129.py
|
KonstantinosVasilopoulos/massiliarp
|
143cf04f76d282b1d09546d7e7fcaea259cc9b1e
|
[
"MIT"
] | null | null | null |
backend/massiliarp/migrations/0003_auto_20210925_0129.py
|
KonstantinosVasilopoulos/massiliarp
|
143cf04f76d282b1d09546d7e7fcaea259cc9b1e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-09-24 22:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('massiliarp', '0002_auto_20210925_0116'),
]
operations = [
migrations.AlterField(
model_name='armyunit',
name='recruitment_cost',
field=models.DecimalField(decimal_places=3, max_digits=5, verbose_name='Recruitment cost'),
),
migrations.AlterField(
model_name='armyunit',
name='upkeep_cost',
field=models.DecimalField(decimal_places=3, max_digits=5, verbose_name='Unit upkeep'),
),
migrations.AlterField(
model_name='navyunit',
name='recruitment_cost',
field=models.DecimalField(decimal_places=3, max_digits=5, verbose_name='Recruitment cost'),
),
migrations.AlterField(
model_name='navyunit',
name='upkeep_cost',
field=models.DecimalField(decimal_places=3, max_digits=5, verbose_name='Unit upkeep'),
),
]
| 32
| 103
| 0.617647
| 113
| 1,088
| 5.743363
| 0.380531
| 0.123267
| 0.154083
| 0.178737
| 0.764253
| 0.764253
| 0.625578
| 0.625578
| 0.625578
| 0.625578
| 0
| 0.049118
| 0.270221
| 1,088
| 33
| 104
| 32.969697
| 0.768262
| 0.04136
| 0
| 0.740741
| 1
| 0
| 0.166186
| 0.022094
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
685328ecbb497ef0044b1c8d917116f8c481fe23
| 4,918
|
py
|
Python
|
tests/test_alg3d_bppt.py
|
RI-imaging/ODTbrain
|
063f9d1cf7803dd0dda9d68d2847f16c2496c205
|
[
"BSD-3-Clause"
] | 15
|
2016-01-22T20:08:10.000Z
|
2022-03-24T17:00:27.000Z
|
tests/test_alg3d_bppt.py
|
RI-imaging/ODTbrain
|
063f9d1cf7803dd0dda9d68d2847f16c2496c205
|
[
"BSD-3-Clause"
] | 15
|
2017-01-17T12:07:58.000Z
|
2022-02-02T22:30:33.000Z
|
tests/test_alg3d_bppt.py
|
RI-imaging/ODTbrain
|
063f9d1cf7803dd0dda9d68d2847f16c2496c205
|
[
"BSD-3-Clause"
] | 6
|
2017-10-29T20:05:42.000Z
|
2021-02-19T23:23:36.000Z
|
"""Test tilted backpropagation algorithm"""
import numpy as np
import odtbrain
from common_methods import create_test_sino_3d, create_test_sino_3d_tilted, \
cutout, get_test_parameter_set
def test_3d_backprop_phase_real():
sino, angles = create_test_sino_3d()
parameters = get_test_parameter_set(2)
# reference
rref = list()
for p in parameters:
fref = odtbrain.backpropagate_3d(sino, angles, padval=0,
dtype=np.float64, onlyreal=True, **p)
rref.append(cutout(fref))
dataref = np.array(rref).flatten().view(float)
r = list()
for p in parameters:
f = odtbrain.backpropagate_3d_tilted(sino, angles, padval=0,
dtype=np.float64, onlyreal=True,
**p)
r.append(cutout(f))
data = np.array(r).flatten().view(float)
assert np.allclose(data, dataref)
def test_3d_backprop_pad():
sino, angles = create_test_sino_3d()
parameters = get_test_parameter_set(2)
# reference
rref = list()
for p in parameters:
fref = odtbrain.backpropagate_3d(sino, angles, padval="edge",
dtype=np.float64, onlyreal=False, **p)
rref.append(cutout(fref))
dataref = np.array(rref).flatten().view(float)
r = list()
for p in parameters:
f = odtbrain.backpropagate_3d_tilted(sino, angles, padval="edge",
dtype=np.float64, onlyreal=False,
**p)
r.append(cutout(f))
data = np.array(r).flatten().view(float)
assert np.allclose(data, dataref)
def test_3d_backprop_plane_rotation():
"""
A very soft test to check if planar rotation works fine
in the reconstruction with tilted angles.
"""
parameters = get_test_parameter_set(1)
results = []
# These are specially selected angles that don't give high results.
# Probably due to phase-wrapping, errors >2 may appear. Hence, we
# call it a soft test.
tilts = [1.1, 0.0, 0.234, 2.80922, -.29, 9.87]
for angz in tilts:
sino, angles = create_test_sino_3d_tilted(tilt_plane=angz, A=21)
rotmat = np.array([
[np.cos(angz), -np.sin(angz), 0],
[np.sin(angz), np.cos(angz), 0],
[0, 0, 1],
])
# rotate `tilted_axis` onto the y-z plane.
tilted_axis = np.dot(rotmat, [0, 1, 0])
rref = list()
for p in parameters:
fref = odtbrain.backpropagate_3d_tilted(sino, angles,
padval="edge",
tilted_axis=tilted_axis,
padding=(False, False),
dtype=np.float64,
onlyreal=False,
**p)
rref.append(cutout(fref))
data = np.array(rref).flatten().view(float)
results.append(data)
for ii in np.arange(len(results)):
assert np.allclose(results[ii], results[ii-1], atol=.2, rtol=.2)
def test_3d_backprop_plane_alignment_along_axes():
"""
Tests whether the reconstruction is always aligned with
the rotational axis (and not antiparallel).
"""
parameters = get_test_parameter_set(1)
p = parameters[0]
results = []
# These are specially selected angles that don't give high results.
# Probably due to phase-wrapping, errors >2 may appear. Hence, we
# call it a soft test.
tilts = [0, np.pi/2, np.pi, 3*np.pi/2, 2*np.pi]
for angz in tilts:
sino, angles = create_test_sino_3d_tilted(tilt_plane=angz, A=21)
rotmat = np.array([
[np.cos(angz), -np.sin(angz), 0],
[np.sin(angz), np.cos(angz), 0],
[0, 0, 1],
])
# rotate `tilted_axis` onto the y-z plane.
tilted_axis = np.dot(rotmat, [0, 1, 0])
fref = odtbrain.backpropagate_3d_tilted(sino, angles,
padval="edge",
tilted_axis=tilted_axis,
padding=(False, False),
dtype=np.float64,
onlyreal=True,
**p)
results.append(fref)
for ii in np.arange(len(results)):
assert np.allclose(results[ii], results[ii-1], atol=.2, rtol=.2)
if __name__ == "__main__":
# Run all tests
loc = locals()
for key in list(loc.keys()):
if key.startswith("test_") and hasattr(loc[key], "__call__"):
loc[key]()
| 36.42963
| 79
| 0.522367
| 577
| 4,918
| 4.30156
| 0.237435
| 0.04029
| 0.033844
| 0.038678
| 0.786865
| 0.767123
| 0.730056
| 0.730056
| 0.730056
| 0.730056
| 0
| 0.028092
| 0.370272
| 4,918
| 134
| 80
| 36.701493
| 0.773329
| 0.132981
| 0
| 0.684783
| 0
| 0
| 0.008789
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 1
| 0.043478
| false
| 0
| 0.032609
| 0
| 0.076087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6867d23e6666bc8226d27174a7b03b95bd337ddc
| 107
|
py
|
Python
|
prototypes/test-examples/nose/test_generator.py
|
mikej888/recipy-test
|
6db030fb4013baf57ec7fb78c287f9f0fbbc28a0
|
[
"Apache-2.0"
] | null | null | null |
prototypes/test-examples/nose/test_generator.py
|
mikej888/recipy-test
|
6db030fb4013baf57ec7fb78c287f9f0fbbc28a0
|
[
"Apache-2.0"
] | null | null | null |
prototypes/test-examples/nose/test_generator.py
|
mikej888/recipy-test
|
6db030fb4013baf57ec7fb78c287f9f0fbbc28a0
|
[
"Apache-2.0"
] | 2
|
2016-08-17T12:17:56.000Z
|
2021-12-17T09:58:22.000Z
|
def test_even():
for i in range(0, 6):
yield is_even, i
def is_even(i):
assert i % 2 == 0
| 15.285714
| 25
| 0.542056
| 21
| 107
| 2.619048
| 0.619048
| 0.218182
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.327103
| 107
| 6
| 26
| 17.833333
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
68683d081825f52a805993ccdd630e858374685c
| 61
|
py
|
Python
|
effects/__init__.py
|
wegfawefgawefg/MonsterCatcher
|
e34a584acc0b0a3ad5ccdaf053569371687c417f
|
[
"BSD-3-Clause"
] | 1
|
2021-03-28T02:14:29.000Z
|
2021-03-28T02:14:29.000Z
|
effects/__init__.py
|
wegfawefgawefg/MonsterCatcher
|
e34a584acc0b0a3ad5ccdaf053569371687c417f
|
[
"BSD-3-Clause"
] | null | null | null |
effects/__init__.py
|
wegfawefgawefg/MonsterCatcher
|
e34a584acc0b0a3ad5ccdaf053569371687c417f
|
[
"BSD-3-Clause"
] | 1
|
2021-03-28T02:30:53.000Z
|
2021-03-28T02:30:53.000Z
|
from .regening import Regening
from .swelling import Swelling
| 30.5
| 30
| 0.852459
| 8
| 61
| 6.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114754
| 61
| 2
| 31
| 30.5
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d7d0d8e1771abefd9e2a64024422e0ebba391c2c
| 168
|
py
|
Python
|
qa_tools/urls.py
|
celelstine/best-flight
|
287a13b795594e2a90a75fd89b1a693a742e6796
|
[
"MIT"
] | null | null | null |
qa_tools/urls.py
|
celelstine/best-flight
|
287a13b795594e2a90a75fd89b1a693a742e6796
|
[
"MIT"
] | 9
|
2020-02-12T00:21:32.000Z
|
2021-09-08T01:09:32.000Z
|
qa_tools/urls.py
|
celelstine/best-flight
|
287a13b795594e2a90a75fd89b1a693a742e6796
|
[
"MIT"
] | null | null | null |
from django.urls import path
from qa_tools.views import create_test_user
urlpatterns = [
path('create_test_user/', create_test_user, name='create_test_user'),
]
| 18.666667
| 73
| 0.77381
| 25
| 168
| 4.84
| 0.52
| 0.330579
| 0.46281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130952
| 168
| 8
| 74
| 21
| 0.828767
| 0
| 0
| 0
| 0
| 0
| 0.196429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0bc87cf7c814a356edbdbea49ba294b674ab0a9b
| 251
|
py
|
Python
|
Draw/Speed.py
|
pydys/rasberry-inav-fpv-osd
|
c7203a866ea88f6ba94642450b033bd65a4664f7
|
[
"MIT"
] | 5
|
2018-12-08T06:58:42.000Z
|
2021-12-28T05:53:38.000Z
|
Draw/Speed.py
|
pydys/rasberry-inav-fpv-osd
|
c7203a866ea88f6ba94642450b033bd65a4664f7
|
[
"MIT"
] | null | null | null |
Draw/Speed.py
|
pydys/rasberry-inav-fpv-osd
|
c7203a866ea88f6ba94642450b033bd65a4664f7
|
[
"MIT"
] | 1
|
2018-12-08T18:54:52.000Z
|
2018-12-08T18:54:52.000Z
|
import cv2
class Speed:
def __init__(self):
pass
drawing_area = ((30, 100), (130, 360))
@staticmethod
def draw(img, hud, color):
cv2.rectangle(img, Speed.drawing_area[0], Speed.drawing_area[1], color, 1, cv2.CV_AA)
| 19.307692
| 93
| 0.621514
| 36
| 251
| 4.111111
| 0.666667
| 0.222973
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089005
| 0.239044
| 251
| 12
| 94
| 20.916667
| 0.685864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.125
| 0.125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
0bd2fd17b7df86fb9e6c8d6b69ff15ec352174de
| 121
|
py
|
Python
|
teacher/admin.py
|
Swarda6/TCS-Project
|
e29e2b136f333128b9169f8ad284e019b1cd7fb4
|
[
"MIT"
] | null | null | null |
teacher/admin.py
|
Swarda6/TCS-Project
|
e29e2b136f333128b9169f8ad284e019b1cd7fb4
|
[
"MIT"
] | null | null | null |
teacher/admin.py
|
Swarda6/TCS-Project
|
e29e2b136f333128b9169f8ad284e019b1cd7fb4
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Teacher
admin.site.register(Teacher)
# Register your models here.
| 17.285714
| 32
| 0.801653
| 17
| 121
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132231
| 121
| 6
| 33
| 20.166667
| 0.92381
| 0.214876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
040a2c8bcc4b2b7e85a96a3dfc3450f34be0136d
| 141
|
py
|
Python
|
polymorphism_and_abstraction/animals/cat.py
|
ivan-yosifov88/python_oop_june_2021
|
7ae6126065abbcce7ce97c86d1150ae307360249
|
[
"MIT"
] | 1
|
2021-08-03T19:14:24.000Z
|
2021-08-03T19:14:24.000Z
|
polymorphism_and_abstraction/animals/cat.py
|
ivan-yosifov88/python_oop_june_2021
|
7ae6126065abbcce7ce97c86d1150ae307360249
|
[
"MIT"
] | null | null | null |
polymorphism_and_abstraction/animals/cat.py
|
ivan-yosifov88/python_oop_june_2021
|
7ae6126065abbcce7ce97c86d1150ae307360249
|
[
"MIT"
] | null | null | null |
from animals.animal import Animal
class Cat(Animal):
MAKE_SOUND = "Meow meow!"
def make_sound(self):
return Cat.MAKE_SOUND
| 17.625
| 33
| 0.687943
| 20
| 141
| 4.7
| 0.6
| 0.287234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22695
| 141
| 8
| 34
| 17.625
| 0.862385
| 0
| 0
| 0
| 0
| 0
| 0.070423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
041cb5d4f07fa1e4939bcadca3b1a9932c9aac04
| 178
|
py
|
Python
|
manet/transform/__init__.py
|
jonasteuwen/manet-old
|
fb20c98f7e5c89a5ffe89d851ee84e7b65c5e229
|
[
"BSD-2-Clause"
] | 1
|
2021-02-23T04:51:19.000Z
|
2021-02-23T04:51:19.000Z
|
manet/transform/__init__.py
|
jonasteuwen/manet-old
|
fb20c98f7e5c89a5ffe89d851ee84e7b65c5e229
|
[
"BSD-2-Clause"
] | null | null | null |
manet/transform/__init__.py
|
jonasteuwen/manet-old
|
fb20c98f7e5c89a5ffe89d851ee84e7b65c5e229
|
[
"BSD-2-Clause"
] | 1
|
2021-02-23T04:51:20.000Z
|
2021-02-23T04:51:20.000Z
|
# encoding: utf-8
from .rescale_transform import random_rescale_2d
from .rotate_transform import random_rotate_2d
__all__ = ['random_rescale_2d',
'random_rotate_2d']
| 25.428571
| 48
| 0.775281
| 24
| 178
| 5.166667
| 0.458333
| 0.241935
| 0.33871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033113
| 0.151685
| 178
| 6
| 49
| 29.666667
| 0.788079
| 0.08427
| 0
| 0
| 0
| 0
| 0.204969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f0944ec47e7d399c04541b1a3ed41b4c560b7db1
| 134
|
py
|
Python
|
accounts/admin.py
|
bodealamu/create_simple_blog_using_django
|
2fcf060e1d940b437eaabd45c452aa5f9257fad1
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
bodealamu/create_simple_blog_using_django
|
2fcf060e1d940b437eaabd45c452aa5f9257fad1
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
bodealamu/create_simple_blog_using_django
|
2fcf060e1d940b437eaabd45c452aa5f9257fad1
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from accounts.models import CustomUser
# Register your models here.
admin.site.register(CustomUser)
| 22.333333
| 38
| 0.828358
| 18
| 134
| 6.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11194
| 134
| 5
| 39
| 26.8
| 0.932773
| 0.19403
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f0be42e24437da59de466928e7fd8ed49c92559d
| 146
|
py
|
Python
|
mlcollection/datasets/__init__.py
|
posborne/mlcollection
|
65e1d0902ad0a3e5a53d98fb68432ce98ff970a3
|
[
"MIT"
] | 2
|
2015-07-24T23:53:18.000Z
|
2015-08-18T10:35:16.000Z
|
mlcollection/datasets/__init__.py
|
posborne/mlcollection
|
65e1d0902ad0a3e5a53d98fb68432ce98ff970a3
|
[
"MIT"
] | null | null | null |
mlcollection/datasets/__init__.py
|
posborne/mlcollection
|
65e1d0902ad0a3e5a53d98fb68432ce98ff970a3
|
[
"MIT"
] | null | null | null |
# TODO: add some datasets that can be used in exmaples, testing, and for those
# just getting started out with the library (to get off the ground)
| 73
| 78
| 0.767123
| 26
| 146
| 4.307692
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184932
| 146
| 2
| 79
| 73
| 0.941176
| 0.972603
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.5
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f0ca21b103fba08e983d6ddce357fdde736bf141
| 52
|
py
|
Python
|
test2.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | null | null | null |
test2.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | null | null | null |
test2.py
|
vishabsingh/Python
|
04514c2e6fd8471a299860d6457146bf961ec86b
|
[
"Apache-2.0"
] | 2
|
2020-10-27T06:19:16.000Z
|
2020-10-27T13:42:08.000Z
|
import keyword
keyword.kwlist
print(keyword.kwlist)
| 13
| 21
| 0.846154
| 7
| 52
| 6.285714
| 0.571429
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 52
| 3
| 22
| 17.333333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f0d48fa491fc4192fa53ed4241586dbab4b4b180
| 173
|
py
|
Python
|
gitsome_test/testdata/gitsome/tmp.py
|
rahman-mahmudur/PyART
|
36591cd10b2b7a560bbcb47a6cf744b72466f92a
|
[
"Apache-2.0"
] | null | null | null |
gitsome_test/testdata/gitsome/tmp.py
|
rahman-mahmudur/PyART
|
36591cd10b2b7a560bbcb47a6cf744b72466f92a
|
[
"Apache-2.0"
] | null | null | null |
gitsome_test/testdata/gitsome/tmp.py
|
rahman-mahmudur/PyART
|
36591cd10b2b7a560bbcb47a6cf744b72466f92a
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
from __future__ import print_function
from .githubcli import GitHubCli
def cli():
github = GitHubCli()
reveal_type(github)
| 19.222222
| 39
| 0.786127
| 21
| 173
| 5.952381
| 0.619048
| 0.16
| 0.256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16185
| 173
| 9
| 40
| 19.222222
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0b160b45378229e010540babaed9b003242b822b
| 37
|
py
|
Python
|
server/sockets/__init__.py
|
noanflaherty/self-replicating-repo
|
4977f24e0554cd160944f1449f3928e9f156606c
|
[
"MIT"
] | null | null | null |
server/sockets/__init__.py
|
noanflaherty/self-replicating-repo
|
4977f24e0554cd160944f1449f3928e9f156606c
|
[
"MIT"
] | 3
|
2021-03-08T22:57:06.000Z
|
2022-02-12T04:06:35.000Z
|
server/sockets/__init__.py
|
noanflaherty/self-replicating-repo
|
4977f24e0554cd160944f1449f3928e9f156606c
|
[
"MIT"
] | null | null | null |
from server.sockets.sockets import *
| 18.5
| 36
| 0.810811
| 5
| 37
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9bf25baab2e9ffc493799d81e00929e493d33798
| 735
|
py
|
Python
|
Python/app.python/aula7/aula7a.py
|
jacksontenorio8/python
|
a484f019960faa5aa29177eff44a1bb1e3f3b9d0
|
[
"MIT"
] | null | null | null |
Python/app.python/aula7/aula7a.py
|
jacksontenorio8/python
|
a484f019960faa5aa29177eff44a1bb1e3f3b9d0
|
[
"MIT"
] | null | null | null |
Python/app.python/aula7/aula7a.py
|
jacksontenorio8/python
|
a484f019960faa5aa29177eff44a1bb1e3f3b9d0
|
[
"MIT"
] | null | null | null |
'''
Por convenção:
- Função é tudo que retorna valor
- Método não retorna valor
'''
class Calculadora:
def __init__(self, num1, num2):
self.valorA = num1
self.valorB = num2
def soma(self):
return self.valorA + self.valorB
def subtracao(self):
return self.valorA - self.valorB
def multiplicacao(self):
return self.valorA * self.valorB
def divisao(self):
return self.valorA / self.valorB
if __name__ == '__main__':
calculadora = Calculadora(10, 20)
print(calculadora.valorA)
print(calculadora.valorB)
print(calculadora.soma())
print(calculadora.subtracao())
print(calculadora.multiplicacao())
print(calculadora.divisao())
| 22.272727
| 40
| 0.647619
| 82
| 735
| 5.658537
| 0.365854
| 0.206897
| 0.12069
| 0.172414
| 0.278017
| 0.278017
| 0.213362
| 0
| 0
| 0
| 0
| 0.01444
| 0.246259
| 735
| 33
| 41
| 22.272727
| 0.823105
| 0.102041
| 0
| 0
| 0
| 0
| 0.012251
| 0
| 0
| 0
| 0
| 0.030303
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.2
| 0.5
| 0.3
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
5019d16fc9f91f6dee1f7938b8b9671d8cdab643
| 44
|
py
|
Python
|
tests/__init__.py
|
pete-twibill/alertlogic-sdk-python
|
5449dc3db312ba42de43cd8c9d86a68732c4c319
|
[
"MIT"
] | 4
|
2020-05-14T11:18:07.000Z
|
2021-09-30T13:20:34.000Z
|
tests/__init__.py
|
pete-twibill/alertlogic-sdk-python
|
5449dc3db312ba42de43cd8c9d86a68732c4c319
|
[
"MIT"
] | 26
|
2020-05-18T14:58:12.000Z
|
2021-11-29T16:57:04.000Z
|
tests/__init__.py
|
pete-twibill/alertlogic-sdk-python
|
5449dc3db312ba42de43cd8c9d86a68732c4c319
|
[
"MIT"
] | 23
|
2020-02-10T09:14:05.000Z
|
2022-01-27T23:44:54.000Z
|
"""Unit tests for alertlogic-sdk-python."""
| 22
| 43
| 0.704545
| 6
| 44
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.775
| 0.840909
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50399c8e2b615d919b5e9a41a2b21ab75ce23438
| 25
|
py
|
Python
|
sim/__init__.py
|
AdrienBenamira/k_coloring_graph_AlphaZeroGo
|
c8f3271a2b117c95616b5752e134114ee8b20294
|
[
"MIT"
] | 1
|
2020-04-05T03:12:22.000Z
|
2020-04-05T03:12:22.000Z
|
sim/__init__.py
|
AdrienBenamira/k_coloring_graph_AlphaZeroGo
|
c8f3271a2b117c95616b5752e134114ee8b20294
|
[
"MIT"
] | null | null | null |
sim/__init__.py
|
AdrienBenamira/k_coloring_graph_AlphaZeroGo
|
c8f3271a2b117c95616b5752e134114ee8b20294
|
[
"MIT"
] | null | null | null |
from sim.EnvTest import *
| 25
| 25
| 0.8
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5044c4b3d8cc11b1a47761d566e20645b2695f40
| 262
|
py
|
Python
|
pysectprop/general/__init__.py
|
Pretsdaya/pysectprop
|
e01a04c13a99e5430b235d745975c27ac38de5ac
|
[
"MIT"
] | 1
|
2022-01-30T05:59:50.000Z
|
2022-01-30T05:59:50.000Z
|
pysectprop/general/__init__.py
|
Pretsdaya/pysectprop
|
e01a04c13a99e5430b235d745975c27ac38de5ac
|
[
"MIT"
] | null | null | null |
pysectprop/general/__init__.py
|
Pretsdaya/pysectprop
|
e01a04c13a99e5430b235d745975c27ac38de5ac
|
[
"MIT"
] | 1
|
2021-07-01T12:37:33.000Z
|
2021-07-01T12:37:33.000Z
|
from .generalsection import GeneralSection
from .materialsection import MaterialSection
from .compositesection import CompositeSection
from .cripplingsection import CripplingSection
from .thinwalledsection import ThinWalledSection
from .material import Material
| 37.428571
| 48
| 0.885496
| 24
| 262
| 9.666667
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091603
| 262
| 6
| 49
| 43.666667
| 0.97479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5051b24b8ced7d1aadb892cc34574b6440ed0a24
| 81
|
py
|
Python
|
engineer/render/registry.py
|
lingtengqiu/Open-PIFuhd
|
3a66b647bcf5591e818af62735e64a93c4aaef85
|
[
"MIT"
] | 191
|
2021-03-18T08:09:06.000Z
|
2022-03-21T05:48:02.000Z
|
engineer/render/registry.py
|
lingtengqiu/Open-PIFuhd
|
3a66b647bcf5591e818af62735e64a93c4aaef85
|
[
"MIT"
] | 9
|
2021-03-18T10:34:25.000Z
|
2022-01-05T19:22:48.000Z
|
engineer/render/registry.py
|
lingtengqiu/Open-PIFuhd
|
3a66b647bcf5591e818af62735e64a93c4aaef85
|
[
"MIT"
] | 26
|
2021-03-18T08:09:08.000Z
|
2022-03-28T01:07:19.000Z
|
from engineer.registry import Registry
NORMAL_RENDER = Registry('normal_render')
| 27
| 41
| 0.839506
| 10
| 81
| 6.6
| 0.6
| 0.424242
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08642
| 81
| 3
| 41
| 27
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0.158537
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5063c6cf6e477fe6323a892196f1cab3cecd941a
| 351
|
py
|
Python
|
play.py
|
akhtarhameed/CP_PROJECT_521
|
4c7a95cdaa9394c73705f8b63efbebbf72dda54e
|
[
"MIT"
] | null | null | null |
play.py
|
akhtarhameed/CP_PROJECT_521
|
4c7a95cdaa9394c73705f8b63efbebbf72dda54e
|
[
"MIT"
] | 1
|
2019-05-05T17:18:40.000Z
|
2019-05-05T17:18:40.000Z
|
play.py
|
akhtarhameed/CP_PROJECT_521
|
4c7a95cdaa9394c73705f8b63efbebbf72dda54e
|
[
"MIT"
] | null | null | null |
import RPI.GPIO as GPIO
import pygame
pygame.init()
paino_tile_1 = pygame.mixer.Sound
("C:\Users\cc\Downloads\Music_Notes\C_S.wav")
btn1.when_pressed = print('btn1')
btn2.when_pressed = print('btn1')
btn3.when_pressed = print('btn1')
btn4.when_pressed = print('btn1')
btn5.when_pressed = print('btn1')
btn6.when_pressed = print('btn1')
| 27
| 46
| 0.720798
| 54
| 351
| 4.5
| 0.518519
| 0.271605
| 0.395062
| 0.493827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042345
| 0.125356
| 351
| 13
| 47
| 27
| 0.749186
| 0
| 0
| 0
| 0
| 0
| 0.191176
| 0.120588
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.181818
| null | null | 0.545455
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
aca04a47a5bb36b07681f38eb90039559c379629
| 1,095
|
py
|
Python
|
src/rapidapi/symbols.py
|
Spy7Dragon/rapidapi_python
|
f69efa17eb867ab25e12926bf4a69d88400ae231
|
[
"MIT"
] | null | null | null |
src/rapidapi/symbols.py
|
Spy7Dragon/rapidapi_python
|
f69efa17eb867ab25e12926bf4a69d88400ae231
|
[
"MIT"
] | null | null | null |
src/rapidapi/symbols.py
|
Spy7Dragon/rapidapi_python
|
f69efa17eb867ab25e12926bf4a69d88400ae231
|
[
"MIT"
] | null | null | null |
import requests
class Symbols:
url_extension = 'symbols/'
def __init__(self, client):
self.client = client
def get_meta_data(self, symbol):
url_method = 'get-meta-data'
url = self.client.build_url(Symbols.url_extension, url_method)
query_string = {'symbol': symbol}
response = requests.request("GET", url, headers=self.client.headers, params=query_string)
return response.json()
def get_profile(self, symbols):
url_method = 'get-profile'
url = self.client.build_url(Symbols.url_extension, url_method)
query_string = {"symbols": ",".join(symbols)}
response = requests.request("GET", url, headers=self.client.headers, params=query_string)
return response.json()
def get_summary(self, symbols):
url_method = 'get-summary'
url = self.client.build_url(Symbols.url_extension, url_method)
query_string = {"symbols": ",".join(symbols)}
response = requests.request("GET", url, headers=self.client.headers, params=query_string)
return response.json()
| 36.5
| 97
| 0.663927
| 132
| 1,095
| 5.30303
| 0.204545
| 0.114286
| 0.108571
| 0.077143
| 0.768571
| 0.702857
| 0.702857
| 0.702857
| 0.702857
| 0.702857
| 0
| 0
| 0.212785
| 1,095
| 29
| 98
| 37.758621
| 0.812065
| 0
| 0
| 0.478261
| 0
| 0
| 0.06758
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173913
| false
| 0
| 0.043478
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
acc83c98c434f04f90afb8366dbacd56383c6622
| 148
|
py
|
Python
|
petit_downloader/__init__.py
|
Plawn/petit_downloader
|
9fa93a54142509de3fc7da5cf0f01b1c18f328ae
|
[
"Apache-2.0"
] | 1
|
2018-11-29T21:10:45.000Z
|
2018-11-29T21:10:45.000Z
|
petit_downloader/__init__.py
|
Plawn/Fancy_downloader
|
9fa93a54142509de3fc7da5cf0f01b1c18f328ae
|
[
"Apache-2.0"
] | 1
|
2021-06-02T14:46:39.000Z
|
2021-06-02T14:46:39.000Z
|
petit_downloader/__init__.py
|
Plawn/petit_downloader
|
9fa93a54142509de3fc7da5cf0f01b1c18f328ae
|
[
"Apache-2.0"
] | null | null | null |
from .download import Download, from_save
from .download_container import DownloadContainer
from .download_methods import METHODS
__version__ = 0.2
| 29.6
| 49
| 0.851351
| 19
| 148
| 6.263158
| 0.526316
| 0.302521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.108108
| 148
| 5
| 50
| 29.6
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
acd8d694ff24c1d742ed2b47a36b7a2124808cf1
| 79
|
py
|
Python
|
telegram/services/__init__.py
|
LucasBiason/TelegramRobot
|
91d62ad329d6620530617f1ba4f994bf00e7f156
|
[
"MIT"
] | null | null | null |
telegram/services/__init__.py
|
LucasBiason/TelegramRobot
|
91d62ad329d6620530617f1ba4f994bf00e7f156
|
[
"MIT"
] | null | null | null |
telegram/services/__init__.py
|
LucasBiason/TelegramRobot
|
91d62ad329d6620530617f1ba4f994bf00e7f156
|
[
"MIT"
] | null | null | null |
from telegram import TelegramBot
from stackoverflow import StackOverFlowService
| 39.5
| 46
| 0.911392
| 8
| 79
| 9
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 46
| 39.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
acfa874cefa76e760e3a55e3edff89a795cb5be9
| 166
|
py
|
Python
|
sideboard/run_server.py
|
bitbyt3r/sideboard
|
45e13011a664543352d51ce073cfa9635c748bb7
|
[
"BSD-3-Clause"
] | 4
|
2015-02-18T20:38:42.000Z
|
2021-11-17T10:10:34.000Z
|
sideboard/run_server.py
|
bitbyt3r/sideboard
|
45e13011a664543352d51ce073cfa9635c748bb7
|
[
"BSD-3-Clause"
] | 84
|
2015-07-23T12:23:24.000Z
|
2018-08-04T05:09:30.000Z
|
sideboard/run_server.py
|
bitbyt3r/sideboard
|
45e13011a664543352d51ce073cfa9635c748bb7
|
[
"BSD-3-Clause"
] | 10
|
2015-02-10T13:38:18.000Z
|
2020-05-23T20:01:36.000Z
|
from __future__ import unicode_literals
import cherrypy
import sideboard.server
if __name__ == '__main__':
cherrypy.engine.start()
cherrypy.engine.block()
| 16.6
| 39
| 0.76506
| 19
| 166
| 6
| 0.736842
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150602
| 166
| 9
| 40
| 18.444444
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0.048193
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
acfd205dc844bafd8d4c9ea6d7e53dc768c5c8ff
| 57
|
py
|
Python
|
recipe/run_test.py
|
regro-cf-autotick-bot/lxml-stubs-feedstock
|
273100ab1ea7657519c14a9b5c96de0760570263
|
[
"BSD-3-Clause"
] | null | null | null |
recipe/run_test.py
|
regro-cf-autotick-bot/lxml-stubs-feedstock
|
273100ab1ea7657519c14a9b5c96de0760570263
|
[
"BSD-3-Clause"
] | 4
|
2021-05-21T11:59:20.000Z
|
2022-03-04T12:46:51.000Z
|
recipe/run_test.py
|
regro-cf-autotick-bot/lxml-stubs-feedstock
|
273100ab1ea7657519c14a9b5c96de0760570263
|
[
"BSD-3-Clause"
] | 1
|
2021-05-21T11:58:56.000Z
|
2021-05-21T11:58:56.000Z
|
import importlib
importlib.import_module("lxml-stubs")
| 11.4
| 37
| 0.807018
| 7
| 57
| 6.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 57
| 4
| 38
| 14.25
| 0.865385
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a044e312836b61e6a92e1fece0daa1d977fa5f6
| 80
|
py
|
Python
|
Python-Course/Workshops/October3rd/factorial.py
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | null | null | null |
Python-Course/Workshops/October3rd/factorial.py
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | null | null | null |
Python-Course/Workshops/October3rd/factorial.py
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | 1
|
2020-10-29T18:31:32.000Z
|
2020-10-29T18:31:32.000Z
|
def factorial(n):
if n == 0:
return 1
return factorial(n-1) * n
| 16
| 29
| 0.525
| 13
| 80
| 3.230769
| 0.538462
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 0.35
| 80
| 4
| 30
| 20
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
4a11372478483a8159ac0267ea70862a9765a2b0
| 23,546
|
py
|
Python
|
SDK/test_integration_config_helpers.py
|
queueit/KnownUser.V3.Python
|
2e9e429451221b650209dabd6df6b3e420a8ac34
|
[
"MIT"
] | 2
|
2019-07-04T11:09:45.000Z
|
2021-04-02T17:28:15.000Z
|
SDK/test_integration_config_helpers.py
|
queueit/KnownUser.V3.Python
|
2e9e429451221b650209dabd6df6b3e420a8ac34
|
[
"MIT"
] | null | null | null |
SDK/test_integration_config_helpers.py
|
queueit/KnownUser.V3.Python
|
2e9e429451221b650209dabd6df6b3e420a8ac34
|
[
"MIT"
] | 3
|
2019-06-30T18:51:32.000Z
|
2021-11-15T19:57:11.000Z
|
import unittest
from queueit_knownuserv3.integration_config_helpers import *
from queueit_knownuserv3.http_context_providers import HttpContextProvider
class HttpContextProviderMock(HttpContextProvider):
def __init__(self):
self.headers = {}
self.cookies = {}
self.body = ""
def getHeader(self, header_name):
if header_name not in self.headers:
return None
return self.headers[header_name]
def getCookie(self, cookie_name):
if cookie_name not in self.cookies:
return None
return self.cookies[cookie_name]
def getRequestBodyAsString(self):
return self.body
class TestIntegrationEvaluator(unittest.TestCase):
def test_getMatchedIntegrationConfig_oneTrigger_and_notMatched(self):
integrationConfig = {
"Integrations": [{
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": False,
"IsNegative": False
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, HttpContextProviderMock())
assert (matchedConfig == None)
def test_getMatchedIntegrationConfig_oneTrigger_and_matched(self):
integrationConfig = {
"Integrations": [{
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": True,
"IsNegative": False
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig["Name"] == "integration1")
def test_getMatchedIntegrationConfig_oneTrigger_and_notmatched_UserAgent(
self):
integrationConfig = {
"Integrations": [{
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": True,
"IsNegative": False
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}, {
"ValidatorType": "userAgentValidator",
"ValueToCompare": "Googlebot",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": True
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.headers = {"user-agent": "bot.html google.com googlebot test"}
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig == None)
def test_getMatchedIntegrationConfig_oneTrigger_or_notMatched(self):
integrationConfig = {
"Integrations": [{
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"Or",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": True,
"IsNegative": True
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Equals",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig == None)
def test_getMatchedIntegrationConfig_oneTrigger_or_matched(self):
integrationConfig = {
"Integrations": [{
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"Or",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": True,
"IsNegative": True
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Equals",
"IsIgnoreCase": False,
"IsNegative": True
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig["Name"] == "integration1")
def test_getMatchedIntegrationConfig_twoTriggers_matched(self):
integrationConfig = {
"Integrations": [{
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": True,
"IsNegative": True
}]
}, {
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"Operator": "Equals",
"ValueToCompare": "Value1",
"ValidatorType": "CookieValidator",
"IsIgnoreCase": False,
"IsNegative": False
}, {
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig["Name"] == "integration1")
def test_getMatchedIntegrationConfig_threeIntegrationsInOrder_secondMatched(
self):
integrationConfig = {
"Integrations": [{
"Name":
"integration0",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "Test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}, {
"Name":
"integration1",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"UrlPart": "PageUrl",
"ValidatorType": "UrlValidator",
"ValueToCompare": "test",
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False
}]
}]
}, {
"Name":
"integration2",
"Triggers": [{
"LogicalOperator":
"And",
"TriggerParts": [{
"CookieName": "c1",
"ValidatorType": "CookieValidator",
"ValueToCompare": "c1",
"Operator": "Equals",
"IsIgnoreCase": True,
"IsNegative": False
}]
}]
}]
}
url = "http://test.testdomain.com:8080/test?q=2"
hcpMock = HttpContextProviderMock()
hcpMock.cookies = {"c2": "ddd", "c1": "Value1"}
testObject = IntegrationEvaluator()
matchedConfig = testObject.getMatchedIntegrationConfig(
integrationConfig, url, hcpMock)
assert (matchedConfig["Name"] == "integration1")
class TestUrlValidatorHelper(unittest.TestCase):
def test_evaluate(self):
assert (not UrlValidatorHelper.evaluate(None, "notimportant"))
assert (not UrlValidatorHelper.evaluate({}, "notimportant"))
triggerPart = {
"UrlPart": "PageUrl",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "http://test.testdomain.com:8080/test?q=1"
}
assert (not UrlValidatorHelper.evaluate(
triggerPart, "http://test.testdomain.com:8080/test?q=2"))
triggerPart = {
"UrlPart": "PagePath",
"Operator": "Equals",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "/Test/t1"
}
assert (UrlValidatorHelper.evaluate(
triggerPart, "http://test.testdomain.com:8080/test/t1?q=2&y02"))
triggerPart = {
"UrlPart": "HostName",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "test.testdomain.com"
}
assert (UrlValidatorHelper.evaluate(
triggerPart, "http://m.test.testdomain.com:8080/test?q=2"))
triggerPart = {
"UrlPart": "HostName",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": True,
"ValueToCompare": "test.testdomain.com"
}
assert (not UrlValidatorHelper.evaluate(
triggerPart, "http://m.test.testdomain.com:8080/test?q=2"))
class TestCookieValidatorHelper(unittest.TestCase):
def test_evaluate(self):
hcpMock = HttpContextProviderMock()
assert (not CookieValidatorHelper.evaluate(None, hcpMock))
assert (not CookieValidatorHelper.evaluate({}, hcpMock))
triggerPart = {
"CookieName": "c1",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "1"
}
hcpMock.cookies = {"c1": "hhh"}
assert (not CookieValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"CookieName": "c1",
"Operator": "Contains",
"ValueToCompare": "1"
}
hcpMock.cookies = {"c2": "ddd", "c1": "3"}
assert (not CookieValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"CookieName": "c1",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "1"
}
hcpMock.cookies = {"c2": "ddd", "c1": "1"}
assert (CookieValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"CookieName": "c1",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": True,
"ValueToCompare": "1"
}
hcpMock.cookies = {"c2": "ddd", "c1": "1"}
assert (not CookieValidatorHelper.evaluate(triggerPart, hcpMock))
class TestUserAgentValidatorHelper(unittest.TestCase):
def test_evaluate(self):
hcpMock = HttpContextProviderMock()
assert (not UserAgentValidatorHelper.evaluate(None, hcpMock))
assert (not UserAgentValidatorHelper.evaluate({}, hcpMock))
triggerPart = {
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": False,
"ValueToCompare": "googlebot"
}
hcpMock.headers = {"user-agent": "Googlebot sample useraagent"}
assert (not UserAgentValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"Operator": "Equals",
"IsIgnoreCase": True,
"IsNegative": True,
"ValueToCompare": "googlebot"
}
hcpMock.headers = {"user-agent": "ooglebot sample useraagent"}
assert (UserAgentValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"Operator": "Contains",
"IsIgnoreCase": False,
"IsNegative": True,
"ValueToCompare": "googlebot"
}
hcpMock.headers = {"user-agent": "googlebot"}
assert (not UserAgentValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "googlebot"
}
hcpMock.headers = {"user-agent": "Googlebot"}
assert (UserAgentValidatorHelper.evaluate(triggerPart, hcpMock))
class TestHttpHeaderValidatorHelper(unittest.TestCase):
def test_evaluate(self):
hcpMock = HttpContextProviderMock()
assert (not HttpHeaderValidatorHelper.evaluate(None, hcpMock))
assert (not HttpHeaderValidatorHelper.evaluate({}, hcpMock))
triggerPart = {
"HttpHeaderName": "a-header",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "value"
}
hcpMock.headers = {'a-header': "VaLuE"}
assert (HttpHeaderValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"HttpHeaderName": "a-header",
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "value"
}
hcpMock.headers = {'a-header': "not"}
assert (not HttpHeaderValidatorHelper.evaluate(triggerPart, hcpMock))
triggerPart = {
"HttpHeaderName": "a-header",
"Operator": "Contains",
"IsNegative": True,
"IsIgnoreCase": False,
"ValueToCompare": "value"
}
hcpMock.headers = {'a-header': "not"}
assert (HttpHeaderValidatorHelper.evaluate(triggerPart, hcpMock))
class TestComparisonOperatorHelper(unittest.TestCase):
def test_evaluate_equals_operator(self):
assert (ComparisonOperatorHelper.evaluate("Equals", False, False, None,
None, None))
assert (ComparisonOperatorHelper.evaluate("Equals", False, False,
"test1", "test1", None))
assert (not ComparisonOperatorHelper.evaluate("Equals", False, False,
"test1", "Test1", None))
assert (ComparisonOperatorHelper.evaluate("Equals", False, True,
"test1", "Test1", None))
assert (ComparisonOperatorHelper.evaluate("Equals", True, False,
"test1", "Test1", None))
assert (not ComparisonOperatorHelper.evaluate("Equals", True, False,
"test1", "test1", None))
assert (not ComparisonOperatorHelper.evaluate("Equals", True, True,
"test1", "Test1", None))
def test_evaluate_contains_operator(self):
assert (ComparisonOperatorHelper.evaluate("Contains", False, False,
None, None, None))
assert (ComparisonOperatorHelper.evaluate(
"Contains", False, False, "test_test1_test", "test1", None))
assert (not ComparisonOperatorHelper.evaluate(
"Contains", False, False, "test_test1_test", "Test1", None))
assert (ComparisonOperatorHelper.evaluate(
"Contains", False, True, "test_test1_test", "Test1", None))
assert (ComparisonOperatorHelper.evaluate(
"Contains", True, False, "test_test1_test", "Test1", None))
assert (not ComparisonOperatorHelper.evaluate(
"Contains", True, True, "test_test1", "Test1", None))
assert (not ComparisonOperatorHelper.evaluate(
"Contains", True, False, "test_test1", "test1", None))
assert (ComparisonOperatorHelper.evaluate(
"Contains", False, False, "test_dsdsdsdtest1", "*", None))
assert (not ComparisonOperatorHelper.evaluate(
"Contains", False, False, "", "*", None))
def test_evaluate_equalsAny_operator(self):
assert (ComparisonOperatorHelper.evaluate("EqualsAny", False, False,
"test1", None, ["test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"EqualsAny", False, False, "test1", None, ["Test1"]))
assert (ComparisonOperatorHelper.evaluate("EqualsAny", False, True,
"test1", None, ["Test1"]))
assert (ComparisonOperatorHelper.evaluate("EqualsAny", True, False,
"test1", None, ["Test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"EqualsAny", True, False, "test1", None, ["test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"EqualsAny", True, True, "test1", None, ["Test1"]))
def test_evaluate_containsAny_operator(self):
assert (ComparisonOperatorHelper.evaluate(
"ContainsAny", False, False, "test_test1_test", None, ["test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"ContainsAny", False, False, "test_test1_test", None, ["Test1"]))
assert (ComparisonOperatorHelper.evaluate(
"ContainsAny", False, True, "test_test1_test", None, ["Test1"]))
assert (ComparisonOperatorHelper.evaluate(
"ContainsAny", True, False, "test_test1_test", None, ["Test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"ContainsAny", True, True, "test_test1", None, ["Test1"]))
assert (not ComparisonOperatorHelper.evaluate(
"ContainsAny", True, False, "test_test1", None, ["test1"]))
assert (ComparisonOperatorHelper.evaluate(
"ContainsAny", False, False, "test_dsdsdsdtest1", None, ["*"]))
def test_evaluate_unsupported_operator(self):
assert (not ComparisonOperatorHelper.evaluate("-not-supported-", False,
False, None, None, None))
class TestRequestBodyValidatorHelper(unittest.TestCase):
def test_evaluate(self):
hcp_mock = HttpContextProviderMock()
assert (not RequestBodyValidatorHelper.evaluate(None, hcp_mock))
assert (not RequestBodyValidatorHelper.evaluate({}, hcp_mock))
trigger_part = {
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "test body"
}
assert (not RequestBodyValidatorHelper.evaluate(trigger_part, hcp_mock))
hcp_mock.body = "my test body is here"
assert (RequestBodyValidatorHelper.evaluate(trigger_part, hcp_mock))
trigger_part = {
"Operator": "Equals",
"IsIgnoreCase": True,
"IsNegative": False,
"ValueToCompare": "Test"
}
assert (not RequestBodyValidatorHelper.evaluate(trigger_part, hcp_mock))
trigger_part = {
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": True,
"ValueToCompare": "Test"
}
assert (not RequestBodyValidatorHelper.evaluate(trigger_part, hcp_mock))
trigger_part = {
"Operator": "Contains",
"IsIgnoreCase": True,
"IsNegative": True,
"ValueToCompare": "BTest"
}
hcp_mock.body = "my test body is here"
assert (RequestBodyValidatorHelper.evaluate(trigger_part, hcp_mock))
| 39.440536
| 80
| 0.499193
| 1,480
| 23,546
| 7.871622
| 0.088514
| 0.027039
| 0.049099
| 0.04927
| 0.849528
| 0.81382
| 0.775966
| 0.72309
| 0.631073
| 0.543949
| 0
| 0.012466
| 0.386775
| 23,546
| 596
| 81
| 39.506711
| 0.794376
| 0
| 0
| 0.756957
| 0
| 0
| 0.212478
| 0
| 0
| 0
| 0
| 0
| 0.124304
| 1
| 0.038961
| false
| 0
| 0.009276
| 0.001855
| 0.072356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.