hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
396be9b8e76a36fa6d51ae0f674f69f4c1dcf376
| 1,217
|
py
|
Python
|
pydouyu/packet_util.py
|
Kexiii/pydouyu
|
494732159980b7b71575e6757899c48052c6c2e0
|
[
"MIT"
] | 11
|
2019-02-22T01:02:32.000Z
|
2021-12-15T08:50:26.000Z
|
pydouyu/packet_util.py
|
Kexiii/pydouyu
|
494732159980b7b71575e6757899c48052c6c2e0
|
[
"MIT"
] | 2
|
2020-07-05T01:26:18.000Z
|
2021-01-07T15:22:57.000Z
|
pydouyu/packet_util.py
|
Kexiii/pydouyu
|
494732159980b7b71575e6757899c48052c6c2e0
|
[
"MIT"
] | 3
|
2019-04-23T01:22:20.000Z
|
2021-12-04T09:09:16.000Z
|
import time
client_msg_type = 689
reserved_data_field = 0
def assemble_login_str(room_id):
res = "type@=loginreq/roomid@=" + str(room_id) + "/"
return res
def assemble_join_group_str(room_id):
res = "type@=joingroup/rid@=" + str(room_id) + "/gid@=-9999/";
return res
def assemble_heartbeat_str():
res = "type@=keeplive/tick@=%s/" % int(time.time()) + "/"
return res
def assemble_transfer_data(ori_str):
data_size = len(ori_str)
packet_size = 4 * 2 + data_size + 1;
data = packet_size.to_bytes(4, byteorder='little')
data += packet_size.to_bytes(4, byteorder='little')
data += client_msg_type.to_bytes(2, byteorder='little')
data += reserved_data_field.to_bytes(2, byteorder='little')
data += ori_str.encode()
data += b'\0'
return data
def extract_str_from_data(data):
packet_size = int.from_bytes(data[0:4], byteorder='little')
if packet_size != len(data):
return ""
return data[8:].decode("utf8", "ignore")
def parse_str_to_dict(ori_str):
res = {}
ori_strs = ori_str.split("/");
for ori_str in ori_strs:
kv = ori_str.split("@=")
if len(kv) == 2:
res[kv[0]] = kv[1]
return res
| 23.403846
| 66
| 0.632703
| 180
| 1,217
| 4.011111
| 0.327778
| 0.058172
| 0.049862
| 0.083102
| 0.227147
| 0.182825
| 0.108033
| 0.108033
| 0.108033
| 0
| 0
| 0.023983
| 0.211997
| 1,217
| 51
| 67
| 23.862745
| 0.728884
| 0
| 0
| 0.114286
| 0
| 0
| 0.104613
| 0.056013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.171429
| false
| 0
| 0.028571
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
396fa59895ef035568d0b517a96fd649c4c2ec84
| 4,364
|
py
|
Python
|
xyw_macro/win32.py
|
xue0228/keyboard
|
dcb0def1d87a9197676c0f405b980a67e128ab24
|
[
"MIT"
] | null | null | null |
xyw_macro/win32.py
|
xue0228/keyboard
|
dcb0def1d87a9197676c0f405b980a67e128ab24
|
[
"MIT"
] | null | null | null |
xyw_macro/win32.py
|
xue0228/keyboard
|
dcb0def1d87a9197676c0f405b980a67e128ab24
|
[
"MIT"
] | null | null | null |
import ctypes
from ctypes import wintypes, windll
import win32api
import win32con
import win32gui
# PUL = ctypes.POINTER(ctypes.c_ulong)
PUL = ctypes.c_void_p
class KeyBdMsg(ctypes.Structure):
"""
键盘回调函数用结构体
"""
_fields_ = [
('vkCode', wintypes.DWORD),
('scanCode', wintypes.DWORD),
('flags', wintypes.DWORD),
('time', wintypes.DWORD),
('dwExtraInfo', PUL)]
class KeyBdInput(ctypes.Structure):
"""
键盘输入用结构体
"""
EXTENDEDKEY = 0x0001
KEYUP = 0x0002
SCANCODE = 0x0008
UNICODE = 0x0004
_fields_ = [("wVk", ctypes.c_ushort),
("wScan", ctypes.c_ushort),
("dwFlags", ctypes.c_ulong),
("time", ctypes.c_ulong),
("dwExtraInfo", PUL)]
class HardwareInput(ctypes.Structure):
"""
硬件输入用结构体
"""
_fields_ = [("uMsg", ctypes.c_ulong),
("wParamL", ctypes.c_short),
("wParamH", ctypes.c_ushort)]
class MouseInput(ctypes.Structure):
"""
鼠标输入用结构体
"""
MOVE = 0x0001
LEFTDOWN = 0x0002
LEFTUP = 0x0004
RIGHTDOWN = 0x0008
RIGHTUP = 0x0010
MIDDLEDOWN = 0x0020
MIDDLEUP = 0x0040
XDOWN = 0x0080
XUP = 0x0100
WHEEL = 0x0800
HWHEEL = 0x1000
ABSOLUTE = 0x8000
XBUTTON1 = 0x0001
XBUTTON2 = 0x0002
_fields_ = [("dx", ctypes.c_long),
("dy", ctypes.c_long),
("mouseData", ctypes.c_ulong),
("dwFlags", ctypes.c_ulong),
("time", ctypes.c_ulong),
("dwExtraInfo", PUL)]
class InputUnion(ctypes.Union):
_fields_ = [("ki", KeyBdInput),
("mi", MouseInput),
("hi", HardwareInput)]
class Input(ctypes.Structure):
"""
SendInput函数用最终结构体
"""
MOUSE = 0
KEYBOARD = 1
HARDWARE = 2
_fields_ = [("type", ctypes.c_ulong),
("ii", InputUnion)]
# 键盘事件用回调函数
HookProc = ctypes.WINFUNCTYPE(
wintypes.LPARAM,
ctypes.c_int32, wintypes.WPARAM, ctypes.POINTER(KeyBdMsg))
# 消息队列发送函数
SendInput = windll.user32.SendInput
SendInput.argtypes = (
wintypes.UINT,
ctypes.POINTER(Input),
ctypes.c_int)
# 获取并阻断消息队列
GetMessage = windll.user32.GetMessageA
GetMessage.argtypes = (
wintypes.MSG,
wintypes.HWND,
wintypes.UINT,
wintypes.UINT)
# 设置回调函数
SetWindowsHookEx = windll.user32.SetWindowsHookExA
SetWindowsHookEx.argtypes = (
ctypes.c_int,
HookProc,
wintypes.HINSTANCE,
wintypes.DWORD)
# 解除回调函数
UnhookWindowsHookEx = windll.user32.UnhookWindowsHookEx
UnhookWindowsHookEx.argtypes = (
wintypes.HHOOK,)
# 将消息传递到钩子链下一函数
CallNextHookEx = windll.user32.CallNextHookEx
CallNextHookEx.argtypes = (
wintypes.HHOOK,
ctypes.c_int,
wintypes.WPARAM,
KeyBdMsg)
GetAsyncKeyState = windll.user32.GetAsyncKeyState
GetAsyncKeyState.argtypes = (
ctypes.c_int,
)
GetMessageExtraInfo = windll.user32.GetMessageExtraInfo
SetMessageExtraInfo = windll.user32.SetMessageExtraInfo
SetMessageExtraInfo.argtypes = (
wintypes.LPARAM,
)
def send_kb_event(v_key, is_pressed):
"""
向消息队列发送键盘输入,指定dwExtraInfo为228,便于回调函数过滤此部分键盘输入
:param v_key: 虚拟键号
:param is_pressed: 是否按下
:return:
"""
extra = 228
li = InputUnion()
flag = KeyBdInput.KEYUP if not is_pressed else 0
li.ki = KeyBdInput(v_key, 0x48, flag, 0, extra)
input = Input(Input.KEYBOARD, li)
return SendInput(1, ctypes.pointer(input), ctypes.sizeof(input))
def send_unicode(unicode):
extra = 228
li = InputUnion()
flag = KeyBdInput.UNICODE
li.ki = KeyBdInput(0, ord(unicode), flag, 0, extra)
input = Input(Input.KEYBOARD, li)
return SendInput(1, ctypes.pointer(input), ctypes.sizeof(input))
def change_language_layout(language):
hwnd = win32gui.GetForegroundWindow()
im_list = win32api.GetKeyboardLayoutList()
im_list = list(map(hex, im_list))
# print(im_list)
if hex(language) not in im_list:
win32api.LoadKeyboardLayout('0000' + hex(language)[-4:], 1)
im_list = win32api.GetKeyboardLayoutList()
im_list = list(map(hex, im_list))
if hex(language) not in im_list:
return False
result = win32api.SendMessage(
hwnd,
win32con.WM_INPUTLANGCHANGEREQUEST,
0,
language)
return result == 0
| 21.82
| 68
| 0.632676
| 441
| 4,364
| 6.138322
| 0.362812
| 0.051718
| 0.035464
| 0.026598
| 0.192833
| 0.192833
| 0.167713
| 0.167713
| 0.167713
| 0.147765
| 0
| 0.048145
| 0.25275
| 4,364
| 199
| 69
| 21.929648
| 0.781969
| 0.060266
| 0
| 0.214286
| 0
| 0
| 0.033325
| 0
| 0
| 0
| 0.028063
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.039683
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3975e522eae96a6443ccb6146ef3bb31b2d6df06
| 1,320
|
py
|
Python
|
examples/bruker_processed_1d/bruker_processed_1d.py
|
genematx/nmrglue
|
8a24cf6cbd18451e552fc0673b84c42d1dcb69a2
|
[
"BSD-3-Clause"
] | 150
|
2015-01-16T12:24:13.000Z
|
2022-03-03T18:01:18.000Z
|
examples/bruker_processed_1d/bruker_processed_1d.py
|
genematx/nmrglue
|
8a24cf6cbd18451e552fc0673b84c42d1dcb69a2
|
[
"BSD-3-Clause"
] | 129
|
2015-01-13T04:58:56.000Z
|
2022-03-02T13:39:16.000Z
|
examples/bruker_processed_1d/bruker_processed_1d.py
|
genematx/nmrglue
|
8a24cf6cbd18451e552fc0673b84c42d1dcb69a2
|
[
"BSD-3-Clause"
] | 88
|
2015-02-16T20:04:12.000Z
|
2022-03-10T06:50:30.000Z
|
#! /usr/bin/env python
"""
Compare bruker read_pdata to read.
"""
import nmrglue as ng
import matplotlib.pyplot as plt
# read in the data
data_dir = "data/bruker_exp/1/pdata/1"
# From pre-procced data.
dic, data = ng.bruker.read_pdata(data_dir, scale_data=True)
udic = ng.bruker.guess_udic(dic, data)
uc = ng.fileiobase.uc_from_udic(udic)
ppm_scale = uc.ppm_scale()
# From FID
dic1, data1 = ng.bruker.read(data_dir)
# remove the digital filter, this data is from an analog spectrometer.
# data = ng.bruker.remove_digital_filter(dic, data)
# process the spectrum
data1 = ng.proc_base.ls(data1, 1) # left shift
data1 = ng.proc_base.gm(data1, g2=1/2.8e3) # To match proc data...
data1 = ng.proc_base.zf_size(data1, 1024*32) # zero fill
data1 = ng.proc_base.fft_positive(data1) # FT
data1 = ng.proc_base.ps(data1, p0=93) # phase is 180 off Bruker
data1 = ng.proc_base.di(data1) # discard
udic1 = ng.bruker.guess_udic(dic1, data1)
uc1 = ng.fileiobase.uc_from_udic(udic1)
ppm_scale1 = uc1.ppm_scale()
# plot the spectrum
fig = plt.figure()
plt.hold(True)
plt.plot(ppm_scale, data)
plt.plot(ppm_scale1, data1)
plt.hold(False)
plt.xlim([50, -50])
plt.xlabel('Carbon Chemical shift (ppm from neat TMS)')
plt.title('bruker.read_pdata vs bruker.read, note ppm axis')
plt.show()
| 28.085106
| 71
| 0.712121
| 225
| 1,320
| 4.044444
| 0.413333
| 0.053846
| 0.072527
| 0.098901
| 0.048352
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042266
| 0.157576
| 1,320
| 46
| 72
| 28.695652
| 0.776079
| 0.258333
| 0
| 0
| 0
| 0
| 0.117954
| 0.026096
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
397645cb5f3148b59ab74fb77253d9299c79d101
| 4,404
|
py
|
Python
|
tests/unit/test_posts_get_logic.py
|
claranet-ch/aws-sam-application-template-python
|
b835ef9295e4820110fd53f50619e4fea7493155
|
[
"CC-BY-4.0"
] | null | null | null |
tests/unit/test_posts_get_logic.py
|
claranet-ch/aws-sam-application-template-python
|
b835ef9295e4820110fd53f50619e4fea7493155
|
[
"CC-BY-4.0"
] | null | null | null |
tests/unit/test_posts_get_logic.py
|
claranet-ch/aws-sam-application-template-python
|
b835ef9295e4820110fd53f50619e4fea7493155
|
[
"CC-BY-4.0"
] | null | null | null |
import io
import os
import unittest
import boto3
from botocore.response import StreamingBody
from botocore.stub import Stubber
from functions.posts_get.posts_get_logic import posts_get_logic
class GetSomethingLogicTest(unittest.TestCase):
def setUp(self):
# https://docs.python.org/3/library/unittest.html#unittest.TestCase.setUp
os.environ['AWS_DEFAULT_REGION'] = 'eu-west-1'
def tearDown(self):
# https://docs.python.org/3/library/unittest.html#unittest.TestCase.tearDown
pass
def __creeate_s3_object_body(self, content: str):
return StreamingBody(
io.BytesIO(content.encode()),
len(content)
)
def test_get_something(self):
DYNAMODB_TABLE = 'test-posts-meta'
S3_BUCKET = 'test-posts'
POST_1_KEY = 'post_2021-11-15T10:00:00Z.html'
POST_2_KEY = 'post_2021-11-16T10:00:00Z.html'
# region ------------------------------------------------- DynamoDB Stub
dynamodb_client = boto3.client('dynamodb')
dynamodb_stubber = Stubber(dynamodb_client)
# region ------------------------------------------------------ 1st call
dynamodb_stubber.add_response(
'get_item',
{
'Item': {
'author': {'S': 'Elia Contini'},
'id': {'S': POST_1_KEY}
}
},
{
'Key': {'id': {'S': POST_1_KEY}},
'TableName': DYNAMODB_TABLE
}
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 2nd call
dynamodb_stubber.add_response(
'get_item',
{
'Item': {
'author': {'S': 'Piero Bozzolo'},
'id': {'S': POST_2_KEY}
}
},
{
'Key': {'id': {'S': POST_2_KEY}},
'TableName': DYNAMODB_TABLE
}
)
# endregion ------------------------------------------------------------
dynamodb_stubber.activate()
# endregion ------------------------------------------------------------
# region ------------------------------------------------------- S3 Stub
s3_client = boto3.client('s3')
s3_stubber = Stubber(s3_client)
# region ------------------------------------------------------ 1st call
list_objects_v2_expected_params = {'Bucket': S3_BUCKET}
list_objects_v2_expected_result = {
'Contents': [{'Key': POST_1_KEY}, {'Key': POST_2_KEY}]
}
s3_stubber.add_response(
'list_objects_v2',
list_objects_v2_expected_result,
list_objects_v2_expected_params
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 2nd call
get_object_expected_params = {'Bucket': S3_BUCKET, 'Key': POST_1_KEY}
get_object_expected_result = {
'Body': self.__creeate_s3_object_body(
'<h1>Post 1</h1><p>Content 1.</p>'
)
}
s3_stubber.add_response(
'get_object',
get_object_expected_result,
get_object_expected_params
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 3rd call
get_object_expected_params = {'Bucket': S3_BUCKET, 'Key': POST_2_KEY}
get_object_expected_result = {
'Body': self.__creeate_s3_object_body(
'<h1>Post 2</h1><p>Content 2.</p>'
)
}
s3_stubber.add_response(
'get_object',
get_object_expected_result,
get_object_expected_params
)
# endregion ------------------------------------------------------------
s3_stubber.activate()
# endregion ------------------------------------------------------------
result = posts_get_logic(
dynamodb_client, DYNAMODB_TABLE, s3_client, S3_BUCKET)
self.assertEqual(len(result), 2)
dynamodb_stubber.assert_no_pending_responses()
s3_stubber.assert_no_pending_responses()
| 35.516129
| 84
| 0.449818
| 378
| 4,404
| 4.89418
| 0.246032
| 0.048649
| 0.073514
| 0.045405
| 0.482162
| 0.322162
| 0.322162
| 0.322162
| 0.322162
| 0.322162
| 0
| 0.026123
| 0.287239
| 4,404
| 123
| 85
| 35.804878
| 0.563237
| 0.258401
| 0
| 0.233333
| 0
| 0
| 0.112719
| 0.018479
| 0
| 0
| 0
| 0
| 0.033333
| 1
| 0.044444
| false
| 0.011111
| 0.077778
| 0.011111
| 0.144444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3978056ea17d8290a8897ffe9ef1bc60af963d5f
| 21,050
|
py
|
Python
|
firepy/model/geometry.py
|
KBeno/firefly-lca
|
a081b05f5d66951792bd00d2bb6ae1f8e43235e0
|
[
"MIT"
] | 3
|
2020-06-16T13:39:31.000Z
|
2022-01-10T09:34:52.000Z
|
firepy/model/geometry.py
|
KBeno/boblica
|
a081b05f5d66951792bd00d2bb6ae1f8e43235e0
|
[
"MIT"
] | null | null | null |
firepy/model/geometry.py
|
KBeno/boblica
|
a081b05f5d66951792bd00d2bb6ae1f8e43235e0
|
[
"MIT"
] | null | null | null |
from typing import Union, List
import copy
import math
import numpy as np
"""
Principles:
- geometry objects are defined by the minimum required information
- Points are made of coordinates (floats), everything else is based on Points except for Vectors
"""
class Point:
def __init__(self, x: float, y: float, z: float = 0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return "{ind}{x}, {y}, {z} (Point)".format(x=self.x, y=self.y, z=self.z, ind=indentation)
def coordinates(self):
return self.x, self.y, self.z
def __sub__(self, other):
if isinstance(other, Point):
return Vector(x=self.x - other.x,
y=self.y - other.y,
z=self.z - other.z)
elif isinstance(other, Vector):
return Point(x=self.x - other.x,
y=self.y - other.y,
z=self.z - other.z)
def __add__(self, other):
if isinstance(other, Vector):
return Point(x=self.x + other.x,
y=self.y + other.y,
z=self.z + other.z)
def __eq__(self, other):
if self.x == other.x and self.y == other.y and self.z == other.z:
return True
else:
return False
class Vector:
def __init__(self, x, y, z: float = 0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return "{ind}{x}, {y}, {z} (Vector)".format(x=self.x, y=self.y, z=self.z, ind=indentation)
def coordinates(self):
return self.x, self.y, self.z
def length(self) -> float:
return math.sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2)
def unitize(self):
return Vector(self.x / self.length(), self.y / self.length(), self.z / self.length())
def cross_product(self, vector2):
product_x = self.y * vector2.z - self.z * vector2.y
product_y = -self.x * vector2.z + self.z * vector2.x
product_z = self.x * vector2.y - self.y * vector2.x
return Vector(product_x, product_y, product_z)
def scalar_product(self, vector2):
product = 0
for xyz in [0, 1, 2]:
product += self.coordinates()[xyz] * vector2.coordinates()[xyz]
return product
def __mul__(self, other):
if isinstance(other, Vector):
# scalar (dot) product
product = 0
for xyz in [0, 1, 2]:
product += self.coordinates()[xyz] * other.coordinates()[xyz]
return product
elif isinstance(other, (float, int)):
return Vector(self.x * other, self.y * other, self.z * other)
def angle(self, vector2):
# angle between the instance vector and the given vector in degrees
# always positive and smaller or equal to 180°
return math.degrees(math.acos(self.scalar_product(vector2) / self.length() / vector2.length()))
def __add__(self, other):
return Vector(self.x + other.x, self.y + other.y, self.z + other.z)
def __sub__(self, other):
return Vector(self.x - other.x, self.y - other.y, self.z - other.z)
def __truediv__(self, other: float):
return self * other ** -1
def __eq__(self, other):
if self.x == other.x and self.y == other.y and self.z == other.z:
return True
else:
return False
class Plane:
def __init__(self, normal: Vector, point: Point):
self.normal = normal
self.point = point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Plane:\n'.format(ind=indentation) +\
'{ind}|--Normal: {s}\n'.format(s=self.normal.pretty_print(), ind=indentation) +\
'{ind}`--Point: {e}\n'.format(e=self.point.pretty_print(), ind=indentation)
def intersect(self, other: Union['Ray', 'Plane']):
if isinstance(other, Ray):
# solve the linear equation system aX = b
plane_eq, plane_ord = self.get_equation(standardize=True)
ray_eq, ray_ord = other.get_equation(standardize=True)
a = np.append(plane_eq, ray_eq, axis=0)
b = np.append(plane_ord, ray_ord, axis=0)
try:
solution = np.linalg.solve(a, b)
except np.linalg.LinAlgError:
# parallel
return None
return Point(
x=solution[0, 0],
y=solution[1, 0],
z=solution[2, 0]
)
if isinstance(other, Plane):
# direction of intersection ray
vector = self.normal.cross_product(other.normal)
if vector == Vector(0, 0, 0):
# parallel
return None
else:
# get largest absolute coordinate value
xyz = [abs(vector.x), abs(vector.y), abs(vector.z)]
set_0_coord = xyz.index(max(xyz))
# set this coordinate to 0 to solve the equation of the two planes
eq1, ord1 = self.get_equation(standardize=True)
eq2, ord2 = other.get_equation(standardize=True)
a = np.append(eq1, eq2, axis=0)
b = np.append(ord1, ord2, axis=0)
# delete the corresponding column from the matrix
i = [True, True, True]
i[set_0_coord] = False
a = a[:, i]
# we should be able to solve this, because parallel case was checked already
solution = np.linalg.solve(a, b)
if set_0_coord == 0:
point = Point(0, solution[0, 0], solution[1, 0])
elif set_0_coord == 1:
point = Point(solution[0, 0], 0, solution[1, 0])
else:
point = Point(solution[0, 0], solution[1, 0], 0)
return Ray(
vector=vector,
point=point
)
def get_equation(self, standardize=False):
# http://tutorial.math.lamar.edu/Classes/CalcIII/EqnsOfPlanes.aspx
a = self.normal.x
b = self.normal.y
c = self.normal.z
d = a * self.point.x + b * self.point.y + c * self.point.z
if standardize:
# return the coefficients of the equation in this form aX + bY + cZ = d
return (
np.array([
[a, b, c]
]),
np.array([
[d]
])
)
return {
'a': a, 'b': b, 'c': c, 'd': d
}
def print_equation(self):
return '{a}x + {b}y + {c}z = {d}'.format(**self.get_equation())
class Ray:
def __init__(self, vector: Vector, point: Point):
self.vector = vector
self.point = point
def get_equation(self, standardize=False):
# http://tutorial.math.lamar.edu/Classes/CalcIII/EqnsOfLines.aspx
x0 = self.point.x
y0 = self.point.y
z0 = self.point.z
a = self.vector.x
b = self.vector.y
c = self.vector.z
if standardize:
# return the coefficients of the equations in this form aX + bY + cZ + d = 0
if a == 0:
# 1X + 0Y + 0Z = x0
a1, b1, c1, d1 = 1, 0, 0, x0
if b == 0:
# 0X + 1Y + 0Z = y0
a2, b2, c2, d2 = 0, 1, 0, y0
elif c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# 0X + cY - bZ = y0*c - z0*b
a2, b2, c2, d2 = 0, c, -b, y0 * c - z0 * b
elif b == 0:
# 0X + 1Y + 0Z = y0
a1, b1, c1, d1 = 0, 1, 0, y0
if c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# cX + 0Y - aZ = x0*c - z0*a
a2, b2, c2, d2 = c, 0, -a, x0 * c - z0 * a
else:
# bX - aY + 0Z = x0*b - y0*a
a1, b1, c1, d1 = b, -a, 0, x0 * b - y0 * a
if c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# cX + 0Y - aZ = x0*c - z0*a
a2, b2, c2, d2 = c, 0, -a, x0 * c - z0 * a
return (
np.array([
[a1, b1, c1],
[a2, b2, c2]
]),
np.array([
[d1],
[d2]
])
)
else:
return {
'x0': x0, 'y0': y0, 'z0': z0, 'a': a, 'b': b, 'c': c,
}
def print_equation(self):
coeffs = self.get_equation()
if coeffs['a'] == 0:
eq1 = 'x = {x0}'.format(**coeffs)
if coeffs['b'] == 0:
eq2 = 'y = {y0}, '.format(**coeffs)
elif coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(y - {y0}) / {b} = (z - {z0}) / {c}'.format(**coeffs)
elif coeffs['b'] == 0:
eq1 = 'y = {y0}'.format(**coeffs)
if coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(x - {x0}) / {a} = (z - {z0}) / {c}'.format(**coeffs)
else:
eq1 = '(x - {x0}) / {a} = (y - {y0}) / {b}'.format(**coeffs)
if coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(x - {x0}) / {a} = (z - {z0}) / {c}'.format(**coeffs)
return eq1 + '\n' + eq2
def intersect(self, other: Plane) -> Point:
return other.intersect(self)
class Line:
def __init__(self, start: Point, end: Point):
self.start = start
self.end = end
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Line:\n'.format(ind=indentation) +\
'{ind}|--Start: {s}\n'.format(s=self.start.pretty_print(), ind=indentation) +\
'{ind}`--End: {e}\n'.format(e=self.end.pretty_print(), ind=indentation)
def length(self):
return self.to_vector().length()
def to_points(self):
return [self.start, self.end]
def to_vector(self, reverse=False):
if reverse:
return Vector(x=self.start.x - self.end.x,
y=self.start.y - self.end.y,
z=self.start.z - self.end.z)
else:
return Vector(x=self.end.x - self.start.x,
y=self.end.y - self.start.y,
z=self.end.z - self.start.z)
def midpoint(self) -> Point:
return Point(
x=(self.start.x + self.end.x) / 2,
y=(self.start.y + self.end.y) / 2,
z=(self.start.z + self.end.z) / 2,
)
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
elif self.start == other.end and self.end == other.start:
return True
else:
return False
def to_ray(self) -> Ray:
return Ray(
vector=self.to_vector(),
point=self.start
)
def flip(self) -> 'Line':
return Line(start=self.end, end=self.start)
class Rectangle:
def __init__(self, side: Line, external_point: Point):
self.side = side
self.external_point = external_point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Rectangle:\n'.format(ind=indentation) +\
'{ind}|--Side:\n'.format(ind=indentation) +\
self.side.pretty_print(indentation=indentation + '| ') +\
'{ind}`--External Point: {p}\n'.format(p=self.external_point.pretty_print(), ind=indentation)
def height(self):
side_vector = self.side.to_vector()
ext_vector = self.external_point - self.side.start
return ext_vector.cross_product(side_vector).length() / side_vector.length()
def height_vector(self):
s = self.side.to_vector()
e = self.external_point - self.side.start
proj = s * ((e * s) / (s * s))
return e - proj
def normal_vector(self):
return self.side.to_vector().cross_product(self.height_vector()).unitize()
def area(self):
return self.side.length() * self.height()
def to_points(self) -> List[Point]:
"""
:return: a list of all vertices as Point instances
"""
return self.side.to_points() + [point + self.height_vector() for point in self.side.to_points()[::-1]]
def to_lines(self) -> List[Line]:
"""
:return: a list of all edges as Line instances
"""
points = self.to_points()
return [Line(s, e) for s, e in zip(points, points[1:] + points[:1])]
def center(self) -> Point:
return self.side.midpoint() + (self.height_vector() / 2)
class Box:
def __init__(self, base: Rectangle, external_point: Point):
self.base = base
self.external_point = external_point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Box:\n'.format(ind=indentation) +\
'{ind}|--Base:\n'.format(ind=indentation) +\
self.base.pretty_print(indentation=indentation + '| ') +\
'{ind}`--External Point: {p}\n'.format(p=self.external_point.pretty_print(), ind=indentation)
def height(self):
external_vector = self.external_point - self.base.side.start
return external_vector * self.base.normal_vector()
def height_vector(self) -> Vector:
return self.base.normal_vector() * self.height()
def to_rects(self) -> List[Rectangle]:
"""
:return: a list of all faces of the box as Rectangle instances [bottom, sides..., top]
"""
return [self.base] + [Rectangle(s, move(s.start, self.height_vector())) for s in self.base.to_lines()] +\
[move(self.base, self.height_vector())]
class Face:
"""
General type of face with any number of points
Face is treated as the projection of its points to te plane defined by
the first 2 points and the last point in the list of vertices
"""
def __init__(self, points: List[Point]):
self.vertices = points
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Face:\n'.format(ind=indentation) +\
''.join([
'{ind}|--{p}\n'.format(p=po.pretty_print(), ind=indentation)
for po in self.vertices[:-1]
]) + \
'{ind}`--{p}\n'.format(p=self.vertices[-1].pretty_print(), ind=indentation)
def normal_vector(self) -> Vector:
"""
Normal vector of the projection plane of the face
If we see the vertices in counter-clockwise order, the normal
is pointing towards us
Note: we assume VertexEntryDirection == "CounterClockWise" in the idf
Note: if vertices are in random order we don't know what will happen :-)
:return: Vector
"""
# TODO normal should be flipped if the three points represent a concave edge
# look for two lines in the face that are not parallel
for i in range(len(self.vertices)):
vector1 = self.vertices[i+1] - self.vertices[0]
vector2 = self.vertices[i+2] - self.vertices[0]
normal = vector1.cross_product(vector2)
if normal != Vector(0, 0, 0):
return normal.unitize()
def area(self, signed=False) -> float:
"""
returns the area of the specified surface
method described here: http://geomalgorithms.com/a01-_area.html
:return: area of the face
"""
# close the loop of vertices without modifying the object itself
point_vectors = [Vector(v.x, v.y, v.z) for v in self.vertices]
# add the first point
point_vectors += point_vectors[:1]
normal_vector = self.normal_vector()
area = 0
for point_count in range(0, len(point_vectors) - 1):
area += normal_vector.scalar_product(
point_vectors[point_count].cross_product(point_vectors[point_count + 1]))
area /= 2
if signed:
return area
else:
return abs(area)
def perimeter(self) -> float:
return sum([side.length() for side in self.to_lines()])
def to_lines(self) -> List[Line]:
return [Line(s, e) for s, e in zip(self.vertices, self.vertices[1:] + self.vertices[:1])]
def __eq__(self, other):
if self.vertices[0] in other.vertices:
start_index = other.vertices.index(self.vertices[0])
if self.vertices == other.vertices[start_index:] + other.vertices[:start_index]:
return True
elif self.vertices == other.vertices[start_index::-1] + other.vertices[:start_index:-1]:
return True
else:
return False
else:
return False
def centroid(self) -> Point:
# https://math.stackexchange.com/questions/90463/how-can-i-calculate-the-centroid-of-polygon
# triangulation with signed areas and centroids
start_corner = self.vertices[0]
triangle_centroids = []
areas = []
for k in range(len(self.vertices) - 2):
# get vectors from first corner point pointing to next two corner points
a_k = self.vertices[k + 1] - start_corner
a_l = self.vertices[k + 2] - start_corner
# get centroid of the triangle between the two vectors
triangle_centroids.append(start_corner + (a_k + a_l) / 3)
# get signed area of the triangle
areas.append(self.normal_vector() * a_k.cross_product(a_l) / 2)
# total area
area = sum(areas)
# return weighted average of centroids (centroid of face)
return Point(
x=sum([c.x * w for c, w in zip(triangle_centroids, areas)]) / area,
y=sum([c.y * w for c, w in zip(triangle_centroids, areas)]) / area,
z=sum([c.z * w for c, w in zip(triangle_centroids, areas)]) / area,
)
def to_plane(self) -> Plane:
return Plane(
normal=self.normal_vector(),
point=self.vertices[0]
)
def move(obj: Union[Point, Line, Rectangle, Box, Face], vector: Vector, inplace=False):
if isinstance(obj, Point):
return obj + vector
else:
if inplace:
new_obj = obj
else:
new_obj = copy.deepcopy(obj)
for param, val in new_obj.__dict__.items():
if isinstance(val, (Point, Line, Rectangle, Box, Face)):
# love recursion
new_obj.__dict__[param] = move(val, vector)
elif isinstance(val, list):
new_obj.__dict__[param] = [move(p, vector) for p in val]
return new_obj
def rotate_xy(obj: Union[Point, Line, Rectangle, Box, Face], angle: float,
center: Point = Point(0, 0, 0), inplace=False):
"""
Rotate objects in the xy plane (around z axis)
:param obj: object to rotate
:param angle: angle to rotate with
:param center: center to rotate around
:param inplace: set True to modify the object instance itself
:return: rotated object
"""
if isinstance(obj, Point):
# move point to origin
obj_origin = move(obj, Point(0, 0, 0) - center)
# apply rotation around origin
new_point = Point(
x=obj_origin.x * math.cos(math.radians(angle)) - obj_origin.y * math.sin(math.radians(angle)),
y=obj_origin.x * math.sin(math.radians(angle)) + obj_origin.y * math.cos(math.radians(angle)),
z=obj_origin.z
)
# move back
return move(new_point, center - Point(0, 0, 0))
else:
if inplace:
new_obj = obj
else:
new_obj = copy.deepcopy(obj)
for param, val in new_obj.__dict__.items():
if isinstance(val, (Point, Line, Rectangle, Box, Face)):
# love recursion
new_obj.__dict__[param] = rotate_xy(val, angle, center)
elif isinstance(val, list):
new_obj.__dict__[param] = [rotate_xy(p, angle, center) for p in val]
return new_obj
| 34.850993
| 113
| 0.526366
| 2,703
| 21,050
| 3.990381
| 0.120607
| 0.024476
| 0.016874
| 0.018543
| 0.409605
| 0.325515
| 0.287595
| 0.251622
| 0.230113
| 0.226034
| 0
| 0.021504
| 0.346081
| 21,050
| 603
| 114
| 34.908789
| 0.762005
| 0.125226
| 0
| 0.368675
| 0
| 0.009639
| 0.031854
| 0
| 0
| 0
| 0
| 0.001658
| 0
| 1
| 0.166265
| false
| 0
| 0.009639
| 0.086747
| 0.385542
| 0.062651
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3978db58ab61262a3273d3565d293223c2d9c041
| 556
|
py
|
Python
|
danmu/log.py
|
awesome-archive/danmu
|
2f4e943d859cecd31b289e21984e35a34515b71f
|
[
"WTFPL"
] | null | null | null |
danmu/log.py
|
awesome-archive/danmu
|
2f4e943d859cecd31b289e21984e35a34515b71f
|
[
"WTFPL"
] | null | null | null |
danmu/log.py
|
awesome-archive/danmu
|
2f4e943d859cecd31b289e21984e35a34515b71f
|
[
"WTFPL"
] | null | null | null |
import os, logging
if not os.path.exists('config'): os.mkdir('config')
log = logging.getLogger('danmu')
log.setLevel(logging.DEBUG)
fileHandler = logging.FileHandler(os.path.join('config', 'run.log'), encoding = 'utf8')
fileHandler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)-17s <%(message)s> %(levelname)s %(filename)s[%(lineno)d]',
datefmt='%Y%m%d %H:%M:%S')
fileHandler.setFormatter(formatter)
log.addHandler(fileHandler)
if __name__ == '__main__':
log.debug('This is debug')
log.info('This is info')
| 34.75
| 101
| 0.690647
| 74
| 556
| 5.081081
| 0.527027
| 0.031915
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006148
| 0.122302
| 556
| 15
| 102
| 37.066667
| 0.764344
| 0
| 0
| 0
| 0
| 0.076923
| 0.275416
| 0.044362
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3978e2b002dc50ec5e34788e51f2d661aefcb01f
| 2,016
|
py
|
Python
|
vector_env_comparison.py
|
neuroevolution-ai/NaturalNets-PerformanceTests
|
de7d99424cc9ab29fdc3691c12d20d0a35afe0fe
|
[
"MIT"
] | null | null | null |
vector_env_comparison.py
|
neuroevolution-ai/NaturalNets-PerformanceTests
|
de7d99424cc9ab29fdc3691c12d20d0a35afe0fe
|
[
"MIT"
] | 1
|
2021-02-13T18:55:40.000Z
|
2021-02-13T18:55:40.000Z
|
vector_env_comparison.py
|
neuroevolution-ai/NaturalNets-PerformanceTests
|
de7d99424cc9ab29fdc3691c12d20d0a35afe0fe
|
[
"MIT"
] | null | null | null |
import multiprocessing
import time
import gym
import gym3
import numpy as np
from gym.vector import make as make_vec_env
from procgen import ProcgenGym3Env
population_size = 112
number_env_steps = 1000
def run_episode_full(u):
env = gym.make('procgen:procgen-heist-v0')
obs = env.reset()
reward = 0
for _ in range(number_env_steps):
action = env.action_space.sample()
obs, rew, done, info = env.step(action)
reward += rew
return reward
def run_episode_vec_env(u):
env = make_vec_env(id="procgen:procgen-heist-v0", num_envs=population_size, asynchronous=True)
obs = env.reset()
rewards = np.zeros(population_size)
for _ in range(number_env_steps):
action = env.action_space.sample()
obs, rew, done, info = env.step(action)
rewards += rew
return rewards
def run_episode_gym3_vec_env(u):
env = ProcgenGym3Env(num=population_size, env_name="heist")
rewards = np.zeros(population_size)
for _ in range(number_env_steps):
env.act(gym3.types_np.sample(env.ac_space, bshape=(env.num,)))
rew, obs, first = env.observe()
rewards += rew
return rewards
def main():
inputs = np.zeros(population_size)
# Multiprocessing
pool = multiprocessing.Pool()
t_start = time.time()
result_mp = pool.map(run_episode_full, inputs)
print("Multi-Processing map took: {:6.3f}s".format(time.time()-t_start))
# Vectorized environment
t_start = time.time()
result_vec = run_episode_vec_env([])
print("Vectorized environment took: {:6.3f}s".format(time.time()-t_start))
# Gym3 Vectorized environment
t_start = time.time()
result_gym3_vec = run_episode_gym3_vec_env([])
print("Gym3 vec environment took: {:6.3f}s".format(time.time()-t_start))
assert (len(result_mp) == len(result_vec)
and len(result_mp) == len(result_gym3_vec)
and len(result_mp) == population_size)
if __name__ == "__main__":
main()
| 22.651685
| 98
| 0.671627
| 282
| 2,016
| 4.553191
| 0.280142
| 0.076324
| 0.043614
| 0.037383
| 0.464953
| 0.32866
| 0.32866
| 0.264798
| 0.264798
| 0.242991
| 0
| 0.016383
| 0.212798
| 2,016
| 88
| 99
| 22.909091
| 0.792691
| 0.032738
| 0
| 0.346154
| 0
| 0
| 0.086331
| 0.024666
| 0
| 0
| 0
| 0
| 0.019231
| 1
| 0.076923
| false
| 0
| 0.134615
| 0
| 0.269231
| 0.057692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
397b7ca45c3f9235af0d2fa52c9c29634429cebe
| 1,641
|
py
|
Python
|
raiden_api/model/requests.py
|
kelsos/test-enviroment-scripts
|
ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d
|
[
"MIT"
] | 1
|
2019-03-28T00:24:48.000Z
|
2019-03-28T00:24:48.000Z
|
raiden_api/model/requests.py
|
kelsos/test-enviroment-scripts
|
ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d
|
[
"MIT"
] | 4
|
2019-03-26T15:27:20.000Z
|
2019-04-29T10:46:08.000Z
|
raiden_api/model/requests.py
|
kelsos/test-enviroment-scripts
|
ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d
|
[
"MIT"
] | 2
|
2019-03-26T14:27:24.000Z
|
2019-03-29T10:28:40.000Z
|
import time
import typing
class PaymentRequest:
def __init__(self, amount: int, identifier: int = None):
self.amount = amount
self.identifier = identifier
if identifier is None:
self.identifier = int(time.time())
def to_dict(self) -> typing.Dict[str, typing.Any]:
result = {
'amount': self.amount,
'identifier': self.identifier,
}
return result
class OpenChannelRequest:
def __init__(
self,
partner_address: str,
token_address: str,
total_deposit: int,
settle_timeout: int = 500,
):
self.partner_address = partner_address
self.token_address = token_address
self.total_deposit = total_deposit
self.settle_timeout = settle_timeout
def to_dict(self) -> typing.Dict[str, typing.Any]:
result = {
'partner_address': self.partner_address,
'token_address': self.token_address,
'total_deposit': self.total_deposit,
'settle_timeout': self.settle_timeout,
}
return result
class ManageChannelRequest:
def __init__(self, total_deposit: int = None, state: str = None):
assert state is None or state == 'closed'
self.total_deposit = total_deposit
self.state = state
def to_dict(self) -> typing.Dict[str, typing.Any]:
result: typing.Dict[str, typing.Any] = {}
if self.total_deposit:
result['total_deposit'] = self.total_deposit
if self.state:
result['state'] = self.state
return result
| 26.467742
| 69
| 0.597806
| 178
| 1,641
| 5.280899
| 0.191011
| 0.140426
| 0.102128
| 0.080851
| 0.281915
| 0.198936
| 0.130851
| 0.130851
| 0.130851
| 0.130851
| 0
| 0.002648
| 0.309567
| 1,641
| 61
| 70
| 26.901639
| 0.827008
| 0
| 0
| 0.217391
| 0
| 0
| 0.057892
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.130435
| false
| 0
| 0.043478
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
397c69961dfa90f232f4ac9c29a73bc3e9510c76
| 823
|
py
|
Python
|
Dynamic/KnapNoRep.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
Dynamic/KnapNoRep.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
Dynamic/KnapNoRep.py
|
mladuke/Algorithms
|
eab5d89c5f496b2849f0646dbfa3a4db93a0b391
|
[
"MIT"
] | null | null | null |
def zeroOneKnapsack(v, w, W):
c = []
n = len(v)
c = [[0 for x in range(W+1)] for x in range(n)]
for i in range(0,n):
for j in range(0,W+1):
if (w[i] > j):
c[i][j] = c[i-1][j]
else:
c[i][j] = max(c[i-1][j],v[i] +c[i-1][j-w[i]])
return [c[n-1][W], getUsedItems(w,c)]
def getUsedItems(w,c):
i = len(c)-1
currentW = len(c[0])-1
marked = []
for i in range(i+1):
marked.append(0)
while (i >= 0 and currentW >=0):
if (i==0 and c[i][currentW] >0 )or c[i][currentW] != c[i-1][currentW]:
marked[i] =1
currentW = currentW-w[i]
i = i-1
return marked
# adapted from https://sites.google.com/site/mikescoderama/Home/0-1-knapsack-problem-in-p
W = 10
v = [9, 14, 16, 30]
w = [2, 3, 4, 6]
print(zeroOneKnapsack(v, w, W))
| 24.939394
| 90
| 0.509113
| 160
| 823
| 2.61875
| 0.3
| 0.042959
| 0.02864
| 0.02864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060403
| 0.27582
| 823
| 33
| 91
| 24.939394
| 0.642617
| 0.106926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0
| 0
| 0.148148
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
397e9f0c2652f385de08911a9951e3eb07c5c86a
| 874
|
py
|
Python
|
tools/one-offs/convert-genres.py
|
DrDos0016/z2
|
b63e77129fefcb4f990ee1cb9952f4f708ee3a2b
|
[
"MIT"
] | 3
|
2017-05-01T19:53:57.000Z
|
2018-08-27T20:14:43.000Z
|
tools/one-offs/convert-genres.py
|
DrDos0016/z2
|
b63e77129fefcb4f990ee1cb9952f4f708ee3a2b
|
[
"MIT"
] | null | null | null |
tools/one-offs/convert-genres.py
|
DrDos0016/z2
|
b63e77129fefcb4f990ee1cb9952f4f708ee3a2b
|
[
"MIT"
] | 1
|
2018-08-27T20:14:46.000Z
|
2018-08-27T20:14:46.000Z
|
import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.auth.models import User # noqa: E402
from museum_site.models import * # noqa: E402
def main():
print("This script will convert the SSV field file.genre to proper Genre object associations")
input("Press Enter to begin... ")
qs = File.objects.all().order_by("id")
for f in qs:
old_genres = f.genre.split("/")
count = len(old_genres)
for g in old_genres:
g = Genre.objects.get(title=g)
f.genres.add(g)
if len(f.genres.all()) != count:
print("UH OH", f.title)
print(f.title, len(f.genres.all()), count)
return True
if __name__ == '__main__':
main()
| 23.621622
| 98
| 0.639588
| 127
| 874
| 4.251969
| 0.519685
| 0.033333
| 0.048148
| 0.055556
| 0.125926
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00885
| 0.224256
| 874
| 36
| 99
| 24.277778
| 0.787611
| 0.024027
| 0
| 0
| 0
| 0
| 0.190588
| 0.025882
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.208333
| 0
| 0.291667
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
397ee9d80cbe93ca71977088ed64acae351304fd
| 553
|
py
|
Python
|
python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py
|
flyingwjw/Documentation
|
567608f388ca369b864c2d75a94647801b5dfa1e
|
[
"Unlicense"
] | 26
|
2016-08-25T01:33:36.000Z
|
2022-03-20T11:33:31.000Z
|
python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py
|
flyingwjw/Documentation
|
567608f388ca369b864c2d75a94647801b5dfa1e
|
[
"Unlicense"
] | null | null | null |
python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py
|
flyingwjw/Documentation
|
567608f388ca369b864c2d75a94647801b5dfa1e
|
[
"Unlicense"
] | 31
|
2016-08-16T15:32:46.000Z
|
2021-01-26T19:16:48.000Z
|
import matplotlib.pylab as plt
import numpy as np
plt.figure()
axes=plt.gca()
y= np.random.randn(9)
col_labels=['col1','col2','col3']
row_labels=['row1','row2','row3']
table_vals=[[11,12,13],[21,22,23],[28,29,30]]
row_colors=['red','gold','green']
the_table = plt.table(cellText=table_vals,
colWidths = [0.1]*3,
rowLabels=row_labels,
colLabels=col_labels,
rowColours=row_colors,
loc='upper right')
plt.text(12,3.4,'Table Title',size=8)
plt.plot(y)
plt.show()
| 24.043478
| 45
| 0.593128
| 82
| 553
| 3.890244
| 0.695122
| 0.056426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077103
| 0.22604
| 553
| 23
| 46
| 24.043478
| 0.668224
| 0
| 0
| 0
| 0
| 0
| 0.104693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3982bd3c6134c4bd9c5526d9392f74c9c724e7ab
| 556
|
py
|
Python
|
makahiki/apps/widgets/energy_power_meter/views.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | 1
|
2015-07-22T11:31:20.000Z
|
2015-07-22T11:31:20.000Z
|
makahiki/apps/widgets/energy_power_meter/views.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | null | null | null |
makahiki/apps/widgets/energy_power_meter/views.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | null | null | null |
"""Handle rendering of the Energy Power Meter widget."""
from apps.widgets.resource_goal import resource_goal
def supply(request, page_name):
"""Return the view_objects content, which in this case is empty."""
_ = page_name
team = request.user.get_profile().team
if team:
interval = resource_goal.team_goal_settings(team, "energy").realtime_meter_interval
else:
interval = None
width = 300
height = 100
return {"interval": interval,
"width": width,
"height": height
}
| 26.47619
| 91
| 0.645683
| 67
| 556
| 5.179104
| 0.641791
| 0.103746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014528
| 0.257194
| 556
| 20
| 92
| 27.8
| 0.825666
| 0.201439
| 0
| 0
| 0
| 0
| 0.057737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3983bdef6c20e9a6ac20cbeb01a996a5e1766f34
| 4,855
|
py
|
Python
|
hkpy/hkpyo/reasoners/simple_reasoner.py
|
renan-souza/hkpy
|
1fdcd3da3520e876f95295bf6d15e40581b2bb49
|
[
"MIT"
] | 7
|
2019-12-23T17:59:36.000Z
|
2022-02-17T19:35:32.000Z
|
hkpy/hkpyo/reasoners/simple_reasoner.py
|
renan-souza/hkpy
|
1fdcd3da3520e876f95295bf6d15e40581b2bb49
|
[
"MIT"
] | 9
|
2019-12-30T13:34:41.000Z
|
2021-07-16T22:46:06.000Z
|
hkpy/hkpyo/reasoners/simple_reasoner.py
|
renan-souza/hkpy
|
1fdcd3da3520e876f95295bf6d15e40581b2bb49
|
[
"MIT"
] | 2
|
2020-03-14T21:34:02.000Z
|
2021-06-12T00:10:43.000Z
|
###
# Copyright (c) 2019-present, IBM Research
# Licensed under The MIT License [see LICENSE for details]
###
from collections import defaultdict
from hkpy.hkpyo.model import HKOContext, HKOContextManager, HKOConcept, HKOSubConceptAxiom, HKOConjunctionExpression, \
HKODisjunctionExpression, HKOConceptAssertion, HKOIndividual, HKOPropertyAssertion, HKOLiteral, Union, HKOAxiom, \
HKOAssertion, HKOProperty
class HKAssertedContextReasoner:
def __init__(self, context: HKOContext):
self.mgr = HKOContextManager.get_global_context_manager()
self.context = context
self.reset_caches()
def reset_caches(self):
self.cache_axioms = []
self.cache_assertions = []
self.cache_individual_concept = defaultdict(lambda: {})
self.cache_concept_individual = defaultdict(lambda: {})
self.cache_individual_prop_value = defaultdict(lambda: defaultdict(lambda: {}))
self.cache_value_prop_individual = defaultdict(lambda: defaultdict(lambda: {}))
for e in self.context.elements:
if isinstance(e, HKOConceptAssertion):
self.cache_individual_concept[e.individual][e.concept] = True
self.cache_concept_individual[e.concept][e.individual] = True
elif isinstance(e, HKOPropertyAssertion):
self.cache_individual_prop_value[e.arg1][e.property][e.arg2] = True
self.cache_value_prop_individual[e.arg2][e.property][e.arg1] = True
if isinstance(e, HKOAxiom):
self.cache_axioms.append(e)
elif isinstance(e, HKOAssertion):
self.cache_assertions.append(e)
def get_direct_sub_concepts_of(self, super_concept: HKOConcept) -> [HKOConcept]:
print("Warning: incomplete implementation of get_direct_sub_concepts_of")
sub_concepts = set()
for e in self.cache_axioms:
if isinstance(e, HKOSubConceptAxiom):
if e.sup == super_concept:
sub_concepts.add(e.sub)
# TODO: should look recursively into conjunctive expressions
# elif isinstance(e.sub, HKOConjunctionExpression):
# # sub = (and c1 c2 super c3 ... cn)
# for exp in e.sub.concepts:
# if exp == super_concept:
# sub_concepts.add(e.sub)
return list(sub_concepts)
def get_direct_instances_of(self, concept: HKOConcept) -> [HKOIndividual]:
print("Warning: incomplete implementation of get_direct_sub_concepts_of")
return list(self.cache_concept_individual[concept].keys())
def is_direct_instance_of(self, individual: HKOIndividual, concept: HKOConcept) -> bool:
return self.cache_concept_individual[concept].get(individual, False)
def is_instance_of(self, individual: HKOIndividual = None, concept: HKOConcept = None) -> bool:
return self.is_direct_instance_of(individual=individual, concept=concept)
def get_concept_assertion_pattern(self, concept: HKOConcept = None, individual: HKOIndividual = None) -> object:
matched_assertions = set()
for e in self.context.elements:
if isinstance(e, HKOConceptAssertion):
if concept is not None and e.concept != concept: continue
if individual is not None and e.individual != individual: continue
# match!
matched_assertions.add(e)
return list(matched_assertions)
def get_related_values(self, property: HKOProperty, arg1: HKOIndividual) -> [Union[HKOIndividual, HKOLiteral]]:
return list(self.cache_individual_prop_value.get(arg1, {}).get(property, {}).keys())
def get_entities_relating_to(self, property: HKOProperty, arg2: HKOIndividual) -> [
Union[HKOIndividual, HKOLiteral]]:
return list(self.cache_value_prop_individual.get(arg2, {}).get(property, {}).keys())
def get_related_value(self, property, arg1) -> Union[HKOIndividual, HKOLiteral]:
lst = self.get_related_values(property, arg1)
if len(lst) == 1:
return lst[0]
elif len(lst) == 0:
return None
else:
raise Exception('Property returned more related values than expected.')
def get_property_assertion_pattern(self, property=None, arg1=None, arg2=None) -> [HKOPropertyAssertion]:
matched_assertions = set()
for e in self.cache_assertions:
if isinstance(e, HKOPropertyAssertion):
if property is not None and e.property != property: continue
if arg1 is not None and e.arg1 != arg1: continue
if arg2 is not None and e.arg2 != arg2: continue
# match!
matched_assertions.add(e)
return list(matched_assertions)
| 45.801887
| 119
| 0.65829
| 532
| 4,855
| 5.827068
| 0.231203
| 0.052258
| 0.030645
| 0.019355
| 0.331613
| 0.196452
| 0.189032
| 0.153548
| 0.114839
| 0.114839
| 0
| 0.007684
| 0.249434
| 4,855
| 105
| 120
| 46.238095
| 0.84303
| 0.073944
| 0
| 0.166667
| 0
| 0
| 0.040197
| 0.011612
| 0
| 0
| 0
| 0.009524
| 0.263889
| 1
| 0.152778
| false
| 0
| 0.027778
| 0.055556
| 0.333333
| 0.027778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3985a0d08f66c16279006e5cf92a0a215003522a
| 8,031
|
py
|
Python
|
prediction-experiments/python-nb/ov-predict/src/api/model_loader.py
|
ouyangzhiping/Info-extract
|
d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a
|
[
"Apache-2.0"
] | 15
|
2019-02-25T09:53:37.000Z
|
2022-03-22T05:13:24.000Z
|
prediction-experiments/python-nb/ov-predict/src/api/model_loader.py
|
ouyangzhiping/Info-extract
|
d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a
|
[
"Apache-2.0"
] | 8
|
2019-06-12T10:14:58.000Z
|
2021-08-15T08:04:10.000Z
|
prediction-experiments/python-nb/ov-predict/src/api/model_loader.py
|
ouyangzhiping/Info-extract
|
d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a
|
[
"Apache-2.0"
] | 1
|
2022-03-15T16:45:35.000Z
|
2022-03-15T16:45:35.000Z
|
import sys
import numpy as np
import os
import requests
import json
import logging
from json import JSONEncoder
from keras.models import model_from_json
sys.path.append('..')
from preprocessing.InputHelper import InputHelper
from model.lstm import rmse
from model.lstm import buildModel
from keras.preprocessing.sequence import pad_sequences
sys.path.append('..')
'''
This is a stand-alone test for the python API service. It doesn't use Flask.
'''
OPTIMIZER = 'rmsprop'
NUM_CLASSES = 0
MAXLEN = 50
SAVED_MODEL_FILE = '../../saved_models/model.h5'
PUBMED_DIM = 200
VAL_DIMENSIONS = 5
TF_SERVING_HOSTNAME = os.environ.get("TF_SERVING_HOSTNAME", "")
TF_SERVING_PORT = os.environ.get("TF_SERVING_PORT", "")
USES_TF_SERVING = TF_SERVING_HOSTNAME != "" and TF_SERVING_PORT != ""
class FuzzyMatchInfo:
def __init__(self, closestToken, origValue, replacedValue):
self.closestToken = closestToken
self.origValue = origValue
self.replacedValue = replacedValue
class NumpyArrayEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return JSONEncoder.default(self, obj)
def get_model_json(saved_model):
print("Loading model from file {}".format(saved_model))
json_file = open(saved_model, 'r')
json_str = json_file.read()
json_file.close()
return json_str
def predict_outcome(inpH, model, test_instance_str):
x = inpH.tokenizer.texts_to_sequences([test_instance_str])
x = pad_sequences(x, padding='post', maxlen=MAXLEN)
y_preds = model.predict(x, steps=1)
return y_preds[0]
def predict_regression_outcome(model, model_name, test_input_batch):
y_preds = predict_outcome_local_or_api(model, model_name, test_input_batch)
return y_preds[:,0]
def predict_confidence(model, model_name, test_input_batch):
y_preds = predict_outcome_local_or_api(model, model_name, test_input_batch)
return np.max(y_preds, axis=1)
def predict_outcome_local_or_api(model, model_name, test_input_batch):
if USES_TF_SERVING:
return call_tf_serving_predict(model_name, test_input_batch)
else:
# in this case, "model" is the actual keras model
return predict_outcome_with_dynamic_vocabchange(model, test_input_batch)
def predict_outcome_with_dynamic_vocabchange(model, test_input_batch):
x_test = test_input_batch
print("x_test = {}".format(x_test))
y_preds = model.predict_on_batch(x_test)
print('y_preds = {}'.format(y_preds))
return y_preds
def call_tf_serving_predict(model_name, test_input_batch):
x_test = test_input_batch
logging.debug("x_test = {}".format(x_test))
url = get_tf_serving_predict_endpoint(model_name)
# batched instances
instances = x_test
json_post_body = json.dumps({"instances": instances}, cls=NumpyArrayEncoder)
r = requests.post(url, json_post_body)
logging.info(f"Response from {url}")
logging.info(r.text)
response = r.json()
return np.array(response["predictions"])
def get_tf_serving_predict_endpoint(model_name):
return "http://" + TF_SERVING_HOSTNAME + ":" + TF_SERVING_PORT + "/" \
+ "v1/models/" + model_name + ":predict"
def init_embedding(embfile):
inpH = InputHelper()
print("converting words to ids...")
inpH.convertWordsToIds(embfile)
print("vocab size = {}".format(inpH.vocab_size))
inpH.loadW2V(embfile)
return inpH
# Replace a node if the form C:<x>:0.1 with C:<x>:0.2 (the closest value with the same attrib-id in our vocabulary)
def getClosestNode(inpH, node):
keytokens = node.split(':')
keynode = keytokens[1]
keyvalue = keytokens[2]
if is_number(keyvalue) == False:
return None
keyvalue = float(keyvalue)
mindiff = 10000
closestFound = None
tobeReplacedWith = 0
# Match the AttribType:Id part
for token in inpH.pre_emb:
parts = token.split(':')
nodename = parts[1]
if nodename == keynode:
if is_number(parts[2]) == False:
continue
x = float(parts[2])
diff = abs(keyvalue - x)
if diff < mindiff:
mindiff = diff
closestFound = token
tobeReplacedWith = x
return FuzzyMatchInfo(closestFound, keyvalue, tobeReplacedWith)
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def build_input_sequence(inpH, x_text, nodevec_dim):
changeLogDict, modified_x_text = replaceAVPSeqWithNN(inpH, x_text, nodevec_dim)
# Convert each sentence (node name sequence) to a sequence of integer ids
x = inpH.tokenizer.texts_to_sequences([modified_x_text])
x = pad_sequences(x, padding='post', maxlen=MAXLEN)
# after prediction revert back the values that we changed from the vocab-vector map
for changeInfo in changeLogDict.values():
inpH.pre_emb[changeInfo.closestToken][-VAL_DIMENSIONS] = changeInfo.origValue
return x
def replaceAVPSeqWithNN(inpH, avpseq, nodevec_dim):
tokens = avpseq.split(' ')
modified_avpseq = []
changedTokens = {} # to keep track of the changes for reverting back
for token in tokens:
fuzzyMatchInfo = getClosestNode(inpH, token)
if fuzzyMatchInfo == None:
continue # check if continue works as expected in Python
changedTokens[fuzzyMatchInfo.closestToken] = fuzzyMatchInfo
instvec = []
attrvec = inpH.pre_emb[fuzzyMatchInfo.closestToken]
# change the dimension corresponding to the value in our vocabulary dict
# replace the nodevec part of instvec with attrvec
for i in range(nodevec_dim):
instvec.append(float(attrvec[i]))
# context part comes from the current instance
for i in range(nodevec_dim, nodevec_dim + PUBMED_DIM + VAL_DIMENSIONS):
instvec.append(float(inpH.pre_emb[fuzzyMatchInfo.closestToken][i]))
instvec_array = np.asarray(instvec)
instvec_array[-VAL_DIMENSIONS] = fuzzyMatchInfo.replacedValue # new followup value
inpH.pre_emb[fuzzyMatchInfo.closestToken] = instvec_array # modified instvec
modified_avpseq.append(fuzzyMatchInfo.closestToken)
return changedTokens, ' '.join(modified_avpseq)
def init_model(inpH, saved_model_wts_file=SAVED_MODEL_FILE, num_classes=NUM_CLASSES):
# saved_model_meta_file = '../../saved_models/model.json'
# json_str = get_model_json(saved_model_meta_file)
# print (json_str)
# trained_model = model_from_json(json_str)
# rebuild the original model
print("DEBUG: During API call - emb matrix o/p dimension: {}".format(inpH.embedding_matrix.shape[1]))
print("DEBUG: During API call - emb matrix shape: {}".format(inpH.embedding_matrix.shape))
trained_model = buildModel(num_classes, inpH.vocab_size, inpH.embedding_matrix.shape[1], MAXLEN,
inpH.embedding_matrix)
# load weights into new model
trained_model.load_weights(saved_model_wts_file)
trained_model.summary()
return trained_model
def init_model_and_embedding(embfile, modelfile=SAVED_MODEL_FILE):
inpH = init_embedding(embfile)
trained_model = init_model(inpH, modelfile)
return inpH, trained_model
def main(argv):
NODEVEC_DIM = 100
EMBFILE = "../../../../../core/prediction/graphs/nodevecs/embfile4api.merged.vec"
# one sample line from test data file
TESTDATA_ROW = "C:5579689:18 I:3675717:1"
TESTDATA_ROW2 = "C:5579689:18 I:3675717:1 C:5579088:35 I:3673272:1"
inpH, trained_model = init_model_and_embedding(EMBFILE)
# try executing a test instance on the loaded model
predicted_val = predict_outcome_with_dynamic_vocabchange(inpH, trained_model, TESTDATA_ROW, NODEVEC_DIM)
print(predicted_val)
predicted_val = predict_outcome_with_dynamic_vocabchange(inpH, trained_model, TESTDATA_ROW2, NODEVEC_DIM)
print(predicted_val)
if __name__ == "__main__":
main(sys.argv[1:])
| 32.383065
| 115
| 0.707757
| 1,051
| 8,031
| 5.160799
| 0.258801
| 0.02323
| 0.028392
| 0.02323
| 0.279314
| 0.196165
| 0.151917
| 0.126844
| 0.106195
| 0.070059
| 0
| 0.013805
| 0.197236
| 8,031
| 247
| 116
| 32.51417
| 0.827517
| 0.119786
| 0
| 0.074074
| 0
| 0
| 0.071808
| 0.013787
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.074074
| 0.006173
| 0.320988
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3986fe60405cf4775e3e7c28b77f8afe1fba2cf3
| 599
|
py
|
Python
|
tests/test_fails.py
|
Alviner/wsrpc-aiohttp
|
12387f68b74587e52ae4b10f28892dbbb2afc32f
|
[
"MIT"
] | null | null | null |
tests/test_fails.py
|
Alviner/wsrpc-aiohttp
|
12387f68b74587e52ae4b10f28892dbbb2afc32f
|
[
"MIT"
] | null | null | null |
tests/test_fails.py
|
Alviner/wsrpc-aiohttp
|
12387f68b74587e52ae4b10f28892dbbb2afc32f
|
[
"MIT"
] | null | null | null |
from aiohttp import ClientConnectionError
from wsrpc_aiohttp.testing import BaseTestCase, async_timeout
class TestDisconnect(BaseTestCase):
@async_timeout
async def test_call_error(self):
class DataStore:
def get_data(self, _):
return 1000
self.WebSocketHandler.add_route('get_data', DataStore().get_data)
client = await self.get_ws_client()
# Imitation of server connection has been closed
client.socket._closed = True
with self.assertRaises(ClientConnectionError):
await client.call('get_data')
| 28.52381
| 73
| 0.689482
| 66
| 599
| 6.045455
| 0.575758
| 0.070175
| 0.120301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008811
| 0.24207
| 599
| 20
| 74
| 29.95
| 0.870044
| 0.076795
| 0
| 0
| 0
| 0
| 0.029038
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0
| 0.153846
| 0.076923
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
398a3a700f8b78eced80ede2546a27f9c162d1aa
| 2,325
|
py
|
Python
|
devops/python/issuebot/applog.py
|
simahao/lily
|
c22ec37cb02374e94b41822eccc5e6d6aa7d0d25
|
[
"MIT"
] | 4
|
2020-11-16T06:24:19.000Z
|
2021-05-19T02:10:01.000Z
|
devops/python/issuebot/applog.py
|
simahao/lily
|
c22ec37cb02374e94b41822eccc5e6d6aa7d0d25
|
[
"MIT"
] | 5
|
2021-05-05T14:17:27.000Z
|
2021-09-30T08:47:23.000Z
|
devops/python/issuebot/applog.py
|
simahao/lily
|
c22ec37cb02374e94b41822eccc5e6d6aa7d0d25
|
[
"MIT"
] | 3
|
2021-02-22T01:38:49.000Z
|
2021-06-03T08:52:37.000Z
|
import logging
import logging.config
import os
LOG_DIR = os.path.dirname(os.path.abspath(__file__))
log_config = {
'version': 1,
'formatters': {
'verbose': {
'class': 'logging.Formatter',
'format': '%(asctime)s [%(name)s] %(levelname)-8s %(pathname)s:%(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'style': '%'
},
'simple': {
'class': 'logging.Formatter',
'format': '%(asctime)s %(levelname)-8s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'style': '%'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'DEBUG',
'formatter': 'simple'
},
'octopus': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': os.path.join(LOG_DIR, 'octopus.log'),
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'surveillance': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': os.path.join(LOG_DIR, 'surveillance.log'),
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'file': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': 'app.log',
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'rotate_file': {
'class': 'logging.handlers.RotatingFileHandler',
'level': 'INFO',
'filename': 'app.log',
'mode': 'a',
'formatter': 'verbose',
'maxBytes': 10485760,
'backupCount': 3,
'encoding': 'utf-8'
}
},
'loggers': {
'Octopus': {
'handlers': ['octopus']
},
'Surveillance': {
'handlers': ['surveillance']
}
},
'root': {
'level': 'INFO',
'handlers': ['console']
}
}
# propagate default is true,so message is propagated its parent's logger until root
# e.x. Octopus flush message to file, and progagate message to root logger, and flush to console
logging.config.dictConfig(log_config)
| 29.43038
| 101
| 0.455054
| 202
| 2,325
| 5.188119
| 0.366337
| 0.080153
| 0.064886
| 0.064886
| 0.409351
| 0.409351
| 0.320611
| 0.320611
| 0.240458
| 0.156489
| 0
| 0.010804
| 0.363011
| 2,325
| 78
| 102
| 29.807692
| 0.696826
| 0.075699
| 0
| 0.373333
| 0
| 0.013333
| 0.3863
| 0.037279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
398d56540cd3fb4efa42ef33aee42fa70cf89afe
| 3,024
|
py
|
Python
|
datasets/thuc_news/thuc_news.py
|
jhxu-org/datasets
|
e78e81ff2aec2928506a42c3312799acd6c5e807
|
[
"Apache-2.0"
] | null | null | null |
datasets/thuc_news/thuc_news.py
|
jhxu-org/datasets
|
e78e81ff2aec2928506a42c3312799acd6c5e807
|
[
"Apache-2.0"
] | null | null | null |
datasets/thuc_news/thuc_news.py
|
jhxu-org/datasets
|
e78e81ff2aec2928506a42c3312799acd6c5e807
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""THUNews"""
import csv
import ctypes
import os
import datasets
csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2))
_CITATION = """\
@misc{xujianhua,
title={page xxx},
author={Xiang Zhang and Junbo Zhao and Yann LeCun},
year={2015},
eprint={1509.01626},
archivePrefix={arXiv},
primaryClass={cs.LG}
}
"""
_DESCRIPTION = """\
THUCTC(THU Chinese Text Classification)是由清华大学自然语言处理实验室推出的中文文本分类工具包,能够自动高效地实现用户自定义的文本分类语料的训练、\
评测、分类功能。文本分类通常包括特征选取、特征降维、分类模型学习三个步骤。如何选取合适的文本特征并进行降维,是中文文本分类的挑战性问题。、
我组根据多年在中文文本分类的研究经验,在THUCTC中选取二字串bigram作为特征单元,特征降维方法为Chi-square,权重计算方法为tfidf,、
分类模型使用的是LibSVM或LibLinear。THUCTC对于开放领域的长文本具有良好的普适性,不依赖于任何中文分词工具的性能,具有准确率高、测试速度快的优点。
"""
_DATA_URL = "http://127.0.0.1/thuc_news.zip"
_CLS = ['体育', '娱乐', '家居', '彩票', '房产', '教育', '时尚', '时政', '星座', '游戏', '社会', '科技', '股票', '财经']
class THUC_News(datasets.GeneratorBasedBuilder):
"""Sogou News dataset"""
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"content": datasets.Value("string"),
"label": datasets.features.ClassLabel(
names=_CLS
),
}
),
# No default supervised_keys (as we have to pass both premise
# and hypothesis as input).
supervised_keys=None,
homepage="", # didn't find a real homepage
citation=_CITATION,
)
def _split_generators(self, dl_manager):
dl_dir = dl_manager.download_and_extract(_DATA_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"filepath": os.path.join(dl_dir, "thuc_news", "test.txt")}
),
datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"filepath": os.path.join(dl_dir, "thuc_news", "train.txt")}
),
]
def _generate_examples(self, filepath):
"""This function returns the examples in the raw (text) form."""
with open(filepath, encoding="utf-8") as txt_file:
data = txt_file.readlines()
for id_, row in enumerate(data):
row = row.split('\t')
yield id_, {"content": row[1], "label": _CLS.index(row[0])}
| 33.977528
| 114
| 0.638889
| 354
| 3,024
| 5.350282
| 0.607345
| 0.031679
| 0.013728
| 0.016895
| 0.083421
| 0.042239
| 0.042239
| 0.042239
| 0.042239
| 0.042239
| 0
| 0.01486
| 0.243386
| 3,024
| 88
| 115
| 34.363636
| 0.812937
| 0.276786
| 0
| 0.145455
| 0
| 0
| 0.309192
| 0.148097
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054545
| false
| 0
| 0.072727
| 0.018182
| 0.181818
| 0.018182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3990560a6bff336fd21ff88b51780152f5105716
| 1,215
|
py
|
Python
|
mundo3/ex115/lib/arquivo/__init__.py
|
dilsonm/CeV
|
8043be36b2da187065691d23ed5cb40fd65f806f
|
[
"MIT"
] | null | null | null |
mundo3/ex115/lib/arquivo/__init__.py
|
dilsonm/CeV
|
8043be36b2da187065691d23ed5cb40fd65f806f
|
[
"MIT"
] | null | null | null |
mundo3/ex115/lib/arquivo/__init__.py
|
dilsonm/CeV
|
8043be36b2da187065691d23ed5cb40fd65f806f
|
[
"MIT"
] | null | null | null |
from lib.interface import cabecalho
def arquivoExiste(arq):
try:
a = open(arq, 'rt')
a.close()
except FileNotFoundError:
return False
else:
return True
def criarArquivo(arq):
try:
a = open(arq, 'wt+')
a.close()
except:
print('Houve um erro na criação do arquivo.')
else:
print(f'Arquivo {arq} criado com sucesso.')
def lerarquivo(arq):
try:
a = open(arq,'rt')
except:
print('Erro ao abrir o arquivo.')
else:
cabecalho('PESSOAS CADASTRADAS')
for linha in a:
dado = linha.split(';')
dado1 = dado[1].replace('\n','')
print(f'{dado[0]:<30} {dado1:>3}')
# print(f'{dado[0]:<30}{dado[1]:>3}')
finally:
a.close()
def cadastrar(arq,nome='desconhecido', idade=0):
# cabecalho('Opção 2')
try:
a = open(arq,'at')
except:
print('Houve um ERRO na abertura do arquivo.')
else:
try:
a.write(f'{nome};{idade}\n')
except:
print('Não foi possivel gravar no arquivo.')
else:
print(f'Novo cadastro de {nome} adicionado.')
a.close()
| 23.365385
| 57
| 0.516872
| 148
| 1,215
| 4.243243
| 0.472973
| 0.031847
| 0.050955
| 0.070064
| 0.191083
| 0.127389
| 0
| 0
| 0
| 0
| 0
| 0.017391
| 0.337449
| 1,215
| 52
| 58
| 23.365385
| 0.762733
| 0.046091
| 0
| 0.465116
| 0
| 0
| 0.244598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.023256
| 0
| 0.162791
| 0.162791
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39965ea3888f463b999a6106ce07def8d9adf4ac
| 4,010
|
py
|
Python
|
carts/views.py
|
yun-mh/uniwalk
|
f5307f6970b24736d13b56b4792c580398c35b3a
|
[
"Apache-2.0"
] | null | null | null |
carts/views.py
|
yun-mh/uniwalk
|
f5307f6970b24736d13b56b4792c580398c35b3a
|
[
"Apache-2.0"
] | 9
|
2020-01-10T14:10:02.000Z
|
2022-03-12T00:08:19.000Z
|
carts/views.py
|
yun-mh/uniwalk
|
f5307f6970b24736d13b56b4792c580398c35b3a
|
[
"Apache-2.0"
] | null | null | null |
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render, redirect, get_object_or_404
from designs import models as design_models
from feet import models as foot_models
from products import models as product_models
from .models import Cart, CartItem
# 現在のセッション宛にカートを生成するための関数
def _session_key(request):
cart = request.session.session_key
if not cart:
cart = request.session.create()
return cart
def add_cart(request, pk, design_pk):
""" カートに商品を追加するビュー """
product = product_models.Product.objects.get(pk=pk)
# カートを持っているかチェック
try:
cart = Cart.objects.get(session_key=_session_key(request))
# 持っていない場合、カートを生成する
except Cart.DoesNotExist:
if request.user.is_authenticated:
cart = Cart.objects.create(
session_key=_session_key(request), user_id=request.user.pk
)
cart.save()
else:
cart = Cart.objects.create(session_key=_session_key(request))
cart.save()
# カート内に同じ商品かつ同じデザインのアイテムがあるかチェック
try:
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
# 取得したアイテムのサイズとセッションのサイズ値が違う場合、ブラウザの前ボタンでサイズを修正したことのため、サイズのみをアップデートする
if (
cart_item.length_left != request.session["length_left"]
or cart_item.length_right != request.session["length_right"]
or cart_item.width_left != request.session["width_left"]
or cart_item.width_right != request.session["width_right"]
):
cart_item.length_left = request.session["length_left"]
cart_item.length_right = request.session["length_right"]
cart_item.width_left = request.session["width_left"]
cart_item.width_right = request.session["width_right"]
# サイズも同じな場合は全く同じ商品の追加になるため、数量を増やす
else:
cart_item.quantity += 1
cart_item.save()
# ない場合、新しくカートアイテムを生成する
except CartItem.DoesNotExist:
cart_item = CartItem.objects.create(
product=product,
design=design_models.Design.objects.get(pk=design_pk),
length_left=request.session["length_left"],
length_right=request.session["length_right"],
width_left=request.session["width_left"],
width_right=request.session["width_right"],
quantity=1,
cart=cart,
)
cart_item.save()
return redirect("carts:cart")
def cart_display(request, amount=0, counter=0, cart_items=None):
""" カートの内容を表示するためのビュー """
# セッションキーに対しカートが既に存在する場合
try:
cart = Cart.objects.get(session_key=_session_key(request))
cart_items = CartItem.objects.filter(cart=cart)
for cart_item in cart_items:
amount += cart_item.product.price * cart_item.quantity
counter += cart_item.quantity
# カートが存在しない場合
except ObjectDoesNotExist:
pass
return render(
request,
"carts/cart.html",
{"cart_items": cart_items, "amount": amount, "counter": counter},
)
def remove_item(request, pk, design_pk):
""" カートに入れた商品の個数を減少させるためのビュー """
# データベースから関連項目を取得する
cart = Cart.objects.get(session_key=_session_key(request))
product = get_object_or_404(product_models.Product, pk=pk)
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
# 削除しようとするカートアイテムの数が1より多い場合
if cart_item.quantity > 1:
cart_item.quantity -= 1
cart_item.save()
# 削除しようとするカートアイテムの数が1以下の場合
else:
cart_item.delete()
return redirect("carts:cart")
def delete_cartitem(request, pk, design_pk):
""" 商品項目をカートから削除するためのビュー """
# データベースから関連項目を取得し、対象カートアイテムを削除する
cart = Cart.objects.get(session_key=_session_key(request))
product = get_object_or_404(product_models.Product, pk=pk)
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
cart_item.delete()
return redirect("carts:cart")
| 33.983051
| 86
| 0.672319
| 453
| 4,010
| 5.724062
| 0.19426
| 0.074046
| 0.045893
| 0.046278
| 0.468955
| 0.457771
| 0.395681
| 0.344774
| 0.215195
| 0.178172
| 0
| 0.005502
| 0.229426
| 4,010
| 117
| 87
| 34.273504
| 0.833657
| 0.106733
| 0
| 0.304878
| 0
| 0
| 0.056417
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060976
| false
| 0.012195
| 0.073171
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3996a072b5270c64e9a774f3c2758ba1336ec30d
| 13,515
|
py
|
Python
|
deploy.py
|
j-benson/Deploy
|
9fb2bd1c383949521967a672ac76fcdcaced503f
|
[
"MIT"
] | null | null | null |
deploy.py
|
j-benson/Deploy
|
9fb2bd1c383949521967a672ac76fcdcaced503f
|
[
"MIT"
] | null | null | null |
deploy.py
|
j-benson/Deploy
|
9fb2bd1c383949521967a672ac76fcdcaced503f
|
[
"MIT"
] | null | null | null |
"""
Script to deploy a website to the server by ftp.
- Compares local directory with remote directory
- Updates modified files
- Adds new files
- Optionally, removes deleted files from remote
Requires: python 3.3+
Due to use of ftplib.mlsd()
The MIT License (MIT)
Copyright (c) 2015 James Benson
"""
"""
TODO: FTP response codes to look out for:
- 502 unknown command
- 550 empty directory
- 451 can't remove directory
Good ones:
- 226 transfer complete
"""
asciiExt = ['coffee', 'css', 'erb', 'haml', 'handlebars', 'hb', 'htm', 'html',
'js', 'less', 'markdown', 'md', 'ms', 'mustache', 'php', 'rb', 'sass', 'scss',
'slim', 'txt', 'xhtml', 'xml'];
deleteIgnoreFiles = ["/.ftpquota"];
deleteIgnoreDirs = ["/cgi-bin"];
remoteSep = "/";
dLogName = "debug.txt";
STOR_AUTO = 0;
STOR_BINARY = 1;
STOR_ASCII = 2;
UPLOAD_OVERWRITE = 0;
UPLOAD_MODIFIED = 1;
######################### SETUP ##########################
remoteHost = "127.0.0.1";
remoteUser = "Benson";
remotePassword = "benson";
localPath = "D:\\test\\ftp";
remotePath = "/";
### OPTIONS ###
verbose = True;
remoteTLS = False; # SSL/TLS doesn't work invalid certificate error
remoteDelete = True;
remoteIgnoreHidden = False; # TODO: Implement hidden.
storMode = STOR_BINARY; # only binary currently works
uploadMode = UPLOAD_MODIFIED;
debug = True;
##########################################################
import os;
from datetime import datetime, timedelta;
from ftplib import FTP, FTP_TLS, error_reply, error_temp, error_perm, error_proto, all_errors;
if remoteTLS:
import ssl;
ftp = None;
dLog = None;
# === FTP Functions ===
def connect():
global ftp;
if remoteTLS:
context = ssl.create_default_context();
ftp = FTP_TLS(remoteHost, remoteUser, remotePassword, acct="", keyfile=None, certfile=None, context=context, timeout=20);
ftp.prot_p();
else:
ftp = FTP(remoteHost, remoteUser, remotePassword, 20);
print(ftp.getwelcome());
def stor(dirpath, file):
"""Store the file obj to the dirpath of server."""
ext = (os.path.splitext(file.name())[1]).lstrip('.');
storpath = remoteJoin(dirpath, file.name());
try:
if (storMode == STOR_ASCII) or (storMode == STOR_AUTO and ext in asciiExt):
# Store in ASCII mode
if verbose: print("[asc] ", end="");
ftp.storlines("STOR %s" % storpath, open(file.path));
else:
# Store in binary mode
if verbose: print("[bin] ", end="");
ftp.storbinary("STOR %s" % storpath, open(file.path, "rb"));
setModified(dirpath, file);
if verbose: print("Uploaded: %s -> %s" % (file.path, storpath));
except OSError as oserror:
print("Failed Upload: %s\n %s" % (file.path, oserror));
def setModified(dirpath, file):
"""Attempts to set the modified time with MFMT."""
ftp.voidcmd("MFMT %s %s" % (file.getModified(), remoteJoin(dirpath, file.name())));
def rm(dirpath, file):
"""Delete the file at the path from the server."""
p = remoteJoin(dirpath, file.name());
_rm(p);
if verbose: print("Deleted: %s" % p);
def _rm(filepath):
ftp.delete(filepath);
def mkDir(dirpath, name):
dirname = remoteJoin(dirpath, name);
ftp.mkd(dirname);
if verbose: print("Created: %s" % dirname);
def rmDir(dirpath, name, recursive = False):
dirname = remoteJoin(dirpath, name);
if recursive:
_rmDirR(dirname);
_rmDir(dirname);
else:
_rmDir(dirname);
if verbose: print("Deleted: %s" % remoteJoin(dirname, "*"));
def _rmDir(dirpath):
"""Delete directory with name from the current working directory.
Only deletes empty directories."""
ftp.rmd(dirpath); # TODO: What if fails to delete?
def _rmDirR(dirpath):
"""Remove the directory at dirpath and its contents (recursive)."""
try:
dirs, files = listRemote(dirpath);
for f in files:
_rm(f.path);
for d in dirs:
_rmDirR(d.path);
_rmDir(d.path);
except:
raise error_temp("451 Can't remove directory");
# === End FTP Functions ===
# === Traversal Functions ===
def traverse(localPath, remotePath = remoteSep):
dprint("TRAVERSING: local %s | remote %s"%(localPath, remotePath));
localDirs, localFiles = listLocal(localPath);
remoteDirs, remoteFiles = listRemote(remotePath);
newF, modifiedF, unmodifiedF, deletedF = compareFiles(localFiles, remoteFiles, remoteDelete);
newD, existingD, deletedD = compareDirs(localDirs, remoteDirs, remoteDelete);
for f in newF + modifiedF:
stor(remotePath, f);
for d in newD:
mkDir(remotePath, d);
for d in newD + existingD:
dname = d.name();
traverse(os.path.join(localPath, dname), remoteJoin(remotePath, dname));
if remoteDelete:
for d in deletedD:
rmDir(remotePath, d, True);
for f in deletedF:
rm(remotePath, f);
def listLocal(path):
dirs = [];
files = [];
names = os.listdir(path);
for n in names:
fullp = os.path.join(path, n);
if os.path.isdir(fullp):
dirs.append(Directory(fullp));
if os.path.isfile(fullp):
f = File(fullp);
f.setModifiedTimestamp(os.stat(fullp).st_mtime);
files.append(f);
return (dirs, files);
def listRemote(path = ""):
dirs = [];
files = [];
response = ftp.mlsd(path);
for name, fact in response:
if fact["type"] == "dir":
dirs.append(Directory(remoteJoin(path, name)));
if fact["type"] == "file":
f = File(remoteJoin(path, name));
f.setModifiedUTCStr(fact["modify"]);
files.append(f);
return (dirs, files);
# === End Traversal Functions ===
def remoteJoin(pathA, pathB):
if not pathA.endswith(remoteSep) and not pathB.startswith(remoteSep):
pathA += remoteSep;
elif pathA.endswith(remoteSep) and pathB.startswith(remoteSep):
pathA = pathA.rstrip(remoteSep);
return pathA + pathB;
# === Structures ===
class File(object):
def __init__(self, path):
self.path = str(path);
self.datetimeFormat = "%Y%m%d%H%M%S";
def __str__(self):
return self.name();
# Object Comparison
def __eq__(self, other):
"""As File objects will only be compared within a directory the unique
identitifier will be the name."""
if isinstance(other, File):
return self.name() == other.name();
else:
return self.name() == str(other);
def __lt__(self, other):
"""Determine if the file is older than other using the modified timestamp."""
return self.modified < other.modified;
def __gt__(self, other):
"""Determine if the file is newer than other using the modified timestamp."""
return self.modified > other.modified;
def __le__(self, other):
"""Determine if the file is older or the same than other using the modified timestamp."""
return self.modified <= other.modified;
def __ge__(self, other):
"""Determine if the file is newer or the same than other using the modified timestamp."""
return self.modified >= other.modified;
# End Object Comparison
def name(self):
return os.path.basename(self.path);
def setModifiedUTCStr(self, modified):
# Should be a string of the utc time.
self.modified = datetime.strptime(modified, self.datetimeFormat);
def setModifiedTimestamp(self, modified):
# Timestamp (in windows at least) gives extra microseconds (us) that ftp doesn't have
usModified = datetime.utcfromtimestamp(modified)
usExtra = timedelta(microseconds=usModified.microsecond);
self.modified = usModified - usExtra;
def getModified(self):
return datetime.strftime(self.modified, self.datetimeFormat);
class Directory(object):
def __init__(self, path):
self.path = path;
def __str__(self):
return self.name();
def __eq__(self, other):
if isinstance(other, Directory):
return self.name() == other.name();
else:
return self.name() == str(other);
# def __len__(self):
# len()
def name(self):
if isinstance(self.path, Directory):
raise Exception("Expected str found Directory");
return os.path.basename(self.path);
# === End Structures ===
def compareFiles(localList, remoteList, checkDeleted = True):
"""Compares localList with remoteList gets the tuple containing File objects:
(new, modified, unmodified, deleted)
new: Files that are in localList but not in remoteList.
modified: Files that are newer in localList than remoteList.
unmodified: Files that are the same in both lists.
deleted: Files that are in the remoteList but not in localList.
*newer is defined by the file's date modified attribute.
New, Modified and Unmodified will contain local files objects that need to
be uploaded to the remote location.
Deleted will contain remote file objects that need to be deleted from
the remote location."""
new = [];
modified = [];
unmodified = [];
deleted = [];
dprint("COMPARE FILES");
for lfile in localList:
dprint("LOCAL: %s - %s" % (lfile.path, lfile.modified));
existsInRemote = False;
for rfile in remoteList:
if lfile == rfile:
dprint("REMOTE: %s - %s" % (rfile.path, rfile.modified));
existsInRemote = True;
if uploadMode == UPLOAD_OVERWRITE or lfile > rfile:
dprint("Upload Mode: %s | Modified: lfile > rfile" % uploadMode);
modified.append(lfile);
else:
dprint("Not Modified: lfile <= rfile");
unmodified.append(lfile);
break;
if not existsInRemote:
dprint("New local file");
new.append(lfile);
dprint("--------------------------------------");
# Check for deleted files
if checkDeleted:
dprint("CHECK FOR DELETED FILES");
for rfile in remoteList:
existsInLocal = False;
for lfile in localList:
if rfile == lfile:
existsInLocal = True;
break;
if not existsInLocal and not rfile.path in deleteIgnoreFiles:
dprint("DELETED: %s" % rfile.path);
deleted.append(rfile);
dprint("--------------------------------------");
return (new, modified, unmodified, deleted);
def compareDirs(localList, remoteList, checkDeleted = True):
"""Compares localList with remoteList gets the tuple containing string
names of the directories: (new, existing, deleted)
new: Directories that are in localList but not in remoteList.
existing: Directories that are in both lists.
deleted: Directories that are in the remoteList but not in localList.
localList - list of strings of the directory names in the local location.
remoteList - list of strings of the directory name in the remote location."""
new = [];
existing = [];
deleted = [];
dprint("COMPARE DIRECTORIES");
for ldir in localList:
dprint("LOCAL DIR: %s"%ldir.path);
existsInRemote = False;
for rdir in remoteList:
if ldir == rdir:
dprint("REMOTE DIR: %s"%rdir.path);
dprint("Exists On Local and Remote");
existsInRemote = True;
existing.append(ldir)
break;
if not existsInRemote:
dprint("New Local Directory");
new.append(ldir);
# Check for deleted directories
if checkDeleted:
dprint("CHECK FOR DELETED DIRECTORIES");
for rdir in remoteList:
existsInLocal = False;
for ldir in localList:
if rdir == ldir:
existsInLocal = True;
break;
if not existsInLocal and not rdir.path in deleteIgnoreDirs:
dprint("DELETED: %s" % rdir.path);
deleted.append(rdir);
dprint("--------------------------------------");
return (new, existing, deleted);
def dprint(line, end="\n"):
global dLog;
if debug:
if dLog == None:
if os.path.exists(dLogName):
os.remove(dLogName);
dLog = open(dLogName, "w")
dLog.write(line + end);
def main():
if not os.path.isdir(localPath):
print("Path Not Found: %s" % localPath);
return -1;
try:
connect();
traverse(localPath, remotePath);
except error_reply as r:
print(r);
except error_temp as t:
print(t);
except error_perm as p:
print(p);
except error_proto as pr:
print(pr);
except all_errors as a:
# REVIEW: all_errors is a tuple of (Error, OSError, EOFError)
# printing like this won't work I doubt, but I'm doing it anyway.
print(a);
finally:
if not ftp == None:
try:
ftp.quit();
except: pass;
ftp.close();
if not dLog == None and not dLog.closed:
dLog.flush();
dLog.close();
if __name__ == "__main__":
main();
| 35.565789
| 129
| 0.592379
| 1,547
| 13,515
| 5.117001
| 0.238526
| 0.012633
| 0.010611
| 0.010106
| 0.206417
| 0.178247
| 0.136306
| 0.119378
| 0.081481
| 0.071627
| 0
| 0.003881
| 0.275472
| 13,515
| 379
| 130
| 35.659631
| 0.804534
| 0.207547
| 0
| 0.251773
| 0
| 0
| 0.080228
| 0.011181
| 0
| 0
| 0
| 0.007916
| 0
| 1
| 0.113475
| false
| 0.014184
| 0.014184
| 0.014184
| 0.202128
| 0.120567
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3997e398937ee03af443d926f755e2d9046ee9c6
| 1,740
|
py
|
Python
|
wataru/commands/models/project.py
|
risuoku/wataru
|
63be36d15454abd0636f67eaf1e80728b8c5a9bd
|
[
"MIT"
] | null | null | null |
wataru/commands/models/project.py
|
risuoku/wataru
|
63be36d15454abd0636f67eaf1e80728b8c5a9bd
|
[
"MIT"
] | null | null | null |
wataru/commands/models/project.py
|
risuoku/wataru
|
63be36d15454abd0636f67eaf1e80728b8c5a9bd
|
[
"MIT"
] | null | null | null |
from wataru.commands.models.base import CommandBase
from wataru.logging import getLogger
import wataru.rules.models as rmodels
import os
import sys
logger = getLogger(__name__)
class Create(CommandBase):
def apply_arguments(self, parser):
parser.add_argument('--name', action='store', dest='projectname'),
parser.add_argument('--root-dir', action='store', dest='rootdir'),
parser.add_argument('--enable-virtualenv', action='store_true', default=False, dest='virtualenv_enabled'),
parser.add_argument('--theme-dir', action='store', dest='themedir'),
def pre_execute(self, namespace):
pass
def execute(self, namespace):
# get theme
from wataru.rules import themes
tm = themes.get_default() if namespace.themedir is None else themes.get(namespace.themedir)
# update theme
if namespace.projectname is not None:
tm.update_project('name', namespace.projectname)
if namespace.rootdir is not None:
tm.update_project('rootdir', namespace.rootdir)
if namespace.virtualenv_enabled:
tm.update_project('virtualenv', True)
# setup template loader
from wataru.rules import templates
templates.setenv(tm.abs_tpldir)
# get project rule graph
from wataru.rules import graph
rg = graph.get_by_theme(tm)
project = rg.project
# add extra nodes
mddir = rmodels.get_metadatadirectory(project)
project.add_node(mddir)
# process project
project.converge()
# process meta
mt = tm.config['meta']
jobj = rmodels.SetupJupyter(mddir, project.abspath, mt.get('jupyter'))
jobj.converge()
| 32.222222
| 114
| 0.65977
| 202
| 1,740
| 5.574257
| 0.391089
| 0.044405
| 0.060391
| 0.05595
| 0.042629
| 0.042629
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237356
| 1,740
| 53
| 115
| 32.830189
| 0.848531
| 0.064368
| 0
| 0
| 0
| 0
| 0.090741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0.029412
| 0.235294
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3998894acc2c2f5b50a8cd1451c55bffb80880f7
| 2,914
|
py
|
Python
|
UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py
|
6henrykim/UnityExamples
|
3d4d782e6e67fee1ede902998c2df1b5b90b074a
|
[
"Apache-2.0"
] | 9
|
2020-04-02T10:33:37.000Z
|
2021-12-03T17:14:40.000Z
|
UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py
|
ultrahaptics/UnityExamples
|
3d4d782e6e67fee1ede902998c2df1b5b90b074a
|
[
"Apache-2.0"
] | 2
|
2019-11-06T10:37:18.000Z
|
2021-09-20T14:31:13.000Z
|
UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py
|
ultrahaptics/UnityExamples
|
3d4d782e6e67fee1ede902998c2df1b5b90b074a
|
[
"Apache-2.0"
] | 1
|
2022-02-25T16:38:52.000Z
|
2022-02-25T16:38:52.000Z
|
# A Sensation which creates a Polyline of 35 points of the finger joints, along which a Circle Path is animated.
from pysensationcore import *
import sensation_helpers as sh
import HandOperations
# We will use the joint positions of the fingers to animate a Circle along a PolylinePath
fingers = ["thumb", "indexFinger", "middleFinger", "ringFinger", "pinkyFinger"]
bones = ["metacarpal", "proximal", "intermediate", "distal", "intermediate","proximal","metacarpal"]
jointKeyFrames = []
# Create a Polyline Path for each Animation Step
animPath = createInstance("PolylinePath", "PolylinePathInstance")
# Create inputs for each of the Bone joints
for finger in fingers:
for bone in bones:
jointInputName = "%s_%s_position" % (finger, bone)
jointKeyFrames+=[jointInputName]
# The number of Key frames
numPoints = len(jointKeyFrames)
points = sh.createList(numPoints)
# Connect the points list for our Polylinepath to the animation path
connect(points["output"], animPath.points)
translateAlongPath = createInstance("TranslateAlongPath", "translateAlongPath")
connect(Constant((1,0,0)), translateAlongPath.direction)
connect(animPath.out, translateAlongPath.animationPath)
# The Object Path (a circle) Will trace along the animation Path
# On top of its translation along the path, we apply a rotation transform,
# to match the orientation of the Palm
circlePath = createInstance("CirclePath", "objectPath")
orientToPalmInstance = createInstance("OrientPathToPalm", "orientToPalm")
# Object Path -> OrientPathToPalm -> TranslateAlongPath
connect(circlePath.out, orientToPalmInstance.path)
connect(orientToPalmInstance.out, translateAlongPath.objectPath)
topLevelInputs = {}
for n in range(0,numPoints):
topLevelInputs[(jointKeyFrames[n], points["inputs"][n])] = (0,0,0)
topLevelInputs[("t", translateAlongPath.t)] = (0, 0, 0)
topLevelInputs[("duration", translateAlongPath.duration)] = (2.5,0,0)
topLevelInputs[("dotSize", circlePath.radius)] = (0.01, 0, 0)
topLevelInputs[("palm_direction", orientToPalmInstance.palm_direction)] = (0, 0, 0)
topLevelInputs[("palm_normal", orientToPalmInstance.palm_normal)] = (0, 0, 0)
fingerScan = sh.createSensationFromPath("Finger Trace",
topLevelInputs,
output = translateAlongPath.out,
drawFrequency = 120,
renderMode=sh.RenderMode.Loop,
definedInVirtualSpace = True
)
# Hide the non-vital inputs...
visibleInputs = ("duration", "dotSize")
for topLevelInput in topLevelInputs.keys():
inputName = topLevelInput[0]
if inputName not in visibleInputs:
setMetaData(getattr(fingerScan, inputName), "Input-Visibility", False)
setMetaData(fingerScan.duration, "Type", "Scalar")
setMetaData(fingerScan.dotSize, "Type", "Scalar")
| 42.231884
| 112
| 0.710707
| 306
| 2,914
| 6.745098
| 0.411765
| 0.010659
| 0.03876
| 0.024709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01302
| 0.18291
| 2,914
| 69
| 113
| 42.231884
| 0.853843
| 0.217914
| 0
| 0
| 0
| 0
| 0.159171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.069767
| 0
| 0.069767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
3998e8576c81d8620613973a3fcb28ca0f349137
| 2,053
|
py
|
Python
|
scripts/extarct_from_videos.py
|
corenel/yt8m-feature-extractor
|
3f658749fd365478f1f26daa78b3e7b8d4844047
|
[
"MIT"
] | 18
|
2017-09-12T07:02:28.000Z
|
2021-06-07T13:38:51.000Z
|
scripts/extarct_from_videos.py
|
corenel/yt8m-feature-extractor
|
3f658749fd365478f1f26daa78b3e7b8d4844047
|
[
"MIT"
] | 1
|
2017-10-19T13:51:41.000Z
|
2017-12-30T08:49:08.000Z
|
scripts/extarct_from_videos.py
|
corenel/yt8m-feature-extractor
|
3f658749fd365478f1f26daa78b3e7b8d4844047
|
[
"MIT"
] | 3
|
2017-09-07T07:07:22.000Z
|
2018-09-18T15:49:29.000Z
|
"""Extract inception_v3_feats from videos for Youtube-8M feature extractor."""
import os
import torch
import init_path
import misc.config as cfg
from misc.utils import (concat_feat_var, get_dataloader, make_cuda,
make_variable)
from models import inception_v3
if __name__ == '__main__':
# init models and data loader
model = make_cuda(inception_v3(pretrained=True,
transform_input=True,
extract_feat=True))
model.eval()
# get vid list
video_list = os.listdir(cfg.video_root)
video_list = [v for v in video_list
if os.path.splitext(v)[1] in cfg.video_ext]
# extract features by inception_v3
for idx, video_file in enumerate(video_list):
vid = os.path.splitext(video_file)[0]
filepath = os.path.join(cfg.video_root, video_file)
if os.path.exists(cfg.inception_v3_feats_path.format(vid)):
print("skip {}".format(vid))
else:
print("processing {}".format(vid))
# data loader for frames in single video
data_loader = get_dataloader(dataset="VideoFrame",
path=filepath,
num_frames=cfg.num_frames,
batch_size=cfg.batch_size)
# extract features by inception_v3
feats = None
for step, frames in enumerate(data_loader):
print("--> extract features [{}/{}]".format(step + 1,
len(data_loader)))
feat = model(make_variable(frames))
feats = concat_feat_var(feats, feat.data.cpu())
print("--> save feats to {}"
.format(cfg.inception_v3_feats_path.format(vid)))
torch.save(feats, cfg.inception_v3_feats_path.format(vid))
# print("--> delete original video file: {}".format(filepath))
# os.remove(filepath)
| 40.254902
| 78
| 0.560156
| 234
| 2,053
| 4.688034
| 0.350427
| 0.080219
| 0.072926
| 0.05196
| 0.147675
| 0.096627
| 0.096627
| 0.067457
| 0
| 0
| 0
| 0.008929
| 0.345348
| 2,053
| 50
| 79
| 41.06
| 0.807292
| 0.146128
| 0
| 0
| 0
| 0
| 0.049397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.171429
| 0
| 0.171429
| 0.114286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
399fd36bf8e08b05046794370fe69a0ebbb1e2b1
| 4,208
|
py
|
Python
|
wc_rules/simulator/simulator.py
|
KarrLab/wc_rules
|
5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518
|
[
"MIT"
] | 5
|
2018-12-24T16:20:27.000Z
|
2022-02-12T23:07:42.000Z
|
wc_rules/simulator/simulator.py
|
KarrLab/wc_rules
|
5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518
|
[
"MIT"
] | 7
|
2019-01-14T23:08:52.000Z
|
2021-06-03T02:38:43.000Z
|
wc_rules/simulator/simulator.py
|
KarrLab/wc_rules
|
5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518
|
[
"MIT"
] | 3
|
2018-12-15T00:51:56.000Z
|
2020-04-29T14:12:34.000Z
|
from collections import deque
from ..utils.collections import DictLike
from ..matcher.core import ReteNet
from ..matcher.actions import make_node_token, make_edge_token, make_attr_token
from .sampler import NextReactionMethod
class SimulationState:
def __init__(self,nodes=[],**kwargs):
self.cache = DictLike(nodes)
# for both stacks, use LIFO semantics using appendleft and popleft
self.rollback = kwargs.get('rollback',False)
self.action_stack = deque()
self.rollback_stack = deque()
self.matcher = kwargs.get('matcher',ReteNet.default_initialization())
self.start_time = kwargs.get('start_time',0.0)
self.end_time = kwargs.get('end_time',0.0)
self.sampler = NextReactionMethod(time=self.start_time)
# These are elementary methods, used as
# the final step in adding/removing a node
def resolve(self,idx):
return self.cache.get(idx)
def update(self,node):
self.cache.add(node)
return self
def remove(self,node):
self.cache.remove(node)
return self
def get_contents(self,ignore_id=True,ignore_None=True,use_id_for_related=True,sort_for_printing=True):
d = {x.id:x.get_attrdict(ignore_id=ignore_id,ignore_None=ignore_None,use_id_for_related=use_id_for_related) for k,x in self.cache.items()}
if sort_for_printing:
# sort list attributes
for idx,adict in d.items():
for k,v in adict.items():
if isinstance(v,list):
adict[k] = list(sorted(v))
adict = dict(sorted(adict.items()))
d = dict(sorted(d.items()))
return d
def push_to_stack(self,action):
if isinstance(action,list):
# assume list has to be executed left to right
self.action_stack = deque(action) + self.action_stack
else:
self.action_stack.appendleft(action)
return self
def simulate(self):
while self.action_stack:
action = self.action_stack.popleft()
if hasattr(action,'expand'):
self.push_to_stack(action.expand())
elif action.__class__.__name__ == 'RemoveNode':
if self.rollback:
self.rollback_stack.appendleft(action)
matcher_tokens = self.compile_to_matcher_tokens(action)
action.execute(self)
outtokens = self.matcher.process(matcher_tokens)
else:
if self.rollback:
self.rollback_stack.appendleft(action)
action.execute(self)
matcher_tokens = self.compile_to_matcher_tokens(action)
outtokens = self.matcher.process(matcher_tokens)
self.update_sampler(outtokens)
return self
def rollback(self):
while self.rollback_stack:
action = self.rollback_stack.popleft()
action.execute(self)
return self
def compile_to_matcher_tokens(self,action):
action_name = action.__class__.__name__
#d = {'AddNode':'add','RemoveNode':'remove','AddEdge':'add','RemoveEdge':'remove'}
# NOTE: WE"RE ATTACHING ACTUAL NODES HERE, NOT IDS, FIX action.idx,idx1,idx2 later
if action_name in ['AddNode','RemoveNode']:
return [make_node_token(action._class, self.resolve(action.idx), action_name)]
if action_name in ['SetAttr']:
_class = self.resolve(action.idx).__class__
return [make_attr_token(_class, self.resolve(action.idx), action.attr, action.value, action_name)]
if action_name in ['AddEdge','RemoveEdge']:
i1,a1,i2,a2 = [getattr(action,x) for x in ['source_idx','source_attr','target_idx','target_attr']]
c1,c2 = [self.resolve(x).__class__ for x in [i1,i2]]
return [
make_edge_token(c1,self.resolve(i1),a1,self.resolve(i2),a2,action_name),
make_edge_token(c2,self.resolve(i2),a2,self.resolve(i1),a1,action_name)
]
return []
def update_sampler(self,tokens):
for token in tokens:
self.sampler.update_propensity(reaction=token['source'],propensity=token['propensity'])
return self
def sample_next_event(self):
rule,time = self.sampler.next_event()
if time == float('inf'):
print('Null event!')
return self
sample = self.matcher.function_sample_rule(rule)
rule_node = self.matcher.get_node(core=rule,type='rule')
for act in rule_node.data.actions:
if act.deps.declared_variable is not None:
sample[act.deps.declared_variable] = act.exec(sample,rule_node.data.helpers)
else:
self.push_to_stack(act.exec(sample,rule_node.data.helpers))
self.sampler.update_time(time)
self.simulate()
return self
| 34.491803
| 140
| 0.736217
| 617
| 4,208
| 4.810373
| 0.247974
| 0.030323
| 0.030323
| 0.015162
| 0.153976
| 0.145553
| 0.083558
| 0.061995
| 0
| 0
| 0
| 0.006623
| 0.138783
| 4,208
| 121
| 141
| 34.77686
| 0.812362
| 0.088403
| 0
| 0.226804
| 0
| 0
| 0.04341
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113402
| false
| 0
| 0.051546
| 0.010309
| 0.319588
| 0.030928
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39a16a05ac36a9db042c0bce00dc04a5a657ef37
| 1,370
|
py
|
Python
|
__private__/temp_dev/testshapefile.py
|
karimbahgat/PyA
|
4d62a0850ba1dca93f7362ef23e18a13938fce4f
|
[
"MIT"
] | 16
|
2016-02-26T15:24:28.000Z
|
2021-06-16T21:00:22.000Z
|
__private__/temp_dev/testshapefile.py
|
karimbahgat/PyA
|
4d62a0850ba1dca93f7362ef23e18a13938fce4f
|
[
"MIT"
] | 5
|
2016-02-27T20:13:26.000Z
|
2018-09-12T23:08:36.000Z
|
__private__/temp_dev/testshapefile.py
|
karimbahgat/PyA
|
4d62a0850ba1dca93f7362ef23e18a13938fce4f
|
[
"MIT"
] | 7
|
2015-07-08T12:51:57.000Z
|
2019-12-05T19:07:27.000Z
|
import Tkinter as tk
from PIL import Image, ImageTk
import aggdraw
window = tk.Tk()
label = tk.Label(window)
label.pack()
# schedule changing images
import itertools, random, time
def agg2tkimg(aggimage):
t = time.clock()
img = aggimage
colorlength = len(img.mode)
width,height = img.size
imgbytes = img.tostring()
# via PIL/PILLOW for fast window updates
tempimg = Image.fromstring("RGBA", (width,height), data=imgbytes)
tkimg = ImageTk.PhotoImage(image=tempimg)
return tkimg
def random_n(minval, maxval, n=1):
ns = (random.randrange(minval,maxval) for _ in xrange(n))
return tuple(ns)
def draw_polygon(img, coords):
pen = aggdraw.Pen(random_n(0,222,n=3), width=int(img.size[0]*0.001))
brush = aggdraw.Brush(random_n(0,222,n=3))
# draw
img.polygon(coords, pen, brush)
def update(img):
# update
img.flush()
tkimg = agg2tkimg(img)
label["image"] = label.img = tkimg
# Begin #
img = aggdraw.Draw("RGBA", (1000,600), random_n(0,222,n=3) )
import geovis
sf = geovis.shapefile_fork.Reader("D:/Test Data/cshapes/cshapes.shp")
for shape in sf.iterShapes():
if shape.__geo_interface__["type"] == "Polygon":
flatcoords = [xory+350 for xy in shape.__geo_interface__["coordinates"][0] for xory in xy]
draw_polygon(img, flatcoords)
update(img)
window.mainloop()
| 22.096774
| 98
| 0.674453
| 197
| 1,370
| 4.598985
| 0.451777
| 0.030905
| 0.02649
| 0.036424
| 0.043046
| 0.043046
| 0
| 0
| 0
| 0
| 0
| 0.030603
| 0.189051
| 1,370
| 61
| 99
| 22.459016
| 0.784878
| 0.059124
| 0
| 0
| 0
| 0
| 0.052426
| 0.018779
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.138889
| 0
| 0.305556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39a902062ca7512880d1818276ec6c8f4ed11b57
| 693
|
py
|
Python
|
aoc10.py
|
roscroft/aoc-2020
|
3f37f6b29ec66bac5610bccd6de5ebb000bde312
|
[
"MIT"
] | 1
|
2020-12-07T22:16:17.000Z
|
2020-12-07T22:16:17.000Z
|
aoc10.py
|
roscroft/aoc-2020
|
3f37f6b29ec66bac5610bccd6de5ebb000bde312
|
[
"MIT"
] | null | null | null |
aoc10.py
|
roscroft/aoc-2020
|
3f37f6b29ec66bac5610bccd6de5ebb000bde312
|
[
"MIT"
] | null | null | null |
from utils import utils
def part_1(data):
count_1 = sum([1 if data[i] - data[i-1] == 1 else 0 for i in range(len(data))])
count_3 = sum([1 if data[i] - data[i-1] == 3 else 0 for i in range(len(data))])
return count_1*count_3
def part_2(data):
dynm = [1] + [0]*(len(data)-1)
for i in range(1, len(data)):
dynm[i] = sum([dynm[i-j] if data[i] - data[i-j] <= 3 else 0 for j in range(1, 4)])
return dynm[-1]
if __name__ == "__main__":
day = 10
data = utils.get_ints_from_file(f"data/aoc{day}_data.txt")
data = sorted(data)
data = [0] + data + [data[-1]+3]
print(f"Part 1 solution: {part_1(data)}")
print(f"Part 2 solution: {part_2(data)}")
| 34.65
| 90
| 0.588745
| 134
| 693
| 2.895522
| 0.268657
| 0.07732
| 0.054124
| 0.085052
| 0.237113
| 0.206186
| 0.206186
| 0.206186
| 0
| 0
| 0
| 0.059369
| 0.222222
| 693
| 20
| 91
| 34.65
| 0.660482
| 0
| 0
| 0
| 0
| 0
| 0.132565
| 0.0317
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.058824
| 0
| 0.294118
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39ac7cdc9dcc48e4f5e6e8db36ab648730a99cc2
| 20,366
|
py
|
Python
|
source/python/brick_characterizer/CellRiseFall_Char.py
|
electronicvisions/brick
|
9ad14f9d2912e70191f4711f359e3912c8cef837
|
[
"BSD-3-Clause"
] | 1
|
2016-08-02T15:23:16.000Z
|
2016-08-02T15:23:16.000Z
|
source/python/brick_characterizer/CellRiseFall_Char.py
|
ahartel/brick
|
9ad14f9d2912e70191f4711f359e3912c8cef837
|
[
"BSD-3-Clause"
] | null | null | null |
source/python/brick_characterizer/CellRiseFall_Char.py
|
ahartel/brick
|
9ad14f9d2912e70191f4711f359e3912c8cef837
|
[
"BSD-3-Clause"
] | 1
|
2016-05-27T21:22:14.000Z
|
2016-05-27T21:22:14.000Z
|
from timingsignal import TimingSignal
from brick_characterizer.CharBase import CharBase
class CellRiseFall_Char(CharBase):
def __init__(self,toplevel,output_filename,temperature,use_spectre=False):
self.toplevel = toplevel
self.output_filename = output_filename
self.load_capacitance = 0.01
self.clock_rise_time = 0.1 #ns
self.signal_rise_time = 0.1 #ns
self.stimulus_signals = []
self.delays = {}
self.transitions = {}
super(CellRiseFall_Char,self).__init__(temperature,use_spectre)
# The following assignments have to be after the super constructor
self.initial_delay = self.clock_period/2.0
self.simulation_length = 9.0 #ns
def get_delays(self):
return self.delays
def get_transitions(self):
return self.transitions
def get_first_table_param(self):
return round(self.get_clock_rise_time(),5)
def get_second_table_param(self):
return self.get_load_capacitance()
def get_clock_rise_time(self):
return self.clock_rise_time*self.slew_derate_factor
def set_clock_rise_time(self,value):
self.clock_rise_time = value/self.slew_derate_factor
def get_load_capacitance(self):
return self.load_capacitance
def set_load_capacitance(self,value):
self.load_capacitance = value
def whats_my_name(self):
return 'CellRiseFall_Char_inTr'+str(self.get_clock_rise_time())+'_cap'+str(self.load_capacitance)
def log_my_name(self):
return self.state+'\tin'+str(self.get_clock_rise_time())+'\tcap'+str(self.load_capacitance)
def next_step(self):
# this class has only one step
if self.state == 'init':
self.state = 'delay'
self.write_spice_file()
if not self.run() == 0:
return 1
if not self.check_timing() == 0:
return 1
self.state = 'done'
return 0
return 0
def get_current_filename(self):
import os
name,ext = os.path.splitext(self.output_filename)
return name+'_inTr'+str(self.get_clock_rise_time())+'_cap' \
+str(self.load_capacitance)+'_'+self.state+ext
def add_clock_signals(self,clocks):
# Add clock signals
self.clocks = clocks
# Check if one of the clocks is alreay given as a static signal
if self.added_static_signals:
for name in clocks.iterkeys():
if self.static_signals.has_key(name):
raise Exception('Clock signal '+name+' has already been'
+ ' defined as a static signal.')
def add_timing_signals(self,tim_sig):
"""This function adds the timing signals for this characterization run.
Ther parameter tim_sig has the following data structure:
{
'd_out[1:0]' : ['clk', 'd_out_ff[=index=]', 'positive_unate'],
'd_in_ff[1:0]' : ['clk', 'd_in[=index=]', 'positive_unate'],
}
There are two signals involved: The measured signal (in this case
d_out[1:0] and d_in_ff[1:0]) and the stimulus_signal (in this case
d_out_ff[1:0] and d_in[1:0])."""
# Add the actual timing signals
for signal, related in self.itersignals(tim_sig,
eval_index_expression=True):
# Check if one of the clocks is alreay given as a static signal
if self.added_static_signals:
if self.static_signals.has_key(signal):
raise Exception('Timing signal '+signal+' has ' \
+ 'already been defined as a ' \
+ 'static signal.')
t = TimingSignal(signal,related)
self.timing_signals[signal] = t
# The following list stores a unique list of the stimulus
# signals for later pulse source generation in the net list
self.stimulus_signals.append(t.stimulus())
self.delays[signal] = []
self.transitions[signal] = []
self.stimulus_signals = set(self.stimulus_signals)
self.added_timing_signals = True
def generate_timing_signals(self):
for name,direction in self.clocks.iteritems():
self.generate_clock_edge(name,direction)
self.add_probe(name)
for signal in self.stimulus_signals:
self.generate_two_edges(signal,self.signal_rise_time,self.initial_delay,self.initial_delay)
#self.logger_debug("Generating edge for "+signal+" with rising delay "+str(self.initial_delay)+ " and falling delay "+str(self.initial_delay))
self.add_probe(signal)
self.set_initial_condition(signal,self.low_value)
for signal_name,signal_obj in self.timing_signals.iteritems():
self.add_probe(signal_name)
self.add_capacitance(signal_name,self.load_capacitance)
if signal_obj.unateness() == 'positive_unate':
self.set_initial_condition(signal_name,self.low_value)
elif signal_obj.unateness() == 'negative_unate':
self.set_initial_condition(signal_name,self.high_value)
else:
raise Exception('Probe signal '+signal_name+' has unknown unate-ness. Please specify \'positive_unate\' or \'negative_unate\'')
def generate_clock_edge(self,name,direction):
self.append_out('V'+name+' '+name+' 0 pwl(')
if direction == 'R':
self.append_out('+ 0.0000000e+00 0.0000000e+00')
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
else:
self.append_out('+ 0.0000000e+00 '+str(self.high_value)+'000000e+00')
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
def generate_two_edges(self,signal,transition_time,rising_delay,falling_delay):
self.append_out('V'+signal+' '+signal+' 0 pwl(')
start_time = self.timing_offset - rising_delay
start_time_2 = self.timing_offset+self.clock_period - falling_delay
first_value = self.low_value
second_value = self.high_value
self.append_out('+ 0.0000000e+00 '+str(first_value)+'e+00')
self.append_out('+ '+str(start_time)+'e-9 '+str(first_value)+'e+0')
self.append_out('+ '+str(start_time+transition_time)+'e-09 '+str(second_value)+'e+00')
self.append_out('+ '+str(start_time_2)+'e-9 '+str(second_value)+'e+00')
self.append_out('+ '+str(start_time_2+transition_time)+'e-09 '+str(first_value)+'e+00)')
def add_capacitance(self,signal_name,capacitance):
self.append_out('C'+signal_name+' '+signal_name \
+' 0 '+str(capacitance)+'e-12')
def add_pseudo_static_signals(self,signals):
"""Pseudo-Static signals in the case of an Output timing
characterization are the input timing signals. The function
*do_characterization* passes the input timing signals to this function.
It assigns zero to all of them during simulation."""
if not self.added_timing_signals:
raise Exception('Cannot add pseudo-static signals before' \
+ ' timing_signals have been added. Please call' \
+ ' this function afterwards.')
not_known = lambda name: not name in self.stimulus_signals and not self.clocks.has_key(name)
for signal,related in self.itersignals(signals,
eval_index_expression=True):
if not_known(signal):
self.static_signals[signal] = 0
self.added_static_signals = True
def check_timing(self):
# parse result file
# after this step, all edges are identified
if not self.parse_print_file() == 0:
return 1
# find clock edge
clock_edges = {}
try:
for clock_name, clock_dir in self.clocks.iteritems():
if not clock_edges.has_key(clock_name):
clock_edges[clock_name] = []
self.logger_debug(str(self.get_rising_edges(clock_name)))
if (clock_dir == 'R'):
clock_edges[clock_name].append(self.get_rising_edges(clock_name)[1*3+1])
clock_edges[clock_name].append(self.get_rising_edges(clock_name)[2*3+1])
# cnt = 0
# for edge in self.get_rising_edges(clock_name)[1,4,2]:
# if cnt == 1:
# clock_edges[clock_name].append(edge)
# cnt = cnt + 1 if cnt < 2 else 0
self.logger_debug( "Rising edge of "+clock_name+" at "+" ".join([str(x) for x in clock_edges[clock_name]]))
else:
clock_edges[clock_name].append(self.get_falling_edges(clock_name)[1*3+1])
clock_edges[clock_name].append(self.get_falling_edges(clock_name)[2*3+1])
# cnt = 0
# for edge in self.get_falling_edges(clock_name):
# if cnt == 1:
# clock_edges[clock_name].append(edge)
# cnt = cnt + 1 if cnt < 2 else 0
self.logger_debug( "Falling edge of "+clock_name+" at "+" ".join([str(x) for x in clock_edges[clock_name]]))
except:
self.logger_debug("Died")
return 1
for timing_signal in self.timing_signals.itervalues():
# some alias pointers
stimulus = timing_signal.stimulus()
probe = timing_signal.name()
probe_lc = probe
if not self.use_spectre:
probe_lc = probe.lower()
# initial timing values
delta_t = [0,0]
tran = [0,0]
self.logger_debug( "Rising edges of "+probe+" at "+" ".join([str(x) for x in self.get_rising_edges(probe_lc)]))
self.logger_debug( "Falling edges of "+probe+" at "+" ".join([str(x) for x in self.get_falling_edges(probe_lc)]))
if timing_signal.unateness() == 'positive_unate':
r_edges_probe = self.get_rising_edges(probe_lc)
if r_edges_probe:
while len(r_edges_probe) > 0:
lower = r_edges_probe.pop(0)
middle = r_edges_probe.pop(0)
upper = r_edges_probe.pop(0)
# get switching point
delta_t[0] = middle - clock_edges[timing_signal.clock()][0]
# get rising transition
tran[0] = upper - lower
if delta_t[0] < 0 or delta_t[0] > self.timing_offset*1.e-9:
self.logger_debug("Rising edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[0] = self.infinity
else:
self.logger_debug("Rising Delay: "+str(delta_t[0]))
break
else:
self.logger_error("Rising edge for signal "+probe+" not found but expected.")
return 1
f_edges_probe = self.get_falling_edges(probe_lc)
if f_edges_probe:
while len(f_edges_probe) > 0:
lower = f_edges_probe.pop(0)
middle = f_edges_probe.pop(0)
upper = f_edges_probe.pop(0)
# get threshold time for switching point
delta_t[1] = middle - clock_edges[timing_signal.clock()][1]
# get threshold time for falling transition upper
tran[1] = upper-lower
if delta_t[1] < 0 or delta_t[1] > self.timing_offset*1.e-9:
self.logger_debug("Falling edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[1] = self.infinity
else:
self.logger_debug( "Falling Delay: "+str(delta_t[1]))
break
else:
self.logger_error("Falling edge for signal "+probe+" not found but expected.")
return 1
elif timing_signal.unateness() == 'negative_unate':
f_edges_probe = self.get_falling_edges(probe_lc)
if f_edges_probe:
while len(f_edges_probe) > 0:
lower = f_edges_probe.pop(0)
middle = f_edges_probe.pop(0)
upper = f_edges_probe.pop(0)
# get threshold time for switching point
delta_t[1] = middle - clock_edges[timing_signal.clock()][0]
# get threshold time for rising transition upper
tran[1] = upper - lower
if delta_t[1] < 0 or delta_t[1] > self.timing_offset*1.e-9:
self.logger_debug("Falling edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[1] = self.infinity
else:
self.logger_debug( "Falling Delay: "+str(delta_t[1]))
break
else:
self.logger_error("Falling edge for signal "+probe_lc+" not found but expected.")
return 1
r_edges_probe = self.get_rising_edges(probe_lc)
if r_edges_probe:
while len(r_edges_probe) > 0:
lower = r_edges_probe.pop(0)
middle = r_edges_probe.pop(0)
upper = r_edges_probe.pop(0)
# get threshold time for switching point
delta_t[0] = middle - clock_edges[timing_signal.clock()][1]
# get threshold time for rising transition upper
tran[0] = upper - lower
if delta_t[0] < 0 or delta_t[0] > self.timing_offset*1.e-9:
self.logger_debug("Rising edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[0] = self.infinity
else:
self.logger_debug( "Rising Delay: "+str(delta_t[0]))
break
else:
self.logger_error("Rising edge for signal "+probe_lc+" not found but expected.")
return 1
self.delays[probe] = delta_t
self.transitions[probe] = tran
self.logger_debug('Delays for signal \''+probe+'\' are rising: '+str(self.delays[probe][0])+' and falling: '+str(self.delays[probe][1]))
self.logger_debug('Transition times for signal \''+probe+'\' are rising: '+str(self.transitions[probe][0])+' and falling: '+str(self.transitions[probe][1]))
return 0
def parse_print_file(self):
import subprocess,os
call = ''
if self.use_spectre:
call = ['python', os.environ['BRICK_DIR']+'/source/python/brick_characterizer/parse_print_file_spectre.py', self.get_printfile_name(), str(self.high_value*self.rise_threshold), str(self.high_value*self.fall_threshold), str(self.high_value*self.slew_lower_rise), str(self.high_value*self.slew_upper_rise), str(self.high_value*self.slew_lower_fall), str(self.high_value*self.slew_upper_fall)]
else:
call = ['python', os.environ['BRICK_DIR']+'/source/python/brick_characterizer/parse_print_file.py', self.get_printfile_name(), str(self.high_value*self.rise_threshold), str(self.high_value*self.fall_threshold), str(self.high_value*self.slew_lower_rise), str(self.high_value*self.slew_upper_rise), str(self.high_value*self.slew_lower_fall), str(self.high_value*self.slew_upper_fall)]
self.logger_debug(" ".join(call))
returncode = subprocess.call(call)
if not returncode == 0:
self.logger_error("Error in Parse print file")
return 1
import pickle
with open(self.get_printfile_name()+'_rising') as input:
self.rising_edges = pickle.load(input)
with open(self.get_printfile_name()+'_falling') as input:
self.falling_edges = pickle.load(input)
# self.logger_debug(str(self.rising_edges))
# self.logger_debug(str(self.falling_edges))
return 0
| 50.78803
| 402
| 0.584847
| 2,670
| 20,366
| 4.225843
| 0.096255
| 0.052114
| 0.043783
| 0.045378
| 0.612958
| 0.576708
| 0.533014
| 0.519188
| 0.500842
| 0.497917
| 0
| 0.022716
| 0.299666
| 20,366
| 400
| 403
| 50.915
| 0.768352
| 0.09781
| 0
| 0.293478
| 0
| 0
| 0.087285
| 0.007566
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07971
| false
| 0
| 0.018116
| 0.028986
| 0.181159
| 0.025362
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39aefe4ed5c77eadc14e52071c40e7bf0197d590
| 332
|
py
|
Python
|
covid mail/main.py
|
rahul263-stack/PROJECT-Dump
|
d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09
|
[
"MIT"
] | 1
|
2020-04-06T04:41:56.000Z
|
2020-04-06T04:41:56.000Z
|
covid mail/main.py
|
rahul263-stack/quarantine
|
d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09
|
[
"MIT"
] | null | null | null |
covid mail/main.py
|
rahul263-stack/quarantine
|
d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09
|
[
"MIT"
] | null | null | null |
import os
from sendDetailedEmail.email import MailAttachment
def sendMail(clientEmail):
try:
sender = MailAttachment(clientEmail=clientEmail)
sender.send()
except Exception as e:
raise e
if __name__=="__main__":
clientEmail = input("input a valid client email ID: ")
sendMail(clientEmail)
| 22.133333
| 58
| 0.698795
| 36
| 332
| 6.222222
| 0.694444
| 0.169643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222892
| 332
| 14
| 59
| 23.714286
| 0.868217
| 0
| 0
| 0
| 0
| 0
| 0.11747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.181818
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39af2956611d454e6abd79bee5b3ec4243b86cd1
| 2,933
|
py
|
Python
|
pyodide_importer/api.py
|
ryanking13/pyodide-importer
|
fb9f83e54eb307fcdb2590588f0b75db1c87ca97
|
[
"MIT"
] | 1
|
2021-11-16T11:55:54.000Z
|
2021-11-16T11:55:54.000Z
|
pyodide_importer/api.py
|
ryanking13/pyodide-importer
|
fb9f83e54eb307fcdb2590588f0b75db1c87ca97
|
[
"MIT"
] | null | null | null |
pyodide_importer/api.py
|
ryanking13/pyodide-importer
|
fb9f83e54eb307fcdb2590588f0b75db1c87ca97
|
[
"MIT"
] | null | null | null |
from contextlib import contextmanager
import pathlib
import sys
from typing import Union, List
from .import_hook import PyFinder, PyHTTPFinder
# Singleton instance of PyFinder
pyfinder: PyFinder = None
def _update_syspath(path: str):
"""
Append `path` to sys.path so that files in path can be imported
"""
path = pathlib.Path(path).resolve().as_posix()
if path not in sys.path:
sys.path.append(path)
def register_hook(
base_url: Union[str, List[str]],
download_path: str = "",
modules: List[str] = None,
update_syspath: bool = True,
):
"""
Register import hook to sys.meta_path.
Args:
base_url (str or List[str]): URL(s) where the directory containing Python packages is served through HTTP/S
download_path (str): the path in virtual file system where Python packages will be downloaded, default is current working directory
modules (List[str]): a list, with the names of the root modules/packages that can be imported from the given URL
update_syspath (bool): whether to add ``download_path`` to `sys.path`
**Notes on** ``module`` **parameter**:
If this parameter is not specified, import statement will try to search a module everytime
when the module is not found in local filesystem. This means every FAILED import statement will result in multiple 404 HTTP errors.
So when you have fixed modules, using modules parameter to whitelist downloadable modules in recommended.
"""
global pyfinder
if pyfinder is not None and pyfinder._registered():
raise RuntimeError(
"import hook is already registered, if you want to register a new hook, unregister the existing hook with unregister_hook() first"
)
pyfinder = PyHTTPFinder(base_url, download_path, modules)
pyfinder.register()
if update_syspath:
_update_syspath(download_path)
return pyfinder
def unregister_hook():
"""
Unregister import hook from sys.meta_path.
After calling this method, new external modules cannot be downloaded and imported,
while previously imported modules can be keep available.
"""
global pyfinder
if pyfinder is not None:
pyfinder.unregister()
pyfinder = None
def add_module(module: Union[str, List[str]]):
"""
Add new module(s) that can be imported from URL.
Args:
module (str or List[str]): modules/packages that can be imported from the URL
"""
global pyfinder
if pyfinder is None or (not pyfinder._registered()):
raise RuntimeError("import hook is not registered")
pyfinder.add_module(module)
def available_modules():
"""
Get the list of modules that can be imported from the URL.
"""
global pyfinder
if pyfinder is None or (not pyfinder._registered()):
raise RuntimeError("import hook is not registered")
return pyfinder.available_modules()
| 31.880435
| 142
| 0.699284
| 398
| 2,933
| 5.080402
| 0.30402
| 0.029674
| 0.032146
| 0.03363
| 0.21365
| 0.203264
| 0.203264
| 0.147379
| 0.120673
| 0.120673
| 0
| 0.001323
| 0.22673
| 2,933
| 91
| 143
| 32.230769
| 0.890212
| 0.453461
| 0
| 0.195122
| 0
| 0.02439
| 0.126102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121951
| false
| 0
| 0.195122
| 0
| 0.365854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39af8dcb80c383fcd4bfdd52b3cd4d36dce1df8f
| 1,982
|
py
|
Python
|
rastervision/new_version/batch_submit.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2019-11-07T10:02:23.000Z
|
2019-11-07T10:02:23.000Z
|
rastervision/new_version/batch_submit.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/new_version/batch_submit.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import uuid
import click
from rastervision.rv_config import RVConfig
def _batch_submit(cmd,
debug=False,
profile=False,
attempts=5,
parent_job_ids=None,
num_array_jobs=None,
use_gpu=False):
rv_config = RVConfig.get_instance()
batch_config = rv_config.get_subconfig('AWS_BATCH')
job_queue = batch_config('cpu_job_queue')
job_def = batch_config('cpu_job_definition')
if use_gpu:
job_queue = batch_config('job_queue')
job_def = batch_config('job_definition')
import boto3
client = boto3.client('batch')
job_name = 'ffda-{}'.format(uuid.uuid4())
cmd_list = cmd.split(' ')
if debug:
cmd_list = [
'python', '-m', 'ptvsd', '--host', '0.0.0.0', '--port', '6006',
'--wait', '-m'
] + cmd_list
if profile:
cmd_list = ['kernprof', '-v', '-l'] + cmd_list
kwargs = {
'jobName': job_name,
'jobQueue': job_queue,
'jobDefinition': job_def,
'containerOverrides': {
'command': cmd_list
},
'retryStrategy': {
'attempts': attempts
},
}
if parent_job_ids:
kwargs['dependsOn'] = [{'jobId': id} for id in parent_job_ids]
if num_array_jobs:
kwargs['arrayProperties'] = {'size': num_array_jobs}
job_id = client.submit_job(**kwargs)['jobId']
msg = 'submitted job with jobName={} and jobId={}'.format(job_name, job_id)
print(cmd_list)
print(msg)
return job_id
@click.command()
@click.argument('cmd')
@click.option('--debug', is_flag=True)
@click.option('--profile', is_flag=True)
@click.option('--attempts', default=5)
@click.option('--gpu', is_flag=True)
def batch_submit(cmd, debug, profile, attempts, gpu):
return _batch_submit(cmd, debug, profile, attempts, use_gpu=gpu)
if __name__ == '__main__':
batch_submit()
| 26.783784
| 79
| 0.589808
| 239
| 1,982
| 4.60251
| 0.351464
| 0.044545
| 0.038182
| 0.051818
| 0.168182
| 0.107273
| 0
| 0
| 0
| 0
| 0
| 0.009609
| 0.264884
| 1,982
| 73
| 80
| 27.150685
| 0.745367
| 0.010595
| 0
| 0
| 0
| 0
| 0.167347
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.068966
| 0.017241
| 0.137931
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39b0985dcd907af2111c10e4b763175f9a26f8fe
| 311
|
py
|
Python
|
app/api/item.py
|
peterentroprise/entro-tad
|
b074d4810bcc7fb71b467da8dfaa19be66a41fa2
|
[
"MIT"
] | null | null | null |
app/api/item.py
|
peterentroprise/entro-tad
|
b074d4810bcc7fb71b467da8dfaa19be66a41fa2
|
[
"MIT"
] | null | null | null |
app/api/item.py
|
peterentroprise/entro-tad
|
b074d4810bcc7fb71b467da8dfaa19be66a41fa2
|
[
"MIT"
] | null | null | null |
from fastapi import APIRouter
from models.item_model import Payload
from service import item_service
router = APIRouter()
@router.get("/")
async def read_root():
return {"Hello": "Universe"}
@router.post("/indexitem")
async def index_item(payload: Payload):
return item_service.index_item(payload)
| 19.4375
| 43
| 0.752412
| 41
| 311
| 5.560976
| 0.512195
| 0.096491
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135048
| 311
| 16
| 43
| 19.4375
| 0.847584
| 0
| 0
| 0
| 0
| 0
| 0.07717
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.3
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39b1dd9a2298bcc4fe7df8fe5dd5e695bcdaca18
| 6,867
|
py
|
Python
|
scripts/docker_configurator/docker_configurator.py
|
PlenusPyramis/dockerfiles
|
0c1b19faa33e944c66f3762fe49d7f954aa60b12
|
[
"MIT"
] | 1
|
2020-01-10T16:26:32.000Z
|
2020-01-10T16:26:32.000Z
|
scripts/docker_configurator/docker_configurator.py
|
PlenusPyramis/dockerfiles
|
0c1b19faa33e944c66f3762fe49d7f954aa60b12
|
[
"MIT"
] | null | null | null |
scripts/docker_configurator/docker_configurator.py
|
PlenusPyramis/dockerfiles
|
0c1b19faa33e944c66f3762fe49d7f954aa60b12
|
[
"MIT"
] | 2
|
2020-02-22T23:25:24.000Z
|
2020-11-04T05:09:48.000Z
|
"""
Docker Configurator
http://www.github.com/EnigmaCurry/docker-configurator
This tool creates self-configuring docker containers given a single
YAML file.
Run this script before your main docker CMD. It will write fresh
config files on every startup of the container, based off of Mako
templates embedded in the docker image, as well as values specified in
a YAML file provided in a mounted volume.
The idea of this is that container configuration is kind of hard
because everyone does it differently. This creates a standard way of
doing it for containers that I write. A single file to configure
everything.
See the included example project: `docker_configurator_example`
---------------------------------------------------------------------------
Copyright (c) 2019 PlenusPyramis
Copyright (c) 2015 Ryan McGuire
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import yaml
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions as mako_exceptions
import logging
import argparse
import os
import shutil
import collections
logging.basicConfig(level=logging.INFO)
logger=logging.getLogger("docker_configurator")
__version__ = "v0.9.0"
def deep_merge(*dicts):
"""
Non-destructive deep-merge of multiple dictionary-like objects
>>> a = { 'first' : { 'all_rows' : { 'pass' : 'dog', 'number' : '1', 'recipe':['one','two'] } } }
>>> b = { 'first' : { 'all_rows' : { 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
>>> c = deep_merge(a, b)
>>> a == { 'first' : { 'all_rows' : { 'pass' : 'dog', 'number' : '1', 'recipe':['one','two'] } } }
True
>>> b == { 'first' : { 'all_rows' : { 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
True
>>> c == { 'first' : { 'all_rows' : { 'pass' : 'dog', 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
True
>>> c == deep_merge(a, b, c)
True
"""
# Wrap the merge function so that it is no longer destructive of its destination:
def merge(source, destination):
# Thanks @_v1nc3nt_ https://stackoverflow.com/a/20666342/56560
if isinstance(destination, collections.abc.Mapping):
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
merge(value, node)
else:
destination[key] = value
final = {}
for d in dicts:
merge(d, final)
return final
def load_merged_config(config_path="/config"):
default_config_path = os.path.join(config_path,"default.yaml")
user_config_path = os.path.join(config_path, "config.yaml")
with open(default_config_path) as f:
default_config = yaml.safe_load(f)
if default_config is None:
raise AssertionError('Default config is empty: {}'.format(default_config_path))
logger.info("Default configuration loaded from {}".format(default_config_path))
if os.path.exists(user_config_path):
with open(user_config_path) as f:
user_config = yaml.safe_load(f)
logger.info("User configuration loaded from {}".format(user_config_path))
else:
user_config = {}
logger.warning("User configuration was not found. Using default config only.")
return deep_merge(default_config, user_config)
def render_to_files(template, output, **params):
def write(path, data):
if os.path.exists(path):
logger.warning("Overwriting existing file: {}".format(path))
with open(path, 'w') as f:
f.write(data)
try:
logging.info("Rendering template: {} to file(s): {}".format(template.uri, output))
data = template.render(**params)
if type(output) == str:
write(output, data)
else:
for out in output:
write(out, data)
return data
except:
print(mako_exceptions.text_error_template().render())
raise
class DockerConfigurator(object):
"""Reads a yaml config file and creates application config files from Mako templates
The config file should have a key called 'template_map' which is a map of
templates to final system paths.
# Example yaml for config.yaml or default.yaml:
template_map:
- my_config.mako: /etc/my_config
- my_script.sh.mako: /usr/local/bin/cool_script
"""
def __init__(self, config_path="/config"):
self.config = load_merged_config(config_path)
self.template_lookup = TemplateLookup(directories=[os.path.join(config_path, "templates")])
def write_configs(self, template_map=None):
"""Create config files from templates
template_map is a dictionary of template files to config file locations to create
"""
if template_map is None:
try:
template_map = self.config['template_map']
except KeyError:
logger.error("Missing template_map from config.yaml")
raise
for template_name, config_path in template_map.items():
template = self.template_lookup.get_template(template_name)
directory = os.path.dirname(config_path)
if not os.path.exists(directory):
logger.info("Creating directory: {}".format(directory))
os.makedirs(directory)
render_to_files(template, config_path, **self.config)
def main():
parser = argparse.ArgumentParser(description='Docker Configurator',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-c", "--config-path", help="Path to config and templates directory", default="/config")
args = parser.parse_args()
dc = DockerConfigurator(args.config_path)
dc.write_configs()
if __name__ == "__main__":
main()
| 39.24
| 114
| 0.663026
| 882
| 6,867
| 5.049887
| 0.346939
| 0.042658
| 0.013471
| 0.010777
| 0.090031
| 0.055905
| 0.055905
| 0.042434
| 0.034576
| 0.034576
| 0
| 0.005821
| 0.224407
| 6,867
| 174
| 115
| 39.465517
| 0.830454
| 0.447794
| 0
| 0.081395
| 0
| 0
| 0.123228
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 1
| 0.093023
| false
| 0
| 0.104651
| 0
| 0.244186
| 0.011628
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39b8f43a4fc39e9ee986451845affe8860e4df82
| 381
|
py
|
Python
|
setup.py
|
kervi/kervi-hal-win
|
adb0d93f63b3ed36fd6527c69dc301a63a30138f
|
[
"MIT"
] | null | null | null |
setup.py
|
kervi/kervi-hal-win
|
adb0d93f63b3ed36fd6527c69dc301a63a30138f
|
[
"MIT"
] | null | null | null |
setup.py
|
kervi/kervi-hal-win
|
adb0d93f63b3ed36fd6527c69dc301a63a30138f
|
[
"MIT"
] | null | null | null |
import distutils
from setuptools import setup
try:
from kervi.platforms.windows.version import VERSION
except:
VERSION = "0.0"
try:
distutils.dir_util.remove_tree("dist")
except:
pass
setup(
name='kervi-hal-win',
version=VERSION,
packages=[
"kervi/platforms/windows",
],
install_requires=[
'psutil',
'inputs'
],
)
| 15.24
| 55
| 0.627297
| 42
| 381
| 5.619048
| 0.619048
| 0.118644
| 0.177966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.254593
| 381
| 25
| 56
| 15.24
| 0.823944
| 0
| 0
| 0.285714
| 0
| 0
| 0.143979
| 0.060209
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.047619
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39b9562e1c7649e5f232cd655226d45528bdfb68
| 877
|
py
|
Python
|
examples/minimize_koopman_error.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
examples/minimize_koopman_error.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
examples/minimize_koopman_error.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
import argparse
import materia as mtr
import dask.distributed
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--qcenv", type=str)
parser.add_argument("--scratch", type=str)
parser.add_argument("--dask_scratch", type=str)
parser.add_argument("--num_evals", type=int)
args = parser.parse_args()
m = mtr.Molecule("benzene")
qchem = mtr.QChem(qcenv=args.qcenv, scratch_dir=args.scratch)
io = mtr.IO("gs.in", "gs.out", "minimize_koopman_error")
min_ke = qchem.minimize_koopman_error(io, name="min_ke")
min_ke.requires(molecule=m, num_evals=args.num_evals)
wf = mtr.Workflow(min_ke)
cluster = dask.distributed.LocalCluster()
with dask.config.set(temporary_directory=args.dask_scratch):
with dask.distributed.Client(cluster) as client:
print(wf.compute()["min_ke"])
| 31.321429
| 65
| 0.698974
| 119
| 877
| 4.907563
| 0.420168
| 0.042808
| 0.116438
| 0.082192
| 0.14726
| 0.106164
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159635
| 877
| 27
| 66
| 32.481481
| 0.792402
| 0
| 0
| 0
| 0
| 0
| 0.115165
| 0.025086
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.15
| 0
| 0.15
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39ba8a8ab31258dd5face8cc99e1f8cec294b091
| 300
|
py
|
Python
|
simple/__init__.py
|
jbrid867/SIMPLE
|
56e88c8271c22f7c41bd5d6b148b01e11a9e3713
|
[
"Apache-2.0"
] | 1
|
2019-01-19T06:44:29.000Z
|
2019-01-19T06:44:29.000Z
|
simple/__init__.py
|
jbrid867/SIMPLE
|
56e88c8271c22f7c41bd5d6b148b01e11a9e3713
|
[
"Apache-2.0"
] | 179
|
2018-10-02T21:07:19.000Z
|
2020-09-08T17:38:44.000Z
|
simple/__init__.py
|
johnbridstrup/simple
|
56e88c8271c22f7c41bd5d6b148b01e11a9e3713
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Top-level package for simple."""
__author__ = """John Bridstrup"""
__email__ = 'john.bridstrup@gmail.com'
__version__ = '0.1.8'
# import Data
# import data_analysis
# import kernels
# import KMC
# import running
# import simple
# import simulations
# import statevector
| 17.647059
| 38
| 0.703333
| 37
| 300
| 5.351351
| 0.675676
| 0.131313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015748
| 0.153333
| 300
| 16
| 39
| 18.75
| 0.76378
| 0.593333
| 0
| 0
| 0
| 0
| 0.398148
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39baf90e3f5d1892dbfa7337958aae37f41a76bf
| 13,482
|
py
|
Python
|
emarket/views.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
emarket/views.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
emarket/views.py
|
MerlinEmris/eBazar
|
f159314183a8a95afd97d36b0d3d8cf22015a512
|
[
"MIT"
] | null | null | null |
# from traceback import TracebackException
from django.contrib.auth.forms import UserCreationForm
# from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.postgres.search import SearchVector
from django.core import serializers
from django.http import JsonResponse
from django.views import View
# import os
# from django.contrib.sites.shortcuts import get_current_site
# from django.utils.encoding import force_bytes
# from django.utils.encoding import force_text
# from django.utils.http import urlsafe_base64_encode
# from django.utils.http import urlsafe_base64_decode
# from django.template.loader import render_to_string
from django.http import HttpResponse
import django_filters.rest_framework
from django.shortcuts import render, redirect
from .forms import ProfilePhotoForm, PhotoForm, SignUpForm, ProfileForm, ItemForm, SearchForm
from .models import User, Profile, Item, Category, Item_Image, Favorite_item
from ebazar import settings
from .serializers import ( CategorySerializer,
ItemSerializer,
UserSerializer,
Item_ImageSerializer,)
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets, status
# import django_filters.rest_framework
from rest_framework.generics import (
DestroyAPIView,
ListAPIView,
UpdateAPIView,
RetrieveAPIView,
CreateAPIView
)
from rest_framework.views import APIView
import shutil
import os
import datetime
import json
# print console logs
log_prefix = '['+datetime.datetime.now().strftime("%d-%m-%y %H:%M:%S")+']'
log_end = '********'
log_date = datetime.datetime.now().strftime("%d-%m-%y_%H:%M")
# redirect to create user (url(r'^$'))
def index(request):
if request.user:
return redirect('home')
else:
return redirect('home')
# create user with min information
def create_user(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
# form = UserCreationForm(request.POST)
if form.is_valid():
user = form.save()
print(log_prefix+'user '+form.cleaned_data['username']+'is created'+log_end)
# user.is_active = False
# user.refresh_from_db()
# user.profile.birth_date = form.cleaned_data.get('birth_date')
# user.profile.bio = form.cleaned_data.get('bio')
# user.profile.location = form.cleaned_data.get('location')
# current_site = get_current_site(request)
# subject = 'Activate Your MySite Account'
# message = render_to_string('account_activation_email.html', {
# 'user': user,
# 'domain': current_site.domain,
# 'uid': urlsafe_base64_encode(force_bytes(user.pk)),
# 'token': account_activation_token.make_token(user),
# })
# user.email_user(subject, message)
# return redirect('account_activation_sent')
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
print(log_prefix + 'user ' + username + 'is logged in' + log_end)
return redirect('home')
else:
form = SignUpForm(request.POST)
return render(request, 'registration/create_user.html', {'form': form})
else:
form = SignUpForm()
return render(request, 'registration/create_user.html', {'form': form})
@login_required
def edit_profile(request):
exist = 0
try:
profile = request.user.profile
exist = 1
except Profile.DoesNotExist:
profile = Profile(user=request.user)
if request.method == 'POST':
form = ProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
print(log_prefix + ' user ' + request.user.username + ' profile is changed ' + log_end)
return redirect('home')
else:
return render(request, 'emarket/profile.html', {'form': form})
else:
form = ProfileForm(instance=profile)
return render(request, 'emarket/profile.html', {'form': form,'exist':exist})
def profile_change_photo(request, prof_id):
if request.method == 'POST':
profile = Profile.objects.filter(user_id=prof_id)[0]
form = ProfilePhotoForm(request.POST, request.FILES, instance=profile)
profile.img.delete(False)
if form.is_valid():
form.save()
return redirect('profile')
else:
form = ProfilePhotoForm()
return render(request, 'emarket/profile_add_image.html', {'form':form,})
print(log_prefix + 'user ' + prof_id + 'profile img is changed' + log_end)
def user(request, user_id):
items = Item.objects.filter(user_id=user_id)
pics = Item_Image.objects.all()
if items:
paginator = Paginator(items, 9)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/user.html', {'items': items, 'pics': pics, })
@login_required
def create_item(request):
if request.method == 'POST':
item = Item(user=request.user)
form = ItemForm(request.POST, instance=item)
if form.is_valid():
form.save()
print(log_prefix+'item:'+form.cleaned_data['name']+' is created at '+log_date+log_end)
return redirect('add_item_img', item.id)
else:
return render(request, 'emarket/item_create.html', {'form': form})
else:
form = ItemForm()
return render(request, 'emarket/item_create.html', {'form': form})
@login_required
def edit_item(request, it_id):
try:
item = Item.objects.filter(id=it_id)[0]
except Item.DoesNotExist:
redirect('home')
if request.method == 'POST':
form = ItemForm(request.POST, instance=item)
if form.is_valid():
form.save()
print(log_prefix + ' item ' + it_id + ' is changed ' + log_end)
return redirect('show_item',it_id)
else:
form = ItemForm(instance=item)
return render(request, 'emarket/item_edit.html',{'form':form})
else:
form = ItemForm(instance=item)
return render(request, 'emarket/item_edit.html',{'form':form})
def show_item(request, item_id):
user = request.user
exist = 1
# if user and request.method == "GET":
# favs = Favorite_item.objects.filter(user=user)
#
# for fav in favs:
# if fav.item_id == int(item_id):
# print(fav.item_id)
# exist = 1
# else:
# exist = 0
item = Item.objects.filter(id=item_id)[0]
item_images = Item_Image.objects.filter()
return render(request, 'emarket/item_detail.html', {'item': item,
'pics': item_images,
'exist': exist})
@login_required
def favorite_items(request, user_id):
user = User.objects.filter(id=user_id)
fav_items = Favorite_item.objects.filter(user = user)
item_images = Item_Image.objects.filter()
return render(request, 'emarket/favorite_items.html', {'fav_items': fav_items,
'pics': item_images})
# @login_required
# def add_to_fav(request):
# return redirect('home')
def show_category(request, cat_id):
cat = Category.objects.get(id=cat_id)
items = Item.objects.filter(category=cat)
pics = Item_Image.objects.all()
if items:
paginator = Paginator(items, 9)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/show_category.html', {'cat':cat, 'items':items, 'pics':pics})
def home(request):
cats = Category.objects.all()
# item_pic = {}
items = Item.objects.order_by('-price')[0:9]
item_images = Item_Image.objects.filter()
# print(item_images)
# print(items)
# print(categories)
return render(request, 'emarket/home.html', {'cats': cats, 'items': items, 'pics': item_images, })
def search(request, search_word=None):
message = 'Ähli goşlar:'
pics = Item_Image.objects.all()
items = Item.objects.all()
form = SearchForm
if request.method == 'POST':
form = SearchForm(request.POST)
search_word = request.POST.get('search')
location = request.POST.get('location')
user = request.POST.get('user')
if location and user:
items = Item.objects.filter(name__icontains=search_word).filter(user=user).filter(location=location)
elif user:
items = Item.objects.filter(name__icontains=search_word).filter(user=user)
elif location:
items = Item.objects.filter(name__icontains=search_word).filter(location=location)
else:
items = Item.objects.filter(name__icontains=search_word)
if items:
message = 'Netijeler:'
else:
message = 'Hiç zat ýok'
items = None
if items:
paginator = Paginator(items, 18)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/expo.html', {'items': items, 'pics': pics, 'ms': message, 's_word': search_word, 'form':form})
@login_required
def add_item_img(request, it_id):
photos = Item_Image.objects.filter()
if request.method == 'POST':
item_img = Item_Image(item_id=it_id)
form = PhotoForm(request.POST, request.FILES, instance=item_img)
if form.is_valid():
form.save()
print(log_prefix+'item_'+it_id+' added image'+str(form.cleaned_data['img'])+log_end)
return redirect('show_item', it_id)
else:
return render(request, 'emarket/item_add_image.html', {'form': form, 'photos': photos})
else:
form = PhotoForm()
return render(request, 'emarket/item_add_image.html', {'form':form, 'photos': photos})
@login_required
def delete_item(request, it_id):
item = Item.objects.filter(id=it_id)
if item:
item.delete()
items_path = os.path.join(settings.MEDIA_ROOT, 'items')
item_id = 'item_'+str(it_id)
item_path = os.path.join(items_path, item_id)
shutil.rmtree(item_path)
print(log_prefix+item_id+' is deleted'+log_end)
return redirect('home')
else:
return redirect('home')
class UserCreate(APIView):
def post(selfs, request, format='json'):
serializer = UserSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
if user:
print(user)
username = serializer.data.get('username')
print(username)
raw_password = serializer.data.get('password')
print(raw_password)
user_log = authenticate(username=username, password=raw_password)
login(request, user_log)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
print('user create error')
else:
print('user validation failed')
# api for item
class ItemViewSet(ListAPIView):
filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
queryset = Item.objects.all()
serializer_class = ItemSerializer
search_fields = ('name',)
ordering_fields = '__all__'
class Item_ImageViewSet(ListAPIView):
filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
queryset = Item_Image.objects.all()
serializer_class = Item_ImageSerializer
class Item_ImageDetailViewSet(ListAPIView):
queryset = Item_Image.objects.all()
serializer_class = Item_ImageSerializer
def get_queryset(self):
item = self.kwargs['item']
return Item_Image.objects.filter(item=item)
class ItemCreateViewSet(CreateAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemDetailViewSet(RetrieveAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemUpdateViewSet(UpdateAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemDeleteViewSet(DestroyAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
# api for category
class CategoryViewSet(viewsets.ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
| 34.480818
| 130
| 0.641893
| 1,548
| 13,482
| 5.434755
| 0.156331
| 0.027814
| 0.038393
| 0.046357
| 0.437418
| 0.384643
| 0.333888
| 0.28872
| 0.252704
| 0.17473
| 0
| 0.002656
| 0.246032
| 13,482
| 390
| 131
| 34.569231
| 0.824988
| 0.116377
| 0
| 0.414286
| 0
| 0
| 0.085497
| 0.026223
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0.017857
| 0.089286
| 0
| 0.353571
| 0.046429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39bdb6e5ac777c1dbb29e8d29b5d3a629b8f1d14
| 3,683
|
py
|
Python
|
cogs/misc.py
|
DoggieLicc/doggie-bot
|
31400a32916e08cd5b7909cce17db66ea927d2e3
|
[
"MIT"
] | 3
|
2021-08-30T16:51:04.000Z
|
2021-09-13T17:04:29.000Z
|
cogs/misc.py
|
DoggieLicc/doggie-bot
|
31400a32916e08cd5b7909cce17db66ea927d2e3
|
[
"MIT"
] | 1
|
2021-08-30T15:29:37.000Z
|
2021-09-09T23:59:47.000Z
|
cogs/misc.py
|
DoggieLicc/doggie-bot
|
31400a32916e08cd5b7909cce17db66ea927d2e3
|
[
"MIT"
] | null | null | null |
import discord
import utils
import inspect
from discord.ext import commands
from io import StringIO
class Misc(commands.Cog):
"""Commands that show info about the bot"""
def __init__(self, bot: utils.CustomBot):
self.bot: utils.CustomBot = bot
@commands.command(aliases=['i', 'ping'])
async def info(self, ctx: utils.CustomContext):
"""Shows information for the bot!"""
invite_url = discord.utils.oauth_url(ctx.me.id, permissions=discord.Permissions(1375866285270))
embed = utils.create_embed(
ctx.author,
title='Info for Doggie Bot!',
description='This bot is a multi-purpose bot!'
)
embed.add_field(
name="Invite this bot!",
value=f"[Invite]({invite_url})",
inline=False
)
embed.add_field(
name="Join support server!",
value="[Support Server](https://discord.gg/Uk6fg39cWn)",
inline=False
)
embed.add_field(
name='Bot Creator:',
value='[Doggie 2#8512](https://github.com/DoggieLicc/)',
inline=True
)
embed.add_field(
name='Source Code:',
value='[Github Repo](https://github.com/DoggieLicc/doggie-bot)'
)
embed.add_field(
name='Bot Online Since:',
value=utils.user_friendly_dt(self.bot.start_time),
inline=False
)
embed.add_field(
name='Ping:',
value='{} ms'.format(round(1000 * self.bot.latency)),
inline=False
)
await ctx.send(embed=embed)
@commands.cooldown(3, 86_400, commands.BucketType.user)
@commands.command(aliases=['report', 'bug'])
async def suggest(self, ctx: utils.CustomContext, *, suggestion):
"""Send a suggestion or bug report to the bot owner!"""
owner: discord.User = await self.bot.get_owner()
owner_embed = utils.create_embed(
ctx.author,
title='New suggestion!:',
description=suggestion
)
await owner.send(embed=owner_embed)
user_embed = utils.create_embed(
ctx.author,
title=f'👍 Suggestion has been sent to {owner}! 💖'
)
await ctx.send(embed=user_embed)
@commands.command(aliases=['code'])
async def source(self, ctx, *, command: str = None):
"""Look at the code of this bot!"""
if command is None:
embed = utils.create_embed(
ctx.author,
title='Source Code:',
description='[Github for **Doggie Bot**](https://github.com/DoggieLicc/doggie-bot)'
)
return await ctx.send(embed=embed)
if command == 'help':
src = type(self.bot.help_command)
else:
obj = self.bot.get_command(command.replace('.', ' ').lower())
if obj is None:
embed = utils.create_embed(
ctx.author,
title='Command not found!',
description='This command wasn\'t found in this bot.',
color=discord.Color.red()
)
return await ctx.send(embed=embed)
src = obj.callback.__code__
lines, _ = inspect.getsourcelines(src)
src_code = ''.join(lines)
buffer = StringIO(src_code)
file = discord.File(fp=buffer, filename=f'{command.replace(" ", "_").lower()}.py')
await ctx.send(f'Here you go, {ctx.author.mention}. (You should view this on a PC)', file=file)
def setup(bot):
bot.add_cog(Misc(bot))
| 29
| 103
| 0.555525
| 419
| 3,683
| 4.797136
| 0.343675
| 0.024378
| 0.038806
| 0.050746
| 0.227861
| 0.195522
| 0.093035
| 0.040796
| 0.040796
| 0
| 0
| 0.01237
| 0.319576
| 3,683
| 126
| 104
| 29.230159
| 0.788907
| 0.010046
| 0
| 0.222222
| 0
| 0
| 0.173591
| 0.012237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c16bfed4316959a8bb44396e89b0248bfc5ee5
| 719
|
py
|
Python
|
URI/multiplicador.py
|
LuccasTraumer/pythonRepositorio
|
52d4455cea0615c8eba7ab4c6224ce3350bbcf47
|
[
"MIT"
] | null | null | null |
URI/multiplicador.py
|
LuccasTraumer/pythonRepositorio
|
52d4455cea0615c8eba7ab4c6224ce3350bbcf47
|
[
"MIT"
] | null | null | null |
URI/multiplicador.py
|
LuccasTraumer/pythonRepositorio
|
52d4455cea0615c8eba7ab4c6224ce3350bbcf47
|
[
"MIT"
] | null | null | null |
'''
Leia 2 valores inteiros (A e B). Após, o programa deve mostrar uma mensagem "Sao Multiplos" ou
"Nao sao Multiplos", indicando se os valores lidos são múltiplos entre si.
'''
data = str(input())
values = data.split(' ')
first_value = int(values[0])
second_value = int(values[1])
if(second_value > first_value):
resul = second_value / first_value
if(first_value * resul == second_value and second_value % first_value == 0):
print('Sao Multiplos')
else:
print('Nao sao Multiplos')
else:
result = first_value / second_value
if(second_value * result == first_value and first_value % second_value == 0):
print('Sao Multiplos')
else:
print('Nao sao Multiplos')
| 27.653846
| 94
| 0.673157
| 101
| 719
| 4.633663
| 0.425743
| 0.17094
| 0.096154
| 0.134615
| 0.311966
| 0.200855
| 0.200855
| 0.200855
| 0.200855
| 0.200855
| 0
| 0.008834
| 0.212796
| 719
| 25
| 95
| 28.76
| 0.818021
| 0.235049
| 0
| 0.4375
| 0
| 0
| 0.112754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c247e8b1fdf8e3efae1a8994e7cba05bbc1477
| 2,767
|
py
|
Python
|
app/listeners.py
|
seratch/slack_learning_app_ja
|
9552489b1d5d3adc61a7c73645a1ae09abc9d933
|
[
"MIT"
] | 11
|
2020-10-28T08:04:16.000Z
|
2022-03-18T09:12:29.000Z
|
app/listeners.py
|
seratch/slack_learning_app_ja
|
9552489b1d5d3adc61a7c73645a1ae09abc9d933
|
[
"MIT"
] | 1
|
2020-10-29T23:10:52.000Z
|
2020-10-29T23:37:00.000Z
|
app/listeners.py
|
seratch/slack_learning_app_ja
|
9552489b1d5d3adc61a7c73645a1ae09abc9d933
|
[
"MIT"
] | null | null | null |
import re
from slack_bolt import App
from app.onboarding import (
message_multi_users_select,
message_multi_users_select_lazy,
)
from app.tutorials import (
tutorial_page_transition,
tutorial_page_transition_lazy,
app_home_opened,
app_home_opened_lazy,
page1_home_tab_button_click,
page1_home_tab_button_click_lazy,
page1_home_tab_users_select_lazy,
page1_home_tab_users_select,
page2_modal,
page2_modal_lazy,
page2_modal_submission,
page4_create_channel,
page4_create_channel_lazy,
page4_create_channel_submission,
page4_create_channel_submission_lazy,
page4_create_channel_setup,
page4_create_channel_setup_lazy,
global_shortcut_handler,
global_shortcut_view_submission,
global_shortcut_view_submission_lazy,
message_shortcut_handler,
message_shortcut_handler_lazy,
external_data_source_handler,
)
def register_listeners(app: App):
app.action("link_button")(lambda ack: ack())
# ----------------------------------------------
# message
app.action("message_multi_users_select")(
ack=message_multi_users_select, lazy=[message_multi_users_select_lazy]
)
# ----------------------------------------------
# home tab
app.event("app_home_opened")(ack=app_home_opened, lazy=[app_home_opened_lazy])
app.action(re.compile("tutorial_page_transition_\d+"))(
ack=tutorial_page_transition, lazy=[tutorial_page_transition_lazy]
)
app.action(re.compile("page1_home_tab_button_\d"))(
ack=page1_home_tab_button_click, lazy=[page1_home_tab_button_click_lazy]
)
app.action("page1_home_tab_users_select")(
ack=page1_home_tab_users_select, lazy=[page1_home_tab_users_select_lazy]
)
app.action("page2_modal")(ack=page2_modal, lazy=[page2_modal_lazy])
app.view("page2_modal_submission")(page2_modal_submission)
app.action("page4_create_channel")(
ack=page4_create_channel, lazy=[page4_create_channel_lazy]
)
app.view("page4_create_channel_submission")(
ack=page4_create_channel_submission, lazy=[page4_create_channel_submission_lazy]
)
app.event("channel_created")(
ack=page4_create_channel_setup, lazy=[page4_create_channel_setup_lazy]
)
app.shortcut("global-shortcut-example")(global_shortcut_handler)
app.view("global-shortcut-example_submission")(
ack=global_shortcut_view_submission, lazy=[global_shortcut_view_submission_lazy]
)
app.shortcut("message-shortcut-example")(
ack=message_shortcut_handler, lazy=[message_shortcut_handler_lazy]
)
app.options("external-data-source-example")(external_data_source_handler)
app.action("external-data-source-example")(lambda ack: ack())
| 30.744444
| 88
| 0.734731
| 344
| 2,767
| 5.383721
| 0.142442
| 0.083153
| 0.136069
| 0.062095
| 0.472462
| 0.211663
| 0.188985
| 0.143629
| 0.089633
| 0.053996
| 0
| 0.013571
| 0.147814
| 2,767
| 89
| 89
| 31.089888
| 0.771841
| 0.039754
| 0
| 0
| 0
| 0
| 0.138386
| 0.111237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015152
| false
| 0
| 0.060606
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c310b2a22377850644e8e3e7bb4274bb90e2dd
| 1,213
|
py
|
Python
|
project2/redactor.py
|
m-harikiran/cs5293sp21-project2
|
48547543001813aee17731399f617f82043e4a8f
|
[
"MIT"
] | null | null | null |
project2/redactor.py
|
m-harikiran/cs5293sp21-project2
|
48547543001813aee17731399f617f82043e4a8f
|
[
"MIT"
] | null | null | null |
project2/redactor.py
|
m-harikiran/cs5293sp21-project2
|
48547543001813aee17731399f617f82043e4a8f
|
[
"MIT"
] | null | null | null |
import nltk
import re
from nltk.corpus import wordnet
# This method reads the file and redacts names in it and writes redacted data to file with extension python3.redacted
def redactNames(path):
data = open(path).read() # Reading the file to be redacted
tokenized_data = nltk.word_tokenize(data) # Splitting data into words
# Generationg the parts of speech of each word
pos_tokenized_data = nltk.pos_tag(tokenized_data)
# Chunking the tagged words using named entity chunker
chk_tagged_tokens = nltk.chunk.ne_chunk(pos_tokenized_data)
for chk in chk_tagged_tokens.subtrees():
if chk.label().upper() == 'PERSON': # Extracting the words with tag PERSON
# Extracting first and last name
for name in chk:
# print(name)
data = re.sub('\\b{}\\b'.format(name[0]),
'\u2588'*len(name[0]), data) # Replacing the names with block character
# Opening a file with extension .redacted
redactedDoc = open(path.replace('.txt', '.redacted'), 'w')
redactedDoc.write(data) # Writing redacted data to file
redactedDoc.close()
return path.replace('.txt', '.redacted')
| 32.783784
| 117
| 0.660346
| 162
| 1,213
| 4.864198
| 0.512346
| 0.06599
| 0.035533
| 0.045685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007659
| 0.246496
| 1,213
| 36
| 118
| 33.694444
| 0.854486
| 0.380874
| 0
| 0
| 0
| 0
| 0.063599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.176471
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c3360de5ed5436c13f0b5c11ff3ff8f4c1e5e8
| 935
|
py
|
Python
|
python3/max_area_of_island.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | 1
|
2020-10-08T09:17:40.000Z
|
2020-10-08T09:17:40.000Z
|
python3/max_area_of_island.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | null | null | null |
python3/max_area_of_island.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | null | null | null |
# Space : O(n)
# Time : O(m*n)
class Solution:
def crawl(self, grid, x, y):
def bfs(dx, dy):
nonlocal area
if grid[dy][dx] == 1:
area += 1
grid[dy][dx] = 0
elif grid[dy][dx] == 0:
return
for ax, ay in c:
if 0 <= dy + ay < row and 0 <= dx + ax < col:
if grid[dy+ay][dx+ax] == 1:
bfs(dx+ax, dy+ay)
row = len(grid)
col = len(grid[0])
c = [(0, 1), (0, -1), (1, 0), (-1, 0)]
area = 0
bfs(x, y)
return area
def maxAreaOfIsland(self, grid: List[List[int]]) -> int:
row = len(grid)
col = len(grid[0])
ans = 0
for y in range(row):
for x in range(col):
if grid[y][x] == 1:
ans = max(ans, self.crawl(grid, x, y))
return ans
| 25.972222
| 61
| 0.37754
| 130
| 935
| 2.715385
| 0.292308
| 0.067989
| 0.067989
| 0.050992
| 0.11898
| 0.11898
| 0.11898
| 0
| 0
| 0
| 0
| 0.040568
| 0.472727
| 935
| 35
| 62
| 26.714286
| 0.675456
| 0.037433
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c80db6883ae8bab680917b15a4a104eed100d2
| 4,888
|
py
|
Python
|
vl/h5/mg_genome/norm_h5.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | 1
|
2018-02-23T16:49:30.000Z
|
2018-02-23T16:49:30.000Z
|
vl/h5/mg_genome/norm_h5.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
vl/h5/mg_genome/norm_h5.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
"""
1. Normalizing the entire dataset with mean and variance, shuffle, compression=9 runs for more than 8 hours on
ocelote and results in a file of more than 150GB.
2. Try normalizing with only variance and without shuffle.
"""
import os.path
import sys
import time
import h5py
import numpy as np
def calculate_mean_variance(dsets):
"""
Given a list of datasets calculate the mean and variance for all rows in all datasets.
Arguments:
dsets: sequence of datasets with matching column counts
Returns:
(mean, variance): tuple of mean vector and variance vector
"""
print('calculating mean and variance for "{}"'.format([dset.name for dset in dsets]))
t0 = time.time()
mean = np.zeros((1, dsets[0].shape[1]))
M2 = np.zeros((1, dsets[0].shape[1]))
count = 0
for dset in dsets:
# find the right subset size to load without running out of memory
# if dset has more than 10,000 rows use 10,000
# if dset has fewer than 10,000 rows load the whole dset
dsubset = np.zeros((min(10000, dset.shape[0]), dset.shape[1]))
print(' working on "{}"'.format(dset.name))
for n in range(0, dset.shape[0], dsubset.shape[0]):
m = min(n + dsubset.shape[0], dset.shape[0])
dset.read_direct(dsubset, source_sel=np.s_[n:m, :])
t00 = time.time()
for i in range(0, dsubset.shape[0]):
count = count + 1
delta = dsubset[i, :] - mean
mean += delta / count
delta2 = dsubset[i, :] - mean
M2 += delta * delta2
print(' processed slice [{}:{}] {:5.2f}s'.format(n, m, time.time()-t00))
print(' finished mean and variance in {:5.2f}s'.format(time.time()-t0))
# return mean, variance
return (mean, M2/(count - 1))
def normalize_datasets(input_h5_fp, norm_h5_fp):
dset_paths = []
def find_data(name, obj):
if hasattr(obj, 'dtype'):
print('found dataset "{}"'.format(name))
dset_paths.append(obj.name)
else:
pass
with h5py.File(input_h5_fp, 'r', libver='latest', swmr=True) as input_h5_file:
input_h5_file.visititems(find_data)
mean, variance = calculate_mean_variance((
input_h5_file['/clean-bact/training1/extract/kmers'],
input_h5_file['/clean-vir/training1/extract/kmers']))
zero_mean_column_count = len(mean[mean == 0.0])
print('{} column(s) have zero mean'.format(zero_mean_column_count))
zero_var_column_count = len(variance[variance == 0.0])
print('{} column(s) have zero variance'.format(zero_var_column_count))
with h5py.File(norm_h5_fp, 'w') as norm_h5_file:
print('writing normalized data to "{}"'.format(norm_h5_fp))
mean_dset = norm_h5_file.require_dataset(
name='/mean',
shape=mean.shape,
dtype=mean.dtype,
chunks=mean.shape,
compression='gzip')
mean_dset[:, :] = mean
variance_dset = norm_h5_file.require_dataset(
name='/variance',
shape=variance.shape,
dtype=variance.dtype,
chunks=variance.shape,
compression='gzip')
variance_dset[:, :] = variance
for dset_path in dset_paths:
dset = input_h5_file[dset_path]
print(' normalizing "{}"'.format(dset.name))
normalized_dset = norm_h5_file.require_dataset(
name=dset.name,
shape=dset.shape,
dtype=dset.dtype,
chunks=mean.shape,
compression='gzip',
compression_opts=6)
t0 = time.time()
n = 10000
for i in range(0, dset.shape[0], n):
j = i + n
t00 = time.time()
normalized_dset[i:j, :] = (dset[i:j, :] - mean) / variance
##normalized_dset[i:j, :] = dset[i:j, :] / variance
print(' normalized slice {}:{} in {:5.2f}s'.format(i, j, time.time()-t00))
print('normalized "{}" in {:5.2f}s'.format(dset.name, time.time()-t0))
def main():
input_h5_fp = sys.argv[1] # '../data/training_testing.h5'
print(input_h5_fp)
with h5py.File(input_h5_fp, 'r') as input_h5_file:
print(list(input_h5_file['/clean-bact/training1/extract'].items()))
input_h5_dp, input_h5_name = os.path.split(input_h5_fp)
norm_h5_fp = os.path.join(input_h5_dp, 'norm_' + input_h5_name)
normalize_datasets(input_h5_fp, norm_h5_fp)
if __name__ == '__main__':
main()
| 35.678832
| 110
| 0.557897
| 629
| 4,888
| 4.171701
| 0.240064
| 0.048018
| 0.024009
| 0.015244
| 0.22218
| 0.202363
| 0.155488
| 0.025915
| 0
| 0
| 0
| 0.035499
| 0.319967
| 4,888
| 137
| 111
| 35.678832
| 0.753911
| 0.148527
| 0
| 0.090909
| 0
| 0
| 0.114355
| 0.023844
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0.011364
| 0.056818
| 0
| 0.113636
| 0.147727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39c9516fadde5be713c7c8c8f3a12e5d1178fce7
| 780
|
py
|
Python
|
app/controller/api/fields/comment.py
|
Arianxx/LoniceraBlog
|
1f13d336f42c7041b16293dc8f1af62cc98ce2f4
|
[
"MIT"
] | 8
|
2018-09-08T04:41:01.000Z
|
2018-09-08T13:15:59.000Z
|
app/controller/api/fields/comment.py
|
Arianxx/LoniceraBlog
|
1f13d336f42c7041b16293dc8f1af62cc98ce2f4
|
[
"MIT"
] | null | null | null |
app/controller/api/fields/comment.py
|
Arianxx/LoniceraBlog
|
1f13d336f42c7041b16293dc8f1af62cc98ce2f4
|
[
"MIT"
] | 6
|
2018-09-08T08:51:50.000Z
|
2018-09-11T00:29:20.000Z
|
from flask_restful import fields
from .custom import Num, EdgeUrl, PaginateUrl
getCommentField = {
"id": fields.Integer,
"time": fields.DateTime(attribute="timestamp"),
"author_name": fields.String(attribute="username"),
"article_id": fields.Integer(attribute="postid"),
"body": fields.String,
"urls": {
"arthor": fields.Url("api.user", absolute=True),
"post": fields.Url("api.post", absolute=True),
},
}
getPostCommentsField = {
"prev": EdgeUrl("api.post_comments", 0),
"next": EdgeUrl("api.post_comments", 1),
"all_comments": fields.Integer(attribute="total"),
"all_pages": fields.Integer(attribute="pages"),
"urls": fields.List(
PaginateUrl("api.comment", "commentid", "id"), attribute="items"
),
}
| 31.2
| 72
| 0.65
| 85
| 780
| 5.882353
| 0.517647
| 0.104
| 0.132
| 0.088
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003086
| 0.169231
| 780
| 24
| 73
| 32.5
| 0.768519
| 0
| 0
| 0
| 0
| 0
| 0.241026
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39cc957ec5fbf6dc9322a11520c340004afd7af2
| 1,132
|
py
|
Python
|
faq/templatetags/faq_tags.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
faq/templatetags/faq_tags.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
faq/templatetags/faq_tags.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
from django import template
from faq.forms import FaqInstanceForm, FaqAnswerForm
from faq.models import FaqInstance, FaqAnswer
register = template.Library()
@register.inclusion_tag('faq/jagdreisencheck/create-question-form.html', takes_context=True)
def create_question_form(context, model, identifier):
form = FaqInstanceForm()
context['form'] = form
context['model'] = model
context['identifier'] = identifier
return context
@register.inclusion_tag('faq/jagdreisencheck/answer-question-form.html', takes_context=True)
def answer_question_form(context, identifier, parent):
form = FaqAnswerForm()
context['form'] = form
context['identifier'] = identifier
context['parent'] = parent
return context
@register.inclusion_tag('faq/jagdreisencheck/render-questions.html', takes_context=True)
def render_questions(context, model, identifier):
questions = FaqInstance.objects.filter(model=model, identifier=identifier).order_by("-date_created")
context['questions'] = questions
context['qe_form'] = FaqInstanceForm
context['aw_form'] = FaqAnswerForm
return context
| 28.3
| 104
| 0.754417
| 125
| 1,132
| 6.712
| 0.312
| 0.057211
| 0.071514
| 0.082241
| 0.277712
| 0.205006
| 0.205006
| 0
| 0
| 0
| 0
| 0
| 0.134276
| 1,132
| 40
| 105
| 28.3
| 0.856122
| 0
| 0
| 0.28
| 0
| 0
| 0.181818
| 0.115622
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.12
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39cfddaaca78d75a0a19c8026c9b58cbdca9cec8
| 18,099
|
py
|
Python
|
contracts/crawler.py
|
waldyrious/public-contracts
|
3107ddc007f3574ce19aaa2223399484bc6b1382
|
[
"BSD-3-Clause"
] | 25
|
2015-03-05T00:15:11.000Z
|
2021-04-04T18:50:43.000Z
|
contracts/crawler.py
|
waldyrious/public-contracts
|
3107ddc007f3574ce19aaa2223399484bc6b1382
|
[
"BSD-3-Clause"
] | 36
|
2015-03-21T17:04:54.000Z
|
2017-07-06T10:35:51.000Z
|
contracts/crawler.py
|
waldyrious/public-contracts
|
3107ddc007f3574ce19aaa2223399484bc6b1382
|
[
"BSD-3-Clause"
] | 7
|
2015-03-24T16:18:02.000Z
|
2019-05-29T11:51:01.000Z
|
import json
import logging
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import DateField, CharField
import requests
import requests.exceptions
from . import models
from contracts.crawler_forms import EntityForm, ContractForm, \
TenderForm, clean_place, PriceField
logger = logging.getLogger(__name__)
class JSONLoadError(Exception):
"""
When JSON fails to parse the content of an url.
"""
def __init__(self, url):
self.url = url
class JSONCrawler:
"""
A crawler specific for retrieving JSON content.
"""
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) ' \
'AppleWebKit/537.36 (KHTML, like Gecko)'
def __init__(self):
self.session = requests.Session()
self.session.headers.update({'User-Agent': self.user_agent})
def get_response(self, url, headers=None):
if headers:
self.session.headers.update(headers)
return self.session.get(url)
def get_json(self, url, headers=None):
return json.loads(self.get_response(url, headers).text)
class ContractsStaticDataCrawler(JSONCrawler):
def save_contracts_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tipocontratos'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, continue
models.ContractType.objects.get(base_id=element['id'])
except models.ContractType.DoesNotExist:
contract_type = models.ContractType(name=element['description'],
base_id=element['id'])
contract_type.save()
def save_procedures_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tipoprocedimentos'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All that we don't use.
continue
try:
# if it exists, we pass
models.ProcedureType.objects.get(name=element['description'])
except models.ProcedureType.DoesNotExist:
procedure_type = models.ProcedureType(name=element['description'],
base_id=element['id'])
procedure_type.save()
def save_act_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tiposacto'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.ActType.objects.get(base_id=element['id'])
except models.ActType.DoesNotExist:
act_type = models.ActType(name=element['description'],
base_id=element['id'])
act_type.save()
def save_model_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tiposmodelo'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.ModelType.objects.get(base_id=element['id'])
except models.ModelType.DoesNotExist:
act_type = models.ModelType(name=element['description'],
base_id=element['id'])
act_type.save()
def save_all_countries(self):
url = 'http://www.base.gov.pt/base2/rest/lista/paises'
data = self.get_json(url)
for element in data['items']:
try:
# if it exists, we pass
models.Country.objects.get(name=element['description'])
pass
except models.Country.DoesNotExist:
country = models.Country(name=element['description'])
country.save()
def save_all_districts(self):
base_url = 'http://www.base.gov.pt/base2/rest/lista/distritos?pais=%d'
portugal = models.Country.objects.get(name="Portugal")
data = self.get_json(base_url % 187)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.District.objects.get(base_id=element['id'])
except models.District.DoesNotExist:
district = models.District(name=element['description'],
base_id=element['id'],
country=portugal)
district.save()
def save_councils(self, district):
base_url = 'http://www.base.gov.pt/base2/rest/lista/concelhos?distrito=%d'
data = self.get_json(base_url % district.base_id)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All", that we don't use.
continue
try:
# if it exists, we pass
models.Council.objects.get(base_id=element['id'])
except models.Council.DoesNotExist:
council = models.Council(name=element['description'],
base_id=element['id'],
district=district)
council.save()
def retrieve_and_save_all(self):
self.save_contracts_types()
self.save_procedures_types()
self.save_model_types()
self.save_act_types()
# Countries first
self.save_all_countries()
# Districts second
self.save_all_districts()
# Councils third
for district in models.District.objects.all():
self.save_councils(district)
class DynamicCrawler(JSONCrawler):
object_url = None
object_list_url = None
object_name = None
object_model = None
def get_json(self, url, headers=None):
"""
Raises a `JSONLoadError` if all entries are `None`,
the BASE way of saying that the object doesn't exist in its database.
"""
data = super(DynamicCrawler, self).get_json(url, headers)
# ensures that data is not None
if not isinstance(data, list) and data['id'] == 0:
raise JSONLoadError(url)
return data
@staticmethod
def clean_data(data):
raise NotImplementedError
def save_instance(self, cleaned_data):
"""
Saves or updates the instance using cleaned_data
"""
try:
instance = self.object_model.objects.get(
base_id=cleaned_data['base_id'])
for (key, value) in cleaned_data.items():
setattr(instance, key, value)
action = 'updated'
except self.object_model.DoesNotExist:
instance = self.object_model(**cleaned_data)
action = 'created'
instance.save()
logger.info('%s "%d" %s' % (self.object_name, cleaned_data['base_id'],
action))
return instance, (action == 'created')
@transaction.atomic
def update_instance(self, base_id):
"""
Retrieves data of object base_id from BASE,
cleans, and saves it as an instance of a Django model.
Returns the instance
"""
data = self.get_json(self.object_url % base_id)
cleaned_data = self.clean_data(data)
return self.save_instance(cleaned_data)
def get_instances_count(self):
"""
Hits BASE to get the total number of instances in BASE db.
"""
response = self.get_response(self.object_list_url,
headers={'Range': 'items=0-1'})
results_range = response.headers['content-range']
# in "items 0-%d/%d", we want the second %d, the total.
return int(results_range.split('/')[1])
def _hasher(self, instance):
"""
Hashes a entry of BASE response to a tuple. E.g. `(instance['id'], )`.
Add more values to better identify if the instance changed.
"""
raise NotImplementedError
def _values_list(self):
"""
Returns a list of tuples that are retrieved from the database to match
the tuple returned by `_hasher`. E.g. `('base_id',)`.
"""
raise NotImplementedError
def get_base_ids(self, row1, row2):
items = self.get_json(self.object_list_url,
headers={'Range': 'items=%d-%d' % (row1, row2)})
return [self._hasher(instance) for instance in items]
def _update_batch(self, row1, row2):
"""
Updates items from row1 to row2 of BASE db with our db.
"""
c1s = self.get_base_ids(row1, row2)
c2s = set(self.object_model.objects.filter(base_id__gte=c1s[0][0],
base_id__lte=c1s[-1][0])
.order_by('base_id').values_list(*self._values_list()))
c1s = set(c1s)
# just the ids
c1_ids = set(item[0] for item in c1s)
c2_ids = set(item[0] for item in c2s)
aggregated_modifications = {'deleted': 0, 'added': 0, 'updated': 0}
for item in c1s - c2s:
id1 = item[0]
self.update_instance(id1)
if id1 in c2_ids:
aggregated_modifications['updated'] += 1
else:
aggregated_modifications['added'] += 1
for id2 in c2_ids - c1_ids:
self.object_model.objects.get(base_id=id2).delete()
logger.info('contract "%d" deleted' % id2)
aggregated_modifications['deleted'] += 1
return aggregated_modifications
def update(self, start=0, end=None, items_per_batch=1000):
"""
The method retrieves count of all items in BASE (1 hit), and
synchronizes items from `start` until `min(end, count)` in batches
of `items_per_batch`.
If `end=None` (default), it retrieves until the last item.
if `start < 0`, the start is counted from the end.
Use e.g. `start=-2000` for a quick retrieve of new items;
Use `start=0` (default) to synchronize all items in database
(it takes time!)
"""
aggregated = {'deleted': 0, 'added': 0, 'updated': 0}
count = self.get_instances_count()
if end is None:
end = count
else:
end = min(count, end)
if end <= 0:
return aggregated
# if start < 0, start is as if it was from the maximum
if start < 0:
start += end
if start > end:
return aggregated
# + 1 because it is [start, end]
total_items = end - start
# 103 // 100 = 1; we want 2 to also get the 3 in the next batch.
batches = total_items // items_per_batch + 1
logger.info('update of \'%s\' started: %d items in %d batches.' %
(self.object_name, total_items, batches))
for i in range(batches):
logger.info('Batch %d/%d started.' % (i + 1, batches))
batch_aggr = self._update_batch(
start + i*items_per_batch,
min(end, start + (i+1)*items_per_batch))
logger.info('Batch %d/%d finished: %s' % (i + 1, batches, batch_aggr))
for key in aggregated:
aggregated[key] += batch_aggr[key]
logger.info('update of \'%s\' finished: %s' %
(self.object_name, aggregated))
return aggregated
class EntitiesCrawler(DynamicCrawler):
"""
Crawler used to retrieve entities.
"""
object_url = 'http://www.base.gov.pt/base2/rest/entidades/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/entidades'
object_name = 'entity'
object_model = models.Entity
@staticmethod
def clean_data(data):
prepared_data = {'base_id': data['id'],
'name': data['description'],
'nif': data['nif'],
'country': data['location']}
form = EntityForm(prepared_data)
if not form.is_valid():
logger.error('Validation of entity "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def _hasher(self, instance):
return instance['id'], \
CharField().clean(instance['nif']), \
CharField().clean(instance['description'])
def _values_list(self):
return 'base_id', 'nif', 'name'
class ContractsCrawler(DynamicCrawler):
"""
Crawler used to retrieve contracts.
"""
object_url = 'http://www.base.gov.pt/base2/rest/contratos/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/contratos'
object_name = 'contract'
object_model = models.Contract
@staticmethod
def clean_data(data):
def fix_exceptions(prepared_data):
# this is confirmed from the official contract in PDF
if prepared_data['base_id'] in (1892486, 1892453, 1892392):
prepared_data['contractors'] = [{'id': 8468}]
elif prepared_data['base_id'] in (2377732, 2377789, 2377777):
prepared_data['contractors'] = [{'id': 2154}]
return prepared_data
places = clean_place(data['executionPlace'])
prepared_data = {'base_id': data['id'],
'procedure_type': data['contractingProcedureType'],
'contract_type': data[u'contractTypes'],
'contract_description': data['objectBriefDescription'],
'description': data['description'],
'signing_date': data['signingDate'],
'added_date': data['publicationDate'],
'cpvs': data['cpvs'],
'category': data['cpvs'],
'price': data['initialContractualPrice'],
'country': places[0],
'district': places[1],
'council': {'district': places[1], 'council': places[2]},
'contractors': data['contracting'],
'contracted': data['contracted']
}
prepared_data = fix_exceptions(prepared_data)
form = ContractForm(prepared_data)
if not form.is_valid():
logger.error('Validation of contract "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def save_instance(self, cleaned_data):
contractors = cleaned_data.pop('contractors')
contracted = cleaned_data.pop('contracted')
contract, created = super(ContractsCrawler, self)\
.save_instance(cleaned_data)
contract.contracted.clear()
contract.contracted.add(*list(contracted))
contract.contractors.clear()
contract.contractors.add(*list(contractors))
return contract, created
def _hasher(self, instance):
date_field = DateField(input_formats=["%d-%m-%Y"], required=False)
return instance['id'], \
PriceField().clean(instance['initialContractualPrice']), \
date_field.clean(instance['signingDate'])
def _values_list(self):
return 'base_id', 'price', 'signing_date'
class TendersCrawler(DynamicCrawler):
"""
Crawler used to retrieve tenders.
"""
object_url = 'http://www.base.gov.pt/base2/rest/anuncios/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/anuncios'
object_name = 'tender'
object_model = models.Tender
@staticmethod
def clean_data(data):
prepared_data = {'base_id': data['id'],
'act_type': data['type'],
'model_type': data['modelType'],
'contract_type': data['contractType'],
'description': data['contractDesignation'],
'announcement_number': data['announcementNumber'],
'dre_url': data['reference'],
'publication_date': data['drPublicationDate'],
'deadline_date': data['proposalDeadline'],
'cpvs': data['cpvs'],
'category': data['cpvs'],
'price': data['basePrice'],
'contractors': data['contractingEntities']}
prepared_data['publication_date'] = \
TenderForm.prepare_publication_date(prepared_data)
form = TenderForm(prepared_data)
if not form.is_valid():
logger.error('Validation of tender "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def save_instance(self, cleaned_data):
contractors = cleaned_data.pop('contractors')
tender, created = super(TendersCrawler, self).save_instance(cleaned_data)
tender.contractors.clear()
tender.contractors.add(*list(contractors))
return tender, created
def _hasher(self, instance):
date_field = DateField(input_formats=["%d-%m-%Y"])
# e.g. tender 81558 has no price set
price = None
if instance['basePrice'] is not None:
price = PriceField(required=False).clean(instance['basePrice'])
return instance['id'], price, \
date_field.clean(instance['drPublicationDate'])
def _values_list(self):
return 'base_id', 'price', 'publication_date'
| 35.627953
| 82
| 0.560749
| 1,999
| 18,099
| 4.938969
| 0.168584
| 0.018839
| 0.013167
| 0.018434
| 0.353388
| 0.29211
| 0.277727
| 0.234377
| 0.201256
| 0.185658
| 0
| 0.014671
| 0.325874
| 18,099
| 507
| 83
| 35.698225
| 0.794525
| 0.110945
| 0
| 0.274627
| 0
| 0.00597
| 0.142057
| 0.005855
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101493
| false
| 0.002985
| 0.026866
| 0.014925
| 0.268657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39d50b087b533ec75540f6aeefa21a97dbda7cfa
| 7,392
|
py
|
Python
|
tests/unit/test_resources.py
|
butla/PyDAS
|
39df5abbe9563b58da7caaa191b89852fb122ab7
|
[
"MIT"
] | 13
|
2016-06-29T13:35:05.000Z
|
2021-05-25T09:47:31.000Z
|
tests/unit/test_resources.py
|
butla/PyDAS
|
39df5abbe9563b58da7caaa191b89852fb122ab7
|
[
"MIT"
] | 1
|
2016-07-11T23:11:33.000Z
|
2016-07-11T23:11:33.000Z
|
tests/unit/test_resources.py
|
butla/PyDAS
|
39df5abbe9563b58da7caaa191b89852fb122ab7
|
[
"MIT"
] | 3
|
2017-10-17T15:54:25.000Z
|
2022-03-24T01:11:37.000Z
|
import copy
import json
import os
from unittest.mock import MagicMock, call
from bravado.client import SwaggerClient
import bravado.exception
from bravado_falcon import FalconHttpClient
import falcon
import pytest
import pytest_falcon.plugin
import responses
import yaml
from data_acquisition.acquisition_request import AcquisitionRequest, RequestNotFoundError
from data_acquisition.consts import (ACQUISITION_PATH, DOWNLOAD_CALLBACK_PATH,
METADATA_PARSER_CALLBACK_PATH, GET_REQUEST_PATH)
from data_acquisition.resources import (get_download_callback_url, get_metadata_callback_url,
AcquisitionResource)
import tests
from tests.consts import (TEST_DOWNLOAD_REQUEST, TEST_DOWNLOAD_CALLBACK, TEST_ACQUISITION_REQ,
TEST_ACQUISITION_REQ_JSON)
FAKE_TIME = 234.25
FAKE_TIMESTAMP = 234
@pytest.fixture(scope='function')
def client(falcon_api):
client = pytest_falcon.plugin.Client(falcon_api)
client.post = (lambda path, data, post=client.post:
post(path, json.dumps(data), headers={'Content-Type': 'application/json'}))
return client
@pytest.fixture(scope='session')
def swagger_spec():
spec_file_path = os.path.join(tests.__path__[0], '../api_doc.yaml')
with open(spec_file_path) as spec_file:
return yaml.load(spec_file)
@pytest.fixture(scope='function')
def client_no_req_validation(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api),
config={'validate_requests': False})
@pytest.fixture(scope='function')
def client_swagger(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api))
@pytest.fixture(scope='function')
def acquisition_requests_resource(das_config, mock_executor, mock_req_store, fake_time):
return AcquisitionResource(mock_req_store, mock_executor, das_config)
@pytest.fixture(scope='function')
def req_store_get(mock_req_store):
mock_req_store.get.return_value = copy.deepcopy(TEST_ACQUISITION_REQ)
return mock_req_store.get
@pytest.fixture(scope='function')
def fake_time(monkeypatch):
monkeypatch.setattr('time.time', lambda: FAKE_TIME)
def test_get_download_callback_url():
callback_url = get_download_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/downloader/some-test-id'
def test_get_metadata_callback_url():
callback_url = get_metadata_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/metadata/some-test-id'
@responses.activate
def test_external_service_call_not_ok(acquisition_requests_resource):
test_url = 'https://some-fake-url/'
responses.add(responses.POST, test_url, status=404)
assert not acquisition_requests_resource._external_service_call(
url=test_url, data={'a': 'b'}, token='bearer fake-token', request_id='some-fake-id')
def test_processing_acquisition_request_for_hdfs(acquisition_requests_resource, mock_req_store):
# arrange
mock_enqueue_metadata_req = MagicMock()
acquisition_requests_resource._enqueue_metadata_request = mock_enqueue_metadata_req
hdfs_acquisition_req = copy.deepcopy(TEST_ACQUISITION_REQ)
hdfs_acquisition_req.source = TEST_ACQUISITION_REQ.source.replace('http://', 'hdfs://')
proper_saved_request = copy.deepcopy(hdfs_acquisition_req)
proper_saved_request.set_downloaded()
fake_token = 'bearer asdasdasdasd'
# act
acquisition_requests_resource._process_acquisition_request(hdfs_acquisition_req, fake_token)
# assert
mock_enqueue_metadata_req.assert_called_with(proper_saved_request, None, fake_token)
mock_req_store.put.assert_called_with(proper_saved_request)
def test_acquisition_bad_request(client_no_req_validation):
broken_request = dict(TEST_DOWNLOAD_REQUEST)
del broken_request['category']
with pytest.raises(bravado.exception.HTTPError):
client_no_req_validation.rest.submitAcquisitionRequest(body=broken_request).result()
def test_downloader_callback_failed(client, fake_time, mock_req_store, req_store_get):
failed_callback_req = dict(TEST_DOWNLOAD_CALLBACK)
failed_callback_req['state'] = 'ERROR'
response = client.post(
path=DOWNLOAD_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data=failed_callback_req)
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_metadata_callback_failed(client, fake_time, mock_req_store, req_store_get):
response = client.post(
path=METADATA_PARSER_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data={'state': 'FAILED'})
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_get_request(das_api, client_swagger, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
acquisition_request = client_swagger.rest.getRequest(req_id=TEST_ACQUISITION_REQ.id).result()
assert AcquisitionRequest(**acquisition_request.__dict__) == TEST_ACQUISITION_REQ
def test_get_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.get(GET_REQUEST_PATH.format(req_id='some-fake-id'))
assert response.status == falcon.HTTP_404
def test_delete_request(das_api, client, mock_req_store, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
response = client.delete(GET_REQUEST_PATH.format(req_id=TEST_ACQUISITION_REQ.id))
assert response.status == falcon.HTTP_200
mock_req_store.delete.assert_called_with(TEST_ACQUISITION_REQ)
def test_delete_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.delete(GET_REQUEST_PATH.format(req_id='fake-id'))
assert response.status == falcon.HTTP_404
@pytest.mark.parametrize('org_ids', [
['id-1'],
['id-1', 'id-2'],
['id-1', 'id-2', 'id-3'],
])
@pytest.mark.parametrize('acquisition_requests', [
[TEST_ACQUISITION_REQ],
[TEST_ACQUISITION_REQ, TEST_ACQUISITION_REQ]
])
def test_get_requests_for_org(org_ids, acquisition_requests,
das_api, client, mock_req_store):
das_api.acquisition_res._org_checker = MagicMock()
mock_req_store.get_for_org.return_value = acquisition_requests
response = client.get(path=ACQUISITION_PATH,
query_string='orgs=' + ','.join(org_ids))
returned_requests = [AcquisitionRequest(**req_json) for req_json in response.json]
assert response.status == falcon.HTTP_200
assert returned_requests == acquisition_requests * len(org_ids)
assert mock_req_store.get_for_org.call_args_list == [call(id) for id in org_ids]
| 38.103093
| 97
| 0.759199
| 959
| 7,392
| 5.465068
| 0.164755
| 0.038161
| 0.045793
| 0.029765
| 0.441519
| 0.390002
| 0.285823
| 0.274757
| 0.256058
| 0.241557
| 0
| 0.006014
| 0.145157
| 7,392
| 193
| 98
| 38.300518
| 0.82339
| 0.002435
| 0
| 0.220588
| 0
| 0.014706
| 0.072863
| 0
| 0.007353
| 0
| 0
| 0
| 0.125
| 1
| 0.139706
| false
| 0
| 0.125
| 0.022059
| 0.308824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39d6297dad17364278641be6d1ed6ea276348300
| 886
|
py
|
Python
|
Medium/279. Perfect Squares/solution (2).py
|
czs108/LeetCode-Solutions
|
889f5b6a573769ad077a6283c058ed925d52c9ec
|
[
"MIT"
] | 3
|
2020-05-09T12:55:09.000Z
|
2022-03-11T18:56:05.000Z
|
Medium/279. Perfect Squares/solution (2).py
|
czs108/LeetCode-Solutions
|
889f5b6a573769ad077a6283c058ed925d52c9ec
|
[
"MIT"
] | null | null | null |
Medium/279. Perfect Squares/solution (2).py
|
czs108/LeetCode-Solutions
|
889f5b6a573769ad077a6283c058ed925d52c9ec
|
[
"MIT"
] | 1
|
2022-03-11T18:56:16.000Z
|
2022-03-11T18:56:16.000Z
|
# 279. Perfect Squares
# Runtime: 60 ms, faster than 96.81% of Python3 online submissions for Perfect Squares.
# Memory Usage: 14.7 MB, less than 42.95% of Python3 online submissions for Perfect Squares.
class Solution:
# Greedy Enumeration
def numSquares(self, n: int) -> int:
square_nums = set([i * i for i in range(1, int(n**0.5) + 1)])
def is_divided_by(n: int, count: int) -> bool:
'''
Return `true` if `n` can be decomposed into `count` number of perfect square numbers.
'''
if count == 1:
return n in square_nums
for k in square_nums:
if is_divided_by(n - k, count - 1):
return True
return False
for count in range(1, n + 1):
if is_divided_by(n, count):
return count
assert False
| 30.551724
| 97
| 0.555305
| 123
| 886
| 3.926829
| 0.471545
| 0.086957
| 0.068323
| 0.074534
| 0.236025
| 0.178054
| 0.178054
| 0
| 0
| 0
| 0
| 0.045534
| 0.35553
| 886
| 29
| 98
| 30.551724
| 0.80035
| 0.341986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39d6fc42a60ee57ea74155e98d6216d785fa855c
| 2,720
|
py
|
Python
|
server/perform_action/common.py
|
darrenfoong/battleships
|
2866207b3a55d24fc085beedbd735d489990e487
|
[
"MIT"
] | 11
|
2020-01-15T14:25:48.000Z
|
2021-11-25T04:21:18.000Z
|
server/perform_action/common.py
|
darrenfoong/battleships
|
2866207b3a55d24fc085beedbd735d489990e487
|
[
"MIT"
] | 8
|
2021-02-04T16:41:57.000Z
|
2022-03-29T21:57:15.000Z
|
esp8266/common.py
|
pythings/PythingsOS
|
276b41a32af7fa0d5395b2bb308e611f784f9711
|
[
"Apache-2.0"
] | null | null | null |
MAX_COPIES = 2
RECV_SIZE = 1024
SEND_SIZE = 1024
SERVER_IP = "172.24.1.107"
SERVER_PORT = 10000
def recv_line(conn):
data = ""
data += conn.recv(RECV_SIZE)
# data += conn.recv(RECV_SIZE).decode("utf-8")
return data
def make_request(entity_type, type, filename = None, auth = None, filesize = None, ip = None, ip_list = None , response_code = None, storage_space = None, used_space = None, port_no = None):
request = {}
#download : client -> server
if(type == "download"):
request['entity_type'] = entity_type
request['type'] = "download"
request['filename'] = filename
request['ip'] = ip
request['auth'] = auth
#upload : client -> servers
elif(type == "upload"):
request['entity_type'] = entity_type
request['type'] = "upload"
request['filename'] = filename
request['filesize'] = filesize
request['ip'] = ip
request['auth'] = auth
request['response_code'] = response_code
#download_ack : server -> client
elif(type == "download_ack"):
request['entity_type'] = entity_type
request['type'] = "download_ack"
request['ip_list'] = ip_list
request['response_code'] = response_code
request['filename'] = filename
request['filesize'] = filesize
request['auth'] = auth
#upload_ack : server -> client
elif(type == "upload_ack"):
request['entity_type'] = entity_type
request['type'] = "upload_ack"
request['ip'] = ip
request['response_code'] = response_code
request['filename'] = filename
request['filesize'] = filesize
request['auth'] = auth
#upload_complete_ack : storage_client -> client
elif(type == "upload_complete_ack"):
request['entity_type'] = entity_type
request['type'] = "upload_complete_ack"
request['filename'] = filename
request['response_code'] = response_code
request['filesize'] = filesize
request['auth'] = auth
request['ip'] = ip
#copy : server -> storage_client
elif(type == "copy"):
request['entity_type'] = entity_type
request['type'] = "copy"
request['filename'] = filename
request['filesize'] = filesize
request['ip'] = ip
request['auth'] = auth
#add_storage : storage_client -> server
elif(type == "add_storage"):
request['entity_type'] = entity_type
request['type'] = "add_storage"
request['auth'] = auth
request['storage_space'] = storage_space
request['used_space'] = used_space
request['port'] = port_no
#storage_added_ack : server -> storage_client
elif(type == "storage_added_ack"):
request['entity_type'] = entity_type
request['type'] = "storage_added_ack"
request['response_code'] = response_code
request['auth'] = auth
else:
return 0
return str(request)
def read_request(req):
return (eval(req))
# Error Codes
CODE_SUCCESS = 300
CODE_FAILURE = 400
| 28.93617
| 190
| 0.683824
| 342
| 2,720
| 5.216374
| 0.178363
| 0.095291
| 0.085762
| 0.103139
| 0.592489
| 0.461323
| 0.386771
| 0.34417
| 0.243274
| 0.190583
| 0
| 0.013596
| 0.161765
| 2,720
| 93
| 191
| 29.247312
| 0.76886
| 0.120956
| 0
| 0.493333
| 0
| 0
| 0.225074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0
| 0.013333
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39da37adde81c90589b9c7e68358e7bc3b53628e
| 1,361
|
py
|
Python
|
repeat_samples.py
|
xiz675/OpenNMT-py
|
eaee466437d6a2f7c06a2401f9a8ef6c7757cabd
|
[
"MIT"
] | null | null | null |
repeat_samples.py
|
xiz675/OpenNMT-py
|
eaee466437d6a2f7c06a2401f9a8ef6c7757cabd
|
[
"MIT"
] | null | null | null |
repeat_samples.py
|
xiz675/OpenNMT-py
|
eaee466437d6a2f7c06a2401f9a8ef6c7757cabd
|
[
"MIT"
] | null | null | null |
def repeat(srcs, convs, tags):
new_src = []
new_conv = []
new_tag = []
print("size before repeat: " + str(len(srcs)))
for i in zip(srcs, convs, tags):
tag_list = i[2].split(";")
for j in range(len(tag_list)):
new_src.append(i[0])
new_conv.append(i[1])
new_tag += tag_list
assert len(new_conv) == len(new_src) == len(new_tag)
print("size after repeat: " + str(len(new_src)))
return new_src, new_conv, new_tag
def write_to_file(file_path, entities):
f = open(file_path, "w", encoding='utf-8')
for t in entities:
f.write(t)
f.write("\n")
f.close()
def read_file(file_path):
f = open(file_path, "r", encoding='utf-8')
lines = f.readlines()
f.close()
return [l.rstrip("\n") for l in lines]
if __name__ == '__main__':
key = "train"
base_path = "./data/Twitter/"
src_path = base_path + key + "_post.txt"
conv_path = base_path + key + "_conv.txt"
tag_path = base_path + key + "_tag.txt"
srcs = read_file(src_path)
convs = read_file(conv_path)
tags = read_file(tag_path)
new_data = repeat(srcs, convs, tags)
write_to_file(base_path + key + "new_post.txt", new_data[0])
write_to_file(base_path + key + "new_conv.txt", new_data[1])
write_to_file(base_path + key + "new_tag.txt", new_data[2])
| 28.957447
| 64
| 0.603968
| 216
| 1,361
| 3.513889
| 0.273148
| 0.073781
| 0.086957
| 0.059289
| 0.14888
| 0.14888
| 0.098814
| 0
| 0
| 0
| 0
| 0.007744
| 0.240999
| 1,361
| 46
| 65
| 29.586957
| 0.727009
| 0
| 0
| 0.052632
| 0
| 0
| 0.106696
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 1
| 0.078947
| false
| 0
| 0
| 0
| 0.131579
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39df74f7e7ea40de0f014c2a1bd6b468baf99ae0
| 974
|
py
|
Python
|
matching.py
|
siweiwang24/marriage
|
d0f041ef380562885177418944791491949d024e
|
[
"MIT"
] | null | null | null |
matching.py
|
siweiwang24/marriage
|
d0f041ef380562885177418944791491949d024e
|
[
"MIT"
] | null | null | null |
matching.py
|
siweiwang24/marriage
|
d0f041ef380562885177418944791491949d024e
|
[
"MIT"
] | null | null | null |
"""
Stable Marriage Problem solution using Gale-Shapley.
Copyright 2020. Siwei Wang.
"""
# pylint: disable=no-value-for-parameter
from typing import Optional
from click import command, option, Path
from read_validate import get_smp
from marriage import compute_smp
from write import print_results
@command()
@option('--filename', '-f', required=True,
type=Path(exists=True, file_okay=True, dir_okay=False),
help='Path to input json on which to run SMP algorithm.')
@option('--output', '-o', required=False,
type=Path(exists=False, file_okay=True, dir_okay=False),
help='Path to output file in which to print results.')
def main(filename: str, output: Optional[str]):
"""Execute smp algorithm on input and print results to output."""
men_pref, women_pref = get_smp(filename)
men_engage, women_engage = compute_smp(men_pref, women_pref)
print_results(men_engage, women_engage, output)
if __name__ == '__main__':
main()
| 32.466667
| 69
| 0.724846
| 140
| 974
| 4.85
| 0.464286
| 0.070692
| 0.041237
| 0.044183
| 0.100147
| 0.100147
| 0.100147
| 0.100147
| 0.100147
| 0
| 0
| 0.004902
| 0.162218
| 974
| 29
| 70
| 33.586207
| 0.827206
| 0.185832
| 0
| 0
| 0
| 0
| 0.160256
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.277778
| 0
| 0.333333
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e0cfb770931442146ef89aab0fb46b52dd6602
| 7,908
|
py
|
Python
|
chimeric_blacklist.py
|
regnveig/juicer1.6_compact
|
21cd24f4c711640584965704f4fa72e5a25b76e3
|
[
"MIT"
] | null | null | null |
chimeric_blacklist.py
|
regnveig/juicer1.6_compact
|
21cd24f4c711640584965704f4fa72e5a25b76e3
|
[
"MIT"
] | null | null | null |
chimeric_blacklist.py
|
regnveig/juicer1.6_compact
|
21cd24f4c711640584965704f4fa72e5a25b76e3
|
[
"MIT"
] | null | null | null |
import pysam
import json
import bisect
import subprocess
def LoadFragmentMap(RestrSitesMap):
FragmentMap = {}
with open(RestrSitesMap, 'rt') as MapFile:
for Contig in MapFile:
List = Contig[:-1].split(' ')
FragmentMap[List[0]] = [int(item) for item in List[1:]]
return FragmentMap
def CalcDist(Item1, Item2):
if ((Item1 is None) or (Item2 is None) or (type(Item1) == list) or (type(Item2) == list)): return None
return float("+inf") if (Item1["Chr"] != Item2["Chr"]) else abs(Item1["Pos"] - Item2["Pos"])
def SortItems(Item1, Item2): return tuple([(item["ID"], item["Pos"]) for item in sorted([Item1, Item2], key=lambda x: (x["RefID"], x["Pos"]))])
def ProcessQuery(Query, ChromSizes, MinMAPQ):
# Filter unmapped
if any([item[1].is_unmapped for item in Query["ReadBlock"]]): return { "ReadBlock": Query["ReadBlock"], "Type": "Unmapped" }
if any([item[1].mapping_quality < MinMAPQ for item in Query["ReadBlock"]]): return { "ReadBlock": Query["ReadBlock"], "Type": "MappingQualityFailed" }
# Create Sorter
TypeDict = { index: list() for index in ("1p", "1s", "2p", "2s") }
# Annotation
for index, item in Query["ReadBlock"]:
Start = item.reference_start + 1
End = item.reference_end
CigarFirst = item.cigar[0]
CigarLast = item.cigar[-1]
SoftHard = (4, 5)
if CigarFirst[0] in SoftHard:
Start -= CigarFirst[1]
if Start <= 0: Start = 1
if CigarLast[0] in SoftHard:
End += CigarLast[1]
if End >= ChromSizes[item.reference_name]: End = ChromSizes[item.reference_name]
Type = ("1" if item.is_read1 else "2") + ("s" if (item.is_secondary or item.is_supplementary) else "p")
TypeDict[Type].append({ "ID": int(index), "Chr": str(item.reference_name), "RefID": int(item.reference_id), "Pos": int(End) if item.is_reverse else int(Start) })
# Create Pattern
Pattern = tuple([len(item) for index, item in TypeDict.items()])
TypeDict = { index: (None if not item else (item[0] if len(item) == 1 else item)) for index, item in TypeDict.items() }
Dist = { f"1{index1}2{index2}": CalcDist(TypeDict[f"1{index1}"], TypeDict[f"2{index2}"]) for index1, index2 in ('pp', 'ps', 'sp', 'ss')}
# Norm Chimera 4 Ends
if Pattern == (1, 1, 1, 1):
if ((Dist["1p2p"] < 1000) and (Dist["1s2s"] < 1000)) or ((Dist["1p2s"] < 1000) and (Dist["1s2p"] < 1000)):
Sorted = SortItems(TypeDict["1p"], TypeDict["1s"])
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericPaired", "Pair": Pair }
else: return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
# Norm Chimera 3 Ends
elif Pattern in ((1, 0, 1, 1), (1, 1, 1, 0)):
if TypeDict["1s"] is None:
if ((Dist["1p2p"] < 1000) or (Dist["1p2s"] < 1000)): Sorted = SortItems(TypeDict["1p"], TypeDict["2p"] if Dist["1p2p"] > Dist["1p2s"] else TypeDict["2s"])
else: Sorted = None
if TypeDict["2s"] is None:
if ((Dist["1p2p"] < 1000) or (Dist["1s2p"] < 1000)): Sorted = SortItems(TypeDict["2p"], TypeDict["1p"] if Dist["1p2p"] > Dist["1s2p"] else TypeDict["1s"])
else: Sorted = None
if Sorted is None: return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericPaired", "Pair": Pair }
# Regular Pair
elif Pattern == (1, 0, 1, 0):
Sorted = SortItems(TypeDict["1p"], TypeDict["2p"])
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "NormalPaired", "Pair": Pair }
# Collisions
elif (Pattern[1] > 1) or (Pattern[3] > 1):
pass # TODO Collisions
# Other
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
def Main(InputFileSAM, OutputFileTXT, InterPairsTXT, ChimericAmbiguousFileSAM, UnmappedSAM, MappingQualityFailedSAM, StatsTXT, RestrictionSiteFile = None, MinMAPQ = 0):
Input = pysam.AlignmentFile(InputFileSAM, 'r', check_sq=False)
SortCommand = (f'sort -k2,2d -k6,6d -k4,4n -k8,8n -k1,1n -k5,5n -k3,3n | tee >( gzip -c > "{OutputFileTXT}" ) |' +
f' awk -F " " \'{{print $2 "\\t" $3 "\\t" $6 "\\t" $7}}\' | gzip -c > "{InterPairsTXT}"')
Output = subprocess.Popen(SortCommand, shell=True, executable="/bin/bash", stdin=subprocess.PIPE)
if RestrictionSiteFile is not None: FragmentMap = LoadFragmentMap(RestrictionSiteFile)
TechInfo = {
"ChimericAmbiguous": pysam.AlignmentFile(ChimericAmbiguousFileSAM, "wb", template = Input),
"Unmapped": pysam.AlignmentFile(UnmappedSAM, "wb", template = Input),
"MappingQualityFailed": pysam.AlignmentFile(UnmappedSAM, "wb", template = Input)
}
ChromSizes = { Input.references[i]: Input.lengths[i] for i in range(Input.nreferences) }
Stats = { "SequencedReadPairs": 0, "NormalPaired": 0, "ChimericPaired": 0, "ChimericAmbiguous": 0, "MappingQualityFailed": 0, "Unmapped": 0, "Ligation": { "Motif": None, "LineCount": 0, "PresentCount": 0 } }
Query = { "ReadName": None, "ReadBlock": [] }
def BlockProcess():
Stats["SequencedReadPairs"] += 1
Query["ReadBlock"] = list(enumerate(Query["ReadBlock"]))
Result = ProcessQuery(Query, ChromSizes, MinMAPQ)
Stats[Result["Type"]] += 1
if Result["Type"] in ("Unmapped", "ChimericAmbiguous", "MappingQualityFailed"):
for index, Rec in Query["ReadBlock"]: TechInfo[Result["Type"]].write(Rec)
if Result["Type"] in ("ChimericPaired", "NormalPaired"):
Read1, Read2 = Result["Pair"]
Line = ' '.join([
'16' if Read1["Read"].is_reverse else '0',
str(Read1["Read"].reference_name),
str(Read1["Pos"]),
'0' if RestrictionSiteFile is None else str(bisect.bisect(FragmentMap[Read1["Read"].reference_name], Read1["Pos"])),
'16' if Read2["Read"].is_reverse else '0',
str(Read2["Read"].reference_name),
str(Read2["Pos"]),
'1' if RestrictionSiteFile is None else str(bisect.bisect(FragmentMap[Read2["Read"].reference_name], Read2["Pos"])),
str(Read1["Read"].mapping_quality),
str(Read1["Read"].cigarstring),
str(Read1["Read"].seq.__str__()),
str(Read2["Read"].mapping_quality),
str(Read2["Read"].cigarstring),
str(Read2["Read"].seq.__str__()),
str(Read1["Read"].query_name),
str(Read2["Read"].query_name)
]) + '\n'
Output.stdin.write(Line.encode('utf-8'))
while 1:
try:
Record = next(Input)
if not (Record.is_secondary or Record.is_supplementary):
Stats["Ligation"]["LineCount"] += 1
# TODO Add ligation counter
if Record.query_name == Query["ReadName"]: Query["ReadBlock"].append(Record)
else:
BlockProcess()
Query["ReadName"] = Record.query_name
Query["ReadBlock"].clear()
Query["ReadBlock"].append(Record)
except StopIteration:
BlockProcess()
Input.close()
Output.stdin.close()
Output.wait()
Stats["Alignable"] = Stats["ChimericPaired"] + Stats["NormalPaired"]
for stat in ("ChimericPaired", "ChimericAmbiguous", "NormalPaired", "Unmapped", "Alignable", "MappingQualityFailed"): Stats[stat] = { "Count": Stats[stat], "%": Stats[stat] / Stats["SequencedReadPairs"] * 100 }
Stats["Ligation"]["%"] = Stats["Ligation"]["PresentCount"] / Stats["SequencedReadPairs"] * 100 # BUG WTF?
# TODO Postprocessing? Library Complexity?
json.dump(Stats, open(StatsTXT, 'wt'), indent=4, ensure_ascii=False)
break
Main(InputFileSAM = "/Data/NGS_Data/20211228_NGS_MinjaF_Pool/Results/Human_HiC/K1/splits/8_S73_L003.fastq.gz.filtered.sam", OutputFileTXT = "test_mergednodups.txt.gz", InterPairsTXT = "test_interpairs.txt.gz", MappingQualityFailedSAM = "/dev/null", ChimericAmbiguousFileSAM = "/dev/null", UnmappedSAM = "/dev/null", StatsTXT = "test.stats.txt", RestrictionSiteFile = None, MinMAPQ = 30)
| 55.300699
| 386
| 0.661861
| 1,012
| 7,908
| 5.12747
| 0.241107
| 0.062054
| 0.030834
| 0.04471
| 0.244941
| 0.222201
| 0.139333
| 0.127385
| 0.117364
| 0.095394
| 0
| 0.037266
| 0.141502
| 7,908
| 142
| 387
| 55.690141
| 0.727058
| 0.027441
| 0
| 0.073171
| 0
| 0.01626
| 0.209066
| 0.019018
| 0
| 0
| 0
| 0.007042
| 0
| 1
| 0.04878
| false
| 0.00813
| 0.03252
| 0.00813
| 0.130081
| 0.00813
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e198255bc72ec3d147506eb38e23671a7f0cb4
| 4,088
|
py
|
Python
|
bot.py
|
gilgamezh/registration_desk
|
98303a6f96be78e0c1898a523db761f6d19866fc
|
[
"MIT"
] | null | null | null |
bot.py
|
gilgamezh/registration_desk
|
98303a6f96be78e0c1898a523db761f6d19866fc
|
[
"MIT"
] | null | null | null |
bot.py
|
gilgamezh/registration_desk
|
98303a6f96be78e0c1898a523db761f6d19866fc
|
[
"MIT"
] | null | null | null |
import csv
import logging
import os
import discord
from discord.ext import commands, tasks
from discord.utils import get
# logging config
logging.basicConfig(
filename=".log/reg.log",
format="%(asctime)s - %(message)s",
level=logging.INFO,
datefmt="%d-%b-%y %H:%M:%S",
)
# set up channel ids and enviroment variables
reg_channel_id = int(os.environ["REG_CHANNEL_ID"])
try:
log_channel_id = int(os.environ["LOG_CHANNEL_ID"])
except:
log_channel_id = None
try:
only_respond_reg = int(os.environ["ONLY_RESPOND_REG"])
except:
only_respond_reg = False
# TODO: seperate customization in conf file
event_name = "EuroPython"
instruction = f"Welcome to {event_name}! Please use `!register <Full Name>, <Ticket Number>` to register.\nE.g. `!register James Brown, 99999`\nNOTE: please ONLY register for YOURSELF."
def welcome_msg(mention, roles):
if len(roles) == 2:
return f"Welcome {mention}, you now have the {roles[0]} and {roles[1]} roles."
elif len(roles) == 1:
return f"Welcome {mention}, you now have the {roles[0]} role."
else:
text = roles[1:-1].join(", ")
return f"Welcome {mention}, you now have the {roles[0]}, {text} and {roles[-1]} roles."
bot = commands.Bot(
command_prefix="!",
description=f"Registration Desk for {event_name}",
help_command=None,
)
def roles_given(name, ticket_no):
# check the roles that need to be given to the user
# return list of roles that need to be given
with open(os.environ["DATA_PATH"], newline="") as csvfile:
datareader = csv.reader(csvfile, delimiter=",")
for row in datareader:
try: # skip if it's header
if int(row[4]) == int(ticket_no):
if row[0] == name:
if row[3] == "sprint":
return ["sprinter"]
if row[2] == "yes":
return ["speaker", "attendee"]
else:
return ["attendee"]
except:
continue
@bot.event
async def on_ready():
await bot.change_presence(
status=discord.Status.online,
activity=discord.Activity(type=discord.ActivityType.listening, name="!help"),
)
await bot.get_channel(reg_channel_id).send(instruction)
print("Bot is ready")
logging.info("Bot logged in")
@bot.command()
async def register(ctx, *, info):
if not only_respond_reg or ctx.channel.id == reg_channel_id:
info = info.split(",")
roles = roles_given(info[0], info[1])
if roles is None:
logging.info(
f"FAIL: Cannot find request form user {ctx.author} with name={info[0]}, ticket_no={info[1]}"
)
await ctx.send(
f"{ctx.author.mention} Sorry cannot find the ticket #{info[1]} with name: {info[0]}.\nPlease check and make sure you put down your full name same as the one you used in registering your ticket then try again.\nIf you want a team member to help you, please reply to this message with '@registration'"
)
else:
log_msg = f"SUCCESS: Register user {ctx.author} name={info[0]}, ticket_no={info[1]} with roles={roles}"
logging.info(log_msg)
if log_channel_id is not None:
await bot.get_channel(log_channel_id).send(log_msg)
await ctx.message.add_reaction("🎟️")
await ctx.message.add_reaction("🤖")
await ctx.author.edit(nick=info[0])
attendee_role = get(ctx.author.guild.roles, name="attendee")
await ctx.author.add_roles(attendee_role)
for role in roles:
role_id = get(ctx.author.guild.roles, name=role)
await ctx.author.add_roles(role_id)
await ctx.author.send(welcome_msg(ctx.author.mention, roles))
@bot.command()
async def help(ctx):
if not only_respond_reg or ctx.channel.id == reg_channel_id:
await ctx.send(instruction)
bot.run(os.environ["REG_BOT_SECRET"])
| 34.066667
| 315
| 0.613748
| 562
| 4,088
| 4.36121
| 0.327402
| 0.044064
| 0.02448
| 0.025704
| 0.199102
| 0.142799
| 0.103631
| 0.085679
| 0.085679
| 0.085679
| 0
| 0.008988
| 0.265166
| 4,088
| 119
| 316
| 34.352941
| 0.805925
| 0.052104
| 0
| 0.142857
| 0
| 0.065934
| 0.282058
| 0
| 0
| 0
| 0
| 0.008403
| 0
| 1
| 0.021978
| false
| 0
| 0.065934
| 0
| 0.153846
| 0.032967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e1a049e695d46df354014950cf2221cf9cdc1c
| 1,551
|
py
|
Python
|
src/gameServer.py
|
LesGameDevToolsMagique/GameEditor
|
06bed29845ded5cca35e57a3dd457dc72c2a2e8e
|
[
"MIT"
] | null | null | null |
src/gameServer.py
|
LesGameDevToolsMagique/GameEditor
|
06bed29845ded5cca35e57a3dd457dc72c2a2e8e
|
[
"MIT"
] | null | null | null |
src/gameServer.py
|
LesGameDevToolsMagique/GameEditor
|
06bed29845ded5cca35e57a3dd457dc72c2a2e8e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# skeleton from http://kmkeen.com/socketserver/2009-04-03-13-45-57-003.html
import socketserver, subprocess, sys
from threading import Thread
from pprint import pprint
import json
my_unix_command = ['bc']
HOST = 'localhost'
PORT = 12321
with open('storage.json') as data_file:
JSONdata = json.load(data_file)['commands']
class JSONSearchHandler:
def search(self, rule):
for command in JSONdata:
if (command['key'] == rule):
return (command['response'])
return('0')
class SingleTCPHandler(socketserver.BaseRequestHandler):
"One instance per connection. Override handle(self) to customize action."
def handle(self):
while True:
data = self.request.recv(1024)
if not data: break
text = data.decode('utf-8')
print("Client wrote: ", text)
response = JSONSearchHandler().search(text)
self.request.send(response.encode())
print ("%s disconnected", self.client_address[0])
class SimpleServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
daemon_threads = True
allow_reuse_address = True
def __init__(self, server_address, RequestHandlerClass):
socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
if __name__ == "__main__":
server = SimpleServer((HOST, PORT), SingleTCPHandler)
try:
server.serve_forever()
except KeyboardInterrupt:
sys.exit(0)
| 31.02
| 78
| 0.648614
| 168
| 1,551
| 5.827381
| 0.613095
| 0.024515
| 0.028601
| 0.042901
| 0.081716
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025663
| 0.246293
| 1,551
| 49
| 79
| 31.653061
| 0.811805
| 0.107672
| 0
| 0
| 0
| 0
| 0.107904
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0
| 0.108108
| 0
| 0.351351
| 0.081081
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e3fc7a595793dc10754a5adbe8f528668e75d2
| 360
|
py
|
Python
|
src/keycloakclient/aio/openid_connect.py
|
phoebebright/python-keycloak-client
|
8590fbcdbda8edbe993a01bbff06d9d9be679c5e
|
[
"MIT"
] | null | null | null |
src/keycloakclient/aio/openid_connect.py
|
phoebebright/python-keycloak-client
|
8590fbcdbda8edbe993a01bbff06d9d9be679c5e
|
[
"MIT"
] | null | null | null |
src/keycloakclient/aio/openid_connect.py
|
phoebebright/python-keycloak-client
|
8590fbcdbda8edbe993a01bbff06d9d9be679c5e
|
[
"MIT"
] | null | null | null |
from keycloakclient.aio.mixins import WellKnownMixin
from keycloakclient.openid_connect import (
KeycloakOpenidConnect as SyncKeycloakOpenidConnect,
PATH_WELL_KNOWN,
)
__all__ = (
'KeycloakOpenidConnect',
)
class KeycloakOpenidConnect(WellKnownMixin, SyncKeycloakOpenidConnect):
def get_path_well_known(self):
return PATH_WELL_KNOWN
| 24
| 71
| 0.8
| 33
| 360
| 8.363636
| 0.606061
| 0.086957
| 0.141304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144444
| 360
| 14
| 72
| 25.714286
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0.058333
| 0.058333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.181818
| 0.090909
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e4afc96a10bdb1d7dfe165b5b83d57bfbc7c47
| 9,987
|
py
|
Python
|
multi_script_editor/jedi/evaluate/precedence.py
|
paulwinex/pw_multiScriptEditor
|
e447e99f87cb07e238baf693b7e124e50efdbc51
|
[
"MIT"
] | 142
|
2015-03-21T12:56:21.000Z
|
2022-02-08T04:42:46.000Z
|
jedi/evaluate/precedence.py
|
blueyed/jedi
|
a01e4c6b375795bb8c8ee0d4e86d4c535456f5b4
|
[
"MIT"
] | 18
|
2015-05-06T21:14:14.000Z
|
2015-08-29T18:24:43.000Z
|
jedi/evaluate/precedence.py
|
blueyed/jedi
|
a01e4c6b375795bb8c8ee0d4e86d4c535456f5b4
|
[
"MIT"
] | 51
|
2016-05-07T14:27:42.000Z
|
2022-02-10T05:55:11.000Z
|
"""
Handles operator precedence.
"""
from jedi._compatibility import unicode
from jedi.parser import representation as pr
from jedi import debug
from jedi.common import PushBackIterator
from jedi.evaluate.compiled import CompiledObject, create, builtin
from jedi.evaluate import analysis
class PythonGrammar(object):
"""
Some kind of mirror of http://docs.python.org/3/reference/grammar.html.
"""
class MultiPart(str):
def __new__(cls, first, second):
self = str.__new__(cls, first)
self.second = second
return self
def __str__(self):
return str.__str__(self) + ' ' + self.second
FACTOR = '+', '-', '~'
POWER = '**',
TERM = '*', '/', '%', '//'
ARITH_EXPR = '+', '-'
SHIFT_EXPR = '<<', '>>'
AND_EXPR = '&',
XOR_EXPR = '^',
EXPR = '|',
COMPARISON = ('<', '>', '==', '>=', '<=', '!=', 'in',
MultiPart('not', 'in'), MultiPart('is', 'not'), 'is')
NOT_TEST = 'not',
AND_TEST = 'and',
OR_TEST = 'or',
#TEST = or_test ['if' or_test 'else' test] | lambdef
TERNARY = 'if',
SLICE = ':',
ORDER = (POWER, TERM, ARITH_EXPR, SHIFT_EXPR, AND_EXPR, XOR_EXPR,
EXPR, COMPARISON, AND_TEST, OR_TEST, TERNARY, SLICE)
FACTOR_PRIORITY = 0 # highest priority
LOWEST_PRIORITY = len(ORDER)
NOT_TEST_PRIORITY = LOWEST_PRIORITY - 4 # priority only lower for `and`/`or`
SLICE_PRIORITY = LOWEST_PRIORITY - 1 # priority only lower for `and`/`or`
class Precedence(object):
def __init__(self, left, operator, right):
self.left = left
self.operator = operator
self.right = right
def parse_tree(self, strip_literals=False):
def process(which):
try:
which = which.parse_tree(strip_literals)
except AttributeError:
pass
if strip_literals and isinstance(which, pr.Literal):
which = which.value
return which
return (process(self.left), self.operator.string, process(self.right))
def __repr__(self):
return '(%s %s %s)' % (self.left, self.operator, self.right)
class TernaryPrecedence(Precedence):
def __init__(self, left, operator, right, check):
super(TernaryPrecedence, self).__init__(left, operator, right)
self.check = check
def create_precedence(expression_list):
iterator = PushBackIterator(iter(expression_list))
return _check_operator(iterator)
def _syntax_error(element, msg='SyntaxError in precedence'):
debug.warning('%s: %s, %s' % (msg, element, element.start_pos))
def _get_number(iterator, priority=PythonGrammar.LOWEST_PRIORITY):
el = next(iterator)
if isinstance(el, pr.Operator):
if el in PythonGrammar.FACTOR:
right = _get_number(iterator, PythonGrammar.FACTOR_PRIORITY)
elif el in PythonGrammar.NOT_TEST \
and priority >= PythonGrammar.NOT_TEST_PRIORITY:
right = _get_number(iterator, PythonGrammar.NOT_TEST_PRIORITY)
elif el in PythonGrammar.SLICE \
and priority >= PythonGrammar.SLICE_PRIORITY:
iterator.push_back(el)
return None
else:
_syntax_error(el)
return _get_number(iterator, priority)
return Precedence(None, el, right)
elif isinstance(el, pr.tokenize.Token):
return _get_number(iterator, priority)
else:
return el
class MergedOperator(pr.Operator):
"""
A way to merge the two operators `is not` and `not int`, which are two
words instead of one.
Maybe there's a better way (directly in the tokenizer/parser? but for now
this is fine.)
"""
def __init__(self, first, second):
string = first.string + ' ' + second.string
super(MergedOperator, self).__init__(first._sub_module, string,
first.parent, first.start_pos)
self.first = first
self.second = second
def _check_operator(iterator, priority=PythonGrammar.LOWEST_PRIORITY):
try:
left = _get_number(iterator, priority)
except StopIteration:
return None
for el in iterator:
if not isinstance(el, pr.Operator):
_syntax_error(el)
continue
operator = None
for check_prio, check in enumerate(PythonGrammar.ORDER):
if check_prio >= priority:
# respect priorities.
iterator.push_back(el)
return left
try:
match_index = check.index(el)
except ValueError:
continue
match = check[match_index]
if isinstance(match, PythonGrammar.MultiPart):
next_tok = next(iterator)
if next_tok == match.second:
el = MergedOperator(el, next_tok)
else:
iterator.push_back(next_tok)
if el == 'not':
continue
operator = el
break
if operator is None:
_syntax_error(el)
continue
if operator in PythonGrammar.POWER:
check_prio += 1 # to the power of is right-associative
elif operator in PythonGrammar.TERNARY:
try:
middle = []
for each in iterator:
if each == 'else':
break
middle.append(each)
middle = create_precedence(middle)
except StopIteration:
_syntax_error(operator, 'SyntaxError ternary incomplete')
right = _check_operator(iterator, check_prio)
if right is None and not operator in PythonGrammar.SLICE:
_syntax_error(iterator.current, 'SyntaxError operand missing')
else:
if operator in PythonGrammar.TERNARY:
left = TernaryPrecedence(left, operator, right, middle)
else:
left = Precedence(left, operator, right)
return left
def _literals_to_types(evaluator, result):
# Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
# int(), float(), etc).
for i, r in enumerate(result):
if is_literal(r):
# Literals are only valid as long as the operations are
# correct. Otherwise add a value-free instance.
cls = builtin.get_by_name(r.name)
result[i] = evaluator.execute(cls)[0]
return list(set(result))
def calculate(evaluator, left_result, operator, right_result):
result = []
if left_result is None and right_result:
# cases like `-1` or `1 + ~1`
for right in right_result:
result.append(_factor_calculate(evaluator, operator, right))
return result
else:
if not left_result or not right_result:
# illegal slices e.g. cause left/right_result to be None
result = (left_result or []) + (right_result or [])
result = _literals_to_types(evaluator, result)
else:
# I don't think there's a reasonable chance that a string
# operation is still correct, once we pass something like six
# objects.
if len(left_result) * len(right_result) > 6:
result = _literals_to_types(evaluator, left_result + right_result)
else:
for left in left_result:
for right in right_result:
result += _element_calculate(evaluator, left, operator, right)
return result
def _factor_calculate(evaluator, operator, right):
if _is_number(right):
if operator == '-':
return create(evaluator, -right.obj)
return right
def _is_number(obj):
return isinstance(obj, CompiledObject) \
and isinstance(obj.obj, (int, float))
def _is_string(obj):
return isinstance(obj, CompiledObject) \
and isinstance(obj.obj, (str, unicode))
def is_literal(obj):
return _is_number(obj) or _is_string(obj)
def _is_tuple(obj):
from jedi.evaluate import iterable
return isinstance(obj, iterable.Array) and obj.type == pr.Array.TUPLE
def _is_list(obj):
from jedi.evaluate import iterable
return isinstance(obj, iterable.Array) and obj.type == pr.Array.LIST
def _element_calculate(evaluator, left, operator, right):
from jedi.evaluate import iterable, representation as er
l_is_num = _is_number(left)
r_is_num = _is_number(right)
if operator == '*':
# for iterables, ignore * operations
if isinstance(left, iterable.Array) or _is_string(left):
return [left]
elif isinstance(right, iterable.Array) or _is_string(right):
return [right]
elif operator == '+':
if l_is_num and r_is_num or _is_string(left) and _is_string(right):
return [create(evaluator, left.obj + right.obj)]
elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right):
return [iterable.MergedArray(evaluator, (left, right))]
elif operator == '-':
if l_is_num and r_is_num:
return [create(evaluator, left.obj - right.obj)]
elif operator == '%':
# With strings and numbers the left type typically remains. Except for
# `int() % float()`.
return [left]
def check(obj):
"""Checks if a Jedi object is either a float or an int."""
return isinstance(obj, er.Instance) and obj.name in ('int', 'float')
# Static analysis, one is a number, the other one is not.
if operator in ('+', '-') and l_is_num != r_is_num \
and not (check(left) or check(right)):
message = "TypeError: unsupported operand type(s) for +: %s and %s"
analysis.add(evaluator, 'type-error-operation', operator,
message % (left, right))
return [left, right]
| 33.513423
| 88
| 0.601382
| 1,163
| 9,987
| 4.979364
| 0.200344
| 0.022449
| 0.020549
| 0.015196
| 0.240373
| 0.134001
| 0.091867
| 0.091867
| 0.078052
| 0.059057
| 0
| 0.001856
| 0.298588
| 9,987
| 297
| 89
| 33.626263
| 0.824839
| 0.110744
| 0
| 0.203791
| 0
| 0
| 0.028951
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104265
| false
| 0.004739
| 0.042654
| 0.023697
| 0.412322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e817d468144ef60c9cbbd969d60eec454c7689
| 1,967
|
py
|
Python
|
search.py
|
manimaul/mxmcc
|
923458b759c8daa74dd969e968bc72b17fdffe02
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2016-08-24T21:30:45.000Z
|
2016-08-24T21:30:45.000Z
|
search.py
|
manimaul/mxmcc
|
923458b759c8daa74dd969e968bc72b17fdffe02
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 5
|
2021-03-18T23:25:15.000Z
|
2022-03-11T23:44:20.000Z
|
search.py
|
manimaul/mxmcc
|
923458b759c8daa74dd969e968bc72b17fdffe02
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import os
class MapPathSearch:
def __init__(self, directory, map_extensions=['kap', 'tif'], include_only=None):
"""Searches for files ending with <map_extensions> in <directory> and all subdirectories
Optionally supply set of file names <include_only> to only return paths of files that
are contained in the set eg. {file1.kap, file2.tif}
file_paths is a list of all full paths found
"""
self.file_paths = []
extensions = set()
for ext in map_extensions:
extensions.add(ext.upper())
if include_only is not None:
include_only = set(include_only)
if os.path.isdir(directory):
for root, dirs, files in os.walk(directory):
for f in files:
include = False
i = f.rfind(".")
if i > 0:
ext = f[i+1:].upper()
include = ext in extensions
if include and include_only is not None:
include = f in include_only
if include:
self.file_paths.append(os.path.join(root, f))
else:
print(directory, 'is not a directory.')
# def __walker(self, args, p_dir, p_file):
# map_extensions, include_only = args
# if include_only is not None:
# include_only = set(include_only)
# for f in p_file:
# if f.upper().endswith(map_extensions) and (include_only is None or f in include_only) and not f.startswith(
# "."):
# self.file_paths.append(os.path.join(p_dir, f))
if __name__ == '__main__':
print("foo")
| 34.508772
| 120
| 0.56482
| 241
| 1,967
| 4.360996
| 0.39834
| 0.136061
| 0.049477
| 0.045671
| 0.170314
| 0.170314
| 0.144624
| 0.089439
| 0.089439
| 0.089439
| 0
| 0.006154
| 0.339095
| 1,967
| 56
| 121
| 35.125
| 0.802308
| 0.37214
| 0
| 0
| 0
| 0
| 0.095925
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.033333
| 0
| 0.1
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39e9b24961999fcc48a120276aefb45a23005614
| 1,585
|
py
|
Python
|
ggui/style.py
|
arthur-hav/GGUI
|
b64495546541bafa168daa150a4de86569fe1242
|
[
"MIT"
] | 1
|
2021-02-03T13:33:14.000Z
|
2021-02-03T13:33:14.000Z
|
ggui/style.py
|
arthur-hav/GGUI
|
b64495546541bafa168daa150a4de86569fe1242
|
[
"MIT"
] | null | null | null |
ggui/style.py
|
arthur-hav/GGUI
|
b64495546541bafa168daa150a4de86569fe1242
|
[
"MIT"
] | null | null | null |
class Style:
def __init__(self,
parent_styles=None,
color=(0, 0, 0, 0),
hover_color=None,
click_color=None,
disabled_color=None,
border_color=None,
border_line_w=0,
fade_in_time=0.0,
fade_out_time=0.0,
transparent=None):
if parent_styles:
for parent_style in reversed(parent_styles):
attrs = parent_style.__dict__
for k, v in attrs.items():
setattr(self, k, v)
self.default_color = self.premultiply(color)
self.hover_color = self.premultiply(hover_color)
self.click_color = self.premultiply(click_color)
self.disabled_color = self.premultiply(disabled_color)
self.transparent = transparent if transparent is not None else self.default_color[3] < 1.0
self.fade_in_time = fade_in_time
self.fade_out_time = fade_out_time
self.border_color = border_color
self.border_line_w = border_line_w
@property
def background(self):
return self.hover_color or self.border_color
def premultiply(self, color):
if not color:
return color
return color[0] * color[3], color[1] * color[3], color[2] * color[3], color[3]
def __str__(self):
return f'#{int(255 * self.default_color[0]):02X}{int(255 * self.default_color[1]):02X}' \
f'{int(255 * self.default_color[2]):02X}{int(255 * self.default_color[3]):02X}'
| 38.658537
| 98
| 0.581073
| 203
| 1,585
| 4.26601
| 0.231527
| 0.093533
| 0.110855
| 0.078522
| 0.110855
| 0.110855
| 0
| 0
| 0
| 0
| 0
| 0.039889
| 0.319874
| 1,585
| 40
| 99
| 39.625
| 0.763451
| 0
| 0
| 0
| 0
| 0.055556
| 0.096591
| 0.078283
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0.055556
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39ebe1a3f9b6deca1adc431db80e1a994f12644b
| 5,041
|
py
|
Python
|
fsh_validator/cli.py
|
glichtner/fsh-validator
|
c3b16546221c8d43c24bcee426ec7882938305bd
|
[
"BSD-3-Clause"
] | null | null | null |
fsh_validator/cli.py
|
glichtner/fsh-validator
|
c3b16546221c8d43c24bcee426ec7882938305bd
|
[
"BSD-3-Clause"
] | 1
|
2022-03-01T16:06:09.000Z
|
2022-03-01T16:06:09.000Z
|
fsh_validator/cli.py
|
glichtner/fsh-validator
|
c3b16546221c8d43c24bcee426ec7882938305bd
|
[
"BSD-3-Clause"
] | null | null | null |
"""Command line interface for fsh-validator."""
import os
import sys
import argparse
from pathlib import Path
import yaml
from .fsh_validator import (
print_box,
run_sushi,
validate_all_fsh,
validate_fsh,
download_validator,
bcolors,
VALIDATOR_BASENAME,
store_log,
assert_sushi_installed,
get_fsh_base_path,
get_fhir_version_from_sushi_config,
)
from .fshpath import FshPath
def get_config(base_path: Path):
"""
Get the config file from the base path.
:param base_path: The base path to the .fsh-validator.yml File.
:return: Configuration
"""
config_file = base_path / ".fsh-validator.yml"
if not config_file.exists():
return dict()
return yaml.safe_load(open(config_file))
def main():
"""
fsh-validator command line interface main.
:return: None
"""
parser = argparse.ArgumentParser(
description="Validate a fsh file",
formatter_class=argparse.RawTextHelpFormatter,
)
arg_fname = parser.add_argument(
"filename", help="fsh file names (basename only - no path)", nargs="*"
)
parser.add_argument(
"--all",
dest="all",
action="store_true",
help="if set, all detected profiles will be validated",
required=False,
default=False,
)
parser.add_argument(
"--subdir",
dest="subdir",
type=str,
help="Specifies the subdirectory (relative to input/fsh/) in which to search for profiles if --all is set",
required=False,
default="",
)
parser.add_argument(
"--validator-path",
dest="path_validator",
type=str,
help="path to validator",
required=False,
default=None,
)
parser.add_argument(
"--verbose",
dest="verbose",
action="store_true",
help="Be verbose",
required=False,
default=False,
)
parser.add_argument(
"--no-sushi",
dest="no_sushi",
action="store_true",
help="Do not run sushi before validating",
required=False,
default=False,
)
parser.add_argument(
"--log-path",
dest="log_path",
type=str,
help="log file path - if supplied, log files will be written",
required=False,
default=None,
)
args = parser.parse_args()
if not args.all and len(args.filename) == 0:
raise argparse.ArgumentError(
arg_fname, "filename must be set if --all is not specified"
)
elif args.all and len(args.filename) == 0:
# Use current working dir as input path
filenames = [FshPath(os.getcwd())]
else:
filenames = [FshPath(filename) for filename in args.filename]
base_paths = set(filename.fsh_base_path() for filename in filenames)
if len(base_paths) > 1:
raise ValueError(
"Found multiple base paths for fsh project, expecting exactly one"
)
base_path = base_paths.pop()
validator_path = (
args.path_validator if args.path_validator is not None else base_path
)
fname_validator = Path(validator_path) / VALIDATOR_BASENAME
if not fname_validator.exists():
print_box("Downloading java validator")
download_validator(fname_validator.resolve())
if not args.no_sushi:
print_box("Running SUSHI")
run_sushi(base_path)
fhir_version = get_fhir_version_from_sushi_config(base_path)
config = get_config(base_path)
if "exclude_code_systems" in config:
exclude_code_systems = set(config["exclude_code_systems"])
else:
exclude_code_systems = set()
if "exclude_resource_type" in config:
exclude_resource_types = set(config["exclude_resource_type"])
else:
exclude_resource_types = set()
if args.all:
print_box("Validating all FSH files")
results = validate_all_fsh(
base_path,
args.subdir,
str(fname_validator),
exclude_code_systems=exclude_code_systems,
exclude_resource_types=exclude_resource_types,
fhir_version=fhir_version,
verbose=args.verbose,
)
else:
print_box("Validating FSH files")
results = validate_fsh(
filenames,
str(fname_validator),
fhir_version=fhir_version,
exclude_code_systems=exclude_code_systems,
exclude_resource_types=exclude_resource_types,
verbose=args.verbose,
)
if args.log_path is not None:
log_path = Path(args.log_path)
if not log_path.exists():
log_path.mkdir()
store_log(results, log_path)
if any([r.failed() for r in results]):
print_box("Errors during profile validation", col=bcolors.FAIL)
sys.exit(1)
else:
print_box("All profiles successfully validated", col=bcolors.OKGREEN)
sys.exit(0)
if __name__ == "__main__":
main()
| 26.671958
| 115
| 0.623686
| 598
| 5,041
| 5.031773
| 0.249164
| 0.034563
| 0.047856
| 0.033234
| 0.128946
| 0.128946
| 0.109671
| 0.050515
| 0.050515
| 0.050515
| 0
| 0.001381
| 0.281889
| 5,041
| 188
| 116
| 26.81383
| 0.829834
| 0.052767
| 0
| 0.255034
| 0
| 0.006711
| 0.175799
| 0.008885
| 0
| 0
| 0
| 0
| 0.006711
| 1
| 0.013423
| false
| 0
| 0.04698
| 0
| 0.073826
| 0.04698
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39ec0f238d1f205a702d9a13cc4aec9895df5afa
| 475
|
py
|
Python
|
core/util/read_input.py
|
SimoneABNto/Progetto_ASD_py
|
b415bcc3581121c5c39e044ac3fbb92420964e68
|
[
"MIT"
] | null | null | null |
core/util/read_input.py
|
SimoneABNto/Progetto_ASD_py
|
b415bcc3581121c5c39e044ac3fbb92420964e68
|
[
"MIT"
] | null | null | null |
core/util/read_input.py
|
SimoneABNto/Progetto_ASD_py
|
b415bcc3581121c5c39e044ac3fbb92420964e68
|
[
"MIT"
] | null | null | null |
def read_input():
try:
data = input().replace(" ", "") # take the input and remove the extra spaces
input_array = data.split(",") # split the sub substring
input_array[-1] = input_array[-1][:-1]
array = []
for el in input_array:
array.append(float(el)) # convert the element of the array to int
return array
except Exception as e:
print(e)
print('ERROR: bad input')
return []
| 26.388889
| 85
| 0.555789
| 61
| 475
| 4.245902
| 0.57377
| 0.15444
| 0.084942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009404
| 0.328421
| 475
| 17
| 86
| 27.941176
| 0.802508
| 0.223158
| 0
| 0
| 0
| 0
| 0.049315
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.230769
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39ef5804d073f8e1a8698f5b8f98bbb0a09926ef
| 7,170
|
py
|
Python
|
src/asit.py
|
6H057WH1P3/Asit
|
4dce80e3c4c05c4f56563110c59bae55e61aeaae
|
[
"MIT"
] | null | null | null |
src/asit.py
|
6H057WH1P3/Asit
|
4dce80e3c4c05c4f56563110c59bae55e61aeaae
|
[
"MIT"
] | 3
|
2015-09-16T17:54:13.000Z
|
2015-09-18T06:54:33.000Z
|
src/asit.py
|
6H057WH1P3/Asit
|
4dce80e3c4c05c4f56563110c59bae55e61aeaae
|
[
"MIT"
] | null | null | null |
import random
import time
import requests
class Account:
# C'tor
def __init__(self, language, world, user, password, ability):
# def standard class variables
self.cookie = ""
self.language = language
self.world = world
self.user = user
self.password = password
self.ability = ability
# preparing header and basic url for get and post requests
if language == "de":
self.basic_url = "http://welt" + self.world + ".freewar.de/freewar/internal/"
self.header = {"Host": "welt" + self.world + ".freewar.de", "Connection": "keep-alive", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64)"}
elif language == "en":
self.basic_url = "http://world" + self.world + ".freewar.com/freewar/internal/"
self.header = {"Host": "world" + self.world + ".freewar.com", "Connection": "keep-alive", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64)"}
def login(self):
print("\t[*] Logging in")
login_url = self.basic_url + "index.php"
# really annoying
if self.language == "de":
login_submit = "Einloggen"
elif self.language == "en":
login_submit = "Login"
# login payload / post parameters
login_payload = {"name": self.user, "password": self.password, "submit": login_submit}
# login request
login_request = requests.post(login_url, data = login_payload, headers = self.header)
# nesseccary for session management in other requests
self.cookie = login_request.cookies
print("\t[+] Login successful")
return 0
# nesseccary to access all other links in fw main window after login
def redirect(self):
print("\t[*] Redirecting")
redirect_url = self.basic_url + "frset.php"
requests.get(redirect_url, headers = self.header, cookies = self.cookie)
print("\t[+] Redirect successful")
return 0
# function to train characters abilities
def train(self):
# the training sequence
print("\t[*] Training")
train_url = self.basic_url + "ability.php"
train_payload = {"action": "train", "ability_id": self.ability}
requests.get(train_url, params = train_payload, headers = self.header, cookies = self.cookie)
print("\t[+] Training successful")
# preparing for the training status request
status_payload = {"action": "show_ability", "ability_id": self.ability}
# requesting content of main frame
status_request = requests.get(train_url, params = status_payload, headers = self.header, cookies = self.cookie)
if self.language == "de":
search_parameters = ["Aktuelle Stufe: ", "Maximale Stufe: "]
# TODO: online den genauen text nachschlagen
elif self.language == "en":
search_parameters = ["actual level: ", "maximal level: "]
output = "\t[*] Actual level: "
first = True
# looking for search parameters in http response
for search_text in search_parameters:
# exception handling
try:
position = status_request.text.find(search_text)
if (position == -1):
raise RuntimeError("Bad Request")
except RuntimeError:
print("\t[-] Could not found ability level.")
return 1
# TODO: Hier gehts weiter
text_length = len(search_text)
ability_level = status_request.text[position + text_length : position + text_length + 3]
# geting a clean output
ability_level = ability_level.strip("<")
ability_level = ability_level.strip("/")
ability_level = ability_level.strip("b")
output += ability_level
if first:
first = False
output += " / "
print(output)
return 0
# function to pick up accounts oil if he's on the right field for that
def oil(self):
print("\t[*] Picking up oil")
# requesting content of main frame
main_url = self.basic_url + "main.php"
main_request = requests.get(main_url, headers = self.header, cookies = self.cookie)
# something called exception handling
try:
position = main_request.text.find("checkid=")
if (position == -1):
raise RuntimeError("wrong position")
except RuntimeError:
print("\t[-] Oil isn't ready yet or account is on the wrong position.")
return 1
# pincking up the oil
oil_url = self.basic_url + "main.php"
oil_payload = {"arrive_eval": "drink", "checkid": main_request.text[position + 8 : position + 15]}
requests.get(oil_url, params = oil_payload, headers = self.header, cookies = self.cookie)
return 0
# for a clean session
def logout(self):
print("\t[*] Logging out")
logout_url = self.basic_url + "logout.php"
requests.get(logout_url, headers = self.header, cookies = self.cookie)
print("\t[+] Logged out")
return 0
def automatic_sit(self):
try:
self.login()
self.redirect()
self.train()
self.oil()
self.logout()
except:
print("[!] Connection Error.")
return 1
class ManageAccounts:
def __init__(self, account_path):
self.accounts = []
self.later = []
# filling the list of credentials
with open(account_path, "r") as account_file:
for line in account_file:
splitted_line = line.strip("\n").split(", ")
#print(splitted_line)
if len(splitted_line) == 5:
self.accounts.append(splitted_line)
def manage(self):
while len(self.accounts) > 0:
for language, world, user, password, ability in self.accounts:
# skipping credentials of the same world
skip = False
for account in self.accounts:
if (account[1] == world) and (account[2] != user):
self.later.append(account)
self.accounts.remove(account)
skip = True
if skip:
continue
# if not skipped, handling the credential
print("\n[*] World: " + world + " Account: " + user + " Server: " + language)
FWAccount = Account(language, world, user, password, ability)
if FWAccount.automatic_sit():
return 1
# writing memorized credentials back to be handled
if len(self.later) > 0:
random_time = random.randint(180, 300)
print("[*] Wating " + str(random_time) + " Seconds to log other accounts savely.")
time.sleep(random_time)
self.accounts = self.later
self.later.clear()
else:
self.accounts.clear()
| 39.61326
| 155
| 0.565969
| 791
| 7,170
| 5.026549
| 0.275601
| 0.0166
| 0.024145
| 0.022636
| 0.226107
| 0.123491
| 0.112425
| 0.080734
| 0.070674
| 0.049044
| 0
| 0.008287
| 0.326778
| 7,170
| 180
| 156
| 39.833333
| 0.815413
| 0.128452
| 0
| 0.152672
| 0
| 0
| 0.149518
| 0.009486
| 0
| 0
| 0
| 0.005556
| 0
| 1
| 0.068702
| false
| 0.038168
| 0.022901
| 0
| 0.175573
| 0.114504
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39f2718894e3565b21d9ad13de2638c2e9273b26
| 270
|
py
|
Python
|
euler_7_nth_prime.py
|
igorakkerman/euler-challenge
|
1fdedce439520fc31a2e5fb66abe23b6f99f04db
|
[
"MIT"
] | null | null | null |
euler_7_nth_prime.py
|
igorakkerman/euler-challenge
|
1fdedce439520fc31a2e5fb66abe23b6f99f04db
|
[
"MIT"
] | null | null | null |
euler_7_nth_prime.py
|
igorakkerman/euler-challenge
|
1fdedce439520fc31a2e5fb66abe23b6f99f04db
|
[
"MIT"
] | null | null | null |
# https://projecteuler.net/problem=7
import math
def sieve(xmax):
p = {i for i in range(2, xmax + 1)}
for i in range(2, xmax):
r = {j * i for j in range(2, int(xmax / i) + 1)}
p -= r
return sorted(p)
print(sum(sieve(2000000)))
| 20.769231
| 57
| 0.533333
| 46
| 270
| 3.130435
| 0.543478
| 0.145833
| 0.166667
| 0.152778
| 0.222222
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0.069892
| 0.311111
| 270
| 12
| 58
| 22.5
| 0.704301
| 0.125926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.375
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39f3a173967eb82662e3417309654bea4d1eda7a
| 3,066
|
py
|
Python
|
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
|
eubr-atmosphere/jenkins
|
a9065584d810238c6fa101d92d12c131d1d317cb
|
[
"Apache-2.0"
] | null | null | null |
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
|
eubr-atmosphere/jenkins
|
a9065584d810238c6fa101d92d12c131d1d317cb
|
[
"Apache-2.0"
] | null | null | null |
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
|
eubr-atmosphere/jenkins
|
a9065584d810238c6fa101d92d12c131d1d317cb
|
[
"Apache-2.0"
] | null | null | null |
"""Add new types
Revision ID: 32053847c4db
Revises: 05a62958a9cc
Create Date: 2019-06-11 10:36:14.456629
"""
from alembic import context
from sqlalchemy.orm import sessionmaker
# revision identifiers, used by Alembic.
revision = '32053847c4db'
down_revision = '05a62958a9cc'
branch_labels = None
depends_on = None
all_commands = [
(""" ALTER TABLE data_source CHANGE `format` `format` ENUM(
'CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5','DATA_FOLDER',
'IMAGE_FOLDER', 'JDBC','JSON','NETCDF4','PARQUET','PICKLE','SHAPEFILE',
'TAR_IMAGE_FOLDER','TEXT', 'VIDEO_FOLDER',
'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""",
""" ALTER TABLE data_source CHANGE `format` `format` ENUM(
'CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON',
'NETCDF4','PARQUET','PICKLE','SHAPEFILE','TEXT',
'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;"""
),
("""
ALTER TABLE `storage` CHANGE `type` `type` ENUM(
'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE',
'CASSANDRA','JDBC','LOCAL') CHARSET utf8 COLLATE
utf8_unicode_ci NOT NULL;""",
"""
ALTER TABLE `storage` CHANGE `type` `type` ENUM(
'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE',
'CASSANDRA','JDBC') CHARSET utf8 COLLATE
utf8_unicode_ci NOT NULL;""",
),
(
"""ALTER TABLE `model` CHANGE `type` `type` ENUM(
'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION',
'SPARK_ML_CLASSIFICATION','UNSPECIFIED')
CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """,
"""ALTER TABLE `model` CHANGE `type` `type` ENUM(
'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION',
'SPARK_ML_CLASSIFICATION','UNSPECIFIED')
CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """
)
]
def upgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
for cmd in all_commands:
if isinstance(cmd[0], (unicode, str)):
connection.execute(cmd[0])
elif isinstance(cmd[0], list):
for row in cmd[0]:
connection.execute(row)
else:
cmd[0]()
except:
session.rollback()
raise
session.commit()
def downgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
connection.execute('SET foreign_key_checks = 0;')
try:
for cmd in reversed(all_commands):
if isinstance(cmd[1], (unicode, str)):
connection.execute(cmd[1])
elif isinstance(cmd[1], list):
for row in cmd[1]:
connection.execute(row)
else:
cmd[1]()
except:
session.rollback()
raise
connection.execute('SET foreign_key_checks = 1;')
session.commit()
| 32.967742
| 80
| 0.599152
| 338
| 3,066
| 5.284024
| 0.349112
| 0.033595
| 0.06047
| 0.073908
| 0.712206
| 0.602464
| 0.520717
| 0.520717
| 0.520717
| 0.520717
| 0
| 0.036219
| 0.261579
| 3,066
| 92
| 81
| 33.326087
| 0.75265
| 0.046314
| 0
| 0.366667
| 0
| 0
| 0.327264
| 0.129995
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.033333
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
39f5a45cf3414a12f90b8d040d893593304736d0
| 2,836
|
py
|
Python
|
sets-master/sets-master/sets/utility.py
|
FedericoMolinaChavez/tesis-research
|
d77cc621d452c9ecf48d9ac80349b41aeb842412
|
[
"MIT"
] | null | null | null |
sets-master/sets-master/sets/utility.py
|
FedericoMolinaChavez/tesis-research
|
d77cc621d452c9ecf48d9ac80349b41aeb842412
|
[
"MIT"
] | 4
|
2021-03-09T20:33:57.000Z
|
2022-02-18T12:56:32.000Z
|
sets-master/sets-master/sets/utility.py
|
FedericoMolinaChavez/tesis-research
|
d77cc621d452c9ecf48d9ac80349b41aeb842412
|
[
"MIT"
] | null | null | null |
import os
import pickle
import functools
import errno
import shutil
from urllib.request import urlopen
#import definitions
def read_config(schema='data/schema.yaml', name='sets'):
filename = '.{}rc'.format(name)
paths = [
os.path.join(os.curdir, filename),
os.path.expanduser(os.path.join('~', filename)),
os.environ.get('{}_CONFIG'.format(name.upper())),
]
schema = os.path.join(os.path.dirname(__file__), schema)
parser = definitions.Parser(schema)
for path in paths:
if path and os.path.isfile(path):
return parser(path)
return parser('{}')
def disk_cache(basename, directory, method=False):
"""
Function decorator for caching pickleable return values on disk. Uses a
hash computed from the function arguments for invalidation. If 'method',
skip the first argument, usually being self or cls. The cache filepath is
'directory/basename-hash.pickle'.
"""
directory = os.path.expanduser(directory)
ensure_directory(directory)
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
key = (tuple(args), tuple(kwargs.items()))
# Don't use self or cls for the invalidation hash.
if method and key:
key = key[1:]
filename = '{}-{}.pickle'.format(basename, hash(key))
filepath = os.path.join(directory, filename)
if os.path.isfile(filepath):
with open(filepath, 'rb') as handle:
return pickle.load(handle)
result = func(*args, **kwargs)
with open(filepath, 'wb') as handle:
pickle.dump(result, handle)
return result
return wrapped
return wrapper
def download(url, directory, filename=None):
"""
Download a file and return its filename on the local file system. If the
file is already there, it will not be downloaded again. The filename is
derived from the url if not provided. Return the filepath.
"""
if not filename:
_, filename = os.path.split(url)
directory = os.path.expanduser(directory)
ensure_directory(directory)
filepath = os.path.join(directory, filename)
if os.path.isfile(filepath):
return filepath
print('Download', filepath)
with urlopen(url) as response, open(filepath, 'wb') as file_:
shutil.copyfileobj(response, file_)
return filepath
def ensure_directory(directory):
"""
Create the directories along the provided directory path that do not exist.
"""
directory = os.path.expanduser(directory)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
| 35.012346
| 80
| 0.619182
| 340
| 2,836
| 5.126471
| 0.364706
| 0.048193
| 0.028686
| 0.043029
| 0.151463
| 0.131956
| 0.131956
| 0.131956
| 0.065404
| 0.065404
| 0
| 0.000489
| 0.278209
| 2,836
| 80
| 81
| 35.45
| 0.851001
| 0.211566
| 0
| 0.189655
| 0
| 0
| 0.030158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0
| 0.103448
| 0
| 0.344828
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2cdba45917fad7ff9ab33f608fa9dbb603aec4b
| 1,984
|
py
|
Python
|
src/test_fps.py
|
pjenpoomjai/tfpose-herokuNEW
|
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
|
[
"Apache-2.0"
] | null | null | null |
src/test_fps.py
|
pjenpoomjai/tfpose-herokuNEW
|
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
|
[
"Apache-2.0"
] | null | null | null |
src/test_fps.py
|
pjenpoomjai/tfpose-herokuNEW
|
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import time
import numpy as np
import imutils
camera= 0
cam = cv2.VideoCapture(camera)
fgbg = cv2.createBackgroundSubtractorMOG2(history=1000,varThreshold=0,detectShadows=False)
width=600
height=480
fps_time = 0
while True:
ret_val,image = cam.read()
image = cv2.resize(image,(width,height))
image = cv2.GaussianBlur(image, (5, 5), 0)
fgmask = fgbg.apply(image)
# image = fgbg.apply(image,learningRate=0.001)
# image = imutils.resize(image, width=500)
# gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
cnts = cv2.findContours(fgmask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if imutils.is_cv2() else cnts[1]
# loop over the contours
x_left = -1
y_left = -1
x_right = -1
y_right = -1
for c in cnts:
# if the contour is too small, ignore it
# if cv2.contourArea(c) > 500:
# continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
if x_left ==-1 :
x_left = x
y_left = y
if x < x_left:
x_left = x
if y < y_left:
y_left = y
if x+w > x_right:
x_right = x+w
if y+h > y_right:
y_right = y+h
# cv2.rectangle(image, (x, y), (x+w, y+h), (255, 0, 0), 2)
if (x_left==0 and y_left==0 and x_right==width and y_right==height)==False:
cv2.rectangle(image, (x_left, y_left), (x_right, y_right), (0, 255, 0), 2)
# cv2.putText(image,
# "FPS: %f [press 'q'to quit]" % (1.0 / (time.time() - fps_time)),
# (10, 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
# (0, 255, 0), 2)
cv2.imshow('tf-pose-estimation result',fgmask)
cv2.imshow('tf-pose-estimation result2',image)
fps_time = time.time()
if cv2.waitKey(1)==ord('q'):
cam.release()
cv2.destroyAllWindows()
break
| 28.342857
| 90
| 0.579133
| 298
| 1,984
| 3.741611
| 0.35906
| 0.03139
| 0.016144
| 0.01435
| 0.074439
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060777
| 0.286794
| 1,984
| 69
| 91
| 28.753623
| 0.727208
| 0.285786
| 0
| 0.090909
| 0
| 0
| 0.03709
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ce254695f631034aa335be9147cb99e06d1cfc
| 999
|
py
|
Python
|
Python/367.ValidPerfectSquare.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 263
|
2020-10-05T18:47:29.000Z
|
2022-03-31T19:44:46.000Z
|
Python/367.ValidPerfectSquare.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 1,264
|
2020-10-05T18:13:05.000Z
|
2022-03-31T23:16:35.000Z
|
Python/367.ValidPerfectSquare.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 760
|
2020-10-05T18:22:51.000Z
|
2022-03-29T06:06:20.000Z
|
#Given a positive integer num, write a function which returns True if num is a perfect square else False.
class Solution(object):
def isPerfectSquare(self, num):
low=0
high=num
#Starting from zero till the number we need to check for perfect square
while(low<=high):
#Calulating middle value by using right shift operator
mid=(low+high)>>1
#If the square of the middle value is equal to the number then it is a perfect square else not
if(mid*mid==num):
return True
#If the square of the middle value is less than the number we increment the low variable else the high variable is decremented.
#The loop will continue till the low value becomes more than the high value or the number is a perfect square then True will be
#returned
elif(mid*mid<num):
low=mid+1
else:
high=mid-1
return False
| 47.571429
| 140
| 0.617618
| 146
| 999
| 4.226027
| 0.458904
| 0.084279
| 0.048622
| 0.077796
| 0.158833
| 0.094003
| 0.094003
| 0.094003
| 0
| 0
| 0
| 0.006033
| 0.336336
| 999
| 20
| 141
| 49.95
| 0.924585
| 0.581582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2d5d419d88204df9613b1050b9f75f4f36ef80c
| 20,923
|
py
|
Python
|
naspi/naspi.py
|
fgiroult321/simple-nas-pi
|
6d1a13523f1f20ebe26f780c758a3ff15be899ff
|
[
"MIT"
] | null | null | null |
naspi/naspi.py
|
fgiroult321/simple-nas-pi
|
6d1a13523f1f20ebe26f780c758a3ff15be899ff
|
[
"MIT"
] | null | null | null |
naspi/naspi.py
|
fgiroult321/simple-nas-pi
|
6d1a13523f1f20ebe26f780c758a3ff15be899ff
|
[
"MIT"
] | null | null | null |
import os
import boto3
# import subprocess
from subprocess import Popen, PIPE
from time import sleep
import json
import ast
from datetime import datetime, time, timedelta, date
import logging
import logging.handlers
import sys, getopt
import glob
import shutil
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def main():
### Order of tasks
# # 0 check disks are here, catch output
# # 1 sync to replica disk, catch output
# # 2 sync to aws, catch output
# # 3 compare disks files vs replica, catch oputput
# # 4 compare disks files vs s3, catch out
# # # Run option
# -l, --system : only analyze_disks & get_server_metrics , every 5m
# -a, --analyze : analyze_s3_files & analyze_local_files, every 1 or 3 hours
# -s, --sync : run_s3_syncs & run_local_syncs, every night
# -d, --syncdelete : run_s3_syncs & run_local_syncs with delete no cron
#### exception handling in logger:
sys.excepthook = handle_exception
valid_modes = ["system","analyze","sync","syncdelete","synclocal","syncs3","backup","osbackup","init_config"]
mode = ''
config = ''
usage_message = 'naspi -c /path/to/config.json -m <system|analyze|sync|syncdelete|synclocal|syncs3|backup|osbackup|init_config>'
try:
opts, args = getopt.getopt(sys.argv[1:],"hm:c:",["mode=","config="])
# except getopt.GetoptError:
except Exception as e:
print(usage_message)
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print(usage_message)
sys.exit()
elif opt in ("-m", "--mode"):
mode = arg
elif opt in ("-c", "--config"):
config = arg
# # # checking values passed
if not mode:
print("Error, mode is mandatory !!")
print(usage_message)
sys.exit(2)
elif not config:
print("Error, config file is mandatory !!")
print(usage_message)
sys.exit(2)
elif mode not in valid_modes:
print("Wrong mode selected, correct modes are : {}".format(valid_modes))
print(usage_message)
sys.exit(2)
# logger.info("Context info : ")
# logger.info(os.getcwd())
# logger.info(__file__)
if mode == "init_config":
output = init_config_file(config)
sys.exit(0)
else:
#### Configuration loading
disks_list,folder_to_sync_locally,folders_to_sync_s3,configuration = load_configuration(config)
global NUMBER_DAYS_RETENTION
global MIN_DELAY_BETWEEN_SYNCS_SECONDS
global working_dir
NUMBER_DAYS_RETENTION = configuration.get('NUMBER_DAYS_RETENTION')
MIN_DELAY_BETWEEN_SYNCS_SECONDS = configuration.get('MIN_DELAY_BETWEEN_SYNCS_SECONDS')
working_dir = configuration.get('working_dir')
home_dir = os.environ['HOME']
global export_path_cmd
export_path_cmd = 'export PATH={}/.local/bin:$PATH'.format(home_dir)
### Logging setup
# Change root logger level from WARNING (default) to NOTSET in order for all messages to be delegated.
logging.getLogger('').setLevel(logging.NOTSET)
# Add file rotatin handler, with level DEBUG
rotatingHandler = logging.handlers.RotatingFileHandler(filename='{}/nas_monitor.log'.format(working_dir), maxBytes=1000000, backupCount=5)
rotatingHandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
rotatingHandler.setFormatter(formatter)
logging.getLogger('').addHandler(rotatingHandler)
global logger
logger = logging.getLogger("naspi." + __name__)
logger.info("")
logger.info("")
logger.info("----------------------------------------------------------------------------------------")
logger.info("----------------------------------------------------------------------------------------")
logger.info("### Starting Nas Monitor")
logger.info('Mode is {} and config file is {}'.format(mode,config))
output = open_or_init_output_file(working_dir)
if mode == "backup":
output = backup_naspi(configuration['backup'],output)
if mode == "osbackup":
output = os_backup(configuration['backup'],output)
if mode == "system":
output = analyze_disks(disks_list,output)
output = get_server_metrics(output)
if mode == "synclocal":
output = analyze_local_files(folder_to_sync_locally, output)
output = run_local_syncs(folder_to_sync_locally,configuration,output)
output = analyze_local_files(folder_to_sync_locally, output)
# File stored to s3 once per hour like local sync (TODO can be improved with a dedicated mode and cron)
res_s3 = write_and_cleanup_output_file_to_s3(output,'archive-fgi')
if mode == "syncs3":
output = analyze_s3_files(folders_to_sync_s3, output)
output = run_s3_syncs(folders_to_sync_s3,configuration,output)
output = analyze_s3_files(folders_to_sync_s3, output)
if mode == "sync":
output = run_s3_syncs(folders_to_sync_s3,configuration,output)
output = run_local_syncs(folder_to_sync_locally,configuration,output)
if mode == "analyze" or mode == "sync":
output = analyze_s3_files(folders_to_sync_s3, output)
output = analyze_local_files(folder_to_sync_locally, output)
result = write_and_cleanup_output_file(output,configuration)
# res_s3 = write_and_cleanup_output_file_to_s3(output,'archive-fgi')
logger.info(json.dumps(output))
####
#### function defs
####
def handle_exception(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
def load_configuration(conf_file):
try:
f = open(conf_file, "r")
dict_conf = json.loads(f.read())
f.close()
return(
dict_conf['disks_list'],
dict_conf['folder_to_sync_locally'],
dict_conf['folders_to_sync_s3'],
dict_conf['naspi_configuration']
)
except FileNotFoundError as e:
print("Conf file not found, provide a file named {}".format(conf_file))
raise(e)
# sys.exit(2)
def today_time():
today = datetime.today()
d1 = today.strftime("%Y-%m-%d %H:%M:%S")
return(d1)
def today_date():
today = datetime.today()
d1 = today.strftime("%Y-%m-%d")
return(d1)
def date_diff_in_seconds(dt2, dt1):
timediff = dt2 - dt1
return timediff.days * 24 * 3600 + timediff.seconds
def run_shell_command(command):
message = ""
logger.info("### Running {}".format(command))
df_out = Popen(command,
shell=True,
stdout=PIPE,
stderr=PIPE
)
sleep(.2)
retcode = df_out.poll()
while retcode is None: # Process running
# logger.info("### Process not finished, waiting...")
sleep(10)
retcode = df_out.poll()
# Here, `proc` has finished with return code `retcode`
if retcode != 0:
"""Error handling."""
logger.info("### Error !")
message = df_out.stderr.read().decode("utf-8")
logger.info(retcode)
logger.info(message)
return(retcode,message)
message = df_out.stdout.read().decode("utf-8")
logger.info(retcode)
logger.info(message)
return(retcode,message)
def open_or_init_output_file(working_dir):
today = today_date()
try:
f = open("{}/naspi_status_{}.json".format(working_dir,today), "r")
dict_output = json.loads(f.read())
f.close()
except FileNotFoundError:
logger.info("File for today does not exist, initializing it")
dict_output = {}
dict_output['disks'] = {}
dict_output['disks']['disk-list'] = []
dict_output['local_sync'] = {}
dict_output['local_sync']['success'] = True
dict_output['s3_sync'] = {}
dict_output['s3_sync']['success'] = True
dict_output['server'] = {}
return(dict_output)
def init_config_file(file_name):
print("initializing config file {}".format(file_name))
if os.path.exists(file_name):
print("Error, config file {} already exists !!".format(file_name))
sys.exit(2)
else:
dict_conf = {}
dict_conf['disks_list'] = []
dict_conf['folder_to_sync_locally'] = []
dict_conf['folders_to_sync_s3'] = []
dict_conf['naspi_configuration'] = {}
dict_conf['naspi_configuration']['working_dir'] = ""
dict_conf['naspi_configuration']['NUMBER_DAYS_RETENTION'] = 7
dict_conf['naspi_configuration']['MIN_DELAY_BETWEEN_SYNCS_SECONDS'] = 14400
dict_conf['naspi_configuration']['backup'] = {}
dict_conf['naspi_configuration']['backup']['files_to_backup'] = []
dict_conf['naspi_configuration']['backup']['backup_location'] = ""
dict_conf['naspi_configuration']['backup']['os_backup_location'] = ""
f = open("{}".format(file_name), "w")
f.write(json.dumps(dict_conf,indent=4))
f.close()
return("ok")
def write_and_cleanup_output_file_to_s3(output,bucket):
s3_client = boto3.client('s3',region_name='eu-west-1')
today = today_date()
response = s3_client.put_object( Body=json.dumps(output),
Bucket=bucket,
Key="status/naspi_status_{}.json".format(today)
)
return(response)
def write_and_cleanup_output_file(output,configuration):
NUMBER_DAYS_RETENTION = configuration.get('NUMBER_DAYS_RETENTION')
working_dir = configuration.get('working_dir')
today = today_date()
f = open("{}/naspi_status_{}.json".format(working_dir,today), "w")
f.write(json.dumps(output,indent=4))
f.close()
existing_output_files = glob.glob('{}/naspi_status_*.json'.format(working_dir))
existing_output_files.sort()
for out_file in existing_output_files:
if out_file not in existing_output_files[-NUMBER_DAYS_RETENTION:]:
logger.info("Deleting {}".format(out_file))
os.remove(out_file)
return("done")
def analyze_disks(disks_list,output):
output['disks']['all_disks_ok'] = True
output['disks']['disk-list'] = []
retcode,message = run_shell_command('df -kh | tail -n +2')
#logger.info(message)
all_disks_present = True
for disk in disks_list:
disk_output = {}
if disk in message:
logger.info("### disk {} is here".format(disk))
usage = message.split(disk)[0][-4:]
logger.info("### usage : {}".format(usage))
disk_output['name'] = disk
disk_output['occupied_%'] = usage
disk_output['present'] = True
output['disks']['disk-list'].append(disk_output)
else:
logger.info("### disk {} not here".format(disk))
all_disks_present = False
disk_output['name'] = disk
disk_output['occupied_%'] = "NA"
disk_output['present'] = False
output['disks']['disk-list'].append(disk_output)
if not all_disks_present:
logger.info("### some disks are missing")
output['disks']['all_disks_ok'] = False
output['disks']['last_run'] = today_time()
return(output)
def acquire_sync_lock(output,local_or_s3,configuration):
# Make sure only one sync process runs at a time
can_run = True
MIN_DELAY_BETWEEN_SYNCS_SECONDS = configuration.get('MIN_DELAY_BETWEEN_SYNCS_SECONDS')
if 'last_started' in output[local_or_s3]:
started_time = datetime.strptime(output[local_or_s3]['last_started'], '%Y-%m-%d %H:%M:%S')
else:
started_time = datetime.strptime('2020-12-25 12:00:00', '%Y-%m-%d %H:%M:%S')
now_time = datetime.now()
logger.info(" %d seconds from previous run" %(date_diff_in_seconds(now_time, started_time)))
if 'locked' in output[local_or_s3] and output[local_or_s3]['locked'] == True and date_diff_in_seconds(now_time, started_time) < MIN_DELAY_BETWEEN_SYNCS_SECONDS:
logger.info("Can't run sync as another process might be running")
can_run = False
else:
logger.info("Acquiring lock for {}".format(local_or_s3))
output[local_or_s3]['locked'] = True
output[local_or_s3]['last_started'] = today_time()
logger.info(output)
# Acquire lock and write it to disk:
result = write_and_cleanup_output_file(output,configuration)
return(can_run,output)
def run_s3_syncs(folders_to_sync_s3,configuration, output):
can_run,output = acquire_sync_lock(output, 's3_sync',configuration)
if can_run:
success = True
for folder in folders_to_sync_s3:
exclusions_flags = ''
if 'exclude' in folder:
for exclusion in folder['exclude']:
exclusions_flags = exclusions_flags + ' --exclude "{}/*" '.format(exclusion)
# command = 'aws s3 sync {} {} {} --storage-class DEEP_ARCHIVE --dryrun'.format(folder['source_folder'],folder['dest_folder'],exclusions_flags)
command = 'aws s3 sync {} {} {} --storage-class DEEP_ARCHIVE --only-show-errors'.format(folder['source_folder'],folder['dest_folder'],exclusions_flags)
ret,msg = run_shell_command('{}; {}'.format(export_path_cmd,command))
if ret != 0:
success = False
output['s3_sync']['success'] = success
output['s3_sync']['last_run'] = today_time()
output['s3_sync']['locked'] = False
else:
logger.info("/!\ Cant run the sync, there is a sync process ongoing")
return(output)
def count_files_in_dir(folder,exclude_list):
exclude_directories = set(exclude_list) #directory (only names) want to exclude
total_file = 0
for dname, dirs, files in os.walk(folder): #this loop though directies recursively
dirs[:] = [d for d in dirs if d not in exclude_directories] # exclude directory if in exclude list
total_file += len(files)
logger.info("Files in {} : {}".format(folder,total_file))
return(total_file)
def analyze_s3_files(folders_to_sync_s3, output):
output['s3_sync']['files_source'] = 0
output['s3_sync']['files_dest'] = 0
output['s3_sync']['folders'] = []
for folder in folders_to_sync_s3:
one_folder = {}
one_folder['source_folder'] = folder['source_folder']
# Get local files count
if 'exclude' in folder:
exclude_directories = set(folder['exclude']) #directory (only names) want to exclude
else:
exclude_directories = []
total_file = 0
for dname, dirs, files in os.walk(folder['source_folder']): #this loop though directies recursively
dirs[:] = [d for d in dirs if d not in exclude_directories] # exclude directory if in exclude list
# print(len(files))
total_file += len(files)
logger.info("Files in {} : {}".format(folder['source_folder'],total_file))
one_folder['source_count'] = total_file
output['s3_sync']['files_source'] += total_file
# Get s3 files count
ret,msg = run_shell_command('{}; aws s3 ls {} --recursive --summarize | grep "Total Objects"'.format(export_path_cmd,folder['dest_folder']))
output['s3_sync']['files_dest'] += int(msg.split(': ')[1])
one_folder['dest_folder'] = folder['dest_folder']
one_folder['dest_count'] = int(msg.split(': ')[1])
output['s3_sync']['folders'].append(one_folder)
output['s3_sync']['files_delta'] = output['s3_sync']['files_source'] - output['s3_sync']['files_dest']
logger.info("Analyze s3 file output : {}".format(json.dumps(output)))
return(output)
def run_local_syncs(folder_to_sync_locally,configuration, output):
# rsync -anv dir1 dir2 # n = dryrun, v = verbose
# will create dir2/dir1
can_run,output = acquire_sync_lock(output, 'local_sync', configuration)
if can_run:
success = True
for folder in folder_to_sync_locally:
delete = ""
if folder['delete']:
delete = "--delete"
ret,msg = run_shell_command('mkdir -p {}'.format(folder['dest_folder']))
ret,msg = run_shell_command('rsync -aq {} {} {}'.format(folder['source_folder'],folder['dest_folder'],delete))
if ret != 0:
success = False
output['local_sync']['success'] = success
output['local_sync']['last_run'] = today_time()
output['local_sync']['locked'] = False
else:
logger.info("/!\ Cant run the sync, there is a sync process ongoing")
return(output)
def analyze_local_files(folder_to_sync_locally, output):
output['local_sync']['files_source'] = 0
output['local_sync']['files_dest'] = 0
output['local_sync']['folders'] = []
for folder in folder_to_sync_locally:
one_folder = {}
one_folder['source_folder'] = folder['source_folder']
src_count = count_files_in_dir(folder['source_folder'],[''])
output['local_sync']['files_source'] += src_count
one_folder['source_count'] = src_count
dest_folder = "{}/{}".format(folder['dest_folder'],folder['source_folder'].split("/")[-1])
one_folder['dest_folder'] = dest_folder
dest_count = count_files_in_dir(dest_folder,[''])
output['local_sync']['files_dest'] += dest_count
one_folder['dest_count'] = dest_count
output['local_sync']['folders'].append(one_folder)
output['local_sync']['files_delta'] = output['local_sync']['files_source'] - output['local_sync']['files_dest']
logger.info("Analyze local file output : {}".format(json.dumps(output)))
return(output)
def get_server_metrics(output):
# get cpu usage
ret,msg = run_shell_command('top -bn 1 | grep Cpu | head -c 14 | tail -c 5')
output['server']['cpu_%'] = msg
ret,msg = run_shell_command('free -m | grep Mem | head -c 32 | tail -c 5')
output['server']['ram_Mo'] = msg
ret,msg = run_shell_command('vcgencmd measure_temp | head -c 11 | tail -c 6')
output['server']['temp_c'] = msg
output['server']['last_run'] = today_time()
return(output)
def backup_naspi(backup,output):
backup_location = backup.get('backup_location')
backup_dir = "{}{}".format(backup_location,today_date())
ret,msg = run_shell_command('mkdir -p {}'.format(backup_dir))
files_to_backup = backup.get("files_to_backup")
for entry in files_to_backup:
if os.path.isdir(entry):
ret,msg = run_shell_command('rsync -aqR {} {}'.format(entry,backup_dir))
else:
subdir = entry.rsplit('/',1)[0]
ret,msg = run_shell_command('mkdir -p {}{}'.format(backup_dir,subdir))
ret,msg = run_shell_command('rsync -aq {} {}{}'.format(entry,backup_dir,entry))
# old bkp cleanup
existing_backup_dir = glob.glob('{}/*'.format(backup_location))
existing_backup_dir.sort()
for out_file in existing_backup_dir:
if out_file not in existing_backup_dir[-10:]:
print("Deleting {}".format(out_file))
shutil.rmtree(out_file,ignore_errors=True)
return(output)
def os_backup(backup,output):
os_backup_location = backup.get('os_backup_location')
backup_name = "osbkp-{}.img".format(today_date())
# sudo dd if=/dev/mmcblk0 of=/disks/Elements/os_bkp/osbkp18082021.img bs=1M
# sudo ./pishrink.sh -z osbkp18082021.img
ret,msg = run_shell_command('sudo dd if=/dev/mmcblk0 of={}/{} bs=1M'.format(os_backup_location,backup_name))
if not os.path.exists("{}/pishrink.sh".format(working_dir)):
ret,msg = run_shell_command('wget https://raw.githubusercontent.com/Drewsif/PiShrink/master/pishrink.sh -P {}'.format(working_dir))
# wget https://raw.githubusercontent.com/Drewsif/PiShrink/master/pishrink.sh
ret,msg = run_shell_command('sudo chmod +x {}/pishrink.sh'.format(working_dir))
# sudo chmod +x pishrink.sh
ret,msg = run_shell_command('sudo bash {}/pishrink.sh -z {}/{}'.format(working_dir,os_backup_location,backup_name))
ret,msg = run_shell_command('sudo chown pi:pi *.img.gz')
# old bkp cleanup
existing_backup_dir = glob.glob('{}/*'.format(os_backup_location))
existing_backup_dir.sort()
for out_file in existing_backup_dir:
if out_file not in existing_backup_dir[-4:]:
print("Deleting {}".format(out_file))
shutil.rmtree(out_file,ignore_errors=True)
return(output)
if __name__=='__main__':
main()
# main(sys.argv[1:])
| 38.461397
| 164
| 0.633322
| 2,647
| 20,923
| 4.75255
| 0.157159
| 0.027027
| 0.021463
| 0.017806
| 0.523927
| 0.42973
| 0.340143
| 0.300556
| 0.252703
| 0.182671
| 0
| 0.011469
| 0.22492
| 20,923
| 544
| 165
| 38.461397
| 0.76426
| 0.099842
| 0
| 0.27665
| 0
| 0.005076
| 0.205692
| 0.030544
| 0
| 0
| 0
| 0.001838
| 0
| 1
| 0.053299
| false
| 0
| 0.030457
| 0
| 0.088832
| 0.032995
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2d7e6c6a86e1314f1b2716ac6227b1dc354be91
| 14,328
|
py
|
Python
|
fawkes/differentiator_lowkey.py
|
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
|
ffadb82b666e8c1561a036a10d9922db8a3266cc
|
[
"MIT"
] | 1
|
2021-05-18T01:14:44.000Z
|
2021-05-18T01:14:44.000Z
|
fawkes/differentiator_lowkey.py
|
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
|
ffadb82b666e8c1561a036a10d9922db8a3266cc
|
[
"MIT"
] | null | null | null |
fawkes/differentiator_lowkey.py
|
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
|
ffadb82b666e8c1561a036a10d9922db8a3266cc
|
[
"MIT"
] | 1
|
2021-05-18T01:14:47.000Z
|
2021-05-18T01:14:47.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2020-10-21
# @Author : Emily Wenger (ewenger@uchicago.edu)
import time
import numpy as np
import tensorflow as tf
import tensorflow_addons as tfa
from keras.utils import Progbar
class FawkesMaskGeneration:
# if the attack is trying to mimic a target image or a neuron vector
MIMIC_IMG = True
# number of iterations to perform gradient descent
MAX_ITERATIONS = 10000
# larger values converge faster to less accurate results
LEARNING_RATE = 1e-2
# the initial constant c to pick as a first guess
INITIAL_CONST = 1
# pixel intensity range
INTENSITY_RANGE = 'imagenet'
# threshold for distance
L_THRESHOLD = 0.03
# whether keep the final result or the best result
KEEP_FINAL = False
# max_val of image
MAX_VAL = 255
MAXIMIZE = False
IMAGE_SHAPE = (224, 224, 3)
RATIO = 1.0
LIMIT_DIST = False
LOSS_TYPE = 'features' # use features (original Fawkes) or gradients (Witches Brew) to run Fawkes?
def __init__(self, bottleneck_model_ls, mimic_img=MIMIC_IMG,
batch_size=1, learning_rate=LEARNING_RATE,
max_iterations=MAX_ITERATIONS, initial_const=INITIAL_CONST,
intensity_range=INTENSITY_RANGE, l_threshold=L_THRESHOLD,
max_val=MAX_VAL, keep_final=KEEP_FINAL, maximize=MAXIMIZE, image_shape=IMAGE_SHAPE, verbose=1,
ratio=RATIO, limit_dist=LIMIT_DIST, loss_method=LOSS_TYPE):
assert intensity_range in {'raw', 'imagenet', 'inception', 'mnist'}
# constant used for tanh transformation to avoid corner cases
self.it = 0
self.tanh_constant = 2 - 1e-6
self.MIMIC_IMG = mimic_img
self.LEARNING_RATE = learning_rate
self.MAX_ITERATIONS = max_iterations
self.initial_const = initial_const
self.batch_size = batch_size
self.intensity_range = intensity_range
self.l_threshold = l_threshold
self.max_val = max_val
self.keep_final = keep_final
self.verbose = verbose
self.maximize = maximize
self.learning_rate = learning_rate
self.ratio = ratio
self.limit_dist = limit_dist
self.single_shape = list(image_shape)
self.bottleneck_models = bottleneck_model_ls
self.loss_method = loss_method
self.input_shape = tuple([self.batch_size] + self.single_shape)
self.bottleneck_shape = tuple([self.batch_size] + self.single_shape)
# the variable we're going to optimize over
self.modifier = tf.Variable(np.ones(self.input_shape, dtype=np.float32) * 1e-6)
self.const = tf.Variable(np.ones(batch_size) * self.initial_const, dtype=np.float32)
self.mask = tf.Variable(np.ones(batch_size), dtype=np.bool)
@staticmethod
def resize_tensor(input_tensor, model_input_shape):
if input_tensor.shape[1:] == model_input_shape or model_input_shape[1] is None:
return input_tensor
resized_tensor = tf.image.resize(input_tensor, model_input_shape[:2])
return resized_tensor
def input_space_process(self, img):
if self.intensity_range == 'imagenet':
mean = np.repeat([[[[103.939, 116.779, 123.68]]]], self.batch_size, axis=0)
raw_img = (img - mean)
else:
raw_img = img
return raw_img
def reverse_input_space_process(self, img):
if self.intensity_range == 'imagenet':
mean = np.repeat([[[[103.939, 116.779, 123.68]]]], self.batch_size, axis=0)
raw_img = (img + mean)
else:
raw_img = img
return raw_img
def clipping(self, imgs):
imgs = self.reverse_input_space_process(imgs)
imgs = np.clip(imgs, 0, self.max_val)
imgs = self.input_space_process(imgs)
return imgs
def calc_dissim(self, source_raw, source_mod_raw):
return 0.0, 0.0, 0.0
# msssim_split = tf.image.ssim(source_raw, source_mod_raw, max_val=255.0)
# dist_raw = (1.0 - tf.stack(msssim_split)) / 2.0
# dist = tf.maximum(dist_raw - self.l_threshold, 0.0)
# # dist_raw_sum = tf.reduce_sum(tf.where(self.mask, dist_raw, tf.zeros_like(dist_raw)))
# dist_raw_sum = tf.reduce_sum(dist_raw)
# # dist_sum = tf.reduce_sum(tf.where(self.mask, dist, tf.zeros_like(dist)))
# dist_sum = tf.reduce_sum(dist)
# return dist, dist_sum, dist_raw_sum
def calc_bottlesim(self, tape, source_raw, target_raw, source_filtered, original_raw):
""" original Fawkes loss function. """
bottlesim = 0.0
bottlesim_sum = 0.0
# make sure everything is the right size.
model_input_shape = self.single_shape
cur_aimg_input = self.resize_tensor(source_raw, model_input_shape)
cur_source_filtered = self.resize_tensor(source_filtered, model_input_shape)
# cur_timg_input = self.resize_tensor(target_raw, model_input_shape)
for bottleneck_model in self.bottleneck_models:
if tape is not None:
try:
tape.watch(bottleneck_model.variables)
except AttributeError:
tape.watch(bottleneck_model.model.variables)
# get the respective feature space reprs.
bottleneck_a = bottleneck_model(cur_aimg_input)
bottleneck_filter = bottleneck_model(cur_source_filtered)
bottleneck_s = bottleneck_model(original_raw)
# compute the differences.
bottleneck_diff = bottleneck_a - bottleneck_s
bottleneck_diff_filter = bottleneck_filter - bottleneck_s
# get scale factor.
scale_factor = tf.sqrt(tf.reduce_sum(tf.square(bottleneck_s), axis=1))
scale_factor_filter = tf.sqrt(tf.reduce_sum(tf.square(bottleneck_diff_filter), axis=1))
# compute the loss
cur_bottlesim = tf.reduce_sum(tf.square(bottleneck_diff), axis=1)
cur_bottlesim_filter = tf.reduce_sum(tf.square(bottleneck_diff_filter), axis=1)
cur_bottlesim = cur_bottlesim / scale_factor
cur_bottlesim_filter = cur_bottlesim_filter / scale_factor_filter
bottlesim += cur_bottlesim + cur_bottlesim_filter
bottlesim_sum += tf.reduce_sum(cur_bottlesim) + tf.reduce_sum(cur_bottlesim_filter)
return bottlesim, bottlesim_sum
def compute_feature_loss(self, tape, aimg_raw, simg_raw, aimg_input, timg_input, simg_input, aimg_filtered):
""" Compute input space + feature space loss.
"""
input_space_loss, input_space_loss_sum, input_space_loss_raw_sum = self.calc_dissim(aimg_raw, simg_raw)
feature_space_loss, feature_space_loss_sum = self.calc_bottlesim(tape, aimg_input, timg_input, aimg_filtered, simg_input)
if self.maximize:
loss = self.const * input_space_loss - feature_space_loss
else:
if self.it < self.MAX_ITERATIONS:
loss = self.const * input_space_loss + 1000 * feature_space_loss # - feature_space_loss_orig
else:
loss = self.const * 100 * input_space_loss + feature_space_loss
# loss_sum = tf.reduce_sum(tf.where(self.mask, loss, tf.zeros_like(loss)))
loss_sum = tf.reduce_sum(loss)
# return loss_sum, input_space_loss, feature_space_loss, input_space_loss_sum, input_space_loss_raw_sum, feature_space_loss_sum
return loss_sum, 0, feature_space_loss, 0, 0, feature_space_loss_sum
def attack(self, source_imgs, target_imgs, weights=None):
""" Main function that runs cloak generation. """
if weights is None:
weights = np.ones([source_imgs.shape[0]] +
list(self.bottleneck_shape[1:]))
assert weights.shape[1:] == self.bottleneck_shape[1:]
assert source_imgs.shape[1:] == self.input_shape[1:]
assert source_imgs.shape[0] == weights.shape[0]
if self.MIMIC_IMG:
assert target_imgs.shape[1:] == self.input_shape[1:]
assert source_imgs.shape[0] == target_imgs.shape[0]
else:
assert target_imgs.shape[1:] == self.bottleneck_shape[1:]
assert source_imgs.shape[0] == target_imgs.shape[0]
start_time = time.time()
adv_imgs = []
print('%d batches in total'
% int(np.ceil(len(source_imgs) / self.batch_size)))
for idx in range(0, len(source_imgs), self.batch_size):
# print('processing image %d at %s' % (idx + 1, datetime.datetime.now()))
adv_img = self.attack_batch(source_imgs[idx:idx + self.batch_size],
target_imgs[idx:idx + self.batch_size])
adv_imgs.extend(adv_img)
elapsed_time = time.time() - start_time
print('protection cost %f s' % elapsed_time)
return np.array(adv_imgs)
def attack_batch(self, source_imgs, target_imgs):
""" TF2 method to generate the cloak. """
# preprocess images.
global progressbar
nb_imgs = source_imgs.shape[0]
mask = [True] * nb_imgs + [False] * (self.batch_size - nb_imgs)
self.mask = np.array(mask, dtype=np.bool)
LR = self.learning_rate
# make sure source/target images are an array
source_imgs = np.array(source_imgs, dtype=np.float32)
target_imgs = np.array(target_imgs, dtype=np.float32)
# metrics to test
best_bottlesim = [0] * nb_imgs if self.maximize else [np.inf] * nb_imgs
best_adv = np.zeros(source_imgs.shape)
total_distance = [0] * nb_imgs
finished_idx = set()
# make the optimizer
optimizer = tf.keras.optimizers.Adam(self.learning_rate)
# optimizer = tf.keras.optimizers.Adadelta(self.learning_rate)
# get the modifier
self.modifier = tf.Variable(np.ones(self.input_shape, dtype=np.float32) * 1e-4)
# self.modifier = tf.Variable(np.random.uniform(-8.0, 8.0, self.input_shape), dtype=tf.float32)
if self.verbose == 0:
progressbar = Progbar(
self.MAX_ITERATIONS, width=30, verbose=1
)
# watch relevant variables.
simg_tanh = tf.Variable(source_imgs, dtype=np.float32)
timg_tanh = tf.Variable(target_imgs, dtype=np.float32)
# simg_tanh = self.reverse_input_space_process(simg_tanh)
# timg_tanh = self.reverse_input_space_process(timg_tanh)
# run the attack
self.it = 0
below_thresh = False
while self.it < self.MAX_ITERATIONS:
self.it += 1
with tf.GradientTape(persistent=True) as tape:
tape.watch(self.modifier)
tape.watch(simg_tanh)
tape.watch(timg_tanh)
aimg_raw = simg_tanh + self.modifier
aimg_filtered_raw = simg_tanh + tfa.image.gaussian_filter2d(self.modifier, [7, 7], 3.0)
final_filtered_raw = simg_tanh + tfa.image.gaussian_filter2d(self.modifier, [1, 1], 2.0)
simg_raw = simg_tanh
timg_raw = timg_tanh
# Convert further preprocess for bottleneck
aimg_input = self.input_space_process(aimg_raw)
aimg_filtered = self.input_space_process(aimg_filtered_raw)
timg_input = self.input_space_process(timg_raw)
simg_input = self.input_space_process(simg_raw)
# aimg_input = aimg_raw
# timg_input = timg_raw
# simg_input = simg_raw
# get the feature space loss.
loss, input_dist, internal_dist, input_dist_sum, input_dist_raw_sum, internal_dist_sum = self.compute_feature_loss(
tape, aimg_raw, simg_raw, aimg_input, timg_input, simg_input, aimg_filtered)
# compute gradients
grad = tape.gradient(loss, [self.modifier])
# grad[0] = grad[0] * 1e11
grad[0] = tf.sign(grad[0]) * 0.6375
# optimizer.apply_gradients(zip(grad, [self.modifier]))
self.modifier = self.modifier - grad[0]
self.modifier = tf.clip_by_value(self.modifier, -12.0, 12.0)
for e, (feature_d, mod_img) in enumerate(zip(internal_dist, final_filtered_raw)):
if e >= nb_imgs:
break
if (feature_d < best_bottlesim[e] and (not self.maximize)) or (
feature_d > best_bottlesim[e] and self.maximize):
# print('found improvement')
best_bottlesim[e] = feature_d
best_adv[e] = mod_img
# compute whether or not your perturbation is too big.
# thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100
# if self.it != 0 and (self.it % (self.MAX_ITERATIONS // 3) == 0):
# LR = LR * 0.8 # np.array([LR * 0.8])
# optimizer.learning_rate = LR
# print("LR: {}".format(LR))
# print iteration result
# if self.it % 10 == 0:
if self.verbose == 1:
thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100
# import pdb
# pdb.set_trace()
print(
"ITER {:0.0f} Total Loss: {:.4f} perturb: {:0.4f} ({:0.4f} over, {:0.4f} raw); sim: {:.4f}".format(
self.it, loss, input_dist_sum, thresh_over, input_dist_raw_sum,
internal_dist_sum / nb_imgs))
if self.verbose == 0:
progressbar.update(self.it)
# DONE: print results
if self.verbose == 1:
thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100
print(
"END after {} iterations: Total Loss: {} perturb: {:0.4f} ({:0.4f} over, {:0.4f} raw); sim: {}".format(
self.it,
loss, input_dist_sum, thresh_over, input_dist_raw_sum, internal_dist_sum / nb_imgs))
print("\n")
best_adv = self.clipping(best_adv[:nb_imgs])
return best_adv
| 44.775
| 135
| 0.620254
| 1,864
| 14,328
| 4.501609
| 0.168991
| 0.022524
| 0.020141
| 0.011679
| 0.350614
| 0.273984
| 0.20081
| 0.193302
| 0.175903
| 0.168037
| 0
| 0.022559
| 0.285315
| 14,328
| 319
| 136
| 44.915361
| 0.796875
| 0.194724
| 0
| 0.121359
| 0
| 0.009709
| 0.024548
| 0
| 0
| 0
| 0
| 0
| 0.038835
| 1
| 0.048544
| false
| 0
| 0.024272
| 0.004854
| 0.18932
| 0.024272
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2dd43c40f9fe338eecf074d6dac1c0de992c516
| 798
|
py
|
Python
|
chess.py
|
jrj92280/python-eve-backend
|
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
|
[
"MIT"
] | null | null | null |
chess.py
|
jrj92280/python-eve-backend
|
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
|
[
"MIT"
] | null | null | null |
chess.py
|
jrj92280/python-eve-backend
|
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
|
[
"MIT"
] | null | null | null |
from chess_game._board import make_board
from chess_game.chess_game import ChessGame
from chess_game.play_game import get_user_input, game_event_loop
if __name__ == "__main__":
game_board = make_board()
# pawn = Pawn('x', 'y', None, None, None)
# pawn.move()
print('Chess')
print(' : Rules')
print(' : input - piece''s position x,y, second x,y = destination')
print(" : x = row number 1 though 8")
print(" : y = column number 1 though 8")
player1_name = get_user_input(' : Enter player one name', is_move=False)
player2_name = get_user_input(' : Enter player two name', is_move=False)
print('------------------------------------------------')
chess_game = ChessGame(game_board, player1_name, player2_name)
game_event_loop(chess_game)
| 33.25
| 76
| 0.639098
| 110
| 798
| 4.309091
| 0.381818
| 0.113924
| 0.082278
| 0.059072
| 0.113924
| 0.113924
| 0
| 0
| 0
| 0
| 0
| 0.012384
| 0.190476
| 798
| 23
| 77
| 34.695652
| 0.721362
| 0.06391
| 0
| 0
| 0
| 0
| 0.319892
| 0.064516
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.4
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2de6356f341ba86e79ed1873bc9d766068dfedf
| 1,589
|
py
|
Python
|
strstr/3-2.py
|
stonemary/lintcode_solutions
|
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
|
[
"Apache-2.0"
] | null | null | null |
strstr/3-2.py
|
stonemary/lintcode_solutions
|
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
|
[
"Apache-2.0"
] | null | null | null |
strstr/3-2.py
|
stonemary/lintcode_solutions
|
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
|
[
"Apache-2.0"
] | null | null | null |
# time 15 mins
# used time 15 mins
# time 15 mins
# used time 15 mins
# this is actually a correct solution
# the code i submitted a day ago, which passed lintcode, is actually wrong after i looked KMP up
# the previous version does not take care of the situations where the target contains repeatitive elements
class Solution:
def strStr(self, source, target):
## try O(n) with no bug
if source is None or target is None:
return -1
source_pointer = 0
target_pointer = 0
last_target_begining_match = None
while source_pointer < len(source):
if target_pointer == len(target):
return source_pointer - len(target)
if source[source_pointer] == target[target_pointer]:
if target_pointer != 0 and target[target_pointer] == target[0] and last_target_begining_match is None:
last_target_begining_match = target_pointer
target_pointer += 1
else:
if last_target_begining_match is not None:
target_pointer = last_target_begining_match + 1
last_target_begining_match = None
elif source[source_pointer] == target[0]:
target_pointer = 1
else:
target_pointer = 0
source_pointer += 1
else:
if target_pointer == len(target):
return source_pointer - len(target)
return -1
| 34.543478
| 118
| 0.570799
| 186
| 1,589
| 4.682796
| 0.33871
| 0.164179
| 0.123995
| 0.158439
| 0.293915
| 0.174512
| 0.174512
| 0.119403
| 0.119403
| 0.119403
| 0
| 0.020222
| 0.377596
| 1,589
| 45
| 119
| 35.311111
| 0.860465
| 0.200126
| 0
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2e37e6fb52ee6d2e740ecb159b5517384b2a2c4
| 324
|
py
|
Python
|
www/async_flask/__init__.py
|
StarAhri/flask
|
facd476065c945f3467d4bfd7bc4ca910cc27d74
|
[
"BSD-3-Clause"
] | null | null | null |
www/async_flask/__init__.py
|
StarAhri/flask
|
facd476065c945f3467d4bfd7bc4ca910cc27d74
|
[
"BSD-3-Clause"
] | null | null | null |
www/async_flask/__init__.py
|
StarAhri/flask
|
facd476065c945f3467d4bfd7bc4ca910cc27d74
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import Flask
import time
from _thread import get_ident
app=Flask(__name__)
@app.route("/")
def hello_world():
time.sleep(20)
return "hello world!"+str(get_ident())
@app.route("/index")
def hello():
time.sleep(1)
return "Hello"+str(get_ident())
if __name__=="__main__":
app.run(port=6003)
| 17.052632
| 42
| 0.675926
| 48
| 324
| 4.208333
| 0.5
| 0.118812
| 0.108911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025735
| 0.160494
| 324
| 19
| 43
| 17.052632
| 0.716912
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.214286
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ed016efef1c89871a2e33d8718c95390697abc
| 3,545
|
py
|
Python
|
vk_bot/needrework/relation.py
|
triangle1984/vk-bot
|
39dea7bf8043e791ef079ea1ac6616f95d5b5312
|
[
"BSD-3-Clause"
] | 3
|
2019-11-05T12:32:04.000Z
|
2019-11-15T14:29:46.000Z
|
vk_bot/needrework/relation.py
|
anar66/vk-bot
|
39dea7bf8043e791ef079ea1ac6616f95d5b5312
|
[
"BSD-3-Clause"
] | 1
|
2019-12-11T20:26:31.000Z
|
2019-12-11T20:26:31.000Z
|
vk_bot/needrework/relation.py
|
triangle1984/vk-bot
|
39dea7bf8043e791ef079ea1ac6616f95d5b5312
|
[
"BSD-3-Clause"
] | 5
|
2019-11-20T14:20:30.000Z
|
2022-02-05T10:37:01.000Z
|
import vk_api
from vk_api.utils import get_random_id
from vk_bot.core.sql.vksql import *
def relationmeet(text, vk, event):
check = checkrelation('waitmeet', event.object.from_id)
if check == None:
check = checkrelation('relation', event.object.from_id)
if check == None:
userid = "".join(text[2][3:])
userid = userid.split('|')[0]
check = checkrelation('relation', userid)
if check == None:
check = checkrelation('waitmeet', userid)
if check == None:
tableadd("waitmeet", "id, id2", (f"{event.object.from_id}, {userid}"))
vk.messages.send(user_id=int(userid), random_id=get_random_id(),
message=f"*id{event.object.from_id}(Пользователь) предложил тебе встречаться!\nНапиши: '/отношения принять' или '/отношения отклонить'")
else:
return "Этому пользователю уже кто-то предложил встречатся!"
else:
return "Этот пользователь уже встречается с кем-то!"
else:
return "Ай-яй-яй! Изменять нехорошо"
else:
return "Ты уже отправил приглашение!"
def reject(event, vk):
check = checktable('waitmeet', 'id2', event.object.from_id)
if check == None:
return 'У тебя нет предложений встречаться!'
else:
userid = checktable('waitmeet', 'id2', event.object.from_id)
vk.messages.send(user_id=int(userid['id']), random_id=get_random_id(),
message=f"*id{event.object.from_id}(Пользователь) отклонил твое предложение :()")
tablerm('waitmeet', "id2", event.object.from_id)
return "Вы отклонили предложение"
def accept(event, vk):
check = checktable('waitmeet', 'id2', event.object.from_id)
if check == None:
return 'У тебя нет предложений встречаться!'
else:
relationaccept(event.object.from_id)
tablerm('waitmeet', "id2", event.object.from_id)
userid = checktable('relation', 'id2', event.object.from_id)
vk.messages.send(user_id=int(userid['id']), random_id=get_random_id(),
message=f"*id{event.object.from_id}(Пользователь) принял твое предложение! Поздравляем!")
return "Вы приняли предложение! Поздравляем!"
def test(event, vk, message, case):
check = checkrelation('relation', event.object.from_id)
if check == None:
return {'message': 'Ты ни с кем не встречаешься :('}
else:
userid = checktable('relation', 'id', event.object.from_id)
if userid == None:
userid = checktable('relation', 'id2', event.object.from_id)
if userid['id2'] == event.object.from_id:
userid = f"*id{userid['id']}({vk.users.get(user_ids=userid['id'], name_case=case)[0]['first_name']})"
return {'message':f"{message} {userid}"}
elif userid['id'] == event.object.from_id:
userid = f"*id{userid['id2']}({vk.users.get(user_ids=userid['id2'], name_case=case)[0]['first_name']})"
return {'message':f"{message} {userid}"}
def relation(event, vk, text):
try:
if text[1] == "принять":
return {"message": accept(event, vk)}
elif text[1] == "отклонить":
return {"message": reject(event, vk)}
elif text[:2] == ['/отношения', 'встречаться']:
return {"message": relationmeet(text, vk, event)}
except IndexError:
return test(event, vk, "Ты встречаешься с", "ins")
| 48.561644
| 172
| 0.598025
| 422
| 3,545
| 4.92654
| 0.229858
| 0.095238
| 0.12987
| 0.147186
| 0.531506
| 0.50457
| 0.465127
| 0.395382
| 0.329004
| 0.329004
| 0
| 0.007205
| 0.256135
| 3,545
| 73
| 173
| 48.561644
| 0.781191
| 0
| 0
| 0.4
| 0
| 0.042857
| 0.304005
| 0.095601
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.042857
| 0
| 0.328571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ed7a6bb514c982bc41d3c33e724e9e6365650e
| 1,746
|
py
|
Python
|
wallpaperdownloader/main.py
|
k-vinogradov/wallpaper-downloader
|
568c6a1e3a2307f710bf6fe313b39da2d620213a
|
[
"MIT"
] | null | null | null |
wallpaperdownloader/main.py
|
k-vinogradov/wallpaper-downloader
|
568c6a1e3a2307f710bf6fe313b39da2d620213a
|
[
"MIT"
] | null | null | null |
wallpaperdownloader/main.py
|
k-vinogradov/wallpaper-downloader
|
568c6a1e3a2307f710bf6fe313b39da2d620213a
|
[
"MIT"
] | null | null | null |
"""Wallpaper Downloader Main Module."""
import argparse
import asyncio
import logging
import sys
from datetime import datetime
from wallpaperdownloader.downloader import download, LOGGER_NAME
def abort(*args):
"""Print message to the stderr and exit the program."""
print(*args, file=sys.stderr)
sys.exit(1)
def check_args(args):
"""Check if arguments are valid."""
month, year = (args.month, args.year)
if month < 1 or month > 12:
abort("Invalid month number %d", month)
date_string = f"{year:04}{month:02}"
if date_string < "201205":
abort("There are no wallpapers older than May 2012")
if date_string > datetime.now().strftime("%Y%M"):
abort("Too early... come a bit later")
def configure_logger(level):
"""Configure console log output."""
logger = logging.getLogger(LOGGER_NAME)
handler = logging.StreamHandler()
logger.setLevel(level)
handler.setLevel(level)
logger.addHandler(handler)
def main():
"""Run WD main routine."""
parser = argparse.ArgumentParser(
description="Download wallpapers from www.smashingmagazine.com"
)
parser.add_argument("month", type=int, help="Month number")
parser.add_argument("year", type=int, help="Year")
parser.add_argument("resolution", type=str, help="Image resolution")
parser.add_argument(
"-v", "--verbose", action="store_true", help="Enable verbose output"
)
args = parser.parse_args()
check_args(args)
configure_logger(logging.DEBUG if args.verbose else logging.INFO)
year, month, res = (args.year, args.month, args.resolution)
asyncio.get_event_loop().run_until_complete(download(year, month, res))
if __name__ == "__main__":
main()
| 29.59322
| 76
| 0.683849
| 225
| 1,746
| 5.186667
| 0.471111
| 0.030848
| 0.058269
| 0.029135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012667
| 0.18614
| 1,746
| 58
| 77
| 30.103448
| 0.808586
| 0.093929
| 0
| 0
| 0
| 0
| 0.176093
| 0.015424
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.146341
| 0
| 0.243902
| 0.02439
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ee02add396584dc919e32b6bdd9a63f34df039
| 4,512
|
py
|
Python
|
Lib/site-packages/hackedit/app/common.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | null | null | null |
Lib/site-packages/hackedit/app/common.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
Lib/site-packages/hackedit/app/common.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | null | null | null |
"""
Functions shared across the main window, the welcome window and the system
tray.
"""
import os
import qcrash.api as qcrash
from PyQt5 import QtWidgets
from hackedit.app import templates, settings
from hackedit.app.dialogs.dlg_about import DlgAbout
from hackedit.app.dialogs.dlg_template_answers import DlgTemplateVars
from hackedit.app.dialogs.preferences import DlgPreferences
from hackedit.app.wizards.new import WizardNew
def show_about(window):
"""
Shows the about dialog on the parent window
:param window: parent window.
"""
DlgAbout.show_about(window)
def check_for_update(*args, **kwargs):
"""
Checks for update.
:param window: parent window
:param show_up_to_date_msg: True to show a message box when the
app is up to date.
"""
# todo: improve this: make an update wizard that update both hackedit
# and its packages (to ensure compatiblity)
# if pip_tools.check_for_update('hackedit', __version__):
# answer = QtWidgets.QMessageBox.question(
# window, 'Check for update',
# 'A new version of HackEdit is available...\n'
# 'Would you like to install it now?')
# if answer == QtWidgets.QMessageBox.Yes:
# try:
# status = pip_tools.graphical_install_package(
# 'hackedit', autoclose_dlg=True)
# except RuntimeError as e:
# QtWidgets.qApp.processEvents()
# QtWidgets.QMessageBox.warning(
# window, 'Update failed',
# 'Failed to update hackedit: %r' % e)
# else:
# QtWidgets.qApp.processEvents()
# if status:
# QtWidgets.QMessageBox.information(
# window, 'Check for update',
# 'Update completed with sucess, the application '
# 'will now restart...')
# window.app.restart()
# else:
# QtWidgets.QMessageBox.warning(
# window, 'Update failed',
# 'Failed to update hackedit')
# else:
# _logger().debug('HackEdit up to date')
# if show_up_to_date_msg:
# QtWidgets.QMessageBox.information(
# window, 'Check for update', 'HackEdit is up to date.')
pass
def open_folder(window, app):
path = QtWidgets.QFileDialog.getExistingDirectory(
window, _('Open directory'), settings.last_open_dir())
if path:
settings.set_last_open_dir(os.path.dirname(path))
app.open_path(path, sender=window)
def report_bug(window, title='', traceback=None, issue_description=''):
qcrash.show_report_dialog(
issue_title=title, traceback=traceback, parent=window,
include_log=traceback is not None,
include_sys_info=traceback is not None,
issue_description=issue_description)
return True
def edit_preferences(window, app):
DlgPreferences.edit_preferences(window, app)
def not_implemented_action(window):
QtWidgets.QMessageBox.information(
window, _('Not implementeded'),
_('This action has not been implemented yet...'))
def create_new(app, window, current_project=None):
source, template, dest_dir, single_file = WizardNew.get_parameters(
window, current_project)
if source is not None:
create_new_from_template(source, template, dest_dir, single_file,
window, app)
def create_new_from_template(source, template, dest_dir, single_file, window,
app):
from .main_window import MainWindow
try:
variables = template['variables']
except KeyError:
answers = {}
else:
answers = DlgTemplateVars.get_answers(variables, parent=window)
if answers is None:
# canceled by user
return None
files = templates.create(template, dest_dir, answers)
if not files:
# should not happen unless the template is empty
return None
if single_file:
path = files[0]
else:
path = dest_dir
from hackedit.app.welcome_window import WelcomeWindow
if isinstance(window, WelcomeWindow):
sender = None
else:
sender = window
if single_file and isinstance(window, MainWindow):
window.open_file(path)
else:
app.open_path(path, sender=sender)
return path
| 32
| 77
| 0.623005
| 503
| 4,512
| 5.437376
| 0.326044
| 0.051188
| 0.032907
| 0.024132
| 0.186837
| 0.14223
| 0.130896
| 0.093601
| 0.093601
| 0.093601
| 0
| 0.000627
| 0.292775
| 4,512
| 140
| 78
| 32.228571
| 0.856471
| 0.38852
| 0
| 0.092308
| 0
| 0
| 0.031086
| 0
| 0
| 0
| 0
| 0.007143
| 0
| 1
| 0.123077
| false
| 0.015385
| 0.153846
| 0
| 0.338462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ee858e562eab312d062843fa52105cd18f06ef
| 4,778
|
py
|
Python
|
pygame_menu/locals.py
|
apuly/pygame-menu
|
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
|
[
"MIT"
] | 419
|
2017-05-01T20:00:08.000Z
|
2022-03-29T13:49:16.000Z
|
pygame_menu/locals.py
|
apuly/pygame-menu
|
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
|
[
"MIT"
] | 363
|
2017-11-05T17:42:48.000Z
|
2022-03-27T21:13:33.000Z
|
pygame_menu/locals.py
|
apuly/pygame-menu
|
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
|
[
"MIT"
] | 167
|
2017-05-02T20:42:24.000Z
|
2022-03-24T16:17:38.000Z
|
"""
pygame-menu
https://github.com/ppizarror/pygame-menu
LOCALS
Local constants.
License:
-------------------------------------------------------------------------------
The MIT License (MIT)
Copyright 2017-2021 Pablo Pizarro R. @ppizarror
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-------------------------------------------------------------------------------
"""
__all__ = [
# Alignment
'ALIGN_CENTER',
'ALIGN_LEFT',
'ALIGN_RIGHT',
# Data types
'INPUT_FLOAT',
'INPUT_INT',
'INPUT_TEXT',
# Positioning
'POSITION_CENTER',
'POSITION_EAST',
'POSITION_NORTH',
'POSITION_NORTHEAST',
'POSITION_SOUTHWEST',
'POSITION_SOUTH',
'POSITION_SOUTHEAST',
'POSITION_NORTHWEST',
'POSITION_WEST',
# Orientation
'ORIENTATION_HORIZONTAL',
'ORIENTATION_VERTICAL',
# Scrollarea
'SCROLLAREA_POSITION_BOTH_HORIZONTAL',
'SCROLLAREA_POSITION_BOTH_VERTICAL',
'SCROLLAREA_POSITION_FULL',
# Cursors
'CURSOR_ARROW',
'CURSOR_CROSSHAIR',
'CURSOR_HAND',
'CURSOR_IBEAM',
'CURSOR_NO',
'CURSOR_SIZEALL',
'CURSOR_SIZENESW',
'CURSOR_SIZENS',
'CURSOR_SIZENWSE',
'CURSOR_SIZEWE',
'CURSOR_WAIT',
'CURSOR_WAITARROW',
# Event compatibility
'FINGERDOWN',
'FINGERMOTION',
'FINGERUP'
]
import pygame as __pygame
# Alignment
ALIGN_CENTER = 'align-center'
ALIGN_LEFT = 'align-left'
ALIGN_RIGHT = 'align-right'
# Input data type
INPUT_FLOAT = 'input-float'
INPUT_INT = 'input-int'
INPUT_TEXT = 'input-text'
# Position
POSITION_CENTER = 'position-center'
POSITION_EAST = 'position-east'
POSITION_NORTH = 'position-north'
POSITION_NORTHEAST = 'position-northeast'
POSITION_NORTHWEST = 'position-northwest'
POSITION_SOUTH = 'position-south'
POSITION_SOUTHEAST = 'position-southeast'
POSITION_SOUTHWEST = 'position-southwest'
POSITION_WEST = 'position-west'
# Menu ScrollArea position
SCROLLAREA_POSITION_BOTH_HORIZONTAL = 'scrollarea-position-both-horizontal'
SCROLLAREA_POSITION_BOTH_VERTICAL = 'scrollarea_position-both-vertical'
SCROLLAREA_POSITION_FULL = 'scrollarea-position-full'
# Orientation
ORIENTATION_HORIZONTAL = 'orientation-horizontal'
ORIENTATION_VERTICAL = 'orientation-vertical'
# Cursors
CURSOR_ARROW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_ARROW') else __pygame.SYSTEM_CURSOR_ARROW
CURSOR_CROSSHAIR = None if not hasattr(__pygame, 'SYSTEM_CURSOR_CROSSHAIR') else __pygame.SYSTEM_CURSOR_CROSSHAIR
CURSOR_HAND = None if not hasattr(__pygame, 'SYSTEM_CURSOR_HAND') else __pygame.SYSTEM_CURSOR_HAND
CURSOR_IBEAM = None if not hasattr(__pygame, 'SYSTEM_CURSOR_IBEAM') else __pygame.SYSTEM_CURSOR_IBEAM
CURSOR_NO = None if not hasattr(__pygame, 'SYSTEM_CURSOR_NO') else __pygame.SYSTEM_CURSOR_NO
CURSOR_SIZEALL = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZEALL') else __pygame.SYSTEM_CURSOR_SIZEALL
CURSOR_SIZENESW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENESW') else __pygame.SYSTEM_CURSOR_SIZENESW
CURSOR_SIZENS = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENS') else __pygame.SYSTEM_CURSOR_SIZENS
CURSOR_SIZENWSE = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENWSE') else __pygame.SYSTEM_CURSOR_SIZENWSE
CURSOR_SIZEWE = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZEWE') else __pygame.SYSTEM_CURSOR_SIZEWE
CURSOR_WAIT = None if not hasattr(__pygame, 'SYSTEM_CURSOR_WAIT') else __pygame.SYSTEM_CURSOR_WAIT
CURSOR_WAITARROW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_WAITARROW') else __pygame.SYSTEM_CURSOR_WAITARROW
# Events compatibility with lower pygame versions
FINGERDOWN = -1 if not hasattr(__pygame, 'FINGERDOWN') else __pygame.FINGERDOWN
FINGERMOTION = -1 if not hasattr(__pygame, 'FINGERMOTION') else __pygame.FINGERMOTION
FINGERUP = -1 if not hasattr(__pygame, 'FINGERUP') else __pygame.FINGERUP
| 35.392593
| 113
| 0.75429
| 588
| 4,778
| 5.807823
| 0.27551
| 0.084334
| 0.126501
| 0.079063
| 0.298682
| 0.186823
| 0.186823
| 0.046852
| 0.046852
| 0.046852
| 0
| 0.002664
| 0.135831
| 4,778
| 134
| 114
| 35.656716
| 0.824413
| 0.325241
| 0
| 0
| 0
| 0
| 0.354043
| 0.105838
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013699
| 0
| 0.013699
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2f29f0872d8843eb8b228cb03ec5eb0946af9b8
| 32,864
|
py
|
Python
|
tracklib/model/model.py
|
xueyuelei/tracklib
|
d33912baf1bebd1605d5e9c8dfc31484c96628cc
|
[
"MIT"
] | 5
|
2020-03-04T11:36:19.000Z
|
2020-06-21T16:49:45.000Z
|
tracklib/model/model.py
|
xueyuelei/tracklib
|
d33912baf1bebd1605d5e9c8dfc31484c96628cc
|
[
"MIT"
] | null | null | null |
tracklib/model/model.py
|
xueyuelei/tracklib
|
d33912baf1bebd1605d5e9c8dfc31484c96628cc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
REFERENCES:
[1] Y. Bar-Shalom, X. R. Li, and T. Kirubarajan, "Estimation with Applications to Tracking and Navigation," New York: John Wiley and Sons, Inc, 2001.
[2] R. A. Singer, "Estimating Optimal Tracking Filter Performance for Manned Maneuvering Targets," in IEEE Transactions on Aerospace and Electronic Systems, vol. AES-6, no. 4, pp. 473-483, July 1970.
[3] X. Rong Li and V. P. Jilkov, "Survey of maneuvering target tracking. Part I. Dynamic models," in IEEE Transactions on Aerospace and Electronic Systems, vol. 39, no. 4, pp. 1333-1364, Oct. 2003.
[4] W. Koch, "Tracking and Sensor Data Fusion: Methodological Framework and Selected Applications," Heidelberg, Germany: Springer, 2014.
[5] Mo Longbin, Song Xiaoquan, Zhou Yiyu, Sun Zhong Kang and Y. Bar-Shalom, "Unbiased converted measurements for tracking," in IEEE Transactions on Aerospace and Electronic Systems, vol. 34, no. 3, pp. 1023-1027, July 1998
'''
from __future__ import division, absolute_import, print_function
__all__ = [
'F_poly', 'F_singer', 'F_van_keuk', 'Q_poly_dc', 'Q_poly_dd', 'Q_singer',
'Q_van_keuk', 'H_pos_only', 'R_pos_only', 'F_cv', 'f_cv', 'f_cv_jac',
'Q_cv_dc', 'Q_cv_dd', 'H_cv', 'h_cv', 'h_cv_jac', 'R_cv', 'F_ca', 'f_ca',
'f_ca_jac', 'Q_ca_dc', 'Q_ca_dd', 'H_ca', 'h_ca', 'h_ca_jac', 'R_ca',
'F_ct', 'f_ct', 'f_ct_jac', 'Q_ct', 'h_ct', 'h_ct_jac', 'R_ct',
'convert_meas', 'model_switch', 'trajectory_cv', 'trajectory_ca',
'trajectory_ct', 'trajectory_generator', 'trajectory_with_pd',
'trajectory_to_meas'
]
import numbers
import numpy as np
import scipy.linalg as lg
import scipy.stats as st
import scipy.special as sl
from tracklib.utils import sph2cart, pol2cart
def F_poly(order, axis, T):
'''
This polynomial transition matrix is used with discretized continuous-time
models as well as direct discrete-time models. see section 6.2 and 6.3 in [1].
Parameters
----------
order : int
The order of the filter. If order=2, then it is constant velocity,
3 means constant acceleration, 4 means constant jerk, etc.
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
Returns
-------
F : ndarray
The state transition matrix under a linear dynamic model of the given order
and axis.
'''
assert (order >= 1)
assert (axis >= 1)
F_base = np.zeros((order, order))
tmp = np.arange(order)
F_base[0, :] = T**tmp / sl.factorial(tmp)
for row in range(1, order):
F_base[row, row:] = F_base[0, :order - row]
F = np.kron(np.eye(axis), F_base)
return F
def F_singer(axis, T, tau=20):
'''
Get the singer model transition matrix, see section 8.2 in [1].
Parameters
----------
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
tau : float
The time constant of the target acceleration autocorrelation, that is, the
decorrelation time is approximately 2*tau. A reasonable range of tau for
Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft
are 20s for slow turn and 5s for an evasive maneuver. If this parameter
is omitted, the default value of 20 is used.The time constant is assumed
the same for all dimensions of motion, so this parameter is scalar.
Returns
-------
F : ndarray
The state transition matrix under a Gauss-Markov dynamic model of the given
axis.
'''
assert (axis >= 1)
alpha = 1 / tau
F_base = np.zeros((3, 3))
aT = alpha * T
eaT = np.exp(-aT)
F_base[0, 0] = 1
F_base[0, 1] = T
F_base[0, 2] = (aT - 1 + eaT) * tau**2
F_base[1, 1] = 1
F_base[1, 2] = (1 - eaT) * tau
F_base[2, 2] = eaT
F = np.kron(np.eye(axis), F_base)
return F
def F_van_keuk(axis, T, tau=20):
'''
Get the state transition matrix for the van Keuk dynamic model. This is a
direct discrete-time model such that the acceleration advances in each
dimension over time as a[k+1]=exp(-T/tau)a[k]+std*sqrt(1-exp(-2*T/tau))*v[k],
see section 2.2.1 in [4]
Parameters
----------
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
tau : float
The time constant of the target acceleration autocorrelation, that is, the
decorrelation time is approximately 2*tau. A reasonable range of tau for
Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft
are 20s for slow turn and 5s for an evasive maneuver. If this parameter
is omitted, the default value of 20 is used.The time constant is assumed
the same for all dimensions of motion, so this parameter is scalar.
Returns
-------
F : ndarray
The state transition matrix under a Gauss-Markov dynamic model of the given
axis.
'''
assert (axis >= 1)
F_base = F_poly(3, 1, T)
F_base[-1, -1] = np.exp(-T / tau)
F = np.kron(np.eye(axis), F_base)
return F
def Q_poly_dc(order, axis, T, std):
'''
Process noise covariance matrix used with discretized continuous-time models.
see section 6.2 in [1].
Parameters
----------
order : int
The order of the filter. If order=2, then it is constant velocity,
3 means constant acceleration, 4 means constant jerk, etc.
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
std : number, list
The standard deviation (square root of intensity) of continuous-time porcess noise
Returns
-------
Q : ndarray
Process noise convariance
'''
assert (order >= 1)
assert (axis >= 1)
if isinstance(std, numbers.Number):
std = [std] * axis
sel = np.arange(order - 1, -1, -1)
col, row = np.meshgrid(sel, sel)
Q_base = T**(col + row + 1) / (sl.factorial(col) * sl.factorial(row) * (col + row + 1))
Q = np.kron(np.diag(std)**2, Q_base)
return Q
def Q_poly_dd(order, axis, T, std, ht=0):
'''
Process noise covariance matrix used with direct discrete-time models.
see section 6.3 in [1].
Parameters
----------
order : int
The order of the filter. If order=2, then it is constant velocity,
3 means constant acceleration, 4 means constant jerk, etc.
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
std : number, list
The standard deviation of discrete-time porcess noise
ht : int
ht means that the order of the noise is `ht` greater than the highest order
of the state, e.g., if the highest order of state is acceleration, then ht=0
means that the noise is of the same order as the highest order of state, that
is, the noise is acceleration and the model is DWPA, see section 6.3.3 in [1].
If the highest order is velocity, the ht=1 means the noise is acceleration and
the model is DWNA, see section 6.3.2 in [1].
Returns
-------
Q : ndarray
Process noise convariance
Notes
-----
For the model to which the alpha filter applies, we have order=0, ht=2.
Likewise, for the alpha-beta filter, order=1, ht=1 and for the alpha-
beta-gamma filter, order=2, ht=0
'''
assert (order >= 1)
assert (axis >= 1)
if isinstance(std, numbers.Number):
std = [std] * axis
sel = np.arange(ht + order - 1, ht - 1, -1)
L = T**sel / sl.factorial(sel)
Q_base = np.outer(L, L)
Q = np.kron(np.diag(std)**2, Q_base)
return Q
def Q_singer(axis, T, std, tau=20):
'''
Process noise covariance matrix used with Singer models. see section 8.2 in [1]
Parameters
----------
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
std : number, list
std is the instantaneous standard deviation of the acceleration knowm as
Ornstein-Uhlenbeck process, which can be obtained by assuming it to be
1. Equal to a maxmum acceleration a_M with probability p_M and -a_M with the same
probability
2. Equal to zero with probability p_0
3. Uniformly distributed in [-a_M, a_M] with the remaining probability mass
All parameters mentioned above are chosen by the designer. So the expected std^2
is (a_M^2 / 3)*(1 + 4*p_M - p_0)
tau : float
The time constant of the target acceleration autocorrelation, that is, the
decorrelation time is approximately 2*tau. A reasonable range of tau for
Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft
are 20s for slow turn and 5s for an evasive maneuver. If this parameter
is omitted, the default value of 20 is used.The time constant is assumed
the same for all dimensions of motion, so this parameter is scalar.
Returns
-------
Q : ndarray
Process noise convariance
'''
assert (axis >= 1)
if isinstance(std, numbers.Number):
std = [std] * axis
alpha = 1 / tau
aT = alpha * T
eaT = np.exp(-aT)
e2aT = np.exp(-2 * aT)
q11 = tau**4 * (1 - e2aT + 2 * aT + 2 * aT**3 / 3 - 2 * aT**2 - 4 * aT * eaT)
q12 = tau**3 * (e2aT + 1 - 2 * eaT + 2 * aT * eaT - 2 * aT + aT**2)
q13 = tau**2 * (1 - e2aT - 2 * aT * eaT)
q22 = tau**2 * (4 * eaT - 3 - e2aT + 2 * aT)
q23 = tau * (e2aT + 1 - 2 * eaT)
q33 = 1 - e2aT
Q_base = np.array([[q11, q12, q13],
[q12, q22, q23],
[q13, q23, q33]], dtype=float)
Q = np.kron(np.diag(std)**2, Q_base)
return Q
def Q_van_keuk(axis, T, std, tau=20):
'''
Process noise covariance matrix for a Van Keuk dynamic model, see section 2.2.1 in [4]
Parameters
----------
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
T : float
The time-duration of the propagation interval.
std : number, list
std is the instantaneous standard deviation of the acceleration knowm as
Ornstein-Uhlenbeck process, which can be obtained by assuming it to be
1. Equal to a maxmum acceleration a_M with probability p_M and -a_M with the same
probability
2. Equal to zero with probability p_0
3. Uniformly distributed in [-a_M, a_M] with the remaining probability mass
All parameters mentioned above are chosen by the designer. So the expected std^2
is (a_M^2 / 3)*(1 + 4*p_M - p_0)
tau : float
The time constant of the target acceleration autocorrelation, that is, the
decorrelation time is approximately 2*tau. A reasonable range of tau for
Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft
are 20s for slow turn and 5s for an evasive maneuver. If this parameter
is omitted, the default value of 20 is used. The time constant is assumed
the same for all dimensions of motion, so this parameter is scalar.
Returns
-------
Q : ndarray
Process noise convariance
'''
assert (axis >= 1)
if isinstance(std, numbers.Number):
std = [std] * axis
Q_base = np.diag([0., 0., 1.])
Q_base = (1 - np.exp(-2 * T / tau)) * Q_base
Q = np.kron(np.diag(std)**2, Q_base)
return Q
def H_pos_only(order, axis):
'''
Position-only measurement matrix is used with discretized continuous-time models
as well as direct discrete-time models. see section 6.5 in [1].
Parameters
----------
order : int
The order of the filter. If order=2, then it is constant velocity,
3 means constant acceleration, 4 means constant jerk, etc.
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
Returns
-------
H : ndarray
the measurement or obervation matrix
'''
assert (order >= 1)
assert (axis >= 1)
H = np.eye(order * axis)
H = H[::order]
return H
def R_pos_only(axis, std):
'''
Position-only measurement noise covariance matrix and the noise of each
axis is assumed to be uncorrelated.
Parameters
----------
axis : int
Motion directions in Cartesian coordinate. If axis=1, it means x-axis,
2 means x-axis and y-axis, etc.
Returns
-------
R : ndarray
the measurement noise covariance matrix
'''
assert (axis >= 1)
if isinstance(std, numbers.Number):
std = [std] * axis
R = np.diag(std)**2
return R
def F_cv(axis, T):
return F_poly(2, axis, T)
def f_cv(axis, T):
F = F_cv(axis, T)
def f(x, u=None):
return np.dot(F, x)
return f
def f_cv_jac(axis, T):
F = F_cv(axis, T)
def fjac(x, u=None):
return F
return fjac
def Q_cv_dc(axis, T, std):
return Q_poly_dc(2, axis, T, std)
def Q_cv_dd(axis, T, std):
return Q_poly_dd(2, axis, T, std, ht=1)
def H_cv(axis):
return H_pos_only(2, axis)
def h_cv(axis):
H = H_cv(axis)
def h(x):
return np.dot(H, x)
return h
def h_cv_jac(axis):
H = H_cv(axis)
def hjac(x):
return H
return hjac
def R_cv(axis, std):
return R_pos_only(axis, std)
def F_ca(axis, T):
return F_poly(3, axis, T)
def f_ca(axis, T):
F = F_ca(axis, T)
def f(x, u=None):
return np.dot(F, x)
return f
def f_ca_jac(axis, T):
F = F_ca(axis, T)
def fjac(x, u=None):
return F
return fjac
def Q_ca_dc(axis, T, std):
return Q_poly_dc(3, axis, T, std)
def Q_ca_dd(axis, T, std):
return Q_poly_dd(3, axis, T, std, ht=0)
def H_ca(axis):
return H_pos_only(3, axis)
def h_ca(axis):
H = H_ca(axis)
def h(x):
return np.dot(H, x)
return h
def h_ca_jac(axis):
H = H_ca(axis)
def hjac(x):
return H
return hjac
def R_ca(axis, std):
return R_pos_only(axis, std)
def F_ct(axis, turnrate, T):
assert (axis >= 2)
omega = np.deg2rad(turnrate)
if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps):
wt = omega * T
sin_wt = np.sin(wt)
cos_wt = np.cos(wt)
sin_div = sin_wt / omega
cos_div = (cos_wt - 1) / omega
else:
sin_wt = 0
cos_wt = 1
sin_div = T
cos_div = 0
F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt],
[0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]],
dtype=float)
if axis == 3:
zblock = F_cv(1, T)
F = lg.block_diag(F, zblock)
return F
def f_ct(axis, T):
assert (axis >= 2)
def f(x, u=None):
omega = np.deg2rad(x[4])
if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps):
wt = omega * T
sin_wt = np.sin(wt)
cos_wt = np.cos(wt)
sin_div = sin_wt / omega
cos_div = (cos_wt - 1) / omega
else:
sin_wt = 0
cos_wt = 1
sin_div = T
cos_div = 0
F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt],
[0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]],
dtype=float)
F = lg.block_diag(F, 1)
if axis == 3:
zblock = F_cv(1, T)
F = lg.block_diag(F, zblock)
return np.dot(F, x)
return f
def f_ct_jac(axis, T):
assert (axis >= 2)
def fjac(x, u=None):
omega = np.deg2rad(x[4])
if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps):
wt = omega * T
sin_wt = np.sin(wt)
cos_wt = np.cos(wt)
sin_div = sin_wt / omega
cos_div = (cos_wt - 1) / omega
f0 = np.deg2rad(((wt * cos_wt - sin_wt) * x[1] + (1 - cos_wt - wt * sin_wt) * x[3]) / omega**2)
f1 = np.deg2rad((-x[1] * sin_wt - x[3] * cos_wt) * T)
f2 = np.deg2rad((wt * (x[1] * sin_wt + x[3] * cos_wt) - (x[1] * (1 - cos_wt) + x[3] * sin_wt)) / omega**2)
f3 = np.deg2rad((x[1]*cos_wt - x[3]*sin_wt) * T)
else:
sin_wt = 0
cos_wt = 1
sin_div = T
cos_div = 0
f0 = np.deg2rad(-x[3] * T**2 / 2)
f1 = np.deg2rad(-x[3] * T)
f2 = np.deg2rad(x[1] * T**2 / 2)
f3 = np.deg2rad(x[1] * T)
F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt],
[0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]],
dtype=float)
F = lg.block_diag(F, 1)
F[0, -1] = f0
F[1, -1] = f1
F[2, -1] = f2
F[3, -1] = f3
if axis == 3:
zblock = F_cv(1, T)
F = lg.block_diag(F, zblock)
return F
return fjac
def Q_ct(axis, T, std):
assert (axis >= 2)
if isinstance(std, numbers.Number):
std = [std] * (axis + 1) # omega
block = np.array([T**2 / 2, T], dtype=float).reshape(-1, 1)
L = lg.block_diag(block, block, T)
Q = np.diag(std)**2
if axis == 3:
L = lg.block_diag(L, block)
return L @ Q @ L.T
def h_ct(axis):
assert (axis >= 2)
if axis == 3:
H = H_pos_only(2, 3)
else:
H = H_pos_only(2, 2)
H = np.insert(H, 4, 0, axis=1)
def h(x):
return np.dot(H, x)
return h
def h_ct_jac(axis):
assert (axis >= 2)
if axis == 3:
H = H_pos_only(2, 3)
else:
H = H_pos_only(2, 2)
H = np.insert(H, 4, 0, axis=1)
def hjac(x):
return H
return hjac
def R_ct(axis, std):
assert (axis >= 2)
return R_pos_only(axis, std)
def convert_meas(z, R, elev=False):
if elev:
# coverted measurement
r, az, el = z[0], z[1], z[2]
var_r, var_az, var_el = R[0, 0], R[1, 1], R[2, 2]
lamb_az = np.exp(-var_az / 2)
lamb_el = np.exp(-var_el / 2)
z_cart = np.array(sph2cart(r, az, el), dtype=float)
z_cart[0] = z_cart[0] / lamb_az / lamb_el
z_cart[1] = z_cart[1] / lamb_az / lamb_el
z_cart[2] = z_cart[2] / lamb_el
# coverted covariance
r11 = (1 / (lamb_az * lamb_el)**2 - 2) * (r * np.cos(az) * np.cos(el))**2 + (r**2 + var_r) * (1 + lamb_az**4 * np.cos(2 * az)) * (1 + lamb_el**4 * np.cos(2 * el)) / 4
r22 = (1 / (lamb_az * lamb_el)**2 - 2) * (r * np.sin(az) * np.cos(el))**2 + (r**2 + var_r) * (1 - lamb_az**4 * np.cos(2 * az)) * (1 + lamb_el**4 * np.cos(2 * el)) / 4
r33 = (1 / lamb_el**2 - 2) * (r * np.sin(el))**2 + (r**2 + var_r) * (1 - lamb_el**4 * np.cos(2 * el)) / 2
r12 = (1 / (lamb_az * lamb_el)**2 - 2) * r**2 * np.sin(az) * np.cos(az) * np.cos(el)**2 + (r**2 + var_r) * lamb_az**4 * np.sin(2 * az) * (1 + lamb_el**4 * np.cos(2 * el)) / 4
r13 = (1 / (lamb_az * lamb_el**2) - 1 / lamb_az - lamb_az) * r**2 * np.cos(az) * np.sin(el) * np.cos(el) + (r**2 + var_r) * lamb_az * lamb_el**4 * np.cos(az) * np.sin(2 * el) / 2
r23 = (1 / (lamb_az * lamb_el**2) - 1 / lamb_az - lamb_az) * r**2 * np.sin(az) * np.sin(el) * np.cos(el) + (r**2 + var_r) * lamb_az * lamb_el**4 * np.sin(az) * np.sin(2 * el) / 2
R_cart = np.array([[r11, r12, r13], [r12, r22, r23], [r13, r23, r33]], dtype=float)
else:
# coverted measurement
r, az = z[0], z[1]
var_r, var_az = R[0, 0], R[1, 1]
lamb_az = np.exp(-var_az / 2)
z_cart = np.array(pol2cart(r, az), dtype=float) / lamb_az
# coverted covariance
r11 = (r**2 + var_r) / 2 * (1 + lamb_az**4 * np.cos(2 * az)) + (1 / lamb_az**2 - 2) * (r * np.cos(az))**2
r22 = (r**2 + var_r) / 2 * (1 - lamb_az**4 * np.cos(2 * az)) + (1 / lamb_az**2 - 2) * (r * np.sin(az))**2
r12 = (r**2 + var_r) / 2 * lamb_az**4 * np.sin(2 * az) + (1 / lamb_az**2 - 2) * r**2 * np.sin(az) * np.cos(az)
R_cart = np.array([[r11, r12], [r12, r22]], dtype=float)
return z_cart, R_cart
def state_switch(state, type_in, type_out):
dim = len(state)
state = state.copy()
if type_in == 'cv':
axis = dim // 2
if type_out == 'cv':
return state
elif type_out == 'ca':
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[:, sel]
stmp = np.dot(slct, state)
return stmp
elif type_out == 'ct':
slct = np.eye(5, 4)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
stmp = np.dot(slct, state)
return stmp
else:
raise ValueError('unknown output type: %s' % type_out)
elif type_in == 'ca':
axis = dim // 3
if type_out == 'cv':
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[sel]
stmp = np.dot(slct, state)
return stmp
elif type_out == 'ca':
return state
elif type_out == 'ct':
# ca to cv
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[sel]
stmp = np.dot(slct, state)
# cv to ct
slct = np.eye(5, 4)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
stmp = np.dot(slct, stmp)
return stmp
else:
raise ValueError('unknown output type: %s' % type_out)
elif type_in == 'ct':
axis = dim // 2
if type_out == 'cv':
slct = np.eye(4, 5)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
stmp = np.dot(slct, state)
return stmp
elif type_out == 'ca':
# ct to cv
slct = np.eye(4, 5)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
stmp = np.dot(slct, state)
# cv to ca
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[:, sel]
stmp = np.dot(slct, stmp)
return stmp
elif type_out == 'ct':
return state
else:
raise ValueError('unknown output type: %s' % type_out)
else:
raise ValueError('unknown input type: %s' % type_in)
def cov_switch(cov, type_in, type_out):
dim = len(cov)
cov = cov.copy()
uncertainty = 100
if type_in == 'cv':
axis = dim // 2
if type_out == 'cv':
return cov
elif type_out == 'ca':
ca_dim = 3 * axis
sel_diff = range(2, ca_dim, 3)
sel = np.setdiff1d(range(ca_dim), sel_diff)
slct = np.eye(ca_dim)[:, sel]
ctmp = slct @ cov @ slct.T
ctmp[sel_diff, sel_diff] = uncertainty
return ctmp
elif type_out == 'ct':
slct = np.eye(5, 4)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
ctmp = slct @ cov @ slct.T
ctmp[4, 4] = uncertainty
return ctmp
else:
raise ValueError('unknown output type: %s' % type_out)
elif type_in == 'ca':
axis = dim // 3
if type_out == 'cv':
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[sel]
ctmp = slct @ cov @ slct.T
return ctmp
elif type_out == 'ca':
return cov
elif type_out == 'ct':
# ca to cv
ca_dim = 3 * axis
sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3))
slct = np.eye(ca_dim)[sel]
ctmp = slct @ cov @ slct.T
# cv to ct
slct = np.eye(5, 4)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
ctmp = slct @ ctmp @ slct.T
ctmp[4, 4] = uncertainty
return ctmp
else:
raise ValueError('unknown output type: %s' % type_out)
elif type_in == 'ct':
axis = dim // 2
if type_out == 'cv':
slct = np.eye(4, 5)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
ctmp = slct @ cov @ slct.T
return ctmp
elif type_out == 'ca':
# ct to cv
slct = np.eye(4, 5)
if axis == 3:
slct = lg.block_diag(slct, np.eye(2))
ctmp = slct @ cov @ slct.T
# cv to ca
ca_dim = 3 * axis
sel_diff = range(2, ca_dim, 3)
sel = np.setdiff1d(range(ca_dim), sel_diff)
slct = np.eye(ca_dim)[:, sel]
ctmp = slct @ ctmp @ slct.T
ctmp[sel_diff, sel_diff] = uncertainty
return ctmp
elif type_out == 'ct':
return cov
else:
raise ValueError('unknown output type: %s' % type_out)
else:
raise ValueError('unknown input type: %s' % type_in)
def model_switch(x, type_in, type_out):
dim = len(x)
if isinstance(x, np.ndarray):
if len(x.shape) == 1:
state = state_switch(x, type_in, type_out)
return state
elif len(x.shape) == 2:
cov = cov_switch(x, type_in, type_out)
return cov
else:
raise ValueError("shape of 'x' must be 1 or 2")
elif hasattr(x, '__getitem__'):
state = state_switch(x[0], type_in, type_out)
cov = cov_switch(x[1], type_in, type_out)
return state, cov
else:
raise TypeError("error 'x' type: '%s'" % x.__class__.__name__)
def trajectory_cv(state, interval, length, velocity):
head = state.copy()
dim = head.size
order = 2
axis = dim // order
traj_cv = np.zeros((length, dim))
vel = velocity
cur_vel = head[1:dim:order]
if isinstance(vel, numbers.Number):
vel *= (cur_vel / lg.norm(cur_vel))
else:
vel = [cur_vel[i] if vel[i] is None else vel[i] for i in range(axis)]
cur_vel[:] = vel # it will also change the head
head_cv = head
F = F_cv(axis, interval)
for i in range(length):
head = np.dot(F, head)
traj_cv[i] = head
return traj_cv, head_cv
def trajectory_ca(state, interval, length, acceleration):
head = state.copy()
dim = state.size
order = 3
axis = dim // order
traj_ca = np.zeros((length, dim))
acc = acceleration
cur_vel = head[1:dim:order]
cur_acc = head[2:dim:order]
if isinstance(acc, numbers.Number):
acc *= (cur_vel / lg.norm(cur_vel))
else:
acc = [cur_acc[i] if acc[i] is None else acc[i] for i in range(axis)]
cur_acc[:] = acc # it will also change the head
head_ca = head
F = F_ca(axis, interval)
for i in range(length):
head = np.dot(F, head)
traj_ca[i] = head
return traj_ca, head_ca
def trajectory_ct(state, interval, length, turnrate, velocity=None):
head = state.copy()
dim = state.size
order = 2
axis = dim // order
traj_ct = np.zeros((length, dim))
if velocity is not None:
vel = velocity
cur_vel = head[1:dim:order]
if isinstance(vel, numbers.Number):
vel *= (cur_vel / lg.norm(cur_vel))
else:
vel = [cur_vel[i] if vel[i] is None else vel[i] for i in range(axis)]
cur_vel[:] = vel
head_ct = head
F = F_ct(axis, turnrate, interval)
for i in range(length):
head = np.dot(F, head)
traj_ct[i] = head
return traj_ct, head_ct
def trajectory_generator(record):
'''
record = {
'interval': [1, 1],
'start':
[
[0, 0, 0],
[0, 5, 0]
],
'pattern':
[
[
{'model': 'cv', 'length': 100, 'velocity': [250, 250, 0]},
{'model': 'ct', 'length': 25, 'turnrate': 30}
],
[
{'model': 'cv', 'length': 100, 'velocity': [250, 250, 0]},
{'model': 'ct', 'length': 30, 'turnrate': 30, 'velocity': 30}
]
],
'noise':
[
10 * np.eye(3), 10 * np.eye(3)
],
'pd':
[
0.9, 0.9
],
'entries': 2
}
'''
dim, order, axis = 9, 3, 3
ca_sel = range(dim)
acc_sel = range(2, dim, order)
cv_sel = np.setdiff1d(ca_sel, acc_sel)
ct_sel = np.setdiff1d(ca_sel, acc_sel)
insert_sel = [2, 4, 6]
interval = record['interval']
start = record['start']
pattern = record['pattern']
noise = record['noise']
entries = record['entries']
trajs_state = []
for i in range(entries):
head = np.kron(start[i], [1., 0., 0.])
state = np.kron(start[i], [1., 0., 0.]).reshape(1, -1)
for pat in pattern[i]:
if pat['model'] == 'cv':
ret, head_cv = trajectory_cv(head[cv_sel], interval[i], pat['length'], pat['velocity'])
ret = np.insert(ret, insert_sel, 0, axis=1)
head = ret[-1, ca_sel]
state[-1, acc_sel] = 0 # set the acceleration of previous state to zero
state[-1, cv_sel] = head_cv # change the velocity of previous state
state = np.vstack((state, ret))
elif pat['model'] == 'ca':
ret, head_ca = trajectory_ca(head, interval[i], pat['length'], pat['acceleration'])
head = ret[-1, ca_sel]
state[-1, ca_sel] = head_ca # change the acceleartion of previous state
state = np.vstack((state, ret))
elif pat['model'] == 'ct':
if 'velocity' in pat:
ret, head_ct = trajectory_ct(head[ct_sel], interval[i], pat['length'], pat['turnrate'], pat['velocity'])
else:
ret, head_ct = trajectory_ct(head[ct_sel], interval[i], pat['length'], pat['turnrate'])
ret = np.insert(ret, insert_sel, 0, axis=1)
head = ret[-1, ca_sel]
state[-1, acc_sel] = 0
state[-1, ct_sel] = head_ct
state = np.vstack((state, ret))
else:
raise ValueError('invalid model')
trajs_state.append(state)
# add noise
trajs_meas = []
for i in range(entries):
H = H_ca(axis)
traj_len = trajs_state[i].shape[0]
noi = st.multivariate_normal.rvs(cov=noise[i], size=traj_len)
trajs_meas.append(np.dot(trajs_state[i], H.T) + noi)
return trajs_state, trajs_meas
def trajectory_with_pd(trajs_meas, pd=0.8):
for traj in trajs_meas:
traj_len = traj.shape[0]
remove_idx = st.uniform.rvs(size=traj_len) >= pd
traj[remove_idx] = np.nan
return trajs_meas
def trajectory_to_meas(trajs_meas, lamb=0):
trajs_num = len(trajs_meas)
min_x, max_x = np.inf, -np.inf
min_y, max_y = np.inf, -np.inf
min_z, max_z = np.inf, -np.inf
max_traj_len = 0
for traj in trajs_meas:
min_x, max_x = min(min_x, traj[:, 0].min()), max(max_x, traj[:, 0].max())
min_y, max_y = min(min_y, traj[:, 1].min()), max(max_y, traj[:, 1].max())
min_z, max_z = min(min_z, traj[:, 2].min()), max(max_z, traj[:, 2].max())
max_traj_len = max(max_traj_len, len(traj))
trajs = []
for i in range(max_traj_len):
tmp = []
for j in range(trajs_num):
if i >= len(trajs_meas[j]) or np.any(np.isnan(trajs_meas[j][i])):
continue
tmp.append(trajs_meas[j][i])
clutter_num = st.poisson.rvs(lamb)
for j in range(clutter_num):
x = np.random.uniform(min_x, max_x)
y = np.random.uniform(min_y, max_y)
z = np.random.uniform(min_z, max_z)
tmp.append(np.array([x, y, z], dtype=float))
tmp = np.array(tmp, dtype=float).reshape(-1, 3)
trajs.append(tmp)
return trajs
| 32.538614
| 222
| 0.549781
| 5,103
| 32,864
| 3.423672
| 0.084852
| 0.009158
| 0.012363
| 0.011848
| 0.685021
| 0.654456
| 0.616049
| 0.583081
| 0.565222
| 0.541469
| 0
| 0.0359
| 0.320229
| 32,864
| 1,009
| 223
| 32.570862
| 0.74615
| 0.297651
| 0
| 0.587459
| 0
| 0
| 0.034658
| 0
| 0
| 0
| 0
| 0
| 0.033003
| 1
| 0.092409
| false
| 0
| 0.011551
| 0.033003
| 0.226073
| 0.00165
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2f3e2812670f2833f39a5b2980f1ac2b7819f19
| 1,229
|
py
|
Python
|
benchbuild/engine.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 11
|
2017-10-05T08:59:35.000Z
|
2021-05-29T01:43:07.000Z
|
benchbuild/engine.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 326
|
2016-07-12T08:11:43.000Z
|
2022-03-28T07:10:11.000Z
|
benchbuild/engine.py
|
sturmianseq/benchbuild
|
e3cc1a24e877261e90baf781aa67a9d6f6528dac
|
[
"MIT"
] | 13
|
2016-06-17T12:13:35.000Z
|
2022-01-04T16:09:12.000Z
|
"""
Orchestrate experiment execution.
"""
import typing as tp
import attr
from benchbuild.experiment import Experiment
from benchbuild.project import Project
from benchbuild.utils import actions, tasks
ExperimentCls = tp.Type[Experiment]
Experiments = tp.List[ExperimentCls]
ProjectCls = tp.Type[Project]
Projects = tp.List[ProjectCls]
ExperimentProject = tp.Tuple[ExperimentCls, ProjectCls]
Actions = tp.Sequence[actions.Step]
StepResults = tp.List[actions.StepResult]
@attr.s
class Experimentator:
experiments: Experiments = attr.ib()
projects: Projects = attr.ib()
_plan: tp.Sequence[actions.Step] = attr.ib(init=False, default=None)
def plan(self) -> Actions:
if not self._plan:
self._plan = tasks.generate_plan(self.experiments, self.projects)
return self._plan
@property
def num_actions(self) -> int:
p = self.plan()
return sum([len(child) for child in p])
def start(self) -> StepResults:
p = self.plan()
# Prepare project environment.
return tasks.execute_plan(p)
def print_plan(self) -> None:
p = self.plan()
print("Number of actions to execute: {}".format(self.num_actions))
print(*p)
| 25.604167
| 77
| 0.68511
| 151
| 1,229
| 5.516556
| 0.390728
| 0.057623
| 0.032413
| 0.05042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204231
| 1,229
| 47
| 78
| 26.148936
| 0.851738
| 0.051261
| 0
| 0.09375
| 0
| 0
| 0.027634
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.15625
| 0
| 0.5
| 0.09375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2f4e04a8614d8edbaff0777a5f1c47f01d09f5f
| 6,751
|
py
|
Python
|
misc_code/fcn_loss_layer.py
|
kbardool/mrcnn3
|
f4cbb1e34de97ab08558b56fb7362647436edbd7
|
[
"MIT"
] | 7
|
2018-08-07T13:56:32.000Z
|
2021-04-06T11:07:20.000Z
|
misc_code/fcn_loss_layer.py
|
kbardool/Contextual-Inference-V2
|
f4cbb1e34de97ab08558b56fb7362647436edbd7
|
[
"MIT"
] | null | null | null |
misc_code/fcn_loss_layer.py
|
kbardool/Contextual-Inference-V2
|
f4cbb1e34de97ab08558b56fb7362647436edbd7
|
[
"MIT"
] | 1
|
2019-02-01T06:52:18.000Z
|
2019-02-01T06:52:18.000Z
|
"""
Mask R-CNN
Dataset functions and classes.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by Waleed Abdulla
"""
import numpy as np
import tensorflow as tf
import keras.backend as KB
import keras.layers as KL
import keras.initializers as KI
import keras.engine as KE
import mrcnn.utils as utils
from mrcnn.loss import smooth_l1_loss
import pprint
pp = pprint.PrettyPrinter(indent=2, width=100)
##-----------------------------------------------------------------------
## FCN loss
##-----------------------------------------------------------------------
def fcn_loss_graph(target_masks, pred_masks):
# def fcn_loss_graph(input):
# target_masks, pred_masks = input
"""Mask binary cross-entropy loss for the masks head.
target_masks: [batch, height, width, num_classes].
pred_masks: [batch, height, width, num_classes] float32 tensor
"""
# Reshape for simplicity. Merge first two dimensions into one.
print('\n fcn_loss_graph ' )
print(' target_masks shape :', target_masks.get_shape())
print(' pred_masks shape :', pred_masks.get_shape())
mask_shape = tf.shape(target_masks)
print(' mask_shape shape :', mask_shape.shape)
target_masks = KB.reshape(target_masks, (-1, mask_shape[1], mask_shape[2]))
print(' target_masks shape :', target_masks.shape)
pred_shape = tf.shape(pred_masks)
print(' pred_shape shape :', pred_shape.shape)
pred_masks = KB.reshape(pred_masks, (-1, pred_shape[1], pred_shape[2]))
print(' pred_masks shape :', pred_masks.get_shape())
# Compute binary cross entropy. If no positive ROIs, then return 0.
# shape: [batch, roi, num_classes]
# Smooth-L1 Loss
loss = KB.switch(tf.size(target_masks) > 0,
smooth_l1_loss(y_true=target_masks, y_pred=pred_masks),
tf.constant(0.0))
loss = KB.mean(loss)
loss = KB.reshape(loss, [1, 1])
print(' loss type is :', type(loss))
return loss
##-----------------------------------------------------------------------
## FCN loss for L2 Normalized graph
##-----------------------------------------------------------------------
def fcn_norm_loss_graph(target_masks, pred_masks):
'''
Mask binary cross-entropy loss for the masks head.
target_masks: [batch, height, width, num_classes].
pred_masks: [batch, height, width, num_classes] float32 tensor
'''
print(type(target_masks))
pp.pprint(dir(target_masks))
# Reshape for simplicity. Merge first two dimensions into one.
print('\n fcn_norm_loss_graph ' )
print(' target_masks shape :', target_masks.shape)
print(' pred_masks shape :', pred_masks.shape)
print('\n L2 normalization ------------------------------------------------------')
output_shape=KB.int_shape(pred_masks)
print(' output shape is :' , output_shape, ' ', pred_masks.get_shape(), pred_masks.shape, tf.shape(pred_masks))
output_flatten = KB.reshape(pred_masks, (pred_masks.shape[0], -1, pred_masks.shape[-1]) )
output_norm1 = KB.l2_normalize(output_flatten, axis = 1)
output_norm = KB.reshape(output_norm1, KB.shape(pred_masks) )
print(' output_flatten : ', KB.int_shape(output_flatten) , ' Keras tensor ', KB.is_keras_tensor(output_flatten) )
print(' output_norm1 : ', KB.int_shape(output_norm1) , ' Keras tensor ', KB.is_keras_tensor(output_norm1) )
print(' output_norm final : ', KB.int_shape(output_norm) , ' Keras tensor ', KB.is_keras_tensor(output_norm) )
pred_masks1 = output_norm
print('\n L2 normalization ------------------------------------------------------')
gauss_flatten = KB.reshape(target_masks, (target_masks.shape[0], -1, target_masks.shape[-1]) )
gauss_norm1 = KB.l2_normalize(gauss_flatten, axis = 1)
gauss_norm = KB.reshape(gauss_norm1, KB.shape(target_masks))
print(' guass_flatten : ', KB.int_shape(gauss_flatten), 'Keras tensor ', KB.is_keras_tensor(gauss_flatten) )
print(' gauss_norm shape : ', KB.int_shape(gauss_norm1) , 'Keras tensor ', KB.is_keras_tensor(gauss_norm1) )
print(' gauss_norm final shape: ', KB.int_shape(gauss_norm) , 'Keras tensor ', KB.is_keras_tensor(gauss_norm) )
print(' complete')
target_masks1 = gauss_norm
mask_shape = tf.shape(target_masks1)
print(' mask_shape shape :', mask_shape.shape)
target_masks1 = KB.reshape(target_masks1, (-1, mask_shape[1], mask_shape[2]))
print(' target_masks shape :', target_masks1.shape)
pred_shape = tf.shape(pred_masks1)
print(' pred_shape shape :', pred_shape.shape)
pred_masks1 = KB.reshape(pred_masks1, (-1, pred_shape[1], pred_shape[2]))
print(' pred_masks shape :', pred_masks1.get_shape())
# Compute binary cross entropy. If no positive ROIs, then return 0.
# shape: [batch, roi, num_classes]
# Smooth-L1 Loss
loss = KB.switch(tf.size(target_masks1) > 0,
smooth_l1_loss(y_true=target_masks1, y_pred=pred_masks1),
tf.constant(0.0))
loss = KB.mean(loss)
loss = KB.reshape(loss, [1, 1])
print(' loss type is :', type(loss))
return loss
class FCNLossLayer(KE.Layer):
"""
Returns:
-------
"""
def __init__(self, config=None, **kwargs):
super().__init__(**kwargs)
print('>>> FCN Loss Layer : initialization')
self.config = config
def call(self, inputs):
print('\n FCN Loss Layer : call')
print(' target_masks .shape/type :', inputs[0].shape) # , type(inputs[0]))
print(' pred_masks shape/type :', inputs[1].shape) # , type(inputs[1]))
target_masks = inputs[0]
pred_masks = inputs[1]
loss = KB.placeholder(shape=(1), dtype = 'float32', name = 'fcn_loss')
norm_loss = KB.placeholder(shape=(1), dtype = 'float32', name = 'fcn_norm_loss')
loss = fcn_loss_graph(target_masks, pred_masks)
norm_loss = fcn_norm_loss_graph(target_masks, pred_masks)
return [loss, norm_loss]
def compute_output_shape(self, input_shape):
# may need to change dimensions of first return from IMAGE_SHAPE to MAX_DIM
return [(1), (1)]
| 40.915152
| 123
| 0.578877
| 821
| 6,751
| 4.518879
| 0.168088
| 0.067925
| 0.037736
| 0.024259
| 0.53558
| 0.486523
| 0.474394
| 0.371698
| 0.278167
| 0.255526
| 0
| 0.017889
| 0.254777
| 6,751
| 165
| 124
| 40.915152
| 0.719539
| 0.20708
| 0
| 0.229885
| 0
| 0
| 0.19083
| 0.020548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057471
| false
| 0
| 0.103448
| 0.011494
| 0.218391
| 0.390805
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2f6b4c27e7561e29dbb147f768e0c58a7d09bb7
| 2,150
|
py
|
Python
|
mysticbit/plots.py
|
Connossor/mystic-bit
|
f57f471d3d154560d23bc9eff17fd5b8f284684c
|
[
"MIT"
] | 6
|
2018-11-23T20:13:53.000Z
|
2019-02-25T15:54:55.000Z
|
mysticbit/plots.py
|
Connossor/mystic-bit
|
f57f471d3d154560d23bc9eff17fd5b8f284684c
|
[
"MIT"
] | null | null | null |
mysticbit/plots.py
|
Connossor/mystic-bit
|
f57f471d3d154560d23bc9eff17fd5b8f284684c
|
[
"MIT"
] | 11
|
2018-11-23T20:55:44.000Z
|
2021-12-20T17:25:24.000Z
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
def plot_well_map(df_logs, fig_size=(10, 10)):
""" Simple map of locations of nearby wells """
f, ax = plt.subplots(figsize=fig_size)
df = df_logs.drop_duplicates(subset=['HACKANAME', 'X', 'Y'])
plt.scatter(df['X'], df['Y'])
plt.axis('scaled')
for label, x, y in zip(df['HACKANAME'], df['X'], df['Y']):
plt.annotate(label,
xy=(x, y),
xytext=(-10, 10),
textcoords='offset points')
return f, ax
def make_log_plot(df_logs, well_name, cols=['GR', 'DT', 'CALI'], ztop=None, zbot=None, fig_size=(8, 12)):
""" Single well log plot, both GR and Resistivity """
logs = df_logs[df_logs['HACKANAME'] == well_name]
logs = logs.sort_values(by='TVDSS')
if not ztop:
ztop = logs.TVDSS.min()
if not zbot:
zbot = logs.TVDSS.max()
f, ax = plt.subplots(nrows=1, ncols=len(cols), figsize=fig_size)
for i in range(len(ax)):
log_name = cols[i]
ax[i].scatter(logs[log_name], logs['TVDSS'], marker='+')
ax[i].set_xlabel(log_name)
ax[i].set_ylim(ztop, zbot)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=3)
if i > 0:
ax[i].set_yticklabels([])
# ax[0].set_xlabel("GR")
# ax[0].set_xlim(0, 150)
# ax[1].set_xlabel("RESD")
# ax[1].set_xscale('log')
# ax[1].set_xlim(0.2, 2000)
# ax[1].set_yticklabels([])
f.suptitle('Well: {}'.format(well_name), fontsize=14, y=0.94)
return f, ax
def add_predictions(ax, predictions):
""" Add predicted bands onto plt axes"""
# Scatter plot
ax.scatter(predictions['value'], predictions['TVDSS'], marker='+')
# Shaded bands
tvds = predictions[predictions.model_name == 'high']['TVDSS']
x_hi = predictions[predictions.model_name == 'high']['value']
x_lo = predictions[predictions.model_name == 'low']['value']
ax.fill(np.concatenate([x_lo, x_hi[::-1]]),
np.concatenate([tvds, tvds[::-1]]),
alpha=0.5)
| 28.289474
| 105
| 0.58093
| 315
| 2,150
| 3.838095
| 0.384127
| 0.01737
| 0.019851
| 0.076923
| 0.072787
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023853
| 0.239535
| 2,150
| 76
| 106
| 28.289474
| 0.715596
| 0.137209
| 0
| 0.046512
| 0
| 0
| 0.063934
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069767
| false
| 0
| 0.093023
| 0
| 0.209302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2fc8f6f95ceeb8cf32d3eeed59de008b87d73f7
| 556
|
py
|
Python
|
src/appi/oop/classes/class_attributes.py
|
Kaju-Bubanja/APPI
|
011afc872a0055ff56001547be6da73017042ad5
|
[
"MIT"
] | null | null | null |
src/appi/oop/classes/class_attributes.py
|
Kaju-Bubanja/APPI
|
011afc872a0055ff56001547be6da73017042ad5
|
[
"MIT"
] | null | null | null |
src/appi/oop/classes/class_attributes.py
|
Kaju-Bubanja/APPI
|
011afc872a0055ff56001547be6da73017042ad5
|
[
"MIT"
] | null | null | null |
class Student:
# class variables
school_name = 'ABC School'
# constructor
def __init__(self, name, age):
# instance variables
self.name = name
self.age = age
s1 = Student("Harry", 12)
# access instance variables
print('Student:', s1.name, s1.age)
# access class variable
print('School name:', Student.school_name)
# Modify instance variables
s1.name = 'Jessa'
s1.age = 14
print('Student:', s1.name, s1.age)
# Modify class variables
Student.school_name = 'XYZ School'
print('School name:', Student.school_name)
| 20.592593
| 42
| 0.676259
| 73
| 556
| 5.041096
| 0.30137
| 0.163043
| 0.138587
| 0.097826
| 0.298913
| 0.298913
| 0
| 0
| 0
| 0
| 0
| 0.024775
| 0.201439
| 556
| 26
| 43
| 21.384615
| 0.804054
| 0.257194
| 0
| 0.307692
| 0
| 0
| 0.17284
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.230769
| 0.307692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2feb8df0aea648f82fd8f4f86ab95ad219d052f
| 1,878
|
py
|
Python
|
hamster2pdf.py
|
vleg1991/hamster2pdf
|
1dda22a39b65a0f24b76d278f3d708ac51d3c262
|
[
"MIT"
] | null | null | null |
hamster2pdf.py
|
vleg1991/hamster2pdf
|
1dda22a39b65a0f24b76d278f3d708ac51d3c262
|
[
"MIT"
] | null | null | null |
hamster2pdf.py
|
vleg1991/hamster2pdf
|
1dda22a39b65a0f24b76d278f3d708ac51d3c262
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import datetime
import hamster.client
import reports
import argparse
import pdfkit
import gettext
gettext.install('brainz', '../datas/translations/')
# custom settings:
reportTitle = "My Activities Report"
activityFilter = "unfiled"
def valid_date(s):
try:
return datetime.datetime.strptime(s, "%Y-%m-%d").date()
except ValueError:
msg = "Not a valid date: '{0}'.".format(s)
raise argparse.ArgumentTypeError(msg)
# find dates:
today = datetime.date.today()
first = today.replace(day=1)
previousLast = first - datetime.timedelta(days=1)
previousFirst = previousLast.replace(day=1)
# assign arguments:
parser = argparse.ArgumentParser(description="export the hamster database to pdf")
parser.add_argument("--thismonth", action="store_true", help="export this month's records")
parser.add_argument("--lastmonth", action="store_true", help="export last month's records")
parser.add_argument("-s", dest="startDate", default=today, help="start date (default: today)", type=valid_date)
parser.add_argument("-e", dest="endDate", default=today, help="end date (default: today)", type=valid_date)
parser.add_argument("-o", dest="reportFile", default="report.pdf", help="output file (default: report.pdf)")
# parse arguments:
args = parser.parse_args()
if args.thismonth:
args.startDate = first
args.endDate = today
if args.lastmonth:
args.startDate = previousFirst
args.endDate = previousLast
# prepare filenames:
htmlFilename = os.path.splitext(args.reportFile)[0]+".html"
pdfFilename = os.path.splitext(args.reportFile)[0]+".pdf"
storage = hamster.client.Storage()
facts = storage.get_facts(args.startDate, args.endDate)
# generate report
reports.simple(facts, args.startDate, args.endDate, htmlFilename)
# convert .html to .pdf file:
pdfkit.from_file(htmlFilename, pdfFilename)
| 27.617647
| 111
| 0.736954
| 243
| 1,878
| 5.641975
| 0.444444
| 0.032823
| 0.061999
| 0.027717
| 0.231947
| 0.153173
| 0.067104
| 0.067104
| 0.067104
| 0
| 0
| 0.004225
| 0.117678
| 1,878
| 67
| 112
| 28.029851
| 0.823174
| 0.087859
| 0
| 0
| 0
| 0
| 0.20716
| 0.012911
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.179487
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
f2ff24739f7d32b20b931df9776f794aac82539a
| 589
|
py
|
Python
|
SingleTon.py
|
SuperLeis/meituan
|
71d521826bc50cb8e7bee5617f84e2c26dce1394
|
[
"MIT"
] | 1
|
2020-05-02T14:30:18.000Z
|
2020-05-02T14:30:18.000Z
|
SingleTon.py
|
SuperLeis/meituan
|
71d521826bc50cb8e7bee5617f84e2c26dce1394
|
[
"MIT"
] | null | null | null |
SingleTon.py
|
SuperLeis/meituan
|
71d521826bc50cb8e7bee5617f84e2c26dce1394
|
[
"MIT"
] | null | null | null |
from functools import wraps
# created by PL
# git hello world
def single_ton(cls):
_instance = {}
@wraps(cls)
def single(*args, **kwargs):
if cls not in _instance:
_instance[cls] = cls(*args, **kwargs)
return _instance[cls]
return single
@single_ton
class SingleTon(object):
val = 123
def __init__(self, a):
self.a = a
if __name__ == '__main__':
s = SingleTon(1)
t = SingleTon(2)
print (s is t)
print (s.a, t.a)
print (s.val, t.val)
print ('test')
print ("git test")
| 19.633333
| 50
| 0.550085
| 78
| 589
| 3.923077
| 0.487179
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012626
| 0.327674
| 589
| 30
| 51
| 19.633333
| 0.760101
| 0.049236
| 0
| 0
| 0
| 0
| 0.037807
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0
| 0.045455
| 0
| 0.363636
| 0.227273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8401761cbdcacb5f4d5eb5531d513247beb5261b
| 10,254
|
py
|
Python
|
datatest/differences.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 277
|
2016-05-12T13:22:49.000Z
|
2022-03-11T00:18:32.000Z
|
datatest/differences.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 57
|
2016-05-18T01:03:32.000Z
|
2022-02-17T13:48:43.000Z
|
datatest/differences.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 16
|
2016-05-22T11:35:19.000Z
|
2021-12-01T19:41:42.000Z
|
"""Difference classes."""
__all__ = [
'BaseDifference',
'Missing',
'Extra',
'Invalid',
'Deviation',
]
from cmath import isnan
from datetime import timedelta
from ._compatibility.builtins import *
from ._compatibility import abc
from ._compatibility.contextlib import suppress
from ._utils import _make_token
from ._utils import pretty_timedelta_repr
NOVALUE = _make_token(
'NoValueType',
'<no value>',
'Token to mark when a value does not exist.',
truthy=False,
)
NANTOKEN = _make_token(
'NanTokenType',
'<nan token>',
'Token for comparing differences that contain not-a-number values.',
)
def _nan_to_token(value):
"""Return NANTOKEN if *value* is NaN else return value unchanged."""
def func(x):
with suppress(TypeError):
if isnan(x):
return NANTOKEN
return x
if isinstance(value, tuple):
return tuple(func(x) for x in value)
return func(value)
def _safe_isnan(x):
"""Wrapper for isnan() so it won't fail on non-numeric values."""
try:
return isnan(x)
except TypeError:
return False
class BaseDifference(abc.ABC):
"""The base class for "difference" objects---all other difference
classes are derived from this base.
"""
__slots__ = ()
@property
@abc.abstractmethod
def args(self):
"""The tuple of arguments given to the difference constructor.
Some difference (like :class:`Deviation`) expect a certain
number of arguments and assign a special meaning to the
elements of this tuple, while others are called with only
a single value.
"""
# Concrete method should return tuple of args used in __init__().
raise NotImplementedError
def __eq__(self, other):
if self.__class__ != other.__class__:
return False
self_args = tuple(_nan_to_token(x) for x in self.args)
other_args = tuple(_nan_to_token(x) for x in other.args)
return self_args == other_args
def __ne__(self, other): # <- For Python 2.x support. There is
return not self.__eq__(other) # no implicit relationship between
# __eq__() and __ne__() in Python 2.
def __hash__(self):
try:
return hash((self.__class__, self.args))
except TypeError as err:
msg = '{0} in args tuple {1!r}'.format(str(err), self.args)
hashfail = TypeError(msg)
hashfail.__cause__ = getattr(err, '__cause__', None) # getattr for 2.x support
raise hashfail
def __repr__(self):
cls_name = self.__class__.__name__
args_repr = ', '.join(
getattr(x, '__name__', repr(x)) for x in self.args)
return '{0}({1})'.format(cls_name, args_repr)
class Missing(BaseDifference):
"""Created when *value* is missing from the data under test.
In the following example, the required value ``'A'`` is missing
from the data under test::
data = ['B', 'C']
requirement = {'A', 'B', 'C'}
datatest.validate(data, requirement)
Running this example raises the following error:
.. code-block:: none
:emphasize-lines: 2
ValidationError: does not satisfy set membership (1 difference): [
Missing('A'),
]
"""
__slots__ = ('_args',)
def __init__(self, value):
self._args = (value,)
@property
def args(self):
return self._args
class Extra(BaseDifference):
"""Created when *value* is unexpectedly found in the data under
test.
In the following example, the value ``'C'`` is found in the data
under test but it's not part of the required values::
data = ['A', 'B', 'C']
requirement = {'A', 'B'}
datatest.validate(data, requirement)
Running this example raises the following error:
.. code-block:: none
:emphasize-lines: 2
ValidationError: does not satisfy set membership (1 difference): [
Extra('C'),
]
"""
__slots__ = ('_args',)
def __init__(self, value):
self._args = (value,)
@property
def args(self):
return self._args
class Invalid(BaseDifference):
"""Created when a value does not satisfy a function, equality, or
regular expression requirement.
In the following example, the value ``9`` does not satisfy the
required function::
data = [2, 4, 6, 9]
def is_even(x):
return x % 2 == 0
datatest.validate(data, is_even)
Running this example raises the following error:
.. code-block:: none
:emphasize-lines: 2
ValidationError: does not satisfy is_even() (1 difference): [
Invalid(9),
]
"""
__slots__ = ('_invalid', '_expected')
def __init__(self, invalid, expected=NOVALUE):
try:
is_equal = invalid == expected
except TypeError:
is_equal = False
if is_equal:
msg = 'expects unequal values, got {0!r} and {1!r}'
raise ValueError(msg.format(invalid, expected))
self._invalid = invalid
self._expected = expected
@property
def args(self):
if self._expected is NOVALUE:
return (self._invalid,)
return (self._invalid, self._expected)
@property
def invalid(self):
"""The invalid value under test."""
return self._invalid
@property
def expected(self):
"""The expected value (optional)."""
return self._expected
def __repr__(self):
cls_name = self.__class__.__name__
invalid_repr = getattr(self._invalid, '__name__', repr(self._invalid))
if self._expected is not NOVALUE:
expected_repr = ', expected={0}'.format(
getattr(self._expected, '__name__', repr(self._expected)))
else:
expected_repr = ''
return '{0}({1}{2})'.format(cls_name, invalid_repr, expected_repr)
def _slice_datetime_repr_prefix(obj_repr):
"""Takes a default "datetime", "date", or "timedelta" repr and
returns it with the module prefix sliced-off::
>>> _slice_datetime_repr_prefix('datetime.date(2020, 12, 25)')
'date(2020, 12, 25)'
"""
# The following implementation (using "startswith" and "[9:]")
# may look clumsy but it can run up to 10 times faster than a
# more concise "re.compile()" and "regex.sub()" approach. In
# some situations, this function can get called many, many
# times. DON'T GET CLEVER--KEEP THIS FUNCTION FAST.
if obj_repr.startswith('datetime.datetime(') \
or obj_repr.startswith('datetime.date(') \
or obj_repr.startswith('datetime.timedelta('):
return obj_repr[9:]
return obj_repr
class Deviation(BaseDifference):
"""Created when a quantative value deviates from its expected value.
In the following example, the dictionary item ``'C': 33`` does
not satisfy the required item ``'C': 30``::
data = {'A': 10, 'B': 20, 'C': 33}
requirement = {'A': 10, 'B': 20, 'C': 30}
datatest.validate(data, requirement)
Running this example raises the following error:
.. code-block:: none
:emphasize-lines: 2
ValidationError: does not satisfy mapping requirement (1 difference): {
'C': Deviation(+3, 30),
}
"""
__slots__ = ('_deviation', '_expected')
def __init__(self, deviation, expected):
try:
if deviation + expected == expected:
msg = 'deviation quantity must not be empty, got {0!r}'
exc = ValueError(msg.format(deviation))
raise exc
except TypeError:
msg = ('Deviation arguments must be quantitative, '
'got deviation={0!r}, expected={1!r}')
exc = TypeError(msg.format(deviation, expected))
exc.__cause__ = None
raise exc
self._deviation = deviation
self._expected = expected
@property
def args(self):
return (self._deviation, self._expected)
@property
def deviation(self):
"""Quantative deviation from expected value."""
return self._deviation
@property
def expected(self):
"""The expected value."""
return self._expected
def __repr__(self):
cls_name = self.__class__.__name__
deviation = self._deviation
if _safe_isnan(deviation):
deviation_repr = "float('nan')"
elif isinstance(deviation, timedelta):
deviation_repr = pretty_timedelta_repr(deviation)
else:
try:
deviation_repr = '{0:+}'.format(deviation) # Apply +/- sign
except (TypeError, ValueError):
deviation_repr = repr(deviation)
expected = self._expected
if _safe_isnan(expected):
expected_repr = "float('nan')"
else:
expected_repr = repr(expected)
if expected_repr.startswith('datetime.'):
expected_repr = _slice_datetime_repr_prefix(expected_repr)
return '{0}({1}, {2})'.format(cls_name, deviation_repr, expected_repr)
def _make_difference(actual, expected, show_expected=True):
"""Returns an appropriate difference for *actual* and *expected*
values that are known to be unequal.
Setting *show_expected* to False, signals that the *expected*
argument should be omitted when creating an Invalid difference
(this is useful for reducing duplication when validating data
against a single function or object).
"""
if actual is NOVALUE:
return Missing(expected)
if expected is NOVALUE:
return Extra(actual)
if isinstance(expected, bool) or isinstance(actual, bool):
if show_expected:
return Invalid(actual, expected)
return Invalid(actual)
try:
deviation = actual - expected
return Deviation(deviation, expected)
except (TypeError, ValueError):
if show_expected:
return Invalid(actual, expected)
return Invalid(actual)
| 29.048159
| 91
| 0.610396
| 1,194
| 10,254
| 5.025126
| 0.215243
| 0.022
| 0.016333
| 0.004667
| 0.282833
| 0.238167
| 0.222
| 0.189
| 0.183833
| 0.1505
| 0
| 0.010242
| 0.28584
| 10,254
| 352
| 92
| 29.130682
| 0.809095
| 0.346987
| 0
| 0.324022
| 0
| 0
| 0.094929
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.134078
| false
| 0
| 0.039106
| 0.022346
| 0.402235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8403354322f3d276144123191c8e910a521e71d2
| 1,945
|
py
|
Python
|
VQ2D/vq2d/baselines/predictor.py
|
emulhall/episodic-memory
|
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
|
[
"MIT"
] | 27
|
2021-10-16T02:39:17.000Z
|
2022-03-31T11:16:11.000Z
|
VQ2D/vq2d/baselines/predictor.py
|
emulhall/episodic-memory
|
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
|
[
"MIT"
] | 5
|
2022-03-23T04:53:36.000Z
|
2022-03-29T23:39:07.000Z
|
VQ2D/vq2d/baselines/predictor.py
|
emulhall/episodic-memory
|
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
|
[
"MIT"
] | 13
|
2021-11-25T19:17:29.000Z
|
2022-03-25T14:01:47.000Z
|
from typing import Any, Dict, List, Sequence
import numpy as np
import torch
from detectron2.engine import DefaultPredictor
class SiamPredictor(DefaultPredictor):
def __call__(
self,
original_images: Sequence[np.ndarray],
visual_crops: Sequence[np.ndarray],
) -> List[Dict[str, Any]]:
"""
Args:
original_images (np.ndarray): a list of images of shape (H, W, C) (in BGR order).
visual_crops (np.ndarray): a list of images of shape (H, W, C) (in BGR order)
Returns:
predictions (list[dict]):
the output of the model for a list of images.
See :doc:`/tutorials/models` for details about the format.
"""
with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258
# Apply pre-processing to image.
inputs = []
for original_image, visual_crop in zip(original_images, visual_crops):
if self.input_format == "RGB":
# whether the model expects BGR inputs or RGB
original_image = original_image[:, :, ::-1]
visual_crop = visual_crop[:, :, ::-1]
height, width = original_image.shape[:2]
image = self.aug.get_transform(original_image).apply_image(
original_image
)
image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1))
reference = torch.as_tensor(
visual_crop.astype("float32").transpose(2, 0, 1)
)
inputs.append(
{
"image": image,
"height": height,
"width": width,
"reference": reference,
}
)
predictions = self.model(inputs)
return predictions
| 38.9
| 93
| 0.519794
| 202
| 1,945
| 4.876238
| 0.425743
| 0.079188
| 0.02132
| 0.039594
| 0.136041
| 0.136041
| 0.085279
| 0.085279
| 0.085279
| 0.085279
| 0
| 0.015
| 0.383033
| 1,945
| 49
| 94
| 39.693878
| 0.805833
| 0.23599
| 0
| 0
| 0
| 0
| 0.029745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.117647
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
840519afb7f020a56b84911fb8113394b9946381
| 7,626
|
py
|
Python
|
mutagene/benchmark/multiple_benchmark.py
|
neksa/pymutagene
|
1122d64a5ab843a4960124933f78f3c2e388a792
|
[
"CC0-1.0"
] | 3
|
2020-05-18T07:00:46.000Z
|
2022-02-20T02:55:48.000Z
|
mutagene/benchmark/multiple_benchmark.py
|
neksa/pymutagene
|
1122d64a5ab843a4960124933f78f3c2e388a792
|
[
"CC0-1.0"
] | 31
|
2020-03-13T16:28:34.000Z
|
2021-02-27T22:12:15.000Z
|
mutagene/benchmark/multiple_benchmark.py
|
neksa/pymutagene
|
1122d64a5ab843a4960124933f78f3c2e388a792
|
[
"CC0-1.0"
] | 3
|
2020-03-24T20:01:44.000Z
|
2020-11-26T17:30:39.000Z
|
import glob
import random
import uuid
import numpy as np
from multiprocessing import Pool
from sklearn.metrics import (
recall_score, precision_score, accuracy_score, f1_score, mean_squared_error)
from mutagene.io.profile import read_profile_file, write_profile, read_signatures
from mutagene.signatures.identify import NegLogLik
from mutagene.benchmark.deconstructsigs import deconstruct_sigs_custom
from mutagene.benchmark.generate_benchmark import *
# from mutagene.identify import decompose_mutational_profile_counts
def multiple_benchmark_helper(j):
dirname = "data/benchmark/multiple"
# for i in [5, 10, 30]:
for i in [30, ]:
W, signature_names = read_signatures(i)
N = W.shape[1]
# r = random.randrange(2, i // 3 + 2)
r = random.randrange(2, min(i + 1, 15))
# print(np.random.choice(N, r), .05 + np.random.dirichlet(np.ones(r), 1))
while True:
h0 = np.zeros(N)
h0[np.random.choice(N, r)] = 0.05 + np.random.dirichlet(np.ones(r), 1)
if np.greater(h0, 0.05).sum() == r:
break
h0 /= h0.sum()
v0 = W.dot(h0)
# print(h0)
n_mutations = random.randrange(10, 50)
v0_counts = np.random.multinomial(n_mutations, v0 / v0.sum())
# print(v0_counts)
random_name = str(uuid.uuid4())[:4]
fname = dirname + "/{:02d}_{}_{}_{}".format(i, r, n_mutations, random_name)
print(fname)
profile_fname = fname + ".profile"
info_fname = fname + ".info"
mle_info = fname + ".MLE.info"
mlez_info = fname + ".MLEZ.info"
ds_info = fname + ".ds.info"
write_profile(profile_fname, v0_counts)
write_decomposition(info_fname, h0, signature_names)
##################################################
results = deconstruct_sigs_custom(profile_fname, signatures=i)
write_decomposition(ds_info, results, signature_names)
##################################################
profile = read_profile_file(profile_fname)
for method, method_fname in [("MLE", mle_info), ("MLEZ", mlez_info)]:
_, _, results = decompose_mutational_profile_counts(
profile,
(W, signature_names),
method,
debug=False,
others_threshold=0.0)
write_decomposition(method_fname, results, signature_names)
def multiple_benchmark():
# pathlib.Path(dirname).mkdir(parents=True, exist_ok=True)
random.seed(13425)
with Pool(10) as p:
p.map(multiple_benchmark_helper, range(100))
def multiple_benchmark_run_helper(data):
fname, signature_ids, W, force = data
# methods = ['MLE', 'MLEZ', 'AICc', 'BIC', 'AICcZ', 'BICZ']
methods = ['AICc', 'AICcZ']
# print(fname)
profile = read_profile_file(fname)
for method in methods:
info = "{}.{}.info".format(fname.split(".")[0], method)
if isfile(info) and not force:
continue
print(info)
_, _, results = decompose_mutational_profile_counts(
profile,
(W, signature_ids),
method,
debug=False,
others_threshold=0.0)
exposure_dict = {x['name']: x['score'] for x in results}
exposure = [exposure_dict[name] for name in signature_ids]
write_decomposition(info, np.array(exposure), signature_ids)
def multiple_benchmark_run(N, signature_ids, W, force=False):
def get_iterator():
for fname in glob.glob("data/benchmark/multiple/{:02d}_*.profile".format(N), recursive=True):
yield (fname, signature_ids, W, force)
random.seed(13425)
with Pool(10) as p:
p.map(multiple_benchmark_run_helper, get_iterator(), 100)
def aggregate_multiple_benchmarks():
methods = {
"mle": ".MLE.info",
"mlez": ".MLEZ.info",
"ds": ".ds.info",
'aicc': '.AICc.info',
'bic': '.BIC.info',
'aiccz': '.AICcz.info',
'bicz': '.BICz.info',
}
# signatures_thresholds = {
# 5: 0.06,
# 10: 0.03,
# 30: 0.01,
# }
signatures_thresholds = {
5: 0.06,
10: 0.06,
30: 0.06,
}
# signatures_thresholds = {
# 5: 0.0001,
# 10: 0.0001,
# 30: 0.0001,
# }
# only report the signature 2 value (as in DeconstructSigs benchmark)
with open("data/benchmark/multiple/res1.txt", 'w') as o:
o.write("file_id\tsigtype\tnsig\tnmut\tmethod\tSRMSE\tPRMSE\tSTRMSE\tLLIK\tLLIK0\tTLLIK\tTLLIK0\tprecision\trecall\taccuracy\tf1\n")
for fname in glob.glob("data/benchmark/multiple/*.profile", recursive=True):
file_id = fname.split("/")[-1].split(".")[0]
sigtype, r, nmut, replica = fname.split("/")[-1].split(".")[0].split("_")
sigtype = int(sigtype)
if sigtype != 30:
continue
W, signature_names = read_signatures(sigtype)
info_fname = fname.split(".")[0] + '.info'
orig_profile = read_profile_file(fname)
h0, names = read_decomposition(info_fname)
# threshold = 0.06
threshold = 0.06
# threshold = 1.0 / np.sqrt(int(nmut)) if method != "ds" else 0.06
h0_threshold = np.where(h0 > threshold, h0, 0.0) # zero below threshold
h0_binary = np.array(h0_threshold) > 0.0 # true / false for threshold
nsig = np.count_nonzero(h0_binary)
if nsig < int(r):
print("LESS", sigtype, nsig, r)
if nsig > int(r):
print("MORE", sigtype, nsig, r)
if nsig <= 1:
continue
if nsig > 10:
continue
for method in methods:
method_fname = fname.split(".")[0] + methods[method]
values, names = read_decomposition(method_fname)
# print(method_fname)
if values is None:
continue
h = np.array(values)
if h.sum() == 0:
continue
h_threshold = np.where(h > threshold, h, 0.0) # zero below threshold
reconstructed_profile = W.dot(h)
# print(h)
# print(reconstructed_profile)
PRMSE = np.sqrt(mean_squared_error(
np.array(orig_profile) / np.array(orig_profile).sum(),
np.array(reconstructed_profile) / np.array(reconstructed_profile).sum()))
SRMSE = np.sqrt(mean_squared_error(h0, h))
STRMSE = np.sqrt(mean_squared_error(h0_threshold, h_threshold))
LLIK0 = - NegLogLik(h0, W, orig_profile)
TLLIK0 = - NegLogLik(h0_threshold, W, orig_profile)
LLIK = - NegLogLik(h, W, orig_profile)
TLLIK = - NegLogLik(h_threshold, W, orig_profile)
# print(h0.sum())
# print(h.sum())
h_binary = np.array(h_threshold) > 0.0 # true / false for threshold
precision = precision_score(h0_binary, h_binary)
recall = recall_score(h0_binary, h_binary)
accuracy = accuracy_score(h0_binary, h_binary)
f1 = f1_score(h0_binary, h_binary)
o.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
file_id, sigtype, nsig, nmut, method, SRMSE, PRMSE, STRMSE, LLIK, LLIK0, TLLIK, TLLIK0, precision, recall, accuracy, f1))
| 35.142857
| 141
| 0.560976
| 902
| 7,626
| 4.569845
| 0.211752
| 0.006793
| 0.009461
| 0.011645
| 0.257885
| 0.154537
| 0.133188
| 0.088549
| 0.056526
| 0.027414
| 0
| 0.034102
| 0.300157
| 7,626
| 216
| 142
| 35.305556
| 0.738242
| 0.108707
| 0
| 0.156028
| 0
| 0.014184
| 0.077719
| 0.046962
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.070922
| 0
| 0.113475
| 0.028369
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8407722043fe4e1043792c735a7c99de2eae2b6e
| 1,807
|
py
|
Python
|
ckl/run.py
|
damianbrunold/checkerlang-py
|
97abe5eda5f692ef61acf906a5f596c65688b582
|
[
"MIT"
] | null | null | null |
ckl/run.py
|
damianbrunold/checkerlang-py
|
97abe5eda5f692ef61acf906a5f596c65688b582
|
[
"MIT"
] | null | null | null |
ckl/run.py
|
damianbrunold/checkerlang-py
|
97abe5eda5f692ef61acf906a5f596c65688b582
|
[
"MIT"
] | null | null | null |
import argparse
import os
import sys
from ckl.values import (
ValueList,
ValueString,
NULL
)
from ckl.errors import (
CklSyntaxError,
CklRuntimeError
)
from ckl.interpreter import Interpreter
def main():
parser = argparse.ArgumentParser(description="CKL run command")
parser.add_argument("-s", "--secure", action="store_true")
parser.add_argument("-l", "--legacy", action="store_true")
parser.add_argument("-m", "--modulepath", nargs="?")
parser.add_argument("script")
parser.add_argument("args", nargs="*")
args = parser.parse_args(sys.argv[1:])
modulepath = ValueList()
if args.modulepath:
modulepath.addItem(ValueString(args.modulepath))
interpreter = Interpreter(args.secure, args.legacy)
if not os.path.exists(args.script):
print(f"File not found '{args.script}'", file=sys.stderr)
sys.exit(1)
scriptargs = ValueList()
for scriptarg in args.args:
scriptargs.addItem(ValueString(scriptarg))
interpreter.environment.put("args", scriptargs)
interpreter.environment.put("scriptname", ValueString(args.script))
interpreter.environment.put("checkerlang_module_path", modulepath)
with open(args.script, encoding="utf-8") as infile:
script = infile.read()
try:
result = interpreter.interpret(script, args.script)
if result != NULL:
print(str(result))
except CklRuntimeError as e:
print(str(e.value.asString().value)
+ ": " + e.msg
+ " (Line " + str(e.pos) + ")")
if e.stacktrace:
for st in e.stacktrace:
print(str(st))
except CklSyntaxError as e:
print(e.msg + ((" (Line " + str(e.pos) + ")") if e.pos else ""))
if __name__ == "__main__":
main()
| 28.234375
| 72
| 0.633094
| 208
| 1,807
| 5.413462
| 0.389423
| 0.039964
| 0.075488
| 0.0373
| 0.08881
| 0.08881
| 0.031972
| 0.031972
| 0
| 0
| 0
| 0.002146
| 0.226342
| 1,807
| 63
| 73
| 28.68254
| 0.80329
| 0
| 0
| 0
| 0
| 0
| 0.099059
| 0.012728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.117647
| 0
| 0.137255
| 0.098039
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
840a373b87a5269d4b1deb705abae42b6703a996
| 21,190
|
py
|
Python
|
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
|
sendgrid/JusticeEngine
|
9b39618c836bfcb120db5fb75557cc45c0105e9f
|
[
"MIT"
] | 1
|
2019-03-27T18:52:54.000Z
|
2019-03-27T18:52:54.000Z
|
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
|
sendgrid/JusticeEngine
|
9b39618c836bfcb120db5fb75557cc45c0105e9f
|
[
"MIT"
] | 4
|
2018-08-17T19:10:05.000Z
|
2018-11-16T16:46:04.000Z
|
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
|
sendgrid/JusticeEngine
|
9b39618c836bfcb120db5fb75557cc45c0105e9f
|
[
"MIT"
] | 2
|
2018-10-24T19:19:52.000Z
|
2018-11-16T16:38:23.000Z
|
import datetime
import fnmatch
import hashlib
import json
import time
import arrow
import os
from botocore.exceptions import ClientError
from boto.s3.key import Key
from security_monkey.alerters import custom_alerter
from security_monkey.common.sts_connect import connect
from security_monkey import app, db
from security_monkey.datastore import Account
from security_monkey.task_scheduler.alert_scheduler import schedule_krampus_alerts
class Notify:
"""Notification for resources outside of the Justice Engine."""
KILL = 0
DISABLE = 1
def __init__(self):
self.conn = None
self.bucket = None
self.key = None
self.s3connect(os.getenv('AWS_ACCOUNT_NAME'), os.getenv('KRAMPUS_BUCKET'))
def s3connect(self, account, bucket):
""" s3connect will attempt to connect to an s3 bucket resource.
If the resource does not exist it will attempt to create it
:param account: string the aws account you are connecting to
:param bucket: string the name of the bucket you wish to connect to
:returns: Boolean of connection Status
"""
self.conn = connect(
account,
's3'
)
if self.conn.lookup(bucket) is None:
app.logger.debug("Bucket Does not exist. Creating one")
self.bucket = self.conn.create_bucket(bucket)
else:
self.bucket = self.conn.get_bucket(bucket)
self.key = Key(self.bucket)
return True
def get_s3_key(self, filename):
""" Return the key contents for a specific s3 object
:param filename: the file name of the s3 object
:returns: data in the form of a Dict.
"""
if self.bucket.lookup(filename) is None:
self.key = self.bucket.new_key(filename)
self.key.set_contents_from_string(json.dumps(json.loads('{}')))
self.key.key = filename
tmp = self.key.get_contents_as_string()
return json.loads(tmp)
def write_to_s3_object(self, filename, data):
""" Write to s3
:param filename: the s3 object file name
:param data: string of data to be written to the object
:returns: Boolean of writing success
"""
try:
self.key.key = filename
self.key.set_contents_from_string(data)
return True
except ClientError as e:
app.logger.critical(
"Unable to push information back to s3. :: {0}".format(e))
return False
class Jury():
""" The Jury makes verdict based on evidence.
The Jury class contains the methods used to convert
items with issues into actionable jobs for Krampus to kill.
"""
KILL_THRESHOLD = int(os.getenv('KILL_THRESHOLD'))
DISABLE_THRESHOLD = int(os.getenv('DISABLE_THRESHOLD'))
KILL_RESPONSE_DELTA = int(os.getenv('KILL_RESPONSE_DELTA'))
DISABLE_RESPONSE_DELTA = int(os.getenv('DISABLE_RESPONSE_DELTA'))
SECMONKEY_KRAMPUS_ITEM_MAP = {
's3': ['s3'],
'ebs': ['ebssnapshot', 'ebsvolume'],
'ec2': ['ec2image', 'ec2instance'],
'rds': [
'rdsclustersnapshot', 'rdsdbcluster', 'rdsdbinstance',
'rdssecuritygroup', 'rdssnapshot', 'rdssubnetgroup'],
'iam': [
'iamgroup', 'iamrole', 'iamssl',
'iamuser', 'policy', 'samlprovider', 'keypair'],
'security_group': ['securitygroup'],
None: [
'acm', 'sqs', 'cloudtrail', 'config',
'configrecorder', 'connection', 'virtual_gateway',
'elasticip', 'elasticsearchservice', 'elb', 'alb',
'networkinterface', 'gcefirewallrule', 'gcenetwork',
'gcsbucket', 'organization', 'repository', 'team',
'glacier', 'kms', 'lambda', 'redshift', 'route53',
'route53domains', 'ses', 'sns', 'dhcp', 'endpoint',
'flowlog', 'natgateway', 'networkacl', 'peering',
'routetable', 'subnet', 'vpc', 'vpn']}
@staticmethod
def calc_score(issues):
""" Helper method for calculating scores after an audit.
:param issues: list of the item issues to be turned into a score
:return: int of the score based on the item's issues
"""
score = 0
for i in issues:
if not i.justified:
score += i.score
return score
@staticmethod
def aws_object_type_mapper(aws_object_type):
""" maps an aws_object_type from sec-monkey into an actionable type for krampus
:param aws_object_type: string of the sec-monkey type
:return: None
"""
for key in SECMONKEY_KRAMPUS_ITEM_MAP:
if aws_object_type in SECMONKEY_KRAMPUS_ITEM_MAP[key]:
return key
return None
@staticmethod
def s3_handler(item, issue):
""" Append information required for handling s3 resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
jobs = []
for grants in item.config['Grants']:
jobs.append({
"s3_principal": grants,
"s3_permission": item.config['Grants'][grants]
})
return jobs
@staticmethod
def ebs_handler(item, issue):
""" Append information required for handling ebs resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
return []
@staticmethod
def ec2_handler(item, issue):
""" Append information required for handling ec2 resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
return []
@staticmethod
def rds_handler(item, issue):
""" Append information required for handling rds resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
return []
@staticmethod
def iam_handler(item, issue):
""" Append information required for handling iam resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
return []
@staticmethod
def sg_handler(item, issue):
""" Append information required for handling security group resources
:param item: the item to be handled
:param issue: the issue to be handled
:return: jobs based on this action
"""
jobs = []
# We don't want to do anything to issues that have a scoring of 0
if issue.score == 0:
return []
if len(issue.notes.split(':')) != 2:
return []
rule_issue_id = issue.notes.split(':')[1]
for rule in item.config.get('rules', []):
if int(rule_issue_id) == int(rule.get("sg_index", -1)):
jobs.append({
'cidr_ip': rule['cidr_ip'],
'from_port': rule['from_port'],
'to_port': rule['to_port'],
'proto': rule['ip_protocol'],
'direction': rule['rule_type']
})
return jobs
@staticmethod
def justice(score):
""" Determine the action taken for a specific score
:param score: int of the score for a specific item
:return: string of the action to be taken
"""
int_score = int(score)
if int_score >= Jury.KILL_THRESHOLD:
return "kill"
if int_score >= Jury.DISABLE_THRESHOLD:
return "disable"
else:
return "ignore"
@staticmethod
def should_be_actioned(score):
""" Simple helper method to determine whether a job warrants action
:param score: The int value
:return: Boolean if job should be actioned.
"""
if Jury.justice(score) == 'ignore':
return False
else:
return True
@staticmethod
def get_current_time():
"""
:return: float of current unix (seconds since epoch)
"""
return time.time()
@staticmethod
def when_to_action(action):
""" returns an int of when to action a specific resource based on the action
:param action: String of the action decided
:return: int, representing the unix time the action should occur.
"""
if action == "kill":
delta = Jury.KILL_RESPONSE_DELTA
return Jury.get_current_time() + delta
elif action == "disable":
delta = Jury.DISABLE_RESPONSE_DELTA
return Jury.get_current_time() + delta
else:
app.logger.error("when_to_action was invoked with an issue determined to be ignored.")
raise ValueError("I can't serve Justice to those who have not committed injustice.")
@staticmethod
def gather_details_for_nuanced_actions(item, issues, object_type):
""" Append actions related to specific issues. If we are not completely
deleting a resource, we need more information for Krampus to action
the job generated.
i.e. If 3 rules in a security group need to be removed
it's really 3 jobs that need to be added to the task file.
:param item: the security monkey item that is to be used for gathering details
:param issues: the secmonkey item called
:param object_type: string of the aws resource type of the item
:return jobs: a list of the jobs required to action the item.
"""
if object_type is None:
app.logger.info("Krampus does not have a handler for item type {0}".format(item.index))
return {}
type_handler = {
's3': Jury.s3_handler,
'ebs': Jury.ebs_handler,
'ec2': Jury.ec2_handler,
'rds': Jury.rds_handler,
'iam': Jury.iam_handler,
'security_group': Jury.sg_handler
}
resource_details = []
for issue in item.audit_issues:
extra_fields_by_aws_type = type_handler[object_type](item, issue)
map(lambda x: (isinstance(x, dict)), extra_fields_by_aws_type)
resource_details.extend(extra_fields_by_aws_type)
return resource_details
@staticmethod
def get_case_insensitive_arn(item):
""" get_case_insensitive_arn will return the arn if it exists within the provided item.
there was some historical inconsistency here so this is just a safety class for older versions.
param item: the secmonkey item containing the arn
:return: string the arn result.
"""
for key in ['arn', 'Arn']:
if item.config.get(key, False):
return item.config[key]
app.logger.debug("Arn & arn not in config for {0} of type :: {1}".format(item.name, item.index))
return None
@staticmethod
def get_account_of_item(item):
""" returns the string of the account id hosting a specific item.
This helps with S3 resources.
:param item: the secmonkey item containing the arn
:return: string account id result.
"""
# base_arn = Jury.get_case_insensitive_arn(item)
return str(db.session.query(Account.identifier).filter(
Account.name == item.account).one()[0])
@staticmethod
def build_krampus_jobs_for_item(score, item, current_tasks, whitelist):
""" build_krampus_jobs_for_item will create actionable jobs for krampus for a given aws resource.
* if krampus is not going to delete the aws resource entirely, multiple jobs might be produced.
:param score: int representing how 'bad' the resource is according to sec_monkey.
:param item: the secmonkey item that needs jobs built
:param current_tasks: dict of the current_tasks for krampus
:param whitelist: dict of the krampus whitelist
:return: list of the jobs for this item to be actioned by krampus.
"""
arn = Jury.get_case_insensitive_arn(item)
if arn is None:
return []
action = Jury.justice(score)
issues = ""
for issue in item.audit_issues:
issues += "{0}::{1}\t{2}\n".format(issue.issue, issue.notes, issue.score)
job = {
'score': score,
'action': action,
'action_time': Jury.when_to_action(action),
'audited_time': Jury.get_current_time(),
'aws_resource_name': arn,
'aws_account': Jury.get_account_of_item(item),
'aws_region': item.region,
'aws_object_type': Jury.aws_object_type_mapper(item.index),
'human_readable_name': item.name,
'secmonkey_id': item.db_item.id,
'issues': issues,
}
# Only create jobs for the item if it's actually workable my Krampus
if job['aws_resource_name'] is not None:
if job['aws_object_type'] is None:
job["unique_id"] = Jury.hash_job(job)
job['is_whitelisted'] = True
return [job]
if job['action'] == 'disable':
jobs = Jury.gather_details_for_nuanced_actions(
item,
job['issues'],
job['aws_object_type'])
map(lambda x: x.update(job), jobs)
map(lambda x: x.update({"unique_id": Jury.hash_job(job)}), jobs)
for job in jobs:
job['is_whitelisted'] = Jury.whitelist_match(arn, whitelist) or Jury.convicted(job['unique_id'], current_tasks)
return jobs
else:
job["unique_id"] = Jury.hash_job(job)
job['is_whitelisted'] = Jury.whitelist_match(arn, whitelist) or Jury.convicted(job['unique_id'], current_tasks)
return [job]
return []
@staticmethod
def hash_job(job):
""" hash_job creates a unique id to compare jobs.
:param job: the job to be hashed
:return: string hash representation uniquely identifying the job
"""
hasher = hashlib.sha1()
hasher.update(job['aws_resource_name'])
hasher.update(str(job['score']))
hasher.update(str(job['issues']))
hasher.update(job['human_readable_name'])
return hasher.hexdigest()
@staticmethod
def make_local_from_timestamp(timestamp, timezone='US/Mountain'):
""" make_local_from_timestamp returns a local string representation of a unix timestamp
:param timestamp: int unix timestamp
:param timezone: string timezone matching a tzdb entry from iana
:return: human readable string representing a local timestamp.
"""
utc = arrow.get(timestamp)
local_time = utc.to(timezone)
return local_time.strftime('%a %I:%M %p')
@staticmethod
def make_utc_from_timestamp(timestamp):
""" make_utc_from_timestamp returns a human readable string representing a UTC timestamp
:param timestamp: timestamp in %Y-%m-%d %H:%M:%S
:return: the unix timestamp as a datetime.datetime object
"""
utc_time = datetime.datetime.utcfromtimestamp(timestamp)
return utc_time.strftime('%Y-%m-%d %H:%M:%S')
@staticmethod
def remove_if_in_current_tasks(arn, current_tasks):
""" remove_if_in_current_tasks will remove a job if it exists within the current_tasks hash
:param arn: string AWS Resource Name to check for in current_tasks
:param current_tasks: dict of the current_tasks for krampus
"""
for task in current_tasks:
if task['aws_resource_name'] == arn:
current_tasks.remove(task)
@staticmethod
def convicted(unique_id, current_tasks):
""" convicted returns whether the current job in question has already been judged and needs to be actioned by krampus
:param unique_id: string unique_id hash representation of a job
:param current_tasks: dict of the current_tasks in krampus
:return: boolean of whether the aws resource is to be actioned
"""
for task in current_tasks:
if task.get('unique_id', '') == unique_id:
return True
return False
@staticmethod
def whitelist_match(arn, whitelist):
""" whitelist_match returns whether the whitelist has a fn-match of the arn in question.
:param arn: string AWS Resource Name to check for in current_tasks
:param whitelist: dict of the krampus whitelist
:return: booelean of whether the arn is on the whitelist.
"""
for pattern in whitelist.keys():
if fnmatch.fnmatch(arn, pattern):
return True
return False
class Justice(object):
""" The Judge that serves the Jury's verdict to Krampus.
The Judge class faciliates the actions to be made for any set of issues
found for a security_monkey item.
"""
__metaclass__ = custom_alerter.AlerterType
TASK_KEY = os.getenv('TASK_KEY')
TASKS_FILE_NAME = os.getenv('TASKS_FILE_NAME')
WHITELIST_KEY = os.getenv('WHITELIST_KEY')
WHITELIST_FILE_NAME = os.getenv('WHITELIST_FILE_NAME')
LOGS_FILE_NAME = "{0}.json".format(datetime.datetime.now().strftime('%Y-%m-%d'))
def report_watcher_changes(self, watcher):
""" report_watcher_changes must exist for report_auditor_changes to be
invoked within the SecMonkey Auditor.
This mimics the existing custom alerter documentation in SecurityMonkey:Develop
as alerters can still work to perfom actions with watcher events as well as auditor events.
"""
for item in watcher.changed_items:
pass
def report_auditor_changes(self, auditor):
""" Primary Driver for the Justice Engine. We accumulate scores for a
specific resource and determine if it needs to be actioned.
Alerters only use the confirmed_new_issues and confirmed_fixed_issues
item fields.
The Game Plan:
1. Gather the current tasks
2. Remove the fixed items from the current tasks
3. Calculate the current score from new and existing issues for all items
4 If the current score is larger than or equal to the required thresholds we will update the tasks file.
"""
notify = Notify()
app.logger.debug("S3 Connection established.")
app.logger.debug("Collecting existing items.")
current_tasks = notify.get_s3_key(Justice.TASKS_FILE_NAME)
if not current_tasks:
current_tasks = {Justice.TASK_KEY: []}
app.logger.debug("Collecting whitelisted items.")
whitelist = notify.get_s3_key(Justice.WHITELIST_FILE_NAME)
if not whitelist:
whitelist = {Justice.WHITELIST_KEY: {}}
app.logger.debug("Collecting log file \"{0}\"".format(Justice.LOGS_FILE_NAME))
logs = notify.get_s3_key(Justice.LOGS_FILE_NAME)
if not logs:
logs = []
new_tasks = []
app.logger.debug("Beginning current audit")
current_run_audit_time = Jury.get_current_time()
for item in auditor.items:
app.logger.debug("changes in {0}. Auditing".format(item.name))
score = Jury.calc_score(item.audit_issues)
# remove_if_in_current_tasks lets Krampus ignore those who have atoned
Jury.remove_if_in_current_tasks(Jury.get_case_insensitive_arn(item), current_tasks[Justice.TASK_KEY])
if Jury.should_be_actioned(score):
jobs = Jury.build_krampus_jobs_for_item(score, item, current_tasks[Justice.TASK_KEY], whitelist)
logs.extend(jobs)
for job in jobs:
if not job['is_whitelisted']:
new_tasks.extend(jobs)
new_tasks.extend(current_tasks[Justice.TASK_KEY])
app.logger.debug("Tasks are updated locally.")
app.logger.debug("{0} Tasks to be processed".format(
len(new_tasks)))
if new_tasks != []:
app.logger.debug("Pushing tasks to s3.")
notify.write_to_s3_object(Justice.TASKS_FILE_NAME, json.dumps({Justice.TASK_KEY: new_tasks}))
if logs != []:
app.logger.debug("Pushing logs to s3")
notify.write_to_s3_object(Justice.LOGS_FILE_NAME, json.dumps(logs))
app.logger.debug("Sending Alerts to Account Owners.")
schedule_krampus_alerts.s(current_run_audit_time)
app.logger.debug("Justice Engine Complete. Closing.")
| 39.313544
| 131
| 0.612081
| 2,615
| 21,190
| 4.815679
| 0.17782
| 0.027634
| 0.015564
| 0.011673
| 0.252759
| 0.188279
| 0.17073
| 0.161359
| 0.102517
| 0.095529
| 0
| 0.004593
| 0.301369
| 21,190
| 538
| 132
| 39.386617
| 0.846055
| 0.310854
| 0
| 0.226115
| 0
| 0
| 0.145452
| 0.001672
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089172
| false
| 0.003185
| 0.044586
| 0
| 0.312102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
840ab1d9437aeb791d935b51fa2d0357a65758ff
| 623
|
py
|
Python
|
bot/markups/inline_keyboards.py
|
Im-zeus/Stickers
|
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
|
[
"MIT"
] | 44
|
2018-10-30T14:47:14.000Z
|
2022-03-26T15:17:52.000Z
|
bot/markups/inline_keyboards.py
|
Im-zeus/Stickers
|
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
|
[
"MIT"
] | 37
|
2018-11-09T11:51:15.000Z
|
2021-12-27T15:08:48.000Z
|
bot/markups/inline_keyboards.py
|
Im-zeus/Stickers
|
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
|
[
"MIT"
] | 38
|
2019-03-27T21:12:23.000Z
|
2022-01-08T07:57:39.000Z
|
# noinspection PyPackageRequirements
from telegram import InlineKeyboardMarkup, InlineKeyboardButton
class InlineKeyboard:
HIDE = None
REMOVE = None
@staticmethod
def static_animated_switch(animated=False):
static_button = InlineKeyboardButton(
'{} normal'.format('☑️' if animated else '✅'),
callback_data='packtype:static'
)
animated_button = InlineKeyboardButton(
'{} animated'.format('✅' if animated else '☑️'),
callback_data='packtype:animated'
)
return InlineKeyboardMarkup([[static_button, animated_button]])
| 29.666667
| 71
| 0.658106
| 54
| 623
| 7.555556
| 0.537037
| 0.068627
| 0.068627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243981
| 623
| 20
| 72
| 31.15
| 0.853503
| 0.054575
| 0
| 0
| 0
| 0
| 0.098808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
840ed8b2d962e67e5075227c8b5fb7a2d2b1513b
| 553
|
py
|
Python
|
python/dp/min_cost_climbing_stairs.py
|
googege/algo-learn
|
054d05e8037005c5810906d837de889108dad107
|
[
"MIT"
] | 153
|
2020-09-24T12:46:51.000Z
|
2022-03-31T21:30:44.000Z
|
python/dp/min_cost_climbing_stairs.py
|
googege/algo-learn
|
054d05e8037005c5810906d837de889108dad107
|
[
"MIT"
] | null | null | null |
python/dp/min_cost_climbing_stairs.py
|
googege/algo-learn
|
054d05e8037005c5810906d837de889108dad107
|
[
"MIT"
] | 35
|
2020-12-22T11:07:06.000Z
|
2022-03-09T03:25:08.000Z
|
from typing import List
# 使用最小花费爬楼梯
class Solution:
def minCostClimbingStairs_1(self, cost: List[int]) -> int:
dp = [0 for _ in range(len(cost))]
dp[0], dp[1] = cost[0], cost[1]
for i in range(2, len(cost)):
dp[i] = min(dp[i - 1], dp[i - 2]) + cost[i]
return min(dp[-1], dp[-2])
def minCostClimbingStairs_2(self, cost: List[int]) -> int:
prev, back = 0, 0
for i in range(len(cost)):
prev, back = back, min(prev, back) + cost[i]
return min(prev, back)
| 26.333333
| 62
| 0.529837
| 84
| 553
| 3.452381
| 0.297619
| 0.110345
| 0.082759
| 0.103448
| 0.124138
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036745
| 0.311031
| 553
| 21
| 63
| 26.333333
| 0.724409
| 0.016275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.076923
| 0
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
840f7e43205d6e7a06e7d699111b144ac79f0338
| 10,289
|
py
|
Python
|
pages/graph.py
|
lmason98/PyGraph
|
22d734cfd97333578c91ba4e331716df0aec668e
|
[
"MIT"
] | null | null | null |
pages/graph.py
|
lmason98/PyGraph
|
22d734cfd97333578c91ba4e331716df0aec668e
|
[
"MIT"
] | null | null | null |
pages/graph.py
|
lmason98/PyGraph
|
22d734cfd97333578c91ba4e331716df0aec668e
|
[
"MIT"
] | null | null | null |
"""
File: pages/page.py
Author: Luke Mason
Description: Main part of the application, the actual graph page.
"""
# Application imports
from message import log, error, success
from settings import APP_NAME, COLOR, FONT, FONT_SIZE, SCREEN_WIDTH, SCREEN_HEIGHT, WIDTH, HEIGHT, PAD, _QUIT
from sprites.vertex import Vertex
from sprites.edge import Edge
from pages.page import Page
from graph import Graph as G
# Pygame imports
from pygame import draw, sprite, event, mouse, display, init, key, MOUSEBUTTONUP, MOUSEBUTTONDOWN, MOUSEMOTION, QUIT, \
KEYDOWN, K_BACKSPACE, K_DELETE, KMOD_SHIFT
# Python imports
from math import atan2, degrees, cos, sin
class GraphPage(Page):
def __init__(self, screen):
Page.__init__(self, screen)
self.second_click = False
self.moving = False
self.collision = False
self.selected_vertices = []
self.selected_edges = []
self.vertices = sprite.Group()
self.edges = [] # Edges arent sprites in the same way that vertices are
self.last_clicked_vertex = None
self.show_labels = False
self.graph = G() # Actual graph logic
def add_vertex(self, x: int, y: int):
"""
Attempts to add a new vertex, returns True if successful, False if it is colliding with an existing vertex.
"""
new_v = Vertex(x=x, y=y)
self.collision = False
for v in self.vertices:
if sprite.collide_rect(new_v, v):
error("Vertex placement collision detected!")
self.collision = True
if not self.collision:
success(f'Adding vertex {new_v}')
self.vertices.add(new_v)
return not self.collision
def add_edge(self, v1: Vertex, v2: Vertex) -> None:
"""
Adds an edge between vertices v1 and v2
Here edges in the list are a dict={'edge': edge, 'count': n}
"""
e = Edge(v1, v2)
found = False
# Try to find in list and update count
for _e in self.edges:
if _e.get('edge') == e: # We can do this with the __eq__ definition on the Edge class
_e.update({'count': int(_e.get('count'))+1})
# log(f'{_e} update count={_e.get("count")}')
found = True
break
# Otherwise insert with count=1
if not found:
self.edges.append({'edge': e, 'count': 1})
# log(f'{e} insert count=1')
v1.edges.append(e)
v2.edges.append(e)
success(f'Add edge {e}')
def edge_count(self):
"""
Since self.edges is a list of dicts defining parallel edges, simply
len(self.edges) is misleading.
"""
total_count = 0
for edge in self.edges: total_count += edge.get('count')
return total_count
def remove_edge(self, edge) -> bool:
"""
Removes an edge from the edge list
"""
found = False
for e in self.edges:
if e.get('edge') == edge:
self.edges.remove(e)
found = True
break
return found
def delete_vertices(self):
for sv in self.selected_vertices:
log('deleting sv :', sv)
x, y = sv.get_pos()
self.vertices.remove(sv)
# Remove any edges connected to this removed vertex
for e in self.edges:
if e.get('edge') in sv.edges:
self.edges.remove(e)
self.last_clicked_vertex = None
def delete_edges(self):
for se in self.selected_edges:
for e in self.edges:
if e.get('edge') == se:
log('deleteing se:', se)
self.edges.remove(e)
def stats(self, font):
"""
Draws the graph stats stats, i.e., total vertex and edge count
"""
v_count = f'N={len(self.vertices)}' # N
e_count = f'M={self.edge_count()}' # M
v_count_rendered = font.render(str(v_count), False, COLOR.get('white'), True)
e_count_rendered = font.render(str(e_count), False, COLOR.get('white'), True)
return {'text': v_count_rendered, 'size': font.size(str(v_count))}, \
{'text': e_count_rendered, 'size': font.size(str(e_count))}
def handle_click(self, x, y):
"""
Handles the logic when mouse is clicked, this logic is quite complex as it includes,
- placing a vertex (single click anywhere on app window where there does not already exist a vertex)
- moving a vertex (click and drag a vertex)
- adding an edge between two vertices (single click two vertices in a row)
"""
self.collision = False
button_clicked = False
edge_clicked = False
for b in self.buttons:
if b.hovered(x, y):
log(f'button clicked={b}')
b.onclick()
button_clicked = True
if not button_clicked:
for e in self.edges:
edge = e.get('edge')
if edge.hovered(x, y):
edge_clicked = True
if not button_clicked and not edge_clicked:
for v in self.vertices:
if v.rect.collidepoint(x, y):
self.collision = True
log('====== vertex click:', v)
# Handles vertex move (self.moving and v.drag flipped on MOUSEBUTTONUP)
self.moving = True
v.drag = True
# Click to select
v.selected = True
v.set_color(COLOR.get('focus'))
self.selected_vertices.clear()
self.selected_edges.clear()
self.selected_vertices.append(v)
# If last clicked vertex
if self.last_clicked_vertex and v and self.last_clicked_vertex != v:
self.add_edge(self.last_clicked_vertex, v)
self.last_clicked_vertex = None
log('clear last clicked 1')
elif self.last_clicked_vertex and v and self.last_clicked_vertex == v:
log('ADD LOOP!')
else:
self.last_clicked_vertex = v
log('set last clicked')
# If selected vertex and not a collision, clear selected vertex
if not self.collision and len(self.selected_vertices) > 0:
self.selected_vertices.clear()
# If selected edge and not a collision, clear selected edge
elif not self.collision and len(self.selected_edges) > 0:
self.selected_edges.clear()
# Otherwise add new vertex
elif not self.collision:
self.add_vertex(x, y) # Mousedown not moving, add vertex
self.last_clicked_vertex = None
def poll_events(self):
"""
Graph page event polling (Handles any sort of input)
- Single click anywhere on screen to add a new vertex
- Delete or backspace to delete selected vertex
"""
x, y = mouse.get_pos()
for e in event.get():
if e.type == QUIT:
return _QUIT
# Mouse down
elif e.type == MOUSEBUTTONDOWN:
self.handle_click(x, y)
# Mouse up
elif e.type == MOUSEBUTTONUP:
# If mouse release and vertex is being dragged, stop dragging (placing a moved vertex)
dragging = False
for v in self.vertices:
if v.drag:
dragging = True
v.drag = False
self.moving = False
if v.rect.collidepoint(x, y) and self.last_clicked_vertex and v and self.last_clicked_vertex != v:
self.add_edge(self.last_clicked_vertex, v)
# Handling edge placement on mouse button up, so we do not place an edge when draggin a vertex
if not dragging:
for e in self.edges:
edge = e.get('edge')
if edge.hovered(x, y):
self.selected_edges.clear()
self.selected_vertices.clear()
self.selected_edges.append(edge)
# Mouse moving
elif e.type == MOUSEMOTION:
for v in self.vertices:
# Handles vertex drag as it is being dragged
if v.drag:
v.set_pos(x, y)
# Focus if mouseover
if v.rect.collidepoint(x, y):
v.set_color(COLOR.get('focus'))
elif v not in self.selected_vertices:
v.set_color(COLOR.get('white'))
for _e in self.edges:
edge = _e.get('edge')
if edge.hovered(x, y):
edge.set_color(COLOR.get('focus'))
elif edge not in self.selected_edges:
edge.set_color(COLOR.get('white'))
elif e.type == KEYDOWN:
# (Delete or backspace key) Delete selected vertices
if e.key == K_BACKSPACE or e.key == K_DELETE:
self.delete_vertices()
self.delete_edges()
self.moving = False
def draw_edges(self):
"""
Draw the edges (have to do this manually as pygame sprite did not quite fit for this use case)
"""
mult = 6 # distance between edges
for e in self.edges:
total_count = e.get('count')
for c in range(0, e.get('count')):
edge = e.get('edge')
p1, p2 = edge.v1.get_pos(), edge.v2.get_pos()
ang = degrees(atan2(p2[1] - p1[1], p2[0] - p1[0]))
# Logic to place parallel edges in clear visible manner despite angle between
# the vertices. (This angle will change as user moves vertices around)
x_mult, y_mult = self.handle_point_angle_eq(ang, mult)
p1 = (p1[0] + edge.v1.radius + x_mult*c, p1[1] + edge.v1.radius + y_mult*c)
p2 = (p2[0] + edge.v2.radius + x_mult*c, p2[1] + edge.v2.radius + y_mult*c)
draw.line(self.screen, edge.color, p1, p2)
def handle_point_angle_eq(self, ang, dist) -> (int, int):
"""
Handles the angle point code to keep draw_edges function clean
It returns x, y multiple for distance between parallel edges based on the
angle between the vertices so that parallel edges can always be displayed
as parallel.
"""
# Handles sign of ranges we check to reduce repeated code
sign = 1
if ang < 0:
sign = -1
# This algorithm is likely really ugly... I know there exists a more elegant way
# to do this.
if 45 <= ang <= 135 or -135 <= ang <= -45:
return dist, 0
elif -45 <= ang <= 45 or ang >= 135 or ang <= -135:
return 0, dist
else:
print('======== other ang?')
return dist, dist
def toggle_labels(self):
print('======== toggling labels')
self.show_labels = not self.show_labels
def draw_vertices(self, font):
"""
Draws the vertices and handles vertex labels
"""
self.vertices.draw(self.screen) # Draw vertices
if self.show_labels:
i = 1
for v in self.vertices:
x, y = v.get_pos()
text = font.render(str(i), False, COLOR.get('white'), True)
self.screen.blit(text, (x + PAD*1.5, y - PAD*1.5))
i += 1
def think(self, font):
"""
Graph page think function, this function is called every tick
"""
q = self.poll_events()
n, m = self.stats(font) # n, m are dicts, take a look at render_stats to see structure
self.screen.fill(COLOR.get('black')) # Background color
self.draw_vertices(font)
self.draw_edges() # Draw edges
self.draw_buttons(font) # Draw buttons (inherited from Page class)
self.screen.blit(n.get('text'), (PAD, PAD)) # Draw N=vertex count and M=edge count
self.screen.blit(m.get('text'), (WIDTH - PAD - m.get('size')[0], PAD)) # Set to right side of screen
display.flip() # Weird pygame call required to display window
if q == _QUIT:
return q
| 28.035422
| 119
| 0.666051
| 1,619
| 10,289
| 4.136504
| 0.186535
| 0.017023
| 0.035538
| 0.040765
| 0.221293
| 0.178139
| 0.10766
| 0.067194
| 0.067194
| 0.052262
| 0
| 0.009951
| 0.21868
| 10,289
| 366
| 120
| 28.112022
| 0.823112
| 0.287297
| 0
| 0.265403
| 0
| 0
| 0.056419
| 0.00602
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07109
| false
| 0
| 0.037915
| 0
| 0.156398
| 0.009479
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8413787081f15c4a41a8417aa64436712a8f0d85
| 603
|
py
|
Python
|
pakcrack/__init__.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
pakcrack/__init__.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
pakcrack/__init__.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
# Filenames : <tahm1d>
# Python bytecode : 2.7
# Time decompiled : Thu Sep 10 23:29:38 2020
# Selector <module> in line 4 file <tahm1d>
# Timestamp in code: 2020-09-02 17:33:14
import os, sys, time
from os import system
from time import sleep
def htrprint(s):
for t in s + '\n':
sys.stdout.write(t)
sys.stdout.flush()
sleep(0.01)
def menu():
system('rm -rf *.pyc *.dis')
htrprint(' \x1b[1;96mHello Bro !!')
htrprint('\n \x1b[1;96mExcute \x1b[1;92mpython2 crack.py \x1b[1;96mto run this tool !\x1b[1;97m')
sleep(1)
if __name__ == '__main__':
menu()
| 21.535714
| 101
| 0.623549
| 98
| 603
| 3.755102
| 0.673469
| 0.054348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119658
| 0.223881
| 603
| 27
| 102
| 22.333333
| 0.666667
| 0.27529
| 0
| 0
| 0
| 0.066667
| 0.316279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.2
| 0
| 0.333333
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8414f299e33cb1d7f5931b3a7e8db59199dffc99
| 4,165
|
py
|
Python
|
MstarHe2R/components/models.py
|
IzayoiRin/MstarHe2R
|
938d83acdfa5ec4464cf9113fef104a6e80ad662
|
[
"MIT"
] | null | null | null |
MstarHe2R/components/models.py
|
IzayoiRin/MstarHe2R
|
938d83acdfa5ec4464cf9113fef104a6e80ad662
|
[
"MIT"
] | 2
|
2021-06-08T21:19:41.000Z
|
2021-09-08T01:54:27.000Z
|
MstarHe2R/components/models.py
|
IzayoiRin/MstarHe2R
|
938d83acdfa5ec4464cf9113fef104a6e80ad662
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pandas as pd
import torch as th
from mstarhe.core.nn.models import PrettyFeedForward
from MstarHe2R.components.dataloader import Mstar2RDataLoader
__IMG_SIZE__ = 128 * 128
class MSTARNet(PrettyFeedForward):
data_loader_class = Mstar2RDataLoader
# model_graph_class = ANNetGraph
model_graph_class = None
optimizer_class = th.optim.Adam
loss_func_class = th.nn.NLLLoss
loader_params = {
"train": {},
"test": {}
}
# hyper-parameters
lr = 1e-3 # learning rate
l1_lambda = 0.5 # l1-penalty coef
l2_lambda = 0.01 # l2-penalty coef
step = 10 # measure_progress step k
patient = 3 # early stopping patient
alpha = 0.5 # early stopping threshold
def __init__(self, ofea, **kwargs):
super(MSTARNet, self).__init__(ifea=__IMG_SIZE__, ofea=ofea, **kwargs)
self.CHECK_POINT = 'cp{}ep%s.tar'.format(self.model_graph_class.__name__)
self._acc = list()
self.acc_curve = list()
self._loss = list()
self.vloss_curve = list()
self.tloss_curve = list()
self.eval_ret = list()
self.pre_accuracy = None
self.test_samples_ = list()
def get_data_loader(self, train):
p = self.loader_params['train'] if train else self.loader_params['test']
loader_factory = self.data_loader_class(train=train)
if train:
p["split"] = True
return loader_factory(**p)
p["shuffle"] = False
loader = loader_factory(**p)
self.test_samples_ = np.array(loader_factory.mstar.samples).reshape(-1, 1)
return loader
@property
def epoch_acc(self):
return np.mean(self._acc)
@property
def epoch_loss(self):
return np.mean(self._loss)
def analysis(self, label, ypre, preP):
"""
:param label: size(batch) true class
:param ypre: size(batch) pre class
:param preP: size(batch) pre prob
:return:
"""
self._acc.append(self.accuracy(ypre, label).item())
if not getattr(self, 'validate', False):
self.eval_ret.append(th.stack([label.float(), ypre.float(), preP], dim=1))
def train_batch(self, dl):
super(MSTARNet, self).train_batch(dl)
self.tloss_curve.append(self.epoch_loss)
def eval_batch(self, dl):
self._acc = list()
# eval testing or validating batch
super(MSTARNet, self).eval_batch(dl)
print('Average Accuracy: %s' % self.epoch_acc)
if getattr(self, 'validate', False):
self.acc_curve.append(self.epoch_acc)
self.vloss_curve.append(self.epoch_loss)
else:
ret = th.cat(self.eval_ret, dim=0)
self.pre_accuracy = self.accuracy(ret[0], ret[1])
path = os.path.join(self.csv_path, 'EvalCurves%s.txt' % self.model_graph_class.__name__)
pd.DataFrame(np.hstack([self.test_samples_, ret.cpu().numpy()]),
columns=['objects', 'labels', 'predict', 'prob'])\
.to_csv(path, sep='\t', index=True, header=True)
def model_persistence(self):
super(MSTARNet, self).model_persistence()
curves = {
"Accaracy": self.acc_curve,
"TrLoss": self.tloss_curve,
"VaLoss": self.vloss_curve
}
path = os.path.join(self.csv_path, 'EpochCurves%s.txt' % self.model_graph_class.__name__)
df = pd.DataFrame(curves.values()).T
df.columns = curves.keys()
df.to_csv(path, sep='\t', index=True, header=True)
def _example():
Net = MSTARNet
Net.device = None
from components.graphs.graph2 import TestL4MSTARANNetGraph
G = [TestL4MSTARANNetGraph]
for g, params in G:
Net.model_graph_class = g
Net.alpha = params["aph"]
Net.step = params["stp"]
net = Net(3, reg=None, dropout=False)
print(net.graph.__class__.__name__)
# print(net.get_data_loader(False))
# print(len(net.test_samples_))
net.train(params['n'], 'PQ', checkpoint=params['cp'])
if __name__ == '__main__':
_example()
| 32.038462
| 100
| 0.614646
| 530
| 4,165
| 4.592453
| 0.324528
| 0.028759
| 0.036976
| 0.023418
| 0.140099
| 0.071487
| 0.071487
| 0.028759
| 0.028759
| 0.028759
| 0
| 0.011375
| 0.261224
| 4,165
| 129
| 101
| 32.286822
| 0.779656
| 0.090756
| 0
| 0.042105
| 0
| 0
| 0.047734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094737
| false
| 0
| 0.073684
| 0.021053
| 0.336842
| 0.021053
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
84188f6567eb4fd0ad0c89e940fd5e2fe14303c7
| 3,056
|
py
|
Python
|
predict_yolo3_disconnect.py
|
RentadroneCL/model-definition
|
9dab1f1a808a1efc54d64144745277396c145ff7
|
[
"MIT"
] | 2
|
2020-01-22T19:54:16.000Z
|
2020-02-07T12:20:17.000Z
|
predict_yolo3_disconnect.py
|
RentadroneCL/model-definition
|
9dab1f1a808a1efc54d64144745277396c145ff7
|
[
"MIT"
] | 4
|
2020-06-03T00:27:22.000Z
|
2020-07-15T17:15:23.000Z
|
predict_yolo3_disconnect.py
|
RentadroneCL/model-definition
|
9dab1f1a808a1efc54d64144745277396c145ff7
|
[
"MIT"
] | 1
|
2020-01-21T22:38:22.000Z
|
2020-01-21T22:38:22.000Z
|
#! /usr/bin/env python
import time
import os
import argparse
import json
import cv2
import sys
sys.path += [os.path.abspath('keras-yolo3-master')]
from utils.utils import get_yolo_boxes, makedirs
from utils.bbox import draw_boxes
from tensorflow.keras.models import load_model
from tqdm import tqdm
import numpy as np
from panel_disconnect import disconnect
def _main_(args):
config_path = args.conf
input_path = args.input
output_path = args.output
with open(config_path) as config_buffer:
config = json.load(config_buffer)
makedirs(output_path)
###############################
# Set some parameter
###############################
net_h, net_w = 416, 416 # a multiple of 32, the smaller the faster
obj_thresh, nms_thresh = 0.5, 0.3
###############################
# Load the model
###############################
os.environ['CUDA_VISIBLE_DEVICES'] = config['train']['gpus']
infer_model = load_model(config['train']['saved_weights_name'])
###############################
# Predict bounding boxes
###############################
image_paths = []
if os.path.isdir(input_path):
for inp_file in os.listdir(input_path):
image_paths += [input_path + inp_file]
else:
image_paths += [input_path]
image_paths = [inp_file for inp_file in image_paths if (inp_file[-4:] in ['.jpg', '.png', 'JPEG'])]
# the main loop
times = []
images = [cv2.imread(image_path) for image_path in image_paths]
#print(images)
start = time.time()
# predict the bounding boxes
boxes = get_yolo_boxes(infer_model, images, net_h, net_w, config['model']['anchors'], obj_thresh, nms_thresh)
boxes = [[box for box in boxes_image if box.get_score() > obj_thresh] for boxes_image in boxes]
print('Elapsed time = {}'.format(time.time() - start))
times.append(time.time() - start)
boxes_disc = [disconnect(image, boxes_image, z_thresh = 1.8) for image, boxes_image in zip(images, boxes)]
for image_path, image, boxes_image in zip(image_paths, images, boxes_disc):
#print(boxes_image[0].score)
# draw bounding boxes on the image using labels
draw_boxes(image, boxes_image, ["disconnect"], obj_thresh)
#plt.figure(figsize = (10,12))
#plt.imshow(I)
# write the image with bounding boxes to file
cv2.imwrite(output_path + image_path.split('/')[-1], np.uint8(image))
file = open(args.output + '/time.txt','w')
file.write('Tiempo promedio:' + str(np.mean(times)))
file.close()
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Predict with a trained yolo model')
argparser.add_argument('-c', '--conf', help='path to configuration file')
argparser.add_argument('-i', '--input', help='path to an image, a directory of images, a video, or webcam')
argparser.add_argument('-o', '--output', default='output/', help='path to output directory')
args = argparser.parse_args()
_main_(args)
| 32.168421
| 113
| 0.629581
| 408
| 3,056
| 4.519608
| 0.355392
| 0.048807
| 0.032538
| 0.008677
| 0.021692
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010565
| 0.194699
| 3,056
| 94
| 114
| 32.510638
| 0.738724
| 0.110602
| 0
| 0
| 0
| 0
| 0.131798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0.230769
| 0
| 0.25
| 0.019231
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8419172381c9e4256607a0db506cd791eeb0f296
| 11,655
|
py
|
Python
|
tenning/layers/resnet_block.py
|
guilherme9820/Tenning
|
c0fe7695ef3dd791ea1083f39d6b312266fb0512
|
[
"MIT"
] | null | null | null |
tenning/layers/resnet_block.py
|
guilherme9820/Tenning
|
c0fe7695ef3dd791ea1083f39d6b312266fb0512
|
[
"MIT"
] | null | null | null |
tenning/layers/resnet_block.py
|
guilherme9820/Tenning
|
c0fe7695ef3dd791ea1083f39d6b312266fb0512
|
[
"MIT"
] | null | null | null |
import tensorflow.keras.constraints as constraints
from tensorflow.keras.layers import GlobalAveragePooling2D
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2DTranspose
from tensorflow.keras.layers import LeakyReLU
from tensorflow.keras.layers import ReLU
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import Lambda
from tensorflow.keras.layers import Layer
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Add
from tensorflow_addons.layers import InstanceNormalization
from tensorflow_addons.layers import GroupNormalization
from tenning.generic_utils import get_object_config
from tenning.activations import Swish
import tensorflow as tf
class ResnetBlock(Layer):
def __init__(self,
out_channels,
strides=1,
kernel_size=3,
trainable=True,
mode='identity',
initializer='he_normal',
normalization='instance_norm',
activation='leaky_relu',
groups=None,
squeeze_excitation=False,
squeeze_ratio=16,
**kwargs):
super().__init__(trainable=trainable, **kwargs)
allowed_normalizations = ['batch_norm', 'instance_norm', 'group_norm']
allowed_modes = ['identity', 'downsample', 'upsample']
assert mode in allowed_modes, f"Invalid mode!"
assert normalization in allowed_normalizations, f"Invalid normalization!"
conv_constraint = kwargs.get('conv_constraint', None)
conv_constraint_arguments = kwargs.get('conv_constraint_arguments', [])
dense_constraint = kwargs.get('dense_constraint', None)
dense_constraint_arguments = kwargs.get('dense_constraint_arguments', [])
if conv_constraint_arguments:
if not isinstance(conv_constraint_arguments, list):
raise TypeError(f"'conv_constraint_arguments' must be a list")
if dense_constraint_arguments:
if not isinstance(dense_constraint_arguments, list):
raise TypeError(f"'dense_constraint_arguments' must be a list")
if conv_constraint:
conv_constraint = getattr(constraints, conv_constraint, None)(*conv_constraint_arguments)
if dense_constraint:
dense_constraint = getattr(constraints, dense_constraint, None)(*dense_constraint_arguments)
self.out_channels = out_channels
self.initializer = initializer
self.mode = mode
self.kernel_size = kernel_size
self.strides = strides
self.normalization = normalization
self.groups = groups
self.squeeze_excitation = squeeze_excitation
self.squeeze_ratio = squeeze_ratio
self.conv_constraint = conv_constraint
self.dense_constraint = dense_constraint
if normalization == 'group_norm':
self.norm1 = GroupNormalization(groups=self.groups, name=self.name + '/norm1', trainable=self.trainable)
self.norm2 = GroupNormalization(groups=self.groups, name=self.name + '/norm2', trainable=self.trainable)
self.norm3 = GroupNormalization(groups=self.groups, name=self.name + '/norm3', trainable=self.trainable)
elif normalization == 'instance_norm':
self.norm1 = InstanceNormalization(name=self.name + '/norm1', trainable=self.trainable)
self.norm2 = InstanceNormalization(name=self.name + '/norm2', trainable=self.trainable)
self.norm3 = InstanceNormalization(name=self.name + '/norm3', trainable=self.trainable)
else:
self.norm1 = BatchNormalization(name=self.name + '/norm1', trainable=self.trainable)
self.norm2 = BatchNormalization(name=self.name + '/norm2', trainable=self.trainable)
self.norm3 = BatchNormalization(name=self.name + '/norm3', trainable=self.trainable)
if activation == 'swish':
self.relu1 = Swish(name=self.name + '/activation1')
self.relu2 = Swish(name=self.name + '/activation2')
self.relu3 = Swish(name=self.name + '/activation3')
elif activation == 'leaky_relu':
self.relu1 = LeakyReLU(name=self.name + '/activation1')
self.relu2 = LeakyReLU(name=self.name + '/activation2')
self.relu3 = LeakyReLU(name=self.name + '/activation3')
else:
self.relu1 = ReLU(name=self.name + '/activation1')
self.relu2 = ReLU(name=self.name + '/activation2')
self.relu3 = ReLU(name=self.name + '/activation3')
self.in_conv = Conv2D(self.out_channels // 2, kernel_size=1, name=self.name + '/in_conv', strides=1, kernel_constraint=conv_constraint,
trainable=self.trainable, padding='same', kernel_initializer=self.initializer)
if mode == 'identity':
# Keeps image dimensions (height and width) intact
self.mid_conv = Conv2D(self.out_channels // 2, kernel_size=1, name=self.name + '/mid_conv', strides=1,
trainable=self.trainable, padding='same', kernel_constraint=conv_constraint, kernel_initializer=self.initializer)
elif mode == 'downsample':
# Causes a reduction over image dimensions. The new dimensions are calculated as follows:
# new_dim = floor((old_dim - kernel_size)/stride + 1)
# where new_dim and old_dim are either image height or width
self.mid_conv = Conv2D(self.out_channels // 2, kernel_size=self.kernel_size, name=self.name + '/mid_conv', strides=self.strides,
trainable=self.trainable, padding='valid', kernel_constraint=conv_constraint, kernel_initializer=self.initializer)
else:
# Causes an increase over image dimensions. The new dimensions are calculated as follows:
# new_dim = old_dim * stride + max(kernel_size - stride, 0)
# where new_dim and old_dim are either image height or width
self.mid_conv = Conv2DTranspose(self.out_channels // 2, kernel_size=self.kernel_size, name=self.name + '/mid_conv', strides=self.strides,
trainable=self.trainable, padding='valid', kernel_constraint=conv_constraint, kernel_initializer=self.initializer)
self.global_pool = None
self.squeeze_dense1 = None
self.squeeze_dense2 = None
if self.squeeze_excitation:
self.global_pool = GlobalAveragePooling2D(name=self.name + "/global_pool")
self.squeeze_dense1 = Dense(self.out_channels // self.squeeze_ratio,
activation='relu',
kernel_initializer=self.initializer,
kernel_constraint=dense_constraint,
trainable=self.trainable,
name=self.name + "/squeeze_dense1")
self.squeeze_dense2 = Dense(self.out_channels,
activation='sigmoid',
kernel_constraint=dense_constraint,
kernel_initializer=self.initializer,
trainable=self.trainable,
name=self.name + "/squeeze_dense2")
self.out_conv = Conv2D(self.out_channels, kernel_size=1, name=self.name + '/out_conv', strides=1,
trainable=self.trainable, padding='same', kernel_constraint=conv_constraint, kernel_initializer=self.initializer)
def build(self, input_shape):
if self.mode == 'identity':
if input_shape[-1] != self.out_channels:
# This mode is used when the image dimensions (height and width) don't change, but only its channel dimension
self.shortcut = Conv2D(self.out_channels, kernel_size=1, name=self.name + '/shortcut', strides=1,
trainable=self.trainable, padding='same', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer)
else:
# If the shapes are equal then returns the input data itself
self.shortcut = Lambda(lambda x: x, output_shape=input_shape, name=self.name + '/shortcut')
elif self.mode == 'downsample':
self.shortcut = Conv2D(self.out_channels, kernel_size=self.kernel_size, name=self.name + '/shortcut', strides=self.strides,
trainable=self.trainable, padding='valid', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer)
else:
self.shortcut = Conv2DTranspose(self.out_channels, kernel_size=self.kernel_size, name=self.name + '/shortcut', strides=self.strides,
trainable=self.trainable, padding='valid', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer)
def call(self, input_tensor, training=True):
norm1 = self.norm1(input_tensor, training=training)
relu1 = self.relu1(norm1)
in_conv = self.in_conv(relu1)
norm2 = self.norm2(in_conv, training=training)
relu2 = self.relu2(norm2)
mid_conv = self.mid_conv(relu2)
norm3 = self.norm3(mid_conv, training=training)
relu3 = self.relu3(norm3)
out_conv = self.out_conv(relu3)
if self.squeeze_excitation:
global_pool = self.global_pool(out_conv)
squeeze_dense1 = self.squeeze_dense1(global_pool)
squeeze_dense2 = self.squeeze_dense2(squeeze_dense1)
out_conv = tf.keras.layers.Multiply()([out_conv, squeeze_dense2])
shortcut = self.shortcut(input_tensor)
add = Add(name=self.name + '/add')([out_conv, shortcut])
return add
def get_config(self):
config = super().get_config()
config.update({'out_channels': self.out_channels,
'initializer': self.initializer,
'mode': self.mode,
'kernel_size': self.kernel_size,
'strides': self.strides,
'trainable': self.trainable,
'normalization': self.normalization,
'groups': self.groups,
'squeeze_excitation': self.squeeze_excitation,
'squeeze_ratio': self.squeeze_ratio,
# 'conv_constraint': self.conv_constraint,
# 'dense_constraint': self.dense_constraint,
'name': self.name,
'norm1': get_object_config(self.norm1),
'norm2': get_object_config(self.norm2),
'norm3': get_object_config(self.norm3),
'relu1': get_object_config(self.relu1),
'relu2': get_object_config(self.relu2),
'relu3': get_object_config(self.relu3),
'global_pool': get_object_config(self.global_pool),
'squeeze_dense1': get_object_config(self.squeeze_dense1),
'squeeze_dense2': get_object_config(self.squeeze_dense2),
'in_conv': get_object_config(self.in_conv),
'mid_conv': get_object_config(self.mid_conv),
'out_conv': get_object_config(self.out_conv)})
return config
| 51.8
| 163
| 0.620764
| 1,222
| 11,655
| 5.725041
| 0.134206
| 0.036592
| 0.054889
| 0.03259
| 0.483133
| 0.383219
| 0.309177
| 0.255575
| 0.251286
| 0.20426
| 0
| 0.01358
| 0.286057
| 11,655
| 224
| 164
| 52.03125
| 0.827184
| 0.063406
| 0
| 0.111111
| 0
| 0
| 0.083456
| 0.009721
| 0
| 0
| 0
| 0
| 0.011696
| 1
| 0.023392
| false
| 0
| 0.093567
| 0
| 0.134503
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
841dd327848fd2568a5c74230c7b659174fee507
| 2,961
|
py
|
Python
|
saefportal/datastores/util.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | null | null | null |
saefportal/datastores/util.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | null | null | null |
saefportal/datastores/util.py
|
harry-consulting/SAEF1
|
055d6e492ba76f90e3248b9da2985fdfe0c6b430
|
[
"BSD-2-Clause"
] | 1
|
2020-12-16T15:02:52.000Z
|
2020-12-16T15:02:52.000Z
|
import json
from collections import defaultdict
import fastavro
import pandas as pd
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.urls import reverse
from datasets.models import Connection
from users.models import User
def get_supported_file_types():
"""Return a list of the viable file type extensions."""
return ["csv", "avro", "parquet", "xlsx", "xls", "xlsm", "xlsb"]
def initialize_connection(datastore, connection_name, connection_owner_id, connection_type, request):
"""Create a connection and save the datastore on the connection object for later use."""
owner = User.objects.get(id=connection_owner_id)
connection = Connection.objects.create(name=connection_name, owner=owner, type=connection_type)
connection.datastore = datastore
connection.save()
messages.success(request, "Connection was created.")
return HttpResponseRedirect(reverse("datasets:index"))
def get_query(dataset, query):
"""Go through the potentially None valued given dataset and query and extract the query."""
if query:
return query
elif dataset.query:
return dataset.query
else:
return f"SELECT * FROM {dataset.table}"
def structure_tables_views(table, views):
"""Return a structured dictionary containing the given tables and views."""
table_dict = defaultdict(list)
[table_dict[schema].append({"value": f"{schema}.{table}", "display": table}) for (schema, table) in table]
view_dict = defaultdict(list)
[view_dict[schema].append({"value": f"{schema}.{view}", "display": view}) for (schema, view) in views]
return {"Tables": dict(table_dict), "Views": dict(view_dict)}
def convert_to_dataframe(file_type, data):
"""Convert the given bytes data into a dataframe based on the given file type."""
if file_type == "csv":
df = pd.read_csv(data, sep=None)
elif file_type == "avro":
df = pd.DataFrame.from_records(fastavro.reader(data))
elif file_type == "parquet":
df = pd.read_parquet(data)
else:
df = pd.read_excel(data)
return df
def get_viable_blob_datasets(blobs, name_attr):
"""
Used to get the viable datasets for blob datastores. Used for Google Cloud Storage, Azure Blob Storage,
Azure Data Lake and Amazon S3 datastores.
"""
viable_blobs = []
for blob in blobs:
if getattr(blob, name_attr).split(".")[-1].lower() in get_supported_file_types():
viable_blobs.append(blob)
viable_datasets = defaultdict(list)
for blob in viable_blobs:
split_path = getattr(blob, name_attr).split("/")
parent_folder = split_path[-2] if len(split_path) >= 2 else "root"
value = json.dumps({"id": getattr(blob, name_attr), "name": split_path[-1].split(".")[0]})
viable_datasets[parent_folder].append({"value": value, "display": split_path[-1]})
return {"Files": dict(viable_datasets)}
| 34.835294
| 110
| 0.695373
| 394
| 2,961
| 5.088832
| 0.319797
| 0.02394
| 0.01197
| 0.028429
| 0.05187
| 0.02793
| 0
| 0
| 0
| 0
| 0
| 0.002902
| 0.18541
| 2,961
| 84
| 111
| 35.25
| 0.828358
| 0.172239
| 0
| 0.037736
| 0
| 0
| 0.085204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113208
| false
| 0
| 0.169811
| 0
| 0.433962
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
841fba8a3c7dd4e8b6e7d2a9101dcfe6a12ffb43
| 637
|
py
|
Python
|
count_div.py
|
odellus/year_of_code
|
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
|
[
"MIT"
] | 1
|
2017-01-03T02:24:34.000Z
|
2017-01-03T02:24:34.000Z
|
count_div.py
|
odellus/year_of_code
|
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
|
[
"MIT"
] | null | null | null |
count_div.py
|
odellus/year_of_code
|
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python
def solution(A, B, K):
res = 0
rem_A = A % K
rem_B = B % K
if rem_A == 0 and rem_B == 0:
res = (B - A) / K + 1
elif rem_A == 0 and rem_B != 0:
low_B = B - rem_B
if low_B >= A:
res = (low_B - A) / K + 1
else:
res = 0
elif rem_A != 0 and rem_B != 0:
low_A = A - rem_A
low_B = B - rem_B
if low_B >= A:
res = (low_B - low_A) / K
else:
res = 0
elif rem_A != 0 and rem_B == 0:
low_A = A - rem_A
res = (B - low_A) / K
if res < 1:
res = 0
return res
| 21.233333
| 37
| 0.400314
| 113
| 637
| 2.044248
| 0.159292
| 0.121212
| 0.08658
| 0.138528
| 0.623377
| 0.623377
| 0.623377
| 0.5671
| 0.5671
| 0.493506
| 0
| 0.045045
| 0.477237
| 637
| 29
| 38
| 21.965517
| 0.648649
| 0.026688
| 0
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
842128da3d89d5f7a471cc4a5a88b8952b188592
| 7,216
|
py
|
Python
|
models/DGIFullPipeline.py
|
nicolas-racchi/hpc2020-graphML
|
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
|
[
"Apache-2.0"
] | null | null | null |
models/DGIFullPipeline.py
|
nicolas-racchi/hpc2020-graphML
|
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
|
[
"Apache-2.0"
] | null | null | null |
models/DGIFullPipeline.py
|
nicolas-racchi/hpc2020-graphML
|
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
|
[
"Apache-2.0"
] | null | null | null |
import time
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import f1_score
import stellargraph as sg
from stellargraph.mapper import CorruptedGenerator, HinSAGENodeGenerator
from stellargraph.layer import DeepGraphInfomax, HinSAGE
import tensorflow as tf
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras import Model, optimizers, losses, metrics
'''
Runs the entire pipeline:
- Takes preprocessed data as input
- Outputs predictions on the test_set nodes.
'''
def DGIPipeline(v_sets, e_sets, v_data, e_data, core_targets, ext_targets, core_testing):
print("HINSAGE DGI FULL PIPELINE STARTED")
tin = time.time()
#? Sort based on testingFlag
# data_splits[i].iloc[INDEX].values[0]
# where INDEX:
# [0] testingFlag=NaN
# [1] testingFlag=0
# [2] testingFlag=1
data_splits = dict()
for i in v_sets:
v_sets[i] = v_sets[i].sort_values('testingFlag')
data_splits[i] = v_sets[i].testingFlag.value_counts().to_frame()
v_sets[i] = v_sets[i].drop('testingFlag', axis=1)
#? Removing ExtendedCaseGraphID
for i in v_sets:
v_sets[i] = v_sets[i].drop('ExtendedCaseGraphID', axis=1)
#? Create the graph object
G = sg.StellarDiGraph(v_sets, e_sets)
'''
Iterate through the algotithm for every node type.
This is because HinSAGE can predict on one node type at a time, even though
it uses all the graph to compute the embeddings.
'''
# Parameters
batch_size = 200
dropout = 0.4
verbose = 1
visualize = False
def run_for_node_type(v_type, hinsage_layer_sizes, num_samples, activations, epochs):
nan_tflag = data_splits[v_type].iloc[0].values[0]
train_tflag = data_splits[v_type].iloc[1].values[0]
test_tflag = data_splits[v_type].iloc[2].values[0]
train_cv_set = v_sets[v_type][nan_tflag:nan_tflag+train_tflag]
train_cv_ids = train_cv_set.index.values.tolist()
train_cv_labels = v_data.loc[[int(node_id) for node_id in train_cv_ids]].ExtendedCaseGraphID
test_set = v_sets[v_type][-test_tflag:]
test_ids = test_set.index.values.tolist()
generator = HinSAGENodeGenerator(
G,
batch_size,
num_samples,
head_node_type=v_type
)
hinsage = HinSAGE(
layer_sizes=hinsage_layer_sizes,
activations=activations,
generator=generator,
bias=True,
normalize="l2",
dropout=dropout
)
def run_deep_graph_infomax(base_model, generator, epochs):
print(f"Starting training for {v_type} type: ")
t0 = time.time()
corrupted_generator = CorruptedGenerator(generator)
gen = corrupted_generator.flow(G.nodes(node_type=v_type))
infomax = DeepGraphInfomax(base_model, corrupted_generator)
x_in, x_out = infomax.in_out_tensors()
# Train with DGI
model = Model(inputs=x_in, outputs=x_out)
model.compile(loss=tf.nn.sigmoid_cross_entropy_with_logits, optimizer=Adam(lr=1e-3))
es = EarlyStopping(monitor="loss", min_delta=0, patience=10)
history = model.fit(gen, epochs=epochs, verbose=verbose, callbacks=[es])
#sg.utils.plot_history(history)
x_emb_in, x_emb_out = base_model.in_out_tensors()
if generator.num_batch_dims() == 2:
x_emb_out = tf.squeeze(x_emb_out, axis=0)
t1 = time.time()
print(f'Time required: {t1-t0:.2f} s ({(t1-t0)/60:.1f} min)')
return x_emb_in, x_emb_out, model
#? Train HinSAGE model:
x_emb_in, x_emb_out, _model = run_deep_graph_infomax(hinsage, generator, epochs=epochs)
emb_model = Model(inputs=x_emb_in, outputs=x_emb_out)
train_cv_embs = emb_model.predict(
generator.flow(train_cv_set.index.values)
)
#? Optional: Plot embeddings of training and CV set of current node type
if (visualize == True):
train_cv_embs_2d = pd.DataFrame(
TSNE(n_components=2).fit_transform(train_cv_embs),
index=train_cv_set.index.values
)
label_map = {l: i*10 for i, l in enumerate(np.unique(train_cv_labels), start=10) if pd.notna(l)}
node_colours = [label_map[target] if pd.notna(target) else 0 for target in train_cv_labels]
alpha = 0.7
fig, ax = plt.subplots(figsize=(15, 15))
ax.scatter(
train_cv_embs_2d[0],
train_cv_embs_2d[1],
c=node_colours,
cmap="jet",
alpha=alpha,
)
ax.set(aspect="equal")
plt.title(f"TSNE of HinSAGE {v_type} embeddings with DGI- coloring on ExtendedCaseGraphID")
plt.show()
return 1
#? Split training and cross valuation set using 80% 20% simple ordered split
n_embs = train_cv_embs.shape[0]
train_size = int(n_embs*0.80)
cv_size = int(n_embs*0.20)
train_set = train_cv_embs[:train_size]
train_labels = np.ravel(pd.DataFrame(train_cv_labels.values[:train_size]).fillna(0))
cv_set = train_cv_embs[-cv_size:]
cv_labels = np.ravel(pd.DataFrame(train_cv_labels.values[-cv_size:]).fillna(0))
#? CLASSIFY
print(f"Running Classifier for {v_type} type")
classifier = DecisionTreeClassifier()
classifier.fit(
X=train_set,
y=train_labels,
)
cv_pred = classifier.predict(cv_set)
f1_avg = f1_score(cv_labels, cv_pred, average='weighted')
acc = (cv_pred == cv_labels).mean()
print(f"{v_type} CV Metrics: f1: {f1_avg:.6f} - acc: {acc:.6f}")
#? Now Run on test set
test_embs = emb_model.predict(
generator.flow(test_set.index.values)
)
test_pred = classifier.predict(test_embs)
#? Save predictions
outdir = './output'
outname = f"{v_type}_predictions.csv"
if not os.path.exists(outdir):
os.mkdir(outdir)
fullname = os.path.join(outdir, outname)
output = pd.DataFrame(test_ids)
output = output.rename(columns={0: 'node_id'})
output['ExtendedCaseGraphID'] = test_pred
output = output.set_index('node_id')
output.to_csv(fullname)
return output
#? Run for each node type
full_predictions = pd.DataFrame()
for v_type in v_sets:
if v_type == 'Account':
epochs = 12
num_samples = [8, 4]
hinsage_layer_sizes = [32, 32]
activations = ['relu', 'relu']
else:
epochs = 30
num_samples = [12]
hinsage_layer_sizes = [72]
activations = ['relu']
if v_type != 'External Entity' and v_type != 'Address':
predictions = run_for_node_type(v_type, hinsage_layer_sizes, num_samples, activations, epochs)
full_predictions = full_predictions.append(predictions)
full_predictions.to_csv("./output/full_predictions.csv")
tout = time.time()
print(f"HINSAGE DGI FULL PIPELINE COMPLETED: {(tin-tout)/60:.0f} min")
return 1
| 33.877934
| 106
| 0.651746
| 991
| 7,216
| 4.513623
| 0.280525
| 0.029734
| 0.019674
| 0.00626
| 0.133691
| 0.10351
| 0.070199
| 0.057679
| 0.057679
| 0.038453
| 0
| 0.017431
| 0.244734
| 7,216
| 213
| 107
| 33.877934
| 0.803303
| 0.067212
| 0
| 0.027397
| 0
| 0.013699
| 0.085233
| 0.008273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020548
| false
| 0
| 0.10274
| 0
| 0.150685
| 0.041096
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
842586bea147f3e4d054e06882c5e5cefb545add
| 1,222
|
py
|
Python
|
physics_planning_games/mujoban/mujoban_level_test.py
|
mitchchristow/deepmind-research
|
49c7ebe6acc48dd276ca09eca6924ba6cb5ec3a3
|
[
"Apache-2.0"
] | 10,110
|
2019-08-27T20:05:30.000Z
|
2022-03-31T16:31:56.000Z
|
physics_planning_games/mujoban/mujoban_level_test.py
|
ibex-training/deepmind-research
|
6f8ae40b2626b30f5f80dfc92f5676689eff5599
|
[
"Apache-2.0"
] | 317
|
2019-11-09T10:19:10.000Z
|
2022-03-31T00:05:19.000Z
|
physics_planning_games/mujoban/mujoban_level_test.py
|
ibex-training/deepmind-research
|
6f8ae40b2626b30f5f80dfc92f5676689eff5599
|
[
"Apache-2.0"
] | 2,170
|
2019-08-28T12:53:36.000Z
|
2022-03-31T13:15:11.000Z
|
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for mujoban_level."""
from absl.testing import absltest
from physics_planning_games.mujoban import mujoban_level
_LEVEL = """
#####
# @####
# $. #
###$.# #
# $.# #
# #$. #
# ###
######"""
_GRID_LEVEL = """********
*..P****
*..BG..*
***BG*.*
*..BG*.*
*.*BG..*
*....***
********
"""
class MujobanLevelTest(absltest.TestCase):
def test_ascii_to_text_grid_level(self):
grid_level = mujoban_level._ascii_to_text_grid_level(_LEVEL)
self.assertEqual(_GRID_LEVEL, grid_level)
if __name__ == '__main__':
absltest.main()
| 22.62963
| 78
| 0.636661
| 149
| 1,222
| 5.013423
| 0.604027
| 0.080321
| 0.034806
| 0.042838
| 0.053548
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007722
| 0.152209
| 1,222
| 53
| 79
| 23.056604
| 0.71332
| 0.545827
| 0
| 0
| 0
| 0
| 0.274254
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.038462
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
842f1947d1778a3623e9a7a62865a578b298416e
| 2,027
|
py
|
Python
|
comment/views/blocker.py
|
Italo-Carvalho/Comment
|
86424d02a901b74ccbcaa438fffc38f352535301
|
[
"MIT"
] | 75
|
2018-09-08T14:29:35.000Z
|
2022-03-25T16:17:06.000Z
|
comment/views/blocker.py
|
p0-oya/Comment
|
39f6fb6c40314d97391d36fc25112d6420c96991
|
[
"MIT"
] | 165
|
2018-10-07T21:55:31.000Z
|
2022-02-27T14:44:32.000Z
|
comment/views/blocker.py
|
p0-oya/Comment
|
39f6fb6c40314d97391d36fc25112d6420c96991
|
[
"MIT"
] | 37
|
2019-12-01T19:44:23.000Z
|
2022-02-13T16:46:14.000Z
|
from django.views import View
from comment.models import BlockedUser, BlockedUserHistory, Comment
from comment.mixins import CanBlockUsersMixin
from comment.responses import UTF8JsonResponse, DABResponseData
from comment.messages import BlockUserError
class BaseToggleBlockingView(DABResponseData):
response_class = None
def get_response_class(self):
assert self.response_class is not None, (
"'%s' should either include a `response_class` attribute, "
"or override the `get_response_class()` method."
% self.__class__.__name__
)
return self.response_class
def post(self, request, *args, **kwargs):
response_class = self.get_response_class()
request_data = request.POST or getattr(request, 'data', {})
comment_id = request_data.get('comment_id', None)
try:
comment = Comment.objects.get(id=int(comment_id))
except (Comment.DoesNotExist, ValueError, TypeError):
self.error = {
'detail': BlockUserError.INVALID
}
self.status = 400
return response_class(self.json(), status=self.status)
blocked_user, created = BlockedUser.objects.get_or_create_blocked_user_for_comment(comment)
if not created:
blocked_user.blocked = not blocked_user.blocked
blocked_user.save()
reason = request_data.get('reason', None)
if blocked_user.blocked and not reason:
reason = comment.content
BlockedUserHistory.objects.create_history(
blocked_user=blocked_user,
blocker=request.user,
reason=reason
)
self.data = {
'blocked_user': comment.get_username(),
'blocked': blocked_user.blocked,
'urlhash': comment.urlhash
}
return response_class(self.json())
class ToggleBlockingView(CanBlockUsersMixin, BaseToggleBlockingView, View):
response_class = UTF8JsonResponse
| 34.948276
| 99
| 0.655649
| 208
| 2,027
| 6.173077
| 0.350962
| 0.111371
| 0.070093
| 0.035826
| 0.042056
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003344
| 0.262457
| 2,027
| 57
| 100
| 35.561404
| 0.855518
| 0
| 0
| 0
| 0
| 0
| 0.076468
| 0.010853
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.043478
| false
| 0
| 0.108696
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
843c2a9f5e722e97bca056334565acff3143bb58
| 3,112
|
py
|
Python
|
finetune/TensorFlow/download_model_and_dataset.py
|
cgouttham/microsoft-hackathon
|
7e50981e0f165543676504592ad26818db13432f
|
[
"MIT"
] | 340
|
2019-05-15T06:42:37.000Z
|
2022-02-23T13:29:34.000Z
|
finetune/TensorFlow/download_model_and_dataset.py
|
cgouttham/microsoft-hackathon
|
7e50981e0f165543676504592ad26818db13432f
|
[
"MIT"
] | 43
|
2019-05-14T21:26:06.000Z
|
2022-02-13T02:42:57.000Z
|
finetune/TensorFlow/download_model_and_dataset.py
|
cgouttham/microsoft-hackathon
|
7e50981e0f165543676504592ad26818db13432f
|
[
"MIT"
] | 113
|
2019-05-23T08:21:48.000Z
|
2022-03-03T19:18:17.000Z
|
from __future__ import print_function
import argparse
import sys
import os
import shutil
import zipfile
import urllib
parser = argparse.ArgumentParser()
## Required parameters
parser.add_argument("--bert_model_name",
default = None,
type = str,
required = True,
help = "Name of pretrained BERT model. Possible values: "
"uncased_L-12_H-768_A-12,uncased_L-24_H-1024_A-16,cased_L-12_H-768_A-12,"
"multilingual_L-12_H-768_A-12,chinese_L-12_H-768_A-12")
parser.add_argument("--model_dump_path",
default = None,
type = str,
required = True,
help = "Path to the output model.")
parser.add_argument("--glue_data_path",
default = None,
type = str,
required = True,
help = "Path to store downloaded GLUE dataset")
args = parser.parse_args()
bert_model_url_map = {
'uncased_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip',
'uncased_L-24_H-1024_A-16': 'https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-24_H-1024_A-16.zip',
'cased_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_10_18/cased_L-12_H-768_A-12.zip',
'multilingual_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_11_03/multilingual_L-12_H-768_A-12.zip',
'chinese_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_11_03/chinese_L-12_H-768_A-12.zip'
}
if args.bert_model_name not in bert_model_url_map:
sys.stderr.write('Unknown BERT model name ' + args.bert_model_name)
sys.exit(1)
pretrained_model_url = bert_model_url_map.get(args.bert_model_name)
# make local directory for pretrained tensorflow BERT model
tensorflow_model_dir = './tensorflow_model'
if not os.path.exists(tensorflow_model_dir):
os.makedirs(tensorflow_model_dir)
# download and extract pretrained tensorflow BERT model
download_file_name = 'tensorflow_model.zip'
urllib.request.urlretrieve(pretrained_model_url, filename=download_file_name)
print('Extracting pretrained model...')
with zipfile.ZipFile(download_file_name, 'r') as z:
z.extractall(tensorflow_model_dir)
# make destination path
if not os.path.exists(args.model_dump_path):
os.makedirs(args.model_dump_path)
files = ['bert_model.ckpt.meta', 'bert_model.ckpt.index', 'bert_model.ckpt.data-00000-of-00001', 'bert_config.json', 'vocab.txt']
for file in files:
shutil.copy(os.path.join(tensorflow_model_dir, args.bert_model_name, file), os.path.join(args.model_dump_path, file))
print('Start to download GLUE dataset...\n')
urllib.request.urlretrieve(
'https://gist.githubusercontent.com/W4ngatang/60c2bdb54d156a41194446737ce03e2e/raw/17b8dd0d724281ed7c3b2aeeda662b92809aadd5/download_glue_data.py',
filename='download_glue_data.py')
if os.system('python download_glue_data.py --data_dir {0} --tasks all'.format(args.glue_data_path)) != 0: sys.exit(1)
| 43.222222
| 151
| 0.70662
| 459
| 3,112
| 4.501089
| 0.272331
| 0.065344
| 0.023233
| 0.040658
| 0.314618
| 0.298161
| 0.298161
| 0.173766
| 0.173766
| 0.173766
| 0
| 0.080796
| 0.176735
| 3,112
| 72
| 152
| 43.222222
| 0.725605
| 0.049165
| 0
| 0.163636
| 0
| 0.109091
| 0.42654
| 0.115098
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.127273
| 0
| 0.127273
| 0.054545
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
843d9417ba37601232cb640d55f1d03f38cd7f76
| 3,226
|
py
|
Python
|
python/examples/imagenet/image_reader.py
|
gongweibao/Serving
|
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
|
[
"Apache-2.0"
] | null | null | null |
python/examples/imagenet/image_reader.py
|
gongweibao/Serving
|
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
|
[
"Apache-2.0"
] | null | null | null |
python/examples/imagenet/image_reader.py
|
gongweibao/Serving
|
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cv2
import numpy as np
class ImageReader():
def __init__(self):
self.image_mean = [0.485, 0.456, 0.406]
self.image_std = [0.229, 0.224, 0.225]
self.image_shape = [3, 224, 224]
self.resize_short_size = 256
self.interpolation = None
def resize_short(self, img, target_size, interpolation=None):
"""resize image
Args:
img: image data
target_size: resize short target size
interpolation: interpolation mode
Returns:
resized image data
"""
percent = float(target_size) / min(img.shape[0], img.shape[1])
resized_width = int(round(img.shape[1] * percent))
resized_height = int(round(img.shape[0] * percent))
if interpolation:
resized = cv2.resize(
img, (resized_width, resized_height),
interpolation=interpolation)
else:
resized = cv2.resize(img, (resized_width, resized_height))
return resized
def crop_image(self, img, target_size, center):
"""crop image
Args:
img: images data
target_size: crop target size
center: crop mode
Returns:
img: cropped image data
"""
height, width = img.shape[:2]
size = target_size
if center == True:
w_start = (width - size) // 2
h_start = (height - size) // 2
else:
w_start = np.random.randint(0, width - size + 1)
h_start = np.random.randint(0, height - size + 1)
w_end = w_start + size
h_end = h_start + size
img = img[h_start:h_end, w_start:w_end, :]
return img
def process_image(self, sample):
""" process_image """
mean = self.image_mean
std = self.image_std
crop_size = self.image_shape[1]
data = np.fromstring(sample, np.uint8)
img = cv2.imdecode(data, cv2.IMREAD_COLOR)
if img is None:
print("img is None, pass it.")
return None
if crop_size > 0:
target_size = self.resize_short_size
img = self.resize_short(
img, target_size, interpolation=self.interpolation)
img = self.crop_image(img, target_size=crop_size, center=True)
img = img[:, :, ::-1]
img = img.astype('float32').transpose((2, 0, 1)) / 255
img_mean = np.array(mean).reshape((3, 1, 1))
img_std = np.array(std).reshape((3, 1, 1))
img -= img_mean
img /= img_std
return img
| 32.26
| 74
| 0.591135
| 419
| 3,226
| 4.420048
| 0.329356
| 0.059395
| 0.028078
| 0.017279
| 0.084233
| 0.047516
| 0.047516
| 0.047516
| 0
| 0
| 0
| 0.034127
| 0.309671
| 3,226
| 99
| 75
| 32.585859
| 0.797485
| 0.274334
| 0
| 0.074074
| 0
| 0
| 0.012739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.018519
| 0.037037
| 0
| 0.203704
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|